WIP: still chasing the bug...

This commit is contained in:
Gregory Burd 2012-06-23 09:38:03 +01:00
parent a79dde264f
commit 102c518269
4 changed files with 51 additions and 57 deletions

View file

@ -346,7 +346,6 @@ terminate(normal, _State) ->
ok;
terminate(_Reason, _State) ->
error_logger:info_msg("got terminate(~p, ~p)~n", [_Reason, _State]),
% flush_nursery(State),
ok.
code_change(_OldVsn, State, _Extra) ->
@ -386,11 +385,11 @@ handle_call({get, Key}, From, State=#state{ top=Top, nursery=Nursery } ) when is
{noreply, State}
end;
handle_call(close, _From, State=#state{top=Top}) ->
handle_call(close, _From, State=#state{ nursery=Nursery, top=Top, dir=Dir, max_level=MaxLevel, opt=Config }) ->
try
{ok, State2} = flush_nursery(State),
ok = hanoidb_nursery:finish(Nursery, Top),
ok = hanoidb_level:close(Top),
{stop, normal, ok, State2}
{stop, normal, ok, State#state{ nursery=hanoidb_nursery:new(Dir, MaxLevel, Config)}}
catch
E:R ->
error_logger:info_msg("exception from close ~p:~p~n", [E,R]),
@ -417,11 +416,6 @@ do_transact(TransactionSpec, State=#state{ nursery=Nursery, top=Top }) ->
{ok, Nursery2} = hanoidb_nursery:transact(TransactionSpec, Nursery, Top),
{ok, State#state{ nursery=Nursery2 }}.
flush_nursery(State=#state{ nursery=Nursery, top=Top, dir=Dir, max_level=MaxLevel, opt=Config }) ->
ok = hanoidb_nursery:finish(Nursery, Top),
{ok, Nursery2} = hanoidb_nursery:new(Dir, MaxLevel, Config),
{ok, State#state{ nursery=Nursery2 }}.
start_app() ->
case application:start(?MODULE) of
ok ->

View file

@ -131,9 +131,9 @@ do_add(Nursery=#nursery{log_file=File, cache=Cache, total_size=TotalSize, count=
ok = file:write(File, Data),
Nursery1 = do_sync(File, Nursery),
{ok, Nursery2} =
do_inc_merge(Nursery1#nursery{ cache=Cache2, total_size=TotalSize+erlang:iolist_size(Data),
count=Count+1 }, 1, Top),
{ok, Nursery2} = do_inc_merge(Nursery1#nursery{ cache=Cache2,
total_size=TotalSize + erlang:iolist_size(Data),
count=Count + 1 }, 1, Top),
if Count+1 >= ?BTREE_SIZE(?TOP_LEVEL) ->
{full, Nursery2};
@ -199,10 +199,10 @@ finish(#nursery{ dir=Dir, cache=Cache, log_file=LogFile,
{ok, BT} = hanoidb_writer:open(BTreeFileName, [{size, ?BTREE_SIZE(?TOP_LEVEL)},
{compress, none} | Config]),
try
[] = gb_trees_ext:fold(fun(Key, Value, Acc) ->
ok = gb_trees_ext:fold(fun(Key, Value, Acc) ->
ok = hanoidb_writer:add(BT, Key, Value),
Acc
end, [], Cache)
end, ok, Cache)
after
ok = hanoidb_writer:close(BT)
end,
@ -286,18 +286,15 @@ transact(Spec, Nursery=#nursery{ log_file=File, cache=Cache0, total_size=TotalSi
Count = gb_trees:size(Cache2),
do_inc_merge(Nursery2#nursery{ cache=Cache2, total_size=TotalSize+erlang:iolist_size(Data), count=Count },
length(Spec), Top).
do_inc_merge(Nursery2#nursery{ cache=Cache2, total_size=TotalSize+erlang:iolist_size(Data), count=Count }, length(Spec), Top).
do_inc_merge(Nursery=#nursery{ step=Step, merge_done=Done }, N, TopLevel) ->
case Step+N >= ?INC_MERGE_STEP of
true ->
io:format("do_inc_merge: true ~p ~p ~p~n", [Step, N, ?INC_MERGE_STEP]),
hanoidb_level:begin_incremental_merge(TopLevel, Step+N),
{ok, Nursery#nursery{ step=0, merge_done=Done+Step+N }};
hanoidb_level:begin_incremental_merge(TopLevel, Step + N),
{ok, Nursery#nursery{ step=0, merge_done=Done + Step + N }};
false ->
io:format("do_inc_merge: false ~p ~p ~p~n", [Step, N, ?INC_MERGE_STEP]),
{ok, Nursery#nursery{ step=Step+N }}
{ok, Nursery#nursery{ step=Step + N }}
end.
do_level_fold(#nursery{cache=Cache}, FoldWorkerPID, KeyRange) ->

View file

@ -40,9 +40,7 @@
-define(TAG_DELETED2, 16#85).
-define(TAG_END, 16#FF).
-compile({inline, [
crc_encapsulate/1, crc_encapsulate_kv_entry/2
]}).
-compile({inline, [crc_encapsulate/1, crc_encapsulate_kv_entry/2 ]}).
index_file_name(Name) ->
@ -56,22 +54,22 @@ file_exists(FileName) ->
false
end.
estimate_node_size_increment(_KVList,Key,Value) ->
byte_size(Key)
+ 10
+ if
is_integer(Value) ->
5;
is_binary(Value) ->
5 + byte_size(Value);
is_atom(Value) ->
8;
is_tuple(Value) ->
13
end.
estimate_node_size_increment(_KVList, Key, {Value, _TStamp})
when is_integer(Value) -> byte_size(Key) + 5 + 4;
estimate_node_size_increment(_KVList, Key, {Value, _TStamp})
when is_binary(Value) -> byte_size(Key) + 5 + 4 + byte_size(Value);
estimate_node_size_increment(_KVList, Key, {Value, _TStamp})
when is_atom(Value) -> byte_size(Key) + 8 + 4;
estimate_node_size_increment(_KVList, Key, {Value, _TStamp})
when is_tuple(Value) -> byte_size(Key) + 13 + 4;
estimate_node_size_increment(_KVList, Key, Value)
when is_integer(Value) -> byte_size(Key) + 5 + 4;
estimate_node_size_increment(_KVList, Key, Value)
when is_binary(Value) -> byte_size(Key) + 5 + 4 + byte_size(Value);
estimate_node_size_increment(_KVList, Key, Value)
when is_atom(Value) -> byte_size(Key) + 8 + 4;
estimate_node_size_increment(_KVList, Key, Value)
when is_tuple(Value) -> byte_size(Key) + 13 + 4.
-define(NO_COMPRESSION, 0).
-define(SNAPPY_COMPRESSION, 1).

View file

@ -81,12 +81,12 @@ add(Ref, Key, Value) ->
count(Ref) ->
gen_server:call(Ref, count, infinity).
%% @doc Close the btree index file
close(Ref) ->
gen_server:call(Ref, close, infinity).
%%%
init([Name, Options]) ->
hanoidb_util:ensure_expiry(Options),
Size = proplists:get_value(size, Options, 2048),
@ -94,7 +94,7 @@ init([Name, Options]) ->
case do_open(Name, Options, [exclusive]) of
{ok, IdxFile} ->
file:write(IdxFile, ?FILE_FORMAT),
BloomFilter = bloom:new(erlang:min(Size, 16#ffffffff), 0.001),
BloomFilter = bloom:new(erlang:min(Size, 16#ffffffff), 0.01),
BlockSize = hanoidb:get_opt(block_size, Options, ?NODE_SIZE),
{ok, #state{ name=Name,
index_file_pos=?FIRST_BLOCK_POS, index_file=IdxFile,
@ -109,34 +109,39 @@ init([Name, Options]) ->
end.
handle_cast({add, Key, Value}, State)
when is_binary(Key), (is_binary(Value) orelse Value == ?TOMBSTONE)->
{ok, State2} = add_record(0, Key, Value, State),
handle_cast({add, Key, {?TOMBSTONE, TStamp}}, State)
when is_binary(Key) ->
{ok, State2} = add_record(0, Key, {?TOMBSTONE, TStamp}, State),
{noreply, State2};
handle_cast({add, Key, ?TOMBSTONE}, State)
when is_binary(Key) ->
{ok, State2} = add_record(0, Key, ?TOMBSTONE, State),
{noreply, State2};
handle_cast({add, Key, {Value, TStamp}}, State)
when is_binary(Key), (is_binary(Value) orelse Value == ?TOMBSTONE)->
when is_binary(Key), is_binary(Value) ->
{ok, State2} = add_record(0, Key, {Value, TStamp}, State),
{noreply, State2};
handle_cast({add, Key, Value}, State)
when is_binary(Key), is_binary(Value) ->
{ok, State2} = add_record(0, Key, Value, State),
{noreply, State2}.
handle_call(count, _From, State = #state{ value_count=VC, tombstone_count=TC }) ->
{ok, VC+TC, State};
handle_call(close, _From, State) ->
{ok, State2} = flush_nodes(State),
{stop,normal,ok,State2}.
{stop, normal, ok, State2}.
handle_info(Info,State) ->
handle_info(Info, State) ->
error_logger:error_msg("Unknown info ~p~n", [Info]),
{stop,bad_msg,State}.
{stop, bad_msg, State}.
terminate(normal,_State) ->
ok;
%% premature delete -> cleanup
terminate(_Reason, State) ->
file:close( State#state.index_file ),
file:delete( hanoidb_util:index_file_name(State#state.name) ).
%% premature delete -> cleanup
file:close(State#state.index_file),
file:delete(hanoidb_util:index_file_name(State#state.name)).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
@ -174,7 +179,6 @@ do_open(Name, Options, OpenOpts) ->
%% @doc flush pending nodes and write trailer
flush_nodes(#state{ nodes=[], last_node_pos=LastNodePos, last_node_size=_LastNodeSize, bloom=Bloom }=State) ->
BloomBin = term_to_binary(Bloom, [compressed]),
BloomSize = byte_size(BloomBin),
@ -213,7 +217,8 @@ add_record(Level, Key, Value,
%% Assert that keys are increasing:
case List of
[] -> ok;
[] ->
ok;
[{PrevKey,_}|_] ->
if
(Key >= PrevKey) -> ok;