Set default pagesize to 8k
Also reduce read ahead / delayed write parameters so we don't need too much memory in merge procs.
This commit is contained in:
parent
15fc05634a
commit
f0833de3fc
3 changed files with 6 additions and 6 deletions
|
@ -38,7 +38,7 @@ Put these values in your `app.config` in the `hanoi` section
|
|||
{data_root, "./data/hanoi"},
|
||||
{compress, none | snappy | gzip},
|
||||
{sync_strategy, none | sync | {seconds, N}},
|
||||
{page_size, 32768}
|
||||
{page_size, 8192}
|
||||
]},
|
||||
```
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ open(Name) ->
|
|||
|
||||
%% this is how to open a btree for sequential scanning (merge, fold)
|
||||
open(Name, sequential) ->
|
||||
{ok, File} = file:open(Name, [raw,read,{read_ahead, 1024 * 512},binary]),
|
||||
{ok, File} = file:open(Name, [raw,read,{read_ahead, 1024 * 32},binary]),
|
||||
open2(Name, File);
|
||||
|
||||
%% this is how to open a btree for random access
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
%% Streaming btree writer. Accepts only monotonically increasing keys for put.
|
||||
%%
|
||||
|
||||
-define(NODE_SIZE, 32*1024).
|
||||
-define(NODE_SIZE, 8*1024).
|
||||
|
||||
-behavior(gen_server).
|
||||
|
||||
|
@ -82,15 +82,15 @@ init([Name,Options]) ->
|
|||
Size = proplists:get_value(size, Options, 2048),
|
||||
|
||||
% io:format("got name: ~p~n", [Name]),
|
||||
|
||||
BlockSize = hanoi:get_opt(block_size, Options, ?NODE_SIZE),
|
||||
case file:open( hanoi_util:index_file_name(Name),
|
||||
[raw, exclusive, write, {delayed_write, 512 * 1024, 2000}]) of
|
||||
[raw, exclusive, write, {delayed_write, BlockSize * 4, 2000}]) of
|
||||
{ok, IdxFile} ->
|
||||
{ok, BloomFilter} = ebloom:new(erlang:min(Size,16#ffffffff), 0.01, 123),
|
||||
{ok, #state{ name=Name,
|
||||
index_file_pos=0, index_file=IdxFile,
|
||||
bloom = BloomFilter,
|
||||
block_size = hanoi:get_opt(block_size, Options, ?NODE_SIZE),
|
||||
block_size = BlockSize,
|
||||
compress = hanoi:get_opt(compress, Options, none)
|
||||
}};
|
||||
{error, _}=Error ->
|
||||
|
|
Loading…
Reference in a new issue