Rename hanoi -> hanoidb

This commit is contained in:
Kresten Krab Thorup 2012-05-07 17:22:55 +02:00
parent 3f5a8a7792
commit e315b92faf
25 changed files with 641 additions and 331 deletions

View file

@ -1,12 +1,12 @@
# Hanoi Ordered Key/Value Storage
# HanoiDB Ordered Key/Value Storage
Hanoi implements an ordered key/value storage engine, implemented
HanoiDB implements an ordered key/value storage engine, implemented
using "doubling sizes" persistent ordered sets of key/value pairs,
much like LevelDB.
Here's the bullet list:
- Insert, Delete and Read all have worst case log<sub>2</sub>(N) latency.
- Insert, Delete and Read all have worst case *O*(log<sub>2</sub>(*N*)) latency.
- Incremental space reclaimation: The cost of evicting stale key/values
is amortized into insertion
- you don't need a separate eviction thread to keep memory use low
@ -26,23 +26,44 @@ Here's the bullet list:
- Low CPU overhead
- ~2000 lines of pure Erlang code in src/*.erl
Hanoi is developed by Trifork, a Riak expert solutions provider. You're most
HanoiDB is developed by Trifork, a Riak expert solutions provider. You're most
welcome to contact us if you want help optimizing your Riak setup.
### Configuration options
Put these values in your `app.config` in the `hanoi` section
Put these values in your `app.config` in the `hanoidb` section
```erlang
{hanoi, [
{data_root, "./data/hanoi"},
{compress, none | snappy | gzip},
{hanoidb, [
{data_root, "./data/hanoidb"},
%% Enable/disable on-disk compression.
%%
{compress, none | gzip},
%% Sync strategy `none' only syncs every time the
%% nursery runs full, which is currently hard coded
%% to be evert 256 inserts or deletes.
%%
%% Sync strategy `sync' will sync the nursery log
%% for every insert or delete operation.
%%
{sync_strategy, none | sync | {seconds, N}},
%% The page size is a minimum page size, when a page fills
%% up to beyond this size, it is written to disk.
%% Compression applies to such units of page size.
%%
{page_size, 8192},
%% Read/write buffer sizes apply to merge processes.
%% A merge process has two read buffers and a write
%% buffer, and there is a merge process *per level* in
%% the database.
%%
{write_buffer_size, 524288}, % 512kB
{read_buffer_size, 524288}, % 512kB
%%
%% The merge strategy is one of `fast' or `predictable'.
%% Both have same log2(N) worst case, but `fast' is
%% sometimes faster; yielding latency fluctuations.
@ -51,18 +72,19 @@ Put these values in your `app.config` in the `hanoi` section
]},
```
### How to deploy Hanoi as a Riak/KV backend
### How to deploy HanoiDB as a Riak/KV backend
This storage engine can function as an alternative backend for Basho's Riak/KV.
You can deploy `hanoi` into a Riak devrel cluster using the `enable-hanoi`
You can deploy `hanoidb` into a Riak devrel cluster using the `enable-hanoidb`
script. Clone the `riak` repo, change your working directory to it, and then
execute the `enable-hanoi` script. It adds `hanoi` as a dependency, runs `make
execute the `enable-hanoidb` script. It adds `hanoidb` as a dependency, runs `make
all devrel`, and then modifies the configuration settings of the resulting dev
nodes to use the hanoi storage backend.
nodes to use the hanoidb storage backend.
1. `git clone git://github.com/basho/riak.git`
1. `mkdir riak/deps`
1. `cd riak/deps`
1. `git clone git://github.com/basho/hanoi.git`
1. `git clone git://github.com/basho/hanoidb.git`
1. `cd ..`
1. `./deps/hanoi/enable-hanoi`
1. `./deps/hanoidb/enable-hanoidb`

18
enable-hanoi → enable-hanoidb Executable file → Normal file
View file

@ -1,12 +1,12 @@
#!/bin/sh
# This script adds hanoi to a riak github repo. Run it in the riak repo
# This script adds hanoidb to a riak github repo. Run it in the riak repo
# directory.
#
# First it adds hanoi, then runs "make all devrel" and then enables the
# hanoi storage backend in the resulting dev nodes.
# First it adds hanoidb, then runs "make all devrel" and then enables the
# hanoidb storage backend in the resulting dev nodes.
#
# This script is intended to be temporary. Once hanoi is made into a proper
# This script is intended to be temporary. Once hanoidb is made into a proper
# riak citizen, this script will no longer be needed.
set -e
@ -35,17 +35,17 @@ fi
./rebar get-deps
file=./deps/riak_kv/src/riak_kv.app.src
if ! grep -q hanoi $file ; then
if ! grep -q hanoidb $file ; then
echo
echo "Modifying $file, saving the original as ${file}.orig ..."
perl -i.orig -pe '/\bos_mon,/ && print qq( hanoi,\n)' $file
perl -i.orig -pe '/\bos_mon,/ && print qq( hanoidb,\n)' $file
fi
file=./deps/riak_kv/rebar.config
if ! grep -q hanoi $file ; then
if ! grep -q hanoidb $file ; then
echo
echo "Modifying $file, saving the original as ${file}.orig ..."
perl -i.orig -pe '/\bsext\b/ && print qq( {hanoi, ".*", {git, "git\@github.com:basho/hanoi.git", "master"}},\n)' $file
perl -i.orig -pe '/\bsext\b/ && print qq( {hanoidb, ".*", {git, "git\@github.com:basho/hanoidb.git", "master"}},\n)' $file
fi
./rebar get-deps
@ -55,6 +55,6 @@ make all devrel
echo
echo 'Modifying all dev/dev*/etc/app.config files, saving originals with .orig suffix...'
perl -i.orig -ne 'if (/\bstorage_backend,/) { s/(storage_backend, )[^\}]+/\1riak_kv_hanoi_backend/; print } elsif (/\{eleveldb,/) { $eleveldb++; print } elsif ($eleveldb && /^\s+\]\},/) { $eleveldb = 0; print; print qq(\n {hanoi, [\n {data_root, "./data/hanoi"}\n ]},\n\n) } else { print }' dev/dev*/etc/app.config
perl -i.orig -ne 'if (/\bstorage_backend,/) { s/(storage_backend, )[^\}]+/\1riak_kv_hanoidb_backend/; print } elsif (/\{eleveldb,/) { $eleveldb++; print } elsif ($eleveldb && /^\s+\]\},/) { $eleveldb = 0; print; print qq(\n {hanoidb, [\n {data_root, "./data/hanoidb"}\n ]},\n\n) } else { print }' dev/dev*/etc/app.config
exit 0

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -30,7 +30,7 @@
-define(BTREE_ASYNC_CHUNK_SIZE, 100).
%%
%% The btree_range structure is a bit assymetric, here is why:
%% The key_range structure is a bit assymetric, here is why:
%%
%% from_key=<<>> is "less than" any other key, hence we don't need to
%% handle from_key=undefined to support an open-ended start of the
@ -38,7 +38,7 @@
%% which is > any possible key, hence we need to allow to_key=undefined
%% as a token of an interval that has no upper limit.
%%
-record(btree_range, { from_key = <<>> :: binary(),
-record(key_range, { from_key = <<>> :: binary(),
from_inclusive = true :: boolean(),
to_key :: binary() | undefined,
to_inclusive = false :: boolean(),

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(basho_bench_driver_hanoi).
-module(basho_bench_driver_hanoidb).
-record(state, { tree,
filename,
@ -33,10 +33,10 @@
-export([new/1,
run/4]).
-include("hanoi.hrl").
-include("hanoidb.hrl").
-include_lib("basho_bench/include/basho_bench.hrl").
-record(btree_range, { from_key = <<>> :: binary(),
-record(key_range, { from_key = <<>> :: binary(),
from_inclusive = true :: boolean(),
to_key :: binary() | undefined,
to_inclusive = false :: boolean(),
@ -48,20 +48,20 @@
new(_Id) ->
%% Make sure bitcask is available
case code:which(hanoi) of
case code:which(hanoidb) of
non_existing ->
?FAIL_MSG("~s requires hanoi to be available on code path.\n",
?FAIL_MSG("~s requires hanoidb to be available on code path.\n",
[?MODULE]);
_ ->
ok
end,
%% Get the target directory
Dir = basho_bench_config:get(hanoi_dir, "."),
Filename = filename:join(Dir, "test.hanoi"),
Dir = basho_bench_config:get(hanoidb_dir, "."),
Filename = filename:join(Dir, "test.hanoidb"),
%% Look for sync interval config
case basho_bench_config:get(hanoi_sync_interval, infinity) of
case basho_bench_config:get(hanoidb_sync_interval, infinity) of
Value when is_integer(Value) ->
SyncInterval = Value;
infinity ->
@ -69,9 +69,9 @@ new(_Id) ->
end,
%% Get any bitcask flags
case hanoi:open(Filename) of
case hanoidb:open(Filename) of
{error, Reason} ->
?FAIL_MSG("Failed to open hanoi in ~s: ~p\n", [Filename, Reason]);
?FAIL_MSG("Failed to open hanoidb in ~s: ~p\n", [Filename, Reason]);
{ok, FBTree} ->
{ok, #state { tree = FBTree,
filename = Filename,
@ -80,7 +80,7 @@ new(_Id) ->
end.
run(get, KeyGen, _ValueGen, State) ->
case hanoi:lookup(State#state.tree, KeyGen()) of
case hanoidb:lookup(State#state.tree, KeyGen()) of
{ok, _Value} ->
{ok, State};
not_found ->
@ -89,14 +89,14 @@ run(get, KeyGen, _ValueGen, State) ->
{error, Reason}
end;
run(put, KeyGen, ValueGen, State) ->
case hanoi:put(State#state.tree, KeyGen(), ValueGen()) of
case hanoidb:put(State#state.tree, KeyGen(), ValueGen()) of
ok ->
{ok, State};
{error, Reason} ->
{error, Reason}
end;
run(delete, KeyGen, _ValueGen, State) ->
case hanoi:delete(State#state.tree, KeyGen()) of
case hanoidb:delete(State#state.tree, KeyGen()) of
ok ->
{ok, State};
{error, Reason} ->
@ -105,12 +105,12 @@ run(delete, KeyGen, _ValueGen, State) ->
run(fold_100, KeyGen, _ValueGen, State) ->
[From,To] = lists:usort([KeyGen(), KeyGen()]),
case hanoi:sync_fold_range(State#state.tree,
case hanoidb:sync_fold_range(State#state.tree,
fun(_Key,_Value,Count) ->
Count+1
end,
0,
#btree_range{ from_key=From,
#key_range{ from_key=From,
to_key=To,
limit=100 }) of
Count when Count >= 0; Count =< 100 ->

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
{application, hanoi,
{application, hanoidb,
[
{description, ""},
{vsn, "1.0.0"},
@ -31,6 +31,6 @@
kernel,
stdlib
]},
{mod, {hanoi_app, []}},
{mod, {hanoidb_app, []}},
{env, []}
]}.

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi).
-module(hanoidb).
-author('Kresten Krab Thorup <krab@trifork.com>').
@ -36,9 +36,9 @@
-export([get_opt/2, get_opt/3]).
-include("hanoi.hrl").
-include("hanoidb.hrl").
-include_lib("kernel/include/file.hrl").
-include_lib("include/hanoi.hrl").
-include_lib("include/hanoidb.hrl").
-include_lib("include/plain_rpc.hrl").
-record(state, { top, nursery, dir, opt, max_level }).
@ -53,19 +53,25 @@
%% PUBLIC API
-type hanoi() :: pid().
-type key_range() :: #btree_range{}.
-type hanoidb() :: pid().
-type key_range() :: #key_range{}.
-type config_option() :: {compress, none | gzip | snappy}
| {page_size, pos_integer()}
| {read_buffer_size, pos_integer()}
| {write_buffer_size, pos_integer()}
| {merge_strategy, fast | predictable }
.
% @doc
% Create or open existing hanoi store. Argument `Dir' names a
% Create or open existing hanoidb store. Argument `Dir' names a
% directory in which to keep the data files. By convention, we
% name hanoi data directories with extension ".hanoi".
% name hanoidb data directories with extension ".hanoidb".
% @spec open(Dir::string()) -> pid().
- spec open(Dir::string()) -> pid().
open(Dir) ->
open(Dir, []).
- spec open(Dir::string(), Opts::[_]) -> pid().
- spec open(Dir::string(), Opts::[config_option()]) -> pid().
open(Dir, Opts) ->
ok = start_app(),
gen_server:start(?MODULE, [Dir, Opts], []).
@ -102,39 +108,39 @@ get(Ref,Key) when is_binary(Key) ->
lookup(Ref,Key) when is_binary(Key) ->
gen_server:call(Ref, {get, Key}, infinity).
-spec delete(hanoi(), binary()) ->
-spec delete(hanoidb(), binary()) ->
ok | {error, term()}.
delete(Ref,Key) when is_binary(Key) ->
gen_server:call(Ref, {delete, Key}, infinity).
-spec put(hanoi(), binary(), binary()) ->
-spec put(hanoidb(), binary(), binary()) ->
ok | {error, term()}.
put(Ref,Key,Value) when is_binary(Key), is_binary(Value) ->
gen_server:call(Ref, {put, Key, Value}, infinity).
-type transact_spec() :: {put, binary(), binary()} | {delete, binary()}.
-spec transact(hanoi(), [transact_spec()]) ->
-spec transact(hanoidb(), [transact_spec()]) ->
ok | {error, term()}.
transact(Ref, TransactionSpec) ->
gen_server:call(Ref, {transact, TransactionSpec}, infinity).
-type kv_fold_fun() :: fun((binary(),binary(),any())->any()).
-spec fold(hanoi(),kv_fold_fun(),any()) -> any().
-spec fold(hanoidb(),kv_fold_fun(),any()) -> any().
fold(Ref,Fun,Acc0) ->
fold_range(Ref,Fun,Acc0,#btree_range{from_key= <<>>, to_key=undefined}).
fold_range(Ref,Fun,Acc0,#key_range{from_key= <<>>, to_key=undefined}).
-spec fold_range(hanoi(),kv_fold_fun(),any(),key_range()) -> any().
-spec fold_range(hanoidb(),kv_fold_fun(),any(),key_range()) -> any().
fold_range(Ref,Fun,Acc0,Range) ->
{ok, FoldWorkerPID} = hanoi_fold_worker:start(self()),
if Range#btree_range.limit < 10 ->
{ok, FoldWorkerPID} = hanoidb_fold_worker:start(self()),
if Range#key_range.limit < 10 ->
ok = gen_server:call(Ref, {blocking_range, FoldWorkerPID, Range}, infinity);
true ->
ok = gen_server:call(Ref, {snapshot_range, FoldWorkerPID, Range}, infinity)
end,
MRef = erlang:monitor(process, FoldWorkerPID),
?log("fold_range begin: self=~p, worker=~p~n", [self(), FoldWorkerPID]),
Result = receive_fold_range(MRef, FoldWorkerPID, Fun, Acc0, Range#btree_range.limit),
Result = receive_fold_range(MRef, FoldWorkerPID, Fun, Acc0, Range#key_range.limit),
?log("fold_range done: self:~p, result=~P~n", [self(), Result, 20]),
Result.
@ -154,8 +160,8 @@ receive_fold_range(MRef,PID,Fun,Acc0, Limit) ->
{ok, Fun(K,V,Acc0)}
catch
Class:Exception ->
% ?log("Exception in hanoi fold: ~p ~p", [Exception, erlang:get_stacktrace()]),
% lager:warn("Exception in hanoi fold: ~p", [Exception]),
% ?log("Exception in hanoidb fold: ~p ~p", [Exception, erlang:get_stacktrace()]),
% lager:warn("Exception in hanoidb fold: ~p", [Exception]),
{'EXIT', Class, Exception, erlang:get_stacktrace()}
end
of
@ -236,13 +242,13 @@ init([Dir, Opts]) ->
case file:read_file_info(Dir) of
{ok, #file_info{ type=directory }} ->
{ok, TopLevel, MaxLevel} = open_levels(Dir,Opts),
{ok, Nursery} = hanoi_nursery:recover(Dir, TopLevel, MaxLevel);
{ok, Nursery} = hanoidb_nursery:recover(Dir, TopLevel, MaxLevel);
{error, E} when E =:= enoent ->
ok = file:make_dir(Dir),
{ok, TopLevel} = hanoi_level:open(Dir, ?TOP_LEVEL, undefined, Opts, self()),
{ok, TopLevel} = hanoidb_level:open(Dir, ?TOP_LEVEL, undefined, Opts, self()),
MaxLevel = ?TOP_LEVEL,
{ok, Nursery} = hanoi_nursery:new(Dir, MaxLevel)
{ok, Nursery} = hanoidb_nursery:new(Dir, MaxLevel)
end,
{ok, #state{ top=TopLevel, dir=Dir, nursery=Nursery, opt=Opts, max_level=MaxLevel }}.
@ -276,9 +282,9 @@ open_levels(Dir,Options) ->
%%
{TopLevel, MaxMerge} =
lists:foldl( fun(LevelNo, {NextLevel, MergeWork0}) ->
{ok, Level} = hanoi_level:open(Dir,LevelNo,NextLevel,Options,self()),
{ok, Level} = hanoidb_level:open(Dir,LevelNo,NextLevel,Options,self()),
MergeWork = MergeWork0 + hanoi_level:unmerged_count(Level),
MergeWork = MergeWork0 + hanoidb_level:unmerged_count(Level),
{Level, MergeWork}
end,
@ -291,10 +297,10 @@ open_levels(Dir,Options) ->
{ok, TopLevel, MaxLevel}.
do_merge(TopLevel, _Inc, N) when N =< 0 ->
ok = hanoi_level:await_incremental_merge(TopLevel);
ok = hanoidb_level:await_incremental_merge(TopLevel);
do_merge(TopLevel, Inc, N) ->
ok = hanoi_level:begin_incremental_merge(TopLevel),
ok = hanoidb_level:begin_incremental_merge(TopLevel),
do_merge(TopLevel, Inc, N-Inc).
@ -311,9 +317,9 @@ parse_level(FileName) ->
handle_info({bottom_level, N}, #state{ nursery=Nursery, top=TopLevel }=State)
when N > State#state.max_level ->
State2 = State#state{ max_level = N,
nursery= hanoi_nursery:set_max_level(Nursery, N) },
nursery= hanoidb_nursery:set_max_level(Nursery, N) },
hanoi_level:set_max_level(TopLevel, N),
hanoidb_level:set_max_level(TopLevel, N),
{noreply, State2};
@ -340,13 +346,13 @@ code_change(_OldVsn, State, _Extra) ->
handle_call({snapshot_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
hanoi_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = hanoi_level:snapshot_range(TopLevel, FoldWorkerPID, Range),
hanoidb_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = hanoidb_level:snapshot_range(TopLevel, FoldWorkerPID, Range),
{reply, Result, State};
handle_call({blocking_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
hanoi_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = hanoi_level:blocking_range(TopLevel, FoldWorkerPID, Range),
hanoidb_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = hanoidb_level:blocking_range(TopLevel, FoldWorkerPID, Range),
{reply, Result, State};
handle_call({put, Key, Value}, _From, State) when is_binary(Key), is_binary(Value) ->
@ -362,20 +368,20 @@ handle_call({delete, Key}, _From, State) when is_binary(Key) ->
{reply, ok, State2};
handle_call({get, Key}, _From, State=#state{ top=Top, nursery=Nursery } ) when is_binary(Key) ->
case hanoi_nursery:lookup(Key, Nursery) of
case hanoidb_nursery:lookup(Key, Nursery) of
{value, ?TOMBSTONE} ->
{reply, not_found, State};
{value, Value} when is_binary(Value) ->
{reply, {ok, Value}, State};
none ->
Reply = hanoi_level:lookup(Top, Key),
Reply = hanoidb_level:lookup(Top, Key),
{reply, Reply, State}
end;
handle_call(close, _From, State=#state{top=Top}) ->
try
{ok, State2} = flush_nursery(State),
ok = hanoi_level:close(Top),
ok = hanoidb_level:close(Top),
{stop, normal, ok, State2}
catch
E:R ->
@ -384,13 +390,13 @@ handle_call(close, _From, State=#state{top=Top}) ->
end;
handle_call(destroy, _From, State=#state{top=Top, nursery=Nursery }) ->
ok = hanoi_nursery:destroy(Nursery),
ok = hanoi_level:destroy(Top),
ok = hanoidb_nursery:destroy(Nursery),
ok = hanoidb_level:destroy(Top),
{stop, normal, ok, State#state{ top=undefined, nursery=undefined, max_level=?TOP_LEVEL }}.
do_put(Key, Value, State=#state{ nursery=Nursery, top=Top }) ->
{ok, Nursery2} = hanoi_nursery:add_maybe_flush(Key, Value, Nursery, Top),
{ok, Nursery2} = hanoidb_nursery:add_maybe_flush(Key, Value, Nursery, Top),
{ok, State#state{ nursery=Nursery2 }}.
do_transact([{put, Key, Value}], State) ->
@ -400,12 +406,12 @@ do_transact([{delete, Key}], State) ->
do_transact([], _State) ->
ok;
do_transact(TransactionSpec, State=#state{ nursery=Nursery, top=Top }) ->
{ok, Nursery2} = hanoi_nursery:transact(TransactionSpec, Nursery, Top),
{ok, Nursery2} = hanoidb_nursery:transact(TransactionSpec, Nursery, Top),
{ok, State#state{ nursery=Nursery2 }}.
flush_nursery(State=#state{nursery=Nursery, top=Top, dir=Dir, max_level=MaxLevel}) ->
ok = hanoi_nursery:finish(Nursery, Top),
{ok, Nursery2} = hanoi_nursery:new(Dir, MaxLevel),
ok = hanoidb_nursery:finish(Nursery, Top),
{ok, Nursery2} = hanoidb_nursery:new(Dir, MaxLevel),
{ok, State#state{ nursery=Nursery2 }}.
start_app() ->

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -30,18 +30,18 @@
-define(TOMBSTONE, 'deleted').
-define(KEY_IN_FROM_RANGE(Key,Range),
((Range#btree_range.from_inclusive andalso
(Range#btree_range.from_key =< Key))
((Range#key_range.from_inclusive andalso
(Range#key_range.from_key =< Key))
orelse
(Range#btree_range.from_key < Key))).
(Range#key_range.from_key < Key))).
-define(KEY_IN_TO_RANGE(Key,Range),
((Range#btree_range.to_key == undefined)
((Range#key_range.to_key == undefined)
orelse
((Range#btree_range.to_inclusive andalso
(Key =< Range#btree_range.to_key))
((Range#key_range.to_inclusive andalso
(Key =< Range#key_range.to_key))
orelse
(Key < Range#btree_range.to_key)))).
(Key < Range#key_range.to_key)))).
-define(KEY_IN_RANGE(Key,Range),
(?KEY_IN_FROM_RANGE(Key,Range) andalso ?KEY_IN_TO_RANGE(Key,Range))).

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_app).
-module(hanoidb_app).
-author('Kresten Krab Thorup <krab@trifork.com>').
-behaviour(application).
@ -35,7 +35,7 @@
%% ===================================================================
start(_StartType, _StartArgs) ->
hanoi_sup:start_link().
hanoidb_sup:start_link().
stop(_State) ->
ok.

View file

@ -5,7 +5,7 @@
{concurrent, 1}.
{driver, basho_bench_driver_hanoi}.
{driver, basho_bench_driver_hanoidb}.
{key_generator, {int_to_bin,{uniform_int, 5000000}}}.
@ -16,11 +16,11 @@
%% the second element in the list below (e.g., "../../public/bitcask") must point to
%% the relevant directory of a hanoi installation
{code_paths, ["deps/stats",
"../hanoi/ebin",
"../hanoi/deps/plain_fsm/ebin",
"../hanoi/deps/ebloom/ebin"
"../hanoidb/ebin",
"../hanoidb/deps/plain_fsm/ebin",
"../hanoidb/deps/ebloom/ebin"
]}.
{bitcask_dir, "/tmp/hanoi.bench"}.
{bitcask_dir, "/tmp/hanoidb.bench"}.
{bitcask_flags, [o_sync]}.

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_fold_worker).
-module(hanoidb_fold_worker).
-author('Kresten Krab Thorup <krab@trifork.com>').
-ifdef(DEBUG).
@ -65,7 +65,7 @@
-behavior(plain_fsm).
-export([data_vsn/0, code_change/3]).
-include("hanoi.hrl").
-include("hanoidb.hrl").
-include("plain_rpc.hrl").
-record(state, {sendto, sendto_ref}).

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,16 +22,16 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_level).
-module(hanoidb_level).
-author('Kresten Krab Thorup <krab@trifork.com>').
-include("include/plain_rpc.hrl").
-include("include/hanoi.hrl").
-include("src/hanoi.hrl").
-include("include/hanoidb.hrl").
-include("src/hanoidb.hrl").
%%
%% Manages 0..2 of hanoi index file, and governs all aspects of
%% Manages 0..2 of hanoidb index file, and governs all aspects of
%% merging, lookup, folding, etc. for these files
%%
@ -182,12 +182,12 @@ initialize2(State) ->
file:delete(BFileName),
ok = file:rename(MFileName, AFileName),
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
case file:read_file_info(CFileName) of
{ok, _} ->
file:rename(CFileName, BFileName),
{ok, BTB} = hanoi_reader:open(BFileName, [random|State#state.opts]),
{ok, BTB} = hanoidb_reader:open(BFileName, [random|State#state.opts]),
check_begin_merge_then_loop0(init_state(State#state{ a= BTA, b=BTB }));
{error, enoent} ->
@ -197,12 +197,12 @@ initialize2(State) ->
{error, enoent} ->
case file:read_file_info(BFileName) of
{ok, _} ->
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
{ok, BTB} = hanoi_reader:open(BFileName, [random|State#state.opts]),
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
{ok, BTB} = hanoidb_reader:open(BFileName, [random|State#state.opts]),
case file:read_file_info(CFileName) of
{ok, _} ->
{ok, BTC} = hanoi_reader:open(CFileName, [random|State#state.opts]);
{ok, BTC} = hanoidb_reader:open(CFileName, [random|State#state.opts]);
{error, enoent} ->
BTC = undefined
end,
@ -216,7 +216,7 @@ initialize2(State) ->
case file:read_file_info(AFileName) of
{ok, _} ->
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
main_loop(init_state(State#state{ a=BTA }));
{error, enoent} ->
@ -292,7 +292,7 @@ main_loop(State = #state{ next=Next }) ->
plain_rpc:send_reply(From, ok),
case hanoi_reader:open(ToFileName, [random|State#state.opts]) of
case hanoidb_reader:open(ToFileName, [random|State#state.opts]) of
{ok, BT} ->
if SetPos == #state.b ->
check_begin_merge_then_loop(setelement(SetPos, State, BT));
@ -396,7 +396,7 @@ main_loop(State = #state{ next=Next }) ->
%% rpc would fail when we fall off the cliff
if Next == undefined -> ok;
true ->
hanoi_level:close(Next)
hanoidb_level:close(Next)
end,
plain_rpc:send_reply(From, ok),
{ok, closing};
@ -412,7 +412,7 @@ main_loop(State = #state{ next=Next }) ->
%% rpc would fail when we fall off the cliff
if Next == undefined -> ok;
true ->
hanoi_level:destroy(Next)
hanoidb_level:destroy(Next)
end,
plain_rpc:send_reply(From, ok),
{ok, destroying};
@ -531,7 +531,7 @@ main_loop(State = #state{ next=Next }) ->
% then, rename M to A, and open it
AFileName = filename("A",State2),
ok = file:rename(MFileName, AFileName),
{ok, BT} = hanoi_reader:open(AFileName, [random|State#state.opts]),
{ok, BT} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
% iff there is a C file, then move it to B position
% TODO: consider recovery for this
@ -629,7 +629,7 @@ do_step(StepFrom, PreviousWork, State) ->
TotalWork = (MaxLevel-?TOP_LEVEL+1) * WorkUnit,
WorkUnitsLeft = max(0, TotalWork-PreviousWork),
case hanoi:get_opt( merge_strategy, State#state.opts, fast) of
case hanoidb:get_opt( merge_strategy, State#state.opts, fast) of
fast ->
WorkToDoHere = min(WorkLeftHere, WorkUnitsLeft);
predictable ->
@ -696,7 +696,7 @@ do_lookup(_Key, [Pid]) when is_pid(Pid) ->
do_lookup(Key, [undefined|Rest]) ->
do_lookup(Key, Rest);
do_lookup(Key, [BT|Rest]) ->
case hanoi_reader:lookup(BT, Key) of
case hanoidb_reader:lookup(BT, Key) of
{ok, ?TOMBSTONE} ->
not_found;
{ok, Result} ->
@ -706,10 +706,10 @@ do_lookup(Key, [BT|Rest]) ->
end.
close_if_defined(undefined) -> ok;
close_if_defined(BT) -> hanoi_reader:close(BT).
close_if_defined(BT) -> hanoidb_reader:close(BT).
destroy_if_defined(undefined) -> ok;
destroy_if_defined(BT) -> hanoi_reader:destroy(BT).
destroy_if_defined(BT) -> hanoidb_reader:destroy(BT).
stop_if_defined(undefined) -> ok;
stop_if_defined(MergePid) when is_pid(MergePid) ->
@ -736,7 +736,7 @@ begin_merge(State) ->
try
?log("merge begun~n", []),
{ok, OutCount} = hanoi_merger:merge(AFileName, BFileName, XFileName,
{ok, OutCount} = hanoidb_merger:merge(AFileName, BFileName, XFileName,
?BTREE_SIZE(State#state.level + 1),
State#state.next =:= undefined,
State#state.opts ),
@ -757,8 +757,8 @@ close_and_delete_a_and_b(State) ->
AFileName = filename("A",State),
BFileName = filename("B",State),
ok = hanoi_reader:close(State#state.a),
ok = hanoi_reader:close(State#state.b),
ok = hanoidb_reader:close(State#state.a),
ok = hanoidb_reader:close(State#state.b),
ok = file:delete(AFileName),
ok = file:delete(BFileName),
@ -777,10 +777,10 @@ start_range_fold(FileName, WorkerPID, Range, State) ->
try
?log("start_range_fold ~p on ~p -> ~p", [self, FileName, WorkerPID]),
erlang:link(WorkerPID),
{ok, File} = hanoi_reader:open(FileName, [folding|State#state.opts]),
{ok, File} = hanoidb_reader:open(FileName, [folding|State#state.opts]),
do_range_fold2(File, WorkerPID, self(), Range),
erlang:unlink(WorkerPID),
hanoi_reader:close(File),
hanoidb_reader:close(File),
%% this will release the pinning of the fold file
Owner ! {range_fold_done, self(), FileName},
@ -792,12 +792,12 @@ end
end ),
{ok, PID}.
-spec do_range_fold(BT :: hanoi_reader:read_file(),
-spec do_range_fold(BT :: hanoidb_reader:read_file(),
WorkerPID :: pid(),
SelfOrRef :: pid() | reference(),
Range :: #btree_range{} ) -> ok.
Range :: #key_range{} ) -> ok.
do_range_fold(BT, WorkerPID, SelfOrRef, Range) ->
case hanoi_reader:range_fold(fun(Key,Value,_) ->
case hanoidb_reader:range_fold(fun(Key,Value,_) ->
WorkerPID ! {level_result, SelfOrRef, Key, Value},
ok
end,
@ -815,12 +815,12 @@ do_range_fold(BT, WorkerPID, SelfOrRef, Range) ->
-define(FOLD_CHUNK_SIZE, 100).
-spec do_range_fold2(BT :: hanoi_reader:read_file(),
-spec do_range_fold2(BT :: hanoidb_reader:read_file(),
WorkerPID :: pid(),
SelfOrRef :: pid() | reference(),
Range :: #btree_range{} ) -> ok.
Range :: #key_range{} ) -> ok.
do_range_fold2(BT, WorkerPID, SelfOrRef, Range) ->
try hanoi_reader:range_fold(fun(Key,Value,{0,KVs}) ->
try hanoidb_reader:range_fold(fun(Key,Value,{0,KVs}) ->
send(WorkerPID, SelfOrRef, [{Key,Value}|KVs]),
{?FOLD_CHUNK_SIZE-1, []};
(Key,Value,{N,KVs}) ->

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_merger).
-module(hanoidb_merger).
-author('Kresten Krab Thorup <krab@trifork.com>').
%%
@ -31,7 +31,7 @@
-export([merge/6]).
-include("hanoi.hrl").
-include("hanoidb.hrl").
%% A merger which is inactive for this long will sleep
%% which means that it will close open files, and compress
@ -48,17 +48,17 @@
-define(LOCAL_WRITER, true).
merge(A,B,C, Size, IsLastLevel, Options) ->
{ok, BT1} = hanoi_reader:open(A, [sequential|Options]),
{ok, BT2} = hanoi_reader:open(B, [sequential|Options]),
{ok, BT1} = hanoidb_reader:open(A, [sequential|Options]),
{ok, BT2} = hanoidb_reader:open(B, [sequential|Options]),
case ?LOCAL_WRITER of
true ->
{ok, Out} = hanoi_writer:init([C, [{size,Size} | Options]]);
{ok, Out} = hanoidb_writer:init([C, [{size,Size} | Options]]);
false ->
{ok, Out} = hanoi_writer:open(C, [{size,Size} | Options])
{ok, Out} = hanoidb_writer:open(C, [{size,Size} | Options])
end,
{node, AKVs} = hanoi_reader:first_node(BT1),
{node, BKVs} = hanoi_reader:first_node(BT2),
{node, AKVs} = hanoidb_reader:first_node(BT1),
{node, BKVs} = hanoidb_reader:first_node(BT2),
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, 0, {0, none}).
@ -66,9 +66,9 @@ terminate(Count, Out) ->
case ?LOCAL_WRITER of
true ->
{stop, normal, ok, _} = hanoi_writer:handle_call(close, self(), Out);
{stop, normal, ok, _} = hanoidb_writer:handle_call(close, self(), Out);
false ->
ok = hanoi_writer:close(Out)
ok = hanoidb_writer:close(Out)
end,
{ok, Count}.
@ -84,9 +84,9 @@ hibernate_scan(Keep) ->
receive
{step, From, HowMany} ->
{BT1, BT2, OutBin, IsLastLevel, AKVs, BKVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
scan(hanoi_reader:deserialize(BT1),
hanoi_reader:deserialize(BT2),
hanoi_writer:deserialize(OutBin),
scan(hanoidb_reader:deserialize(BT1),
hanoidb_reader:deserialize(BT2),
hanoidb_writer:deserialize(OutBin),
IsLastLevel, AKVs, BKVs, Count, {N+HowMany, From})
end.
@ -104,9 +104,9 @@ scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, {N, FromPID}) when N < 1, AK
after ?HIBERNATE_TIMEOUT ->
case ?LOCAL_WRITER of
true ->
Args = {hanoi_reader:serialize(BT1),
hanoi_reader:serialize(BT2),
hanoi_writer:serialize(Out), IsLastLevel, AKVs, BKVs, Count, N},
Args = {hanoidb_reader:serialize(BT1),
hanoidb_reader:serialize(BT2),
hanoidb_writer:serialize(Out), IsLastLevel, AKVs, BKVs, Count, N},
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
hibernate_scan(Keep);
false ->
@ -115,20 +115,20 @@ scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, {N, FromPID}) when N < 1, AK
end;
scan(BT1, BT2, Out, IsLastLevel, [], BKVs, Count, Step) ->
case hanoi_reader:next_node(BT1) of
case hanoidb_reader:next_node(BT1) of
{node, AKVs} ->
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
end_of_data ->
hanoi_reader:close(BT1),
hanoidb_reader:close(BT1),
scan_only(BT2, Out, IsLastLevel, BKVs, Count, Step)
end;
scan(BT1, BT2, Out, IsLastLevel, AKVs, [], Count, Step) ->
case hanoi_reader:next_node(BT2) of
case hanoidb_reader:next_node(BT2) of
{node, BKVs} ->
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
end_of_data ->
hanoi_reader:close(BT2),
hanoidb_reader:close(BT2),
scan_only(BT1, Out, IsLastLevel, AKVs, Count, Step)
end;
@ -136,9 +136,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
if Key1 < Key2 ->
case ?LOCAL_WRITER of
true ->
{noreply, Out2} = hanoi_writer:handle_cast({add, Key1, Value1}, Out);
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key1, Value1}, Out);
false ->
ok = hanoi_writer:add(Out2=Out, Key1, Value1)
ok = hanoidb_writer:add(Out2=Out, Key1, Value1)
end,
scan(BT1, BT2, Out2, IsLastLevel, AT, BKVs, Count+1, step(Step));
@ -146,9 +146,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
Key2 < Key1 ->
case ?LOCAL_WRITER of
true ->
{noreply, Out2} = hanoi_writer:handle_cast({add, Key2, Value2}, Out);
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key2, Value2}, Out);
false ->
ok = hanoi_writer:add(Out2=Out, Key2, Value2)
ok = hanoidb_writer:add(Out2=Out, Key2, Value2)
end,
scan(BT1, BT2, Out2, IsLastLevel, AKVs, BT, Count+1, step(Step));
@ -160,9 +160,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
true ->
case ?LOCAL_WRITER of
true ->
{noreply, Out2} = hanoi_writer:handle_cast({add, Key2, Value2}, Out);
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key2, Value2}, Out);
false ->
ok = hanoi_writer:add(Out2=Out, Key2, Value2)
ok = hanoidb_writer:add(Out2=Out, Key2, Value2)
end,
scan(BT1, BT2, Out2, IsLastLevel, AT, BT, Count+1, step(Step, 2))
end.
@ -173,8 +173,8 @@ hibernate_scan_only(Keep) ->
receive
{step, From, HowMany} ->
{BT, OutBin, IsLastLevel, KVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
scan_only(hanoi_reader:deserialize(BT),
hanoi_writer:deserialize(OutBin),
scan_only(hanoidb_reader:deserialize(BT),
hanoidb_writer:deserialize(OutBin),
IsLastLevel, KVs, Count, {N+HowMany, From})
end.
@ -191,14 +191,14 @@ scan_only(BT, Out, IsLastLevel, KVs, Count, {N, FromPID}) when N < 1, KVs =/= []
{step, From, HowMany} ->
scan_only(BT, Out, IsLastLevel, KVs, Count, {N+HowMany, From})
after ?HIBERNATE_TIMEOUT ->
Args = {hanoi_reader:serialize(BT),
hanoi_writer:serialize(Out), IsLastLevel, KVs, Count, N},
Args = {hanoidb_reader:serialize(BT),
hanoidb_writer:serialize(Out), IsLastLevel, KVs, Count, N},
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
hibernate_scan_only(Keep)
end;
scan_only(BT, Out, IsLastLevel, [], Count, {_, FromPID}=Step) ->
case hanoi_reader:next_node(BT) of
case hanoidb_reader:next_node(BT) of
{node, KVs} ->
scan_only(BT, Out, IsLastLevel, KVs, Count, Step);
end_of_data ->
@ -208,7 +208,7 @@ scan_only(BT, Out, IsLastLevel, [], Count, {_, FromPID}=Step) ->
{PID, Ref} ->
PID ! {Ref, step_done}
end,
hanoi_reader:close(BT),
hanoidb_reader:close(BT),
terminate(Count, Out)
end;
@ -218,8 +218,8 @@ scan_only(BT, Out, true, [{_,?TOMBSTONE}|Rest], Count, Step) ->
scan_only(BT, Out, IsLastLevel, [{Key,Value}|Rest], Count, Step) ->
case ?LOCAL_WRITER of
true ->
{noreply, Out2} = hanoi_writer:handle_cast({add, Key, Value}, Out);
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key, Value}, Out);
false ->
ok = hanoi_writer:add(Out2=Out, Key, Value)
ok = hanoidb_writer:add(Out2=Out, Key, Value)
end,
scan_only(BT, Out2, IsLastLevel, Rest, Count+1, step(Step)).

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,14 +22,14 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_nursery).
-module(hanoidb_nursery).
-author('Kresten Krab Thorup <krab@trifork.com>').
-export([new/2, recover/3, add/3, finish/2, lookup/2, add_maybe_flush/4]).
-export([do_level_fold/3, set_max_level/2, transact/3, destroy/1]).
-include("include/hanoi.hrl").
-include("hanoi.hrl").
-include("include/hanoidb.hrl").
-include("hanoidb.hrl").
-include_lib("kernel/include/file.hrl").
-record(nursery, { log_file, dir, cache, total_size=0, count=0,
@ -75,7 +75,7 @@ fill_cache(Transaction, Cache) when is_list(Transaction) ->
read_nursery_from_log(Directory, MaxLevel) ->
{ok, LogBinary} = file:read_file( ?LOGFILENAME(Directory) ),
KVs = hanoi_util:decode_crc_data( LogBinary, [] ),
KVs = hanoidb_util:decode_crc_data( LogBinary, [] ),
Cache = fill_cache(KVs, gb_trees:empty()),
{ok, #nursery{ dir=Directory, cache=Cache, count=gb_trees:size(Cache), max_level=MaxLevel }}.
@ -86,7 +86,7 @@ read_nursery_from_log(Directory, MaxLevel) ->
-spec add(#nursery{}, binary(), binary()|?TOMBSTONE) -> {ok, #nursery{}}.
add(Nursery=#nursery{ log_file=File, cache=Cache, total_size=TotalSize, count=Count }, Key, Value) ->
Data = hanoi_util:crc_encapsulate_kv_entry( Key, Value ),
Data = hanoidb_util:crc_encapsulate_kv_entry( Key, Value ),
ok = file:write(File, Data),
Nursery1 = do_sync(File, Nursery),
@ -101,7 +101,7 @@ add(Nursery=#nursery{ log_file=File, cache=Cache, total_size=TotalSize, count=Co
end.
do_sync(File, Nursery) ->
case application:get_env(hanoi, sync_strategy) of
case application:get_env(hanoidb, sync_strategy) of
{ok, sync} ->
file:datasync(File),
LastSync = now();
@ -143,15 +143,15 @@ finish(#nursery{ dir=Dir, cache=Cache, log_file=LogFile,
N when N>0 ->
%% next, flush cache to a new BTree
BTreeFileName = filename:join(Dir, "nursery.data"),
{ok, BT} = hanoi_writer:open(BTreeFileName, [{size,?BTREE_SIZE(?TOP_LEVEL)},
{ok, BT} = hanoidb_writer:open(BTreeFileName, [{size,?BTREE_SIZE(?TOP_LEVEL)},
{compress, none}]),
try
lists:foreach( fun({Key,Value}) ->
ok = hanoi_writer:add(BT, Key, Value)
ok = hanoidb_writer:add(BT, Key, Value)
end,
gb_trees:to_list(Cache))
after
ok = hanoi_writer:close(BT)
ok = hanoidb_writer:close(BT)
end,
% {ok, FileInfo} = file:read_file_info(BTreeFileName),
@ -160,11 +160,11 @@ finish(#nursery{ dir=Dir, cache=Cache, log_file=LogFile,
%% inject the B-Tree (blocking RPC)
ok = hanoi_level:inject(TopLevel, BTreeFileName),
ok = hanoidb_level:inject(TopLevel, BTreeFileName),
%% issue some work if this is a top-level inject (blocks until previous such
%% incremental merge is finished).
hanoi_level:begin_incremental_merge(TopLevel),
hanoidb_level:begin_incremental_merge(TopLevel),
ok;
@ -201,7 +201,7 @@ add_maybe_flush(Key, Value, Nursery, Top) ->
flush(Nursery=#nursery{ dir=Dir, max_level=MaxLevel }, Top) ->
ok = finish(Nursery, Top),
{error, enoent} = file:read_file_info( filename:join(Dir, "nursery.log")),
hanoi_nursery:new(Dir, MaxLevel).
hanoidb_nursery:new(Dir, MaxLevel).
has_room(#nursery{ count=Count }, N) ->
(Count+N) < ?BTREE_SIZE(?TOP_LEVEL).
@ -217,7 +217,7 @@ ensure_space(Nursery, NeededRoom, Top) ->
transact(Spec, Nursery=#nursery{ log_file=File, cache=Cache0, total_size=TotalSize }, Top) ->
Nursery1 = ensure_space(Nursery, length(Spec), Top),
Data = hanoi_util:crc_encapsulate_transaction( Spec ),
Data = hanoidb_util:crc_encapsulate_transaction( Spec ),
ok = file:write(File, Data),
Nursery2 = do_sync(File, Nursery1),
@ -254,7 +254,7 @@ do_level_fold(#nursery{ cache=Cache }, FoldWorkerPID, KeyRange) ->
{LastKey, Count}
end
end,
{undefined, KeyRange#btree_range.limit},
{undefined, KeyRange#key_range.limit},
gb_trees:to_list(Cache))
of
{LastKey, limit} when LastKey =/= undefined ->

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,12 +22,12 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_reader).
-module(hanoidb_reader).
-author('Kresten Krab Thorup <krab@trifork.com>').
-include_lib("kernel/include/file.hrl").
-include("include/hanoi.hrl").
-include("hanoi.hrl").
-include("include/hanoidb.hrl").
-include("hanoidb.hrl").
-include("include/plain_rpc.hrl").
-export([open/1, open/2,close/1,lookup/2,fold/3,range_fold/4, destroy/1]).
@ -50,14 +50,14 @@ open(Name) ->
open(Name, Config) ->
case proplists:get_bool(sequential, Config) of
true ->
ReadBufferSize = hanoi:get_opt(read_buffer_size, Config, 512 * 1024),
ReadBufferSize = hanoidb:get_opt(read_buffer_size, Config, 512 * 1024),
{ok, File} = file:open(Name, [raw,read,{read_ahead, ReadBufferSize},binary]),
{ok, #index{file=File, name=Name, config=Config}};
false ->
case proplists:get_bool(folding, Config) of
true ->
ReadBufferSize = hanoi:get_opt(read_buffer_size, Config, 512 * 1024),
ReadBufferSize = hanoidb:get_opt(read_buffer_size, Config, 512 * 1024),
{ok, File} = file:open(Name, [read,{read_ahead, ReadBufferSize},binary]);
false ->
{ok, File} = file:open(Name, [read,binary])
@ -113,13 +113,13 @@ fold1(File,Fun,Acc0) ->
end.
range_fold(Fun, Acc0, #index{file=File,root=Root}, Range) ->
case lookup_node(File,Range#btree_range.from_key,Root,0) of
case lookup_node(File,Range#key_range.from_key,Root,0) of
{ok, {Pos,_}} ->
file:position(File, Pos),
do_range_fold(Fun, Acc0, File, Range, Range#btree_range.limit);
do_range_fold(Fun, Acc0, File, Range, Range#key_range.limit);
{ok, Pos} ->
file:position(File, Pos),
do_range_fold(Fun, Acc0, File, Range, Range#btree_range.limit);
do_range_fold(Fun, Acc0, File, Range, Range#key_range.limit);
none ->
{done, Acc0}
end.
@ -301,7 +301,7 @@ find_start(K, KVs) ->
read_node(File,{Pos,Size}) ->
% error_logger:info_msg("read_node ~p ~p ~p~n", [File, Pos, Size]),
{ok, <<_:32, Level:16/unsigned, Data/binary>>} = file:pread(File, Pos, Size),
hanoi_util:decode_index_node(Level, Data);
hanoidb_util:decode_index_node(Level, Data);
read_node(File,Pos) ->
{ok, Pos} = file:position(File, Pos),
@ -315,7 +315,7 @@ read_node(File) ->
0 -> eof;
_ ->
{ok, Data} = file:read(File, Len-2),
hanoi_util:decode_index_node(Level, Data)
hanoidb_util:decode_index_node(Level, Data)
end.
@ -328,7 +328,7 @@ next_leaf_node(File) ->
eof;
{ok, <<Len:32, 0:16>>} ->
{ok, Data} = file:read(File, Len-2),
hanoi_util:decode_index_node(0, Data);
hanoidb_util:decode_index_node(0, Data);
{ok, <<Len:32, _:16>>} ->
{ok, _} = file:position(File, {cur,Len-2}),
next_leaf_node(File)

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_sup).
-module(hanoidb_sup).
-author('Kresten Krab Thorup <krab@trifork.com>').
-behaviour(supervisor).

View file

@ -20,14 +20,14 @@
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
%%%
%%% This is a temporary copy of riak_kv_backend, just here to keep hanoi
%%% development private for now. When riak_kv_hanoi_backend is moved to
%%% This is a temporary copy of riak_kv_backend, just here to keep hanoidb
%%% development private for now. When riak_kv_hanoidb_backend is moved to
%%% riak_kv, delete this file.
%%%
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
-module(hanoi_temp_riak_kv_backend).
-module(hanoidb_temp_riak_kv_backend).
-export([behaviour_info/1]).
-export([callback_after/3]).

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,12 +22,12 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_util).
-module(hanoidb_util).
-author('Kresten Krab Thorup <krab@trifork.com>').
-compile(export_all).
-include("src/hanoi.hrl").
-include("src/hanoidb.hrl").
-define(ERLANG_ENCODED, 131).
-define(CRC_ENCODED, 127).

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,10 +22,10 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_writer).
-module(hanoidb_writer).
-author('Kresten Krab Thorup <krab@trifork.com>').
-include("hanoi.hrl").
-include("hanoidb.hrl").
%%
%% Streaming btree writer. Accepts only monotonically increasing keys for put.
@ -87,16 +87,16 @@ init([Name,Options]) ->
case do_open(Name, Options, [exclusive]) of
{ok, IdxFile} ->
{ok, BloomFilter} = ebloom:new(erlang:min(Size,16#ffffffff), 0.01, 123),
BlockSize = hanoi:get_opt(block_size, Options, ?NODE_SIZE),
BlockSize = hanoidb:get_opt(block_size, Options, ?NODE_SIZE),
{ok, #state{ name=Name,
index_file_pos=0, index_file=IdxFile,
bloom = BloomFilter,
block_size = BlockSize,
compress = hanoi:get_opt(compress, Options, none),
compress = hanoidb:get_opt(compress, Options, none),
opts = Options
}};
{error, _}=Error ->
error_logger:error_msg("hanoi_writer cannot open ~p: ~p~n", [Name, Error]),
error_logger:error_msg("hanoidb_writer cannot open ~p: ~p~n", [Name, Error]),
{stop, Error}
end.
@ -120,7 +120,7 @@ terminate(normal,_State) ->
%% premature delete -> cleanup
terminate(_Reason,State) ->
file:close( State#state.index_file ),
file:delete( hanoi_util:index_file_name(State#state.name) ).
file:delete( hanoidb_util:index_file_name(State#state.name) ).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
@ -149,8 +149,8 @@ deserialize(Binary) ->
do_open(Name, Options, OpenOpts) ->
WriteBufferSize = hanoi:get_opt(write_buffer_size, Options, 512 * 1024),
file:open( hanoi_util:index_file_name(Name),
WriteBufferSize = hanoidb:get_opt(write_buffer_size, Options, 512 * 1024),
file:open( hanoidb_util:index_file_name(Name),
[raw, append, {delayed_write, WriteBufferSize, 2000} | OpenOpts]).
@ -196,7 +196,7 @@ add_record(Level, Key, Value,
end
end,
NewSize = NodeSize + hanoi_util:estimate_node_size_increment(List, Key, Value),
NewSize = NodeSize + hanoidb_util:estimate_node_size_increment(List, Key, Value),
ok = ebloom:insert( State#state.bloom, Key ),
@ -219,7 +219,7 @@ add_record(Level, Key, Value, State=#state{ nodes=[ #node{level=Level2 } |_]=Sta
close_node(#state{nodes=[#node{ level=Level, members=NodeMembers }|RestNodes], compress=Compress} = State) ->
OrderedMembers = lists:reverse(NodeMembers),
{ok, BlockData} = hanoi_util:encode_index_node(OrderedMembers, Compress),
{ok, BlockData} = hanoidb_util:encode_index_node(OrderedMembers, Compress),
NodePos = State#state.index_file_pos,
BlockSize = erlang:iolist_size(BlockData),

241
src/lsm_btree.erl.orig Normal file
View file

@ -0,0 +1,241 @@
%% ----------------------------------------------------------------------------
%%
%% lsm_btree: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
%%
%% Copyright 2012 (c) Basho Technologies, Inc. All Rights Reserved.
%% http://basho.com/ info@basho.com
%%
%% This file is provided to you under the Apache License, Version 2.0 (the
%% "License"); you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
%% License for the specific language governing permissions and limitations
%% under the License.
%%
%% ----------------------------------------------------------------------------
-module(lsm_btree).
-author('Kresten Krab Thorup <krab@trifork.com>').
-behavior(gen_server).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-export([open/1, close/1, get/2, lookup/2, delete/2, put/3,
async_range/2, async_fold_range/4, sync_range/2, sync_fold_range/4]).
-include("lsm_btree.hrl").
-include_lib("kernel/include/file.hrl").
-include_lib("include/lsm_btree.hrl").
-record(state, { top, nursery, dir }).
%% PUBLIC API
open(Dir) ->
gen_server:start(?MODULE, [Dir], []).
close(Ref) ->
try
gen_server:call(Ref, close)
catch
exit:{noproc,_} -> ok;
exit:noproc -> ok;
%% Handle the case where the monitor triggers
exit:{normal, _} -> ok
end.
get(Ref,Key) when is_binary(Key) ->
gen_server:call(Ref, {get, Key}).
%% for compatibility with original code
lookup(Ref,Key) when is_binary(Key) ->
gen_server:call(Ref, {get, Key}).
delete(Ref,Key) when is_binary(Key) ->
gen_server:call(Ref, {delete, Key}).
put(Ref,Key,Value) when is_binary(Key), is_binary(Value) ->
gen_server:call(Ref, {put, Key, Value}).
sync_range(Ref, #btree_range{}=Range) ->
gen_server:call(Ref, {sync_range, self(), Range}).
sync_fold_range(Ref,Fun,Acc0,Range) ->
{ok, PID} = sync_range(Ref, Range),
sync_receive_fold_range(PID,Fun,Acc0).
sync_receive_fold_range(PID,Fun,Acc0) ->
receive
{fold_result, PID, K,V} ->
sync_receive_fold_range(PID, Fun, Fun(K,V,Acc0));
{fold_limit, PID, _} ->
Acc0;
{fold_done, PID} ->
Acc0
end.
async_range(Ref, #btree_range{}=Range) ->
gen_server:call(Ref, {async_range, self(), Range}).
async_fold_range(Ref,Fun,Acc0,Range) ->
Range2 = Range#btree_range{ limit=?BTREE_ASYNC_CHUNK_SIZE },
FoldMoreFun = fun() ->
{ok, PID} = gen_server:call(Ref, {sync_range, self(), Range}),
async_receive_fold_range(PID,Fun,Acc0,Ref,Range2)
end,
{async, FoldMoreFun}.
async_receive_fold_range(PID,Fun,Acc0,Ref,Range) ->
receive
{fold_result, PID, K,V} ->
async_receive_fold_range(PID, Fun, Fun(K,V,Acc0), Ref, Range);
{fold_limit, PID, Key} ->
Range2 = Range#btree_range{ from_key = Key, from_inclusive=true },
async_fold_range(Ref, Fun, Acc0, Range2);
{fold_done, PID} ->
{ok, Acc0}
end.
init([Dir]) ->
case file:read_file_info(Dir) of
{ok, #file_info{ type=directory }} ->
{ok, TopLevel} = open_levels(Dir),
{ok, Nursery} = lsm_btree_nursery:recover(Dir, TopLevel);
{error, E} when E =:= enoent ->
ok = file:make_dir(Dir),
{ok, TopLevel} = lsm_btree_level:open(Dir, ?TOP_LEVEL, undefined),
{ok, Nursery} = lsm_btree_nursery:new(Dir)
end,
{ok, #state{ top=TopLevel, dir=Dir, nursery=Nursery }}.
open_levels(Dir) ->
{ok, Files} = file:list_dir(Dir),
%% parse file names and find max level
{MinLevel,MaxLevel} =
lists:foldl(fun(FileName, {MinLevel,MaxLevel}) ->
case parse_level(FileName) of
{ok, Level} ->
{ erlang:min(MinLevel, Level),
erlang:max(MaxLevel, Level) };
_ ->
{MinLevel,MaxLevel}
end
end,
{?TOP_LEVEL, ?TOP_LEVEL},
Files),
% error_logger:info_msg("found level files ... {~p,~p}~n", [MinLevel, MaxLevel]),
%% remove old nursery file
file:delete(filename:join(Dir,"nursery.data")),
TopLevel =
lists:foldl( fun(LevelNo, Prev) ->
{ok, Level} = lsm_btree_level:open(Dir,LevelNo,Prev),
Level
end,
undefined,
lists:seq(MaxLevel, MinLevel, -1)),
{ok, TopLevel}.
parse_level(FileName) ->
case re:run(FileName, "^[^\\d]+-(\\d+)\\.data$", [{capture,all_but_first,list}]) of
{match,[StringVal]} ->
{ok, list_to_integer(StringVal)};
_ ->
nomatch
end.
handle_info(Info,State) ->
error_logger:error_msg("Unknown info ~p~n", [Info]),
{stop,bad_msg,State}.
handle_cast(Info,State) ->
error_logger:error_msg("Unknown cast ~p~n", [Info]),
{stop,bad_msg,State}.
%% premature delete -> cleanup
terminate(_Reason,_State) ->
% error_logger:info_msg("got terminate(~p,~p)~n", [Reason,State]),
% flush_nursery(State),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_call({async_range, Sender, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
{ok, FoldWorkerPID} = lsm_btree_fold_worker:start(Sender),
lsm_btree_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = lsm_btree_level:async_range(TopLevel, FoldWorkerPID, Range),
{reply, Result, State};
handle_call({sync_range, Sender, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
{ok, FoldWorkerPID} = lsm_btree_fold_worker:start(Sender),
lsm_btree_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
Result = lsm_btree_level:sync_range(TopLevel, FoldWorkerPID, Range),
{reply, Result, State};
handle_call({put, Key, Value}, _From, State) when is_binary(Key), is_binary(Value) ->
{ok, State2} = do_put(Key, Value, State),
{reply, ok, State2};
handle_call({delete, Key}, _From, State) when is_binary(Key) ->
{ok, State2} = do_put(Key, ?TOMBSTONE, State),
{reply, ok, State2};
handle_call({get, Key}, _From, State=#state{ top=Top, nursery=Nursery } ) when is_binary(Key) ->
case lsm_btree_nursery:lookup(Key, Nursery) of
{value, ?TOMBSTONE} ->
{reply, not_found, State};
{value, Value} when is_binary(Value) ->
{reply, {ok, Value}, State};
none ->
Reply = lsm_btree_level:lookup(Top, Key),
{reply, Reply, State}
end;
handle_call(close, _From, State=#state{top=Top}) ->
try
{ok, State2} = flush_nursery(State),
ok = lsm_btree_level:close(Top),
{stop, normal, ok, State2}
catch
E:R ->
error_logger:info_msg("exception from close ~p:~p~n", [E,R]),
{stop, normal, ok, State}
end.
do_put(Key, Value, State=#state{ nursery=Nursery, top=Top }) ->
{ok, Nursery2} = lsm_btree_nursery:add_maybe_flush(Key, Value, Nursery, Top),
{ok, State#state{ nursery=Nursery2 }}.
flush_nursery(State=#state{nursery=Nursery, top=Top, dir=Dir}) ->
ok = lsm_btree_nursery:finish(Nursery, Top),
{ok, Nursery2} = lsm_btree_nursery:new(Dir),
{ok, State#state{ nursery=Nursery2 }}.

40
src/qtop.erl Normal file
View file

@ -0,0 +1,40 @@
-module(qtop).
-export([max/0, max/1, queue/2, queue/1]).
max() ->
max(5).
max(N) ->
PIDs = erlang:processes(),
Pairs = lists:foldl(fun(PID,Acc) ->
case erlang:process_info(PID, message_queue_len) of
{message_queue_len, Len} ->
[{Len, PID}|Acc];
_ ->
Acc
end
end,
[],
PIDs),
[{_, MaxPID}|_] = lists:reverse(lists:sort(Pairs)),
queue(MaxPID,N).
queue(PID) ->
queue(PID, 5).
queue(PID, N) when is_list(PID) ->
queue(erlang:list_to_pid(PID), N);
queue(MaxPID, N) ->
{message_queue_len, MaxLen} = erlang:process_info(MaxPID, message_queue_len),
{messages, Msgs} = erlang:process_info(MaxPID, messages),
{Front30,_} = lists:split(min(N,length(Msgs)), Msgs),
io:format("==== PID: ~p, qlen:~p~n", [MaxPID,MaxLen]),
lists:foldl( fun(Msg,M) ->
io:format("[~p]: ~P~n", [M, Msg,30]),
M+1
end,
1,
Front30),
ok.

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2012 (c) Basho Technologies, Inc. All Rights Reserved.
%% http://basho.com/ info@basho.com
@ -19,8 +19,8 @@
%%
%% ----------------------------------------------------------------------------
-module(riak_kv_hanoi_backend).
-behavior(hanoi_temp_riak_kv_backend).
-module(riak_kv_hanoidb_backend).
-behavior(hanoidb_temp_riak_kv_backend).
-author('Steve Vinoski <steve@basho.com>').
-author('Greg Burd <greg@basho.com>').
@ -51,7 +51,7 @@
to_key_range/1]).
-endif.
-include("include/hanoi.hrl").
-include("include/hanoidb.hrl").
-define(API_VERSION, 1).
%% TODO: for when this backend supports 2i
@ -63,7 +63,8 @@
config :: config() }).
-type state() :: #state{}.
-type config() :: [{atom(), term()}].
-type config_option() :: {data_root, string()} | hanoidb:config_option().
-type config() :: [config_option()].
%% ===================================================================
%% Public API
@ -85,37 +86,37 @@ capabilities(_) ->
capabilities(_, _) ->
{ok, ?CAPABILITIES}.
%% @doc Start the hanoi backend
%% @doc Start the hanoidb backend
-spec start(integer(), config()) -> {ok, state()} | {error, term()}.
start(Partition, Config) ->
%% Get the data root directory
case app_helper:get_prop_or_env(data_root, Config, hanoi) of
case app_helper:get_prop_or_env(data_root, Config, hanoidb) of
undefined ->
lager:error("Failed to create hanoi dir: data_root is not set"),
lager:error("Failed to create hanoidb dir: data_root is not set"),
{error, data_root_unset};
DataRoot ->
AppStart = case application:start(hanoi) of
AppStart = case application:start(hanoidb) of
ok ->
ok;
{error, {already_started, _}} ->
ok;
{error, StartReason} ->
lager:error("Failed to init the hanoi backend: ~p", [StartReason]),
lager:error("Failed to init the hanoidb backend: ~p", [StartReason]),
{error, StartReason}
end,
case AppStart of
ok ->
case get_data_dir(DataRoot, integer_to_list(Partition)) of
{ok, DataDir} ->
case hanoi:open(DataDir, Config) of
case hanoidb:open(DataDir, Config) of
{ok, Tree} ->
{ok, #state{tree=Tree, partition=Partition, config=Config }};
{error, OpenReason}=OpenError ->
lager:error("Failed to open hanoi: ~p\n", [OpenReason]),
lager:error("Failed to open hanoidb: ~p\n", [OpenReason]),
OpenError
end;
{error, Reason} ->
lager:error("Failed to start hanoi backend: ~p\n", [Reason]),
lager:error("Failed to start hanoidb backend: ~p\n", [Reason]),
{error, Reason}
end;
Error ->
@ -123,19 +124,19 @@ start(Partition, Config) ->
end
end.
%% @doc Stop the hanoi backend
%% @doc Stop the hanoidb backend
-spec stop(state()) -> ok.
stop(#state{tree=Tree}) ->
ok = hanoi:close(Tree).
ok = hanoidb:close(Tree).
%% @doc Retrieve an object from the hanoi backend
%% @doc Retrieve an object from the hanoidb backend
-spec get(riak_object:bucket(), riak_object:key(), state()) ->
{ok, any(), state()} |
{ok, not_found, state()} |
{error, term(), state()}.
get(Bucket, Key, #state{tree=Tree}=State) ->
BKey = to_object_key(Bucket, Key),
case hanoi:get(Tree, BKey) of
case hanoidb:get(Tree, BKey) of
{ok, Value} ->
{ok, Value, State};
not_found ->
@ -144,7 +145,7 @@ get(Bucket, Key, #state{tree=Tree}=State) ->
{error, Reason, State}
end.
%% @doc Insert an object into the hanoi backend.
%% @doc Insert an object into the hanoidb backend.
-type index_spec() :: {add, Index, SecondaryKey} | {remove, Index, SecondaryKey}.
-spec put(riak_object:bucket(), riak_object:key(), [index_spec()], binary(), state()) ->
{ok, state()} |
@ -162,10 +163,10 @@ put(Bucket, PrimaryKey, IndexSpecs, Val, #state{tree=Tree}=State) ->
end,
Updates2 = [F(X) || X <- IndexSpecs],
ok = hanoi:transact(Tree, Updates1 ++ Updates2),
ok = hanoidb:transact(Tree, Updates1 ++ Updates2),
{ok, State}.
%% @doc Delete an object from the hanoi backend
%% @doc Delete an object from the hanoidb backend
-spec delete(riak_object:bucket(), riak_object:key(), [index_spec()], state()) ->
{ok, state()} |
{error, term(), state()}.
@ -181,7 +182,7 @@ delete(Bucket, PrimaryKey, IndexSpecs, #state{tree=Tree}=State) ->
end,
Updates2 = [F(X) || X <- IndexSpecs],
case hanoi:transact(Tree, Updates1 ++ Updates2) of
case hanoidb:transact(Tree, Updates1 ++ Updates2) of
ok ->
{ok, State};
{error, Reason} ->
@ -215,12 +216,12 @@ fold_list_buckets(PrevBucket, Tree, FoldBucketsFun, Acc) ->
RangeStart = to_object_key(<<PrevBucket/binary, 0>>, '_')
end,
Range = #btree_range{ from_key=RangeStart, from_inclusive=true,
Range = #key_range{ from_key=RangeStart, from_inclusive=true,
to_key=undefined, to_inclusive=undefined,
limit=1 },
%% grab next bucket, it's a limit=1 range query :-)
case hanoi:fold_range(Tree,
case hanoidb:fold_range(Tree,
fun(BucketKey,_Value,none) ->
?log( "IN_FOLDER ~p~n", [BucketKey]),
case from_object_key(BucketKey) of
@ -265,9 +266,9 @@ fold_keys(FoldKeysFun, Acc, Opts, #state{tree=Tree}) ->
Range = to_key_range(Limiter),
case proplists:get_bool(async_fold, Opts) of
true ->
{async, fun() -> hanoi:fold_range(Tree, FoldFun, Acc, Range) end};
{async, fun() -> hanoidb:fold_range(Tree, FoldFun, Acc, Range) end};
false ->
{ok, hanoi:fold_range(Tree, FoldFun, Acc, Range)}
{ok, hanoidb:fold_range(Tree, FoldFun, Acc, Range)}
end.
%% @doc Fold over all the objects for one or all buckets.
@ -281,7 +282,7 @@ fold_objects(FoldObjectsFun, Acc, Opts, #state{tree=Tree}) ->
ObjectFolder =
fun() ->
% io:format(user, "starting fold_objects in ~p~n", [self()]),
Result = hanoi:fold_range(Tree, FoldFun, Acc, to_key_range(Bucket)),
Result = hanoidb:fold_range(Tree, FoldFun, Acc, to_key_range(Bucket)),
% io:format(user, "ended fold_objects in ~p => ~P~n", [self(),Result,20]),
Result
end,
@ -292,30 +293,30 @@ fold_objects(FoldObjectsFun, Acc, Opts, #state{tree=Tree}) ->
{ok, ObjectFolder()}
end.
%% @doc Delete all objects from this hanoi backend
%% @doc Delete all objects from this hanoidb backend
-spec drop(state()) -> {ok, state()} | {error, term(), state()}.
drop(#state{ tree=Tree, partition=Partition, config=Config }=State) ->
case hanoi:destroy(Tree) of
case hanoidb:destroy(Tree) of
ok ->
start(Partition, Config);
{error, Term} ->
{error, Term, State}
end.
%% @doc Returns true if this hanoi backend contains any
%% @doc Returns true if this hanoidb backend contains any
%% non-tombstone values; otherwise returns false.
-spec is_empty(state()) -> boolean().
is_empty(#state{tree=Tree}) ->
FoldFun = fun(K, _V, Acc) -> [K|Acc] end,
try
Range = to_key_range(undefined),
[] =:= hanoi:fold_range(Tree, FoldFun, [], Range#btree_range{ limit=1 })
[] =:= hanoidb:fold_range(Tree, FoldFun, [], Range#key_range{ limit=1 })
catch
_:ok ->
false
end.
%% @doc Get the status information for this hanoi backend
%% @doc Get the status information for this hanoidb backend
-spec status(state()) -> [{atom(), term()}].
status(#state{}) ->
%% TODO: not yet implemented
@ -339,7 +340,7 @@ get_data_dir(DataRoot, Partition) ->
ok ->
{ok, PartitionDir};
{error, Reason} ->
lager:error("Failed to create hanoi dir ~s: ~p", [PartitionDir, Reason]),
lager:error("Failed to create hanoidb dir ~s: ~p", [PartitionDir, Reason]),
{error, Reason}
end.
@ -412,13 +413,13 @@ fold_objects_fun(FoldObjectsFun, FilterBucket) ->
-define(MAX_INDEX_KEY, <<16,0,0,0,6>>).
to_key_range(undefined) ->
#btree_range{ from_key = to_object_key(<<>>, <<>>),
#key_range{ from_key = to_object_key(<<>>, <<>>),
from_inclusive = true,
to_key = ?MAX_OBJECT_KEY,
to_inclusive = false
};
to_key_range({bucket, Bucket}) ->
#btree_range{ from_key = to_object_key(Bucket, <<>>),
#key_range{ from_key = to_object_key(Bucket, <<>>),
from_inclusive = true,
to_key = to_object_key(<<Bucket/binary, 0>>, <<>>),
to_inclusive = false };
@ -427,12 +428,12 @@ to_key_range({index, Bucket, {eq, <<"$bucket">>, _Term}}) ->
to_key_range({index, Bucket, {eq, Field, Term}}) ->
to_key_range({index, Bucket, {range, Field, Term, Term}});
to_key_range({index, Bucket, {range, <<"$key">>, StartTerm, EndTerm}}) ->
#btree_range{ from_key = to_object_key(Bucket, StartTerm),
#key_range{ from_key = to_object_key(Bucket, StartTerm),
from_inclusive = true,
to_key = to_object_key(Bucket, EndTerm),
to_inclusive = true };
to_key_range({index, Bucket, {range, Field, StartTerm, EndTerm}}) ->
#btree_range{ from_key = to_index_key(Bucket, <<>>, Field, StartTerm),
#key_range{ from_key = to_index_key(Bucket, <<>>, Field, StartTerm),
from_inclusive = true,
to_key = to_index_key(Bucket, <<16#ff,16#ff,16#ff,16#ff,
16#ff,16#ff,16#ff,16#ff,
@ -476,7 +477,7 @@ from_index_key(LKey) ->
%% ===================================================================
-ifdef(TEST).
-include("src/hanoi.hrl").
-include("src/hanoidb.hrl").
key_range_test() ->
Range = to_key_range({bucket, <<"a">>}),
@ -497,14 +498,14 @@ index_range_test() ->
simple_test_() ->
?assertCmd("rm -rf test/hanoi-backend"),
application:set_env(hanoi, data_root, "test/hanoid-backend"),
hanoi_temp_riak_kv_backend:standard_test(?MODULE, []).
?assertCmd("rm -rf test/hanoidb-backend"),
application:set_env(hanoidb, data_root, "test/hanoidbd-backend"),
hanoidb_temp_riak_kv_backend:standard_test(?MODULE, []).
custom_config_test_() ->
?assertCmd("rm -rf test/hanoi-backend"),
application:set_env(hanoi, data_root, ""),
hanoi_temp_riak_kv_backend:standard_test(?MODULE, [{data_root, "test/hanoi-backend"}]).
?assertCmd("rm -rf test/hanoidb-backend"),
application:set_env(hanoidb, data_root, ""),
hanoidb_temp_riak_kv_backend:standard_test(?MODULE, [{data_root, "test/hanoidb-backend"}]).
-ifdef(PROPER).
@ -519,25 +520,25 @@ eqc_test_() ->
[?_assertEqual(true,
backend_eqc:test(?MODULE, false,
[{data_root,
"test/hanoidb-backend"},
"test/hanoidbdb-backend"},
{async_fold, false}]))]},
{timeout, 60,
[?_assertEqual(true,
backend_eqc:test(?MODULE, false,
[{data_root,
"test/hanoidb-backend"}]))]}
"test/hanoidbdb-backend"}]))]}
]}]}]}.
setup() ->
application:load(sasl),
application:set_env(sasl, sasl_error_logger, {file, "riak_kv_hanoidb_backend_eqc_sasl.log"}),
application:set_env(sasl, sasl_error_logger, {file, "riak_kv_hanoidbdb_backend_eqc_sasl.log"}),
error_logger:tty(false),
error_logger:logfile({open, "riak_kv_hanoidb_backend_eqc.log"}),
error_logger:logfile({open, "riak_kv_hanoidbdb_backend_eqc.log"}),
ok.
cleanup(_) ->
?_assertCmd("rm -rf test/hanoidb-backend").
?_assertCmd("rm -rf test/hanoidbdb-backend").
-endif. % EQC

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -23,7 +23,7 @@
%% ----------------------------------------------------------------------------
%% @Doc Drive a set of LSM BTrees
-module(hanoi_drv).
-module(hanoidb_drv).
-behaviour(gen_server).
@ -86,7 +86,7 @@ init([]) ->
{ok, #state{}}.
handle_call({open, N}, _, #state { btrees = D} = State) ->
case hanoi:open(N) of
case hanoidb:open(N) of
{ok, Tree} ->
{reply, ok, State#state { btrees = dict:store(N, Tree, D)}};
Otherwise ->
@ -94,7 +94,7 @@ handle_call({open, N}, _, #state { btrees = D} = State) ->
end;
handle_call({close, N}, _, #state { btrees = D} = State) ->
Tree = dict:fetch(N, D),
case hanoi:close(Tree) of
case hanoidb:close(Tree) of
ok ->
{reply, ok, State#state { btrees = dict:erase(N, D)}};
Otherwise ->
@ -104,11 +104,11 @@ handle_call({fold_range, Name, Fun, Acc0, Range},
_From,
#state { btrees = D } = State) ->
Tree = dict:fetch(Name, D),
Result = hanoi:fold_range(Tree, Fun, Acc0, Range),
Result = hanoidb:fold_range(Tree, Fun, Acc0, Range),
{reply, Result, State};
handle_call({put, N, K, V}, _, #state { btrees = D} = State) ->
Tree = dict:fetch(N, D),
case hanoi:put(Tree, K, V) of
case hanoidb:put(Tree, K, V) of
ok ->
{reply, ok, State};
Other ->
@ -116,14 +116,14 @@ handle_call({put, N, K, V}, _, #state { btrees = D} = State) ->
end;
handle_call({delete_exist, N, K}, _, #state { btrees = D} = State) ->
Tree = dict:fetch(N, D),
Reply = hanoi:delete(Tree, K),
Reply = hanoidb:delete(Tree, K),
{reply, Reply, State};
handle_call({get, N, K}, _, #state { btrees = D} = State) ->
Tree = dict:fetch(N, D),
Reply = hanoi:get(Tree, K),
Reply = hanoidb:get(Tree, K),
{reply, Reply, State};
handle_call(stop, _, #state{ btrees = D } = State ) ->
[ hanoi:close(Tree) || {_,Tree} <- dict:to_list(D) ],
[ hanoidb:close(Tree) || {_,Tree} <- dict:to_list(D) ],
{stop, normal, ok, State};
handle_call(_Request, _From, State) ->
Reply = ok,

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,7 +22,7 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_merger_tests).
-module(hanoidb_merger_tests).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
@ -37,26 +37,26 @@ merge_test() ->
file:delete("test2"),
file:delete("test3"),
{ok, BT1} = hanoi_writer:open("test1"),
{ok, BT1} = hanoidb_writer:open("test1"),
lists:foldl(fun(N,_) ->
ok = hanoi_writer:add(BT1, <<N:128>>, <<"data",N:128>>)
ok = hanoidb_writer:add(BT1, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(1,10000,2)),
ok = hanoi_writer:close(BT1),
ok = hanoidb_writer:close(BT1),
{ok, BT2} = hanoi_writer:open("test2"),
{ok, BT2} = hanoidb_writer:open("test2"),
lists:foldl(fun(N,_) ->
ok = hanoi_writer:add(BT2, <<N:128>>, <<"data",N:128>>)
ok = hanoidb_writer:add(BT2, <<N:128>>, <<"data",N:128>>)
end,
ok,
lists:seq(2,5001,1)),
ok = hanoi_writer:close(BT2),
ok = hanoidb_writer:close(BT2),
self() ! {step, {self(), none}, 2000000000},
{Time,{ok,Count}} = timer:tc(hanoi_merger, merge, ["test1", "test2", "test3", 10000, true, []]),
{Time,{ok,Count}} = timer:tc(hanoidb_merger, merge, ["test1", "test2", "test3", 10000, true, []]),
error_logger:info_msg("time to merge: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,10 +22,10 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_tests).
-module(hanoidb_tests).
-include("include/hanoi.hrl").
-include("src/hanoi.hrl").
-include("include/hanoidb.hrl").
-include("src/hanoidb.hrl").
-ifdef(TEST).
-ifdef(TRIQ).
@ -50,7 +50,7 @@
-record(tree, { elements = dict:new() }).
-record(state, { open = dict:new(),
closed = dict:new() }).
-define(SERVER, hanoi_drv).
-define(SERVER, hanoidb_drv).
full_test_() ->
{setup,
@ -182,7 +182,7 @@ precondition(#state { open = Open, closed = Closed },
{call, ?SERVER, close, [Name]}) ->
(dict:is_key(Name, Open)) and (not dict:is_key(Name, Closed)).
is_valid_range(#btree_range{ from_key=FromKey, from_inclusive=FromIncl,
is_valid_range(#key_range{ from_key=FromKey, from_inclusive=FromIncl,
to_key=ToKey, to_inclusive=ToIncl,
limit=Limit })
when
@ -268,9 +268,9 @@ prop_dict_agree() ->
?FORALL(Cmds, commands(?MODULE),
?TRAPEXIT(
begin
hanoi_drv:start_link(),
hanoidb_drv:start_link(),
{History,State,Result} = run_commands(?MODULE, Cmds),
hanoi_drv:stop(),
hanoidb_drv:stop(),
cleanup_test_trees(State),
?WHENFAIL(io:format("History: ~w\nState: ~w\nResult: ~w\n",
[History,State,Result]),
@ -280,31 +280,31 @@ prop_dict_agree() ->
%% UNIT TESTS
%% ----------------------------------------------------------------------
test_tree_simple_1() ->
{ok, Tree} = hanoi:open("simple"),
ok = hanoi:put(Tree, <<>>, <<"data", 77:128>>),
{ok, <<"data", 77:128>>} = hanoi:get(Tree, <<>>),
ok = hanoi:close(Tree).
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<>>, <<"data", 77:128>>),
{ok, <<"data", 77:128>>} = hanoidb:get(Tree, <<>>),
ok = hanoidb:close(Tree).
test_tree_simple_2() ->
{ok, Tree} = hanoi:open("simple"),
ok = hanoi:put(Tree, <<"ã">>, <<"µ">>),
ok = hanoi:delete(Tree, <<"ã">>),
ok = hanoi:close(Tree).
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, <<"ã">>, <<"µ">>),
ok = hanoidb:delete(Tree, <<"ã">>),
ok = hanoidb:close(Tree).
test_tree_simple_4() ->
Key = <<56,11,62,42,35,163,16,100,9,224,8,228,130,94,198,2,126,117,243,
1,122,175,79,159,212,177,30,153,71,91,85,233,41,199,190,58,3,
173,220,9>>,
Value = <<212,167,12,6,105,152,17,80,243>>,
{ok, Tree} = hanoi:open("simple"),
ok = hanoi:put(Tree, Key, Value),
?assertEqual({ok, Value}, hanoi:get(Tree, Key)),
ok = hanoi:close(Tree).
{ok, Tree} = hanoidb:open("simple"),
ok = hanoidb:put(Tree, Key, Value),
?assertEqual({ok, Value}, hanoidb:get(Tree, Key)),
ok = hanoidb:close(Tree).
test_tree() ->
{ok, Tree} = hanoi:open("simple2"),
{ok, Tree} = hanoidb:open("simple2"),
lists:foldl(fun(N,_) ->
ok = hanoi:put(Tree,
ok = hanoidb:put(Tree,
<<N:128>>, <<"data",N:128>>)
end,
ok,
@ -312,7 +312,7 @@ test_tree() ->
io:format(user, "INSERT DONE 1~n", []),
lists:foldl(fun(N,_) ->
ok = hanoi:put(Tree,
ok = hanoidb:put(Tree,
<<N:128>>, <<"data",N:128>>)
end,
ok,
@ -321,7 +321,7 @@ test_tree() ->
io:format(user, "INSERT DONE 2~n", []),
hanoi:delete(Tree, <<1500:128>>),
hanoidb:delete(Tree, <<1500:128>>),
io:format(user, "INSERT DONE 3~n", []),
@ -330,17 +330,17 @@ test_tree() ->
error_logger:info_msg("time to fold: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),
ok = hanoi:close(Tree).
ok = hanoidb:close(Tree).
run_fold(Tree,From,To) ->
{_, Count} = hanoi:fold_range(Tree,
{_, Count} = hanoidb:fold_range(Tree,
fun(<<N:128>>,_Value, {N, C}) ->
{N + 1, C + 1};
(<<1501:128>>,_Value, {1500, C}) ->
{1502, C + 1}
end,
{From, 0},
#btree_range{from_key= <<From:128>>, to_key= <<(To+1):128>>}),
#key_range{from_key= <<From:128>>, to_key= <<(To+1):128>>}),
{ok, Count}.
@ -376,7 +376,7 @@ cmd_sync_range_args(#state { open = Open }) ->
?LET(Tree, g_non_empty_btree(Open),
?LET({K1, K2}, {g_existing_key(Tree, Open),
g_existing_key(Tree, Open)},
[Tree, #btree_range{from_key=K1, to_key=K2}])).
[Tree, #key_range{from_key=K1, to_key=K2}])).
cmd_sync_fold_range_args(State) ->
?LET([Tree, Range], cmd_sync_range_args(State),

View file

@ -1,6 +1,6 @@
%% ----------------------------------------------------------------------------
%%
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
%%
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
%% http://trifork.com/ info@trifork.com
@ -22,28 +22,28 @@
%%
%% ----------------------------------------------------------------------------
-module(hanoi_writer_tests).
-module(hanoidb_writer_tests).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-endif.
-include("include/hanoi.hrl").
-include("include/hanoidb.hrl").
-compile(export_all).
simple_test() ->
file:delete("testdata"),
{ok, BT} = hanoi_writer:open("testdata"),
ok = hanoi_writer:add(BT, <<"A">>, <<"Avalue">>),
ok = hanoi_writer:add(BT, <<"B">>, <<"Bvalue">>),
ok = hanoi_writer:close(BT),
{ok, BT} = hanoidb_writer:open("testdata"),
ok = hanoidb_writer:add(BT, <<"A">>, <<"Avalue">>),
ok = hanoidb_writer:add(BT, <<"B">>, <<"Bvalue">>),
ok = hanoidb_writer:close(BT),
{ok, IN} = hanoi_reader:open("testdata"),
{ok, <<"Avalue">>} = hanoi_reader:lookup(IN, <<"A">>),
ok = hanoi_reader:close(IN),
{ok, IN} = hanoidb_reader:open("testdata"),
{ok, <<"Avalue">>} = hanoidb_reader:lookup(IN, <<"A">>),
ok = hanoidb_reader:close(IN),
ok = file:delete("testdata").
@ -51,7 +51,7 @@ simple_test() ->
simple1_test() ->
file:delete("testdata"),
{ok, BT} = hanoi_writer:open("testdata", [{block_size, 1024}]),
{ok, BT} = hanoidb_writer:open("testdata", [{block_size, 1024}]),
Max = 1024,
Seq = lists:seq(0, Max),
@ -60,22 +60,22 @@ simple1_test() ->
fun() ->
lists:foreach(
fun(Int) ->
ok = hanoi_writer:add(BT, <<Int:128>>, <<"valuevalue/", Int:128>>)
ok = hanoidb_writer:add(BT, <<Int:128>>, <<"valuevalue/", Int:128>>)
end,
Seq),
ok = hanoi_writer:close(BT)
ok = hanoidb_writer:close(BT)
end,
[]),
error_logger:info_msg("time to insert: ~p/sec~n", [1000000/(Time1/Max)]),
{ok, IN} = hanoi_reader:open("testdata"),
{ok, IN} = hanoidb_reader:open("testdata"),
Middle = Max div 2,
{ok, <<"valuevalue/", Middle:128>>} = hanoi_reader:lookup(IN, <<Middle:128>>),
{ok, <<"valuevalue/", Middle:128>>} = hanoidb_reader:lookup(IN, <<Middle:128>>),
{Time2,Count} = timer:tc(
fun() -> hanoi_reader:fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
fun() -> hanoidb_reader:fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
N+1
end,
0,
@ -88,12 +88,12 @@ simple1_test() ->
Max = Count-1,
{Time3,{done,Count2}} = timer:tc(
fun() -> hanoi_reader:range_fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
fun() -> hanoidb_reader:range_fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
N+1
end,
0,
IN,
#btree_range{ from_key= <<>>, to_key=undefined })
#key_range{ from_key= <<>>, to_key=undefined })
end,
[]),
@ -103,5 +103,5 @@ simple1_test() ->
Max = Count2-1,
ok = hanoi_reader:close(IN).
ok = hanoidb_reader:close(IN).