Rename hanoi -> hanoidb
This commit is contained in:
parent
3f5a8a7792
commit
e315b92faf
25 changed files with 641 additions and 331 deletions
52
README.md
52
README.md
|
@ -1,12 +1,12 @@
|
||||||
# Hanoi Ordered Key/Value Storage
|
# HanoiDB Ordered Key/Value Storage
|
||||||
|
|
||||||
Hanoi implements an ordered key/value storage engine, implemented
|
HanoiDB implements an ordered key/value storage engine, implemented
|
||||||
using "doubling sizes" persistent ordered sets of key/value pairs,
|
using "doubling sizes" persistent ordered sets of key/value pairs,
|
||||||
much like LevelDB.
|
much like LevelDB.
|
||||||
|
|
||||||
Here's the bullet list:
|
Here's the bullet list:
|
||||||
|
|
||||||
- Insert, Delete and Read all have worst case log<sub>2</sub>(N) latency.
|
- Insert, Delete and Read all have worst case *O*(log<sub>2</sub>(*N*)) latency.
|
||||||
- Incremental space reclaimation: The cost of evicting stale key/values
|
- Incremental space reclaimation: The cost of evicting stale key/values
|
||||||
is amortized into insertion
|
is amortized into insertion
|
||||||
- you don't need a separate eviction thread to keep memory use low
|
- you don't need a separate eviction thread to keep memory use low
|
||||||
|
@ -26,23 +26,44 @@ Here's the bullet list:
|
||||||
- Low CPU overhead
|
- Low CPU overhead
|
||||||
- ~2000 lines of pure Erlang code in src/*.erl
|
- ~2000 lines of pure Erlang code in src/*.erl
|
||||||
|
|
||||||
Hanoi is developed by Trifork, a Riak expert solutions provider. You're most
|
HanoiDB is developed by Trifork, a Riak expert solutions provider. You're most
|
||||||
welcome to contact us if you want help optimizing your Riak setup.
|
welcome to contact us if you want help optimizing your Riak setup.
|
||||||
|
|
||||||
### Configuration options
|
### Configuration options
|
||||||
|
|
||||||
Put these values in your `app.config` in the `hanoi` section
|
Put these values in your `app.config` in the `hanoidb` section
|
||||||
|
|
||||||
```erlang
|
```erlang
|
||||||
{hanoi, [
|
{hanoidb, [
|
||||||
{data_root, "./data/hanoi"},
|
{data_root, "./data/hanoidb"},
|
||||||
{compress, none | snappy | gzip},
|
|
||||||
|
%% Enable/disable on-disk compression.
|
||||||
|
%%
|
||||||
|
{compress, none | gzip},
|
||||||
|
|
||||||
|
%% Sync strategy `none' only syncs every time the
|
||||||
|
%% nursery runs full, which is currently hard coded
|
||||||
|
%% to be evert 256 inserts or deletes.
|
||||||
|
%%
|
||||||
|
%% Sync strategy `sync' will sync the nursery log
|
||||||
|
%% for every insert or delete operation.
|
||||||
|
%%
|
||||||
{sync_strategy, none | sync | {seconds, N}},
|
{sync_strategy, none | sync | {seconds, N}},
|
||||||
|
|
||||||
|
%% The page size is a minimum page size, when a page fills
|
||||||
|
%% up to beyond this size, it is written to disk.
|
||||||
|
%% Compression applies to such units of page size.
|
||||||
|
%%
|
||||||
{page_size, 8192},
|
{page_size, 8192},
|
||||||
|
|
||||||
|
%% Read/write buffer sizes apply to merge processes.
|
||||||
|
%% A merge process has two read buffers and a write
|
||||||
|
%% buffer, and there is a merge process *per level* in
|
||||||
|
%% the database.
|
||||||
|
%%
|
||||||
{write_buffer_size, 524288}, % 512kB
|
{write_buffer_size, 524288}, % 512kB
|
||||||
{read_buffer_size, 524288}, % 512kB
|
{read_buffer_size, 524288}, % 512kB
|
||||||
|
|
||||||
%%
|
|
||||||
%% The merge strategy is one of `fast' or `predictable'.
|
%% The merge strategy is one of `fast' or `predictable'.
|
||||||
%% Both have same log2(N) worst case, but `fast' is
|
%% Both have same log2(N) worst case, but `fast' is
|
||||||
%% sometimes faster; yielding latency fluctuations.
|
%% sometimes faster; yielding latency fluctuations.
|
||||||
|
@ -51,18 +72,19 @@ Put these values in your `app.config` in the `hanoi` section
|
||||||
]},
|
]},
|
||||||
```
|
```
|
||||||
|
|
||||||
### How to deploy Hanoi as a Riak/KV backend
|
### How to deploy HanoiDB as a Riak/KV backend
|
||||||
|
|
||||||
This storage engine can function as an alternative backend for Basho's Riak/KV.
|
This storage engine can function as an alternative backend for Basho's Riak/KV.
|
||||||
|
|
||||||
You can deploy `hanoi` into a Riak devrel cluster using the `enable-hanoi`
|
You can deploy `hanoidb` into a Riak devrel cluster using the `enable-hanoidb`
|
||||||
script. Clone the `riak` repo, change your working directory to it, and then
|
script. Clone the `riak` repo, change your working directory to it, and then
|
||||||
execute the `enable-hanoi` script. It adds `hanoi` as a dependency, runs `make
|
execute the `enable-hanoidb` script. It adds `hanoidb` as a dependency, runs `make
|
||||||
all devrel`, and then modifies the configuration settings of the resulting dev
|
all devrel`, and then modifies the configuration settings of the resulting dev
|
||||||
nodes to use the hanoi storage backend.
|
nodes to use the hanoidb storage backend.
|
||||||
|
|
||||||
1. `git clone git://github.com/basho/riak.git`
|
1. `git clone git://github.com/basho/riak.git`
|
||||||
|
1. `mkdir riak/deps`
|
||||||
1. `cd riak/deps`
|
1. `cd riak/deps`
|
||||||
1. `git clone git://github.com/basho/hanoi.git`
|
1. `git clone git://github.com/basho/hanoidb.git`
|
||||||
1. `cd ..`
|
1. `cd ..`
|
||||||
1. `./deps/hanoi/enable-hanoi`
|
1. `./deps/hanoidb/enable-hanoidb`
|
||||||
|
|
18
enable-hanoi → enable-hanoidb
Executable file → Normal file
18
enable-hanoi → enable-hanoidb
Executable file → Normal file
|
@ -1,12 +1,12 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
# This script adds hanoi to a riak github repo. Run it in the riak repo
|
# This script adds hanoidb to a riak github repo. Run it in the riak repo
|
||||||
# directory.
|
# directory.
|
||||||
#
|
#
|
||||||
# First it adds hanoi, then runs "make all devrel" and then enables the
|
# First it adds hanoidb, then runs "make all devrel" and then enables the
|
||||||
# hanoi storage backend in the resulting dev nodes.
|
# hanoidb storage backend in the resulting dev nodes.
|
||||||
#
|
#
|
||||||
# This script is intended to be temporary. Once hanoi is made into a proper
|
# This script is intended to be temporary. Once hanoidb is made into a proper
|
||||||
# riak citizen, this script will no longer be needed.
|
# riak citizen, this script will no longer be needed.
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
@ -35,17 +35,17 @@ fi
|
||||||
./rebar get-deps
|
./rebar get-deps
|
||||||
|
|
||||||
file=./deps/riak_kv/src/riak_kv.app.src
|
file=./deps/riak_kv/src/riak_kv.app.src
|
||||||
if ! grep -q hanoi $file ; then
|
if ! grep -q hanoidb $file ; then
|
||||||
echo
|
echo
|
||||||
echo "Modifying $file, saving the original as ${file}.orig ..."
|
echo "Modifying $file, saving the original as ${file}.orig ..."
|
||||||
perl -i.orig -pe '/\bos_mon,/ && print qq( hanoi,\n)' $file
|
perl -i.orig -pe '/\bos_mon,/ && print qq( hanoidb,\n)' $file
|
||||||
fi
|
fi
|
||||||
|
|
||||||
file=./deps/riak_kv/rebar.config
|
file=./deps/riak_kv/rebar.config
|
||||||
if ! grep -q hanoi $file ; then
|
if ! grep -q hanoidb $file ; then
|
||||||
echo
|
echo
|
||||||
echo "Modifying $file, saving the original as ${file}.orig ..."
|
echo "Modifying $file, saving the original as ${file}.orig ..."
|
||||||
perl -i.orig -pe '/\bsext\b/ && print qq( {hanoi, ".*", {git, "git\@github.com:basho/hanoi.git", "master"}},\n)' $file
|
perl -i.orig -pe '/\bsext\b/ && print qq( {hanoidb, ".*", {git, "git\@github.com:basho/hanoidb.git", "master"}},\n)' $file
|
||||||
fi
|
fi
|
||||||
|
|
||||||
./rebar get-deps
|
./rebar get-deps
|
||||||
|
@ -55,6 +55,6 @@ make all devrel
|
||||||
|
|
||||||
echo
|
echo
|
||||||
echo 'Modifying all dev/dev*/etc/app.config files, saving originals with .orig suffix...'
|
echo 'Modifying all dev/dev*/etc/app.config files, saving originals with .orig suffix...'
|
||||||
perl -i.orig -ne 'if (/\bstorage_backend,/) { s/(storage_backend, )[^\}]+/\1riak_kv_hanoi_backend/; print } elsif (/\{eleveldb,/) { $eleveldb++; print } elsif ($eleveldb && /^\s+\]\},/) { $eleveldb = 0; print; print qq(\n {hanoi, [\n {data_root, "./data/hanoi"}\n ]},\n\n) } else { print }' dev/dev*/etc/app.config
|
perl -i.orig -ne 'if (/\bstorage_backend,/) { s/(storage_backend, )[^\}]+/\1riak_kv_hanoidb_backend/; print } elsif (/\{eleveldb,/) { $eleveldb++; print } elsif ($eleveldb && /^\s+\]\},/) { $eleveldb = 0; print; print qq(\n {hanoidb, [\n {data_root, "./data/hanoidb"}\n ]},\n\n) } else { print }' dev/dev*/etc/app.config
|
||||||
|
|
||||||
exit 0
|
exit 0
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
-define(BTREE_ASYNC_CHUNK_SIZE, 100).
|
-define(BTREE_ASYNC_CHUNK_SIZE, 100).
|
||||||
|
|
||||||
%%
|
%%
|
||||||
%% The btree_range structure is a bit assymetric, here is why:
|
%% The key_range structure is a bit assymetric, here is why:
|
||||||
%%
|
%%
|
||||||
%% from_key=<<>> is "less than" any other key, hence we don't need to
|
%% from_key=<<>> is "less than" any other key, hence we don't need to
|
||||||
%% handle from_key=undefined to support an open-ended start of the
|
%% handle from_key=undefined to support an open-ended start of the
|
||||||
|
@ -38,7 +38,7 @@
|
||||||
%% which is > any possible key, hence we need to allow to_key=undefined
|
%% which is > any possible key, hence we need to allow to_key=undefined
|
||||||
%% as a token of an interval that has no upper limit.
|
%% as a token of an interval that has no upper limit.
|
||||||
%%
|
%%
|
||||||
-record(btree_range, { from_key = <<>> :: binary(),
|
-record(key_range, { from_key = <<>> :: binary(),
|
||||||
from_inclusive = true :: boolean(),
|
from_inclusive = true :: boolean(),
|
||||||
to_key :: binary() | undefined,
|
to_key :: binary() | undefined,
|
||||||
to_inclusive = false :: boolean(),
|
to_inclusive = false :: boolean(),
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(basho_bench_driver_hanoi).
|
-module(basho_bench_driver_hanoidb).
|
||||||
|
|
||||||
-record(state, { tree,
|
-record(state, { tree,
|
||||||
filename,
|
filename,
|
||||||
|
@ -33,10 +33,10 @@
|
||||||
-export([new/1,
|
-export([new/1,
|
||||||
run/4]).
|
run/4]).
|
||||||
|
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
-include_lib("basho_bench/include/basho_bench.hrl").
|
-include_lib("basho_bench/include/basho_bench.hrl").
|
||||||
|
|
||||||
-record(btree_range, { from_key = <<>> :: binary(),
|
-record(key_range, { from_key = <<>> :: binary(),
|
||||||
from_inclusive = true :: boolean(),
|
from_inclusive = true :: boolean(),
|
||||||
to_key :: binary() | undefined,
|
to_key :: binary() | undefined,
|
||||||
to_inclusive = false :: boolean(),
|
to_inclusive = false :: boolean(),
|
||||||
|
@ -48,20 +48,20 @@
|
||||||
|
|
||||||
new(_Id) ->
|
new(_Id) ->
|
||||||
%% Make sure bitcask is available
|
%% Make sure bitcask is available
|
||||||
case code:which(hanoi) of
|
case code:which(hanoidb) of
|
||||||
non_existing ->
|
non_existing ->
|
||||||
?FAIL_MSG("~s requires hanoi to be available on code path.\n",
|
?FAIL_MSG("~s requires hanoidb to be available on code path.\n",
|
||||||
[?MODULE]);
|
[?MODULE]);
|
||||||
_ ->
|
_ ->
|
||||||
ok
|
ok
|
||||||
end,
|
end,
|
||||||
|
|
||||||
%% Get the target directory
|
%% Get the target directory
|
||||||
Dir = basho_bench_config:get(hanoi_dir, "."),
|
Dir = basho_bench_config:get(hanoidb_dir, "."),
|
||||||
Filename = filename:join(Dir, "test.hanoi"),
|
Filename = filename:join(Dir, "test.hanoidb"),
|
||||||
|
|
||||||
%% Look for sync interval config
|
%% Look for sync interval config
|
||||||
case basho_bench_config:get(hanoi_sync_interval, infinity) of
|
case basho_bench_config:get(hanoidb_sync_interval, infinity) of
|
||||||
Value when is_integer(Value) ->
|
Value when is_integer(Value) ->
|
||||||
SyncInterval = Value;
|
SyncInterval = Value;
|
||||||
infinity ->
|
infinity ->
|
||||||
|
@ -69,9 +69,9 @@ new(_Id) ->
|
||||||
end,
|
end,
|
||||||
|
|
||||||
%% Get any bitcask flags
|
%% Get any bitcask flags
|
||||||
case hanoi:open(Filename) of
|
case hanoidb:open(Filename) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?FAIL_MSG("Failed to open hanoi in ~s: ~p\n", [Filename, Reason]);
|
?FAIL_MSG("Failed to open hanoidb in ~s: ~p\n", [Filename, Reason]);
|
||||||
{ok, FBTree} ->
|
{ok, FBTree} ->
|
||||||
{ok, #state { tree = FBTree,
|
{ok, #state { tree = FBTree,
|
||||||
filename = Filename,
|
filename = Filename,
|
||||||
|
@ -80,7 +80,7 @@ new(_Id) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
run(get, KeyGen, _ValueGen, State) ->
|
run(get, KeyGen, _ValueGen, State) ->
|
||||||
case hanoi:lookup(State#state.tree, KeyGen()) of
|
case hanoidb:lookup(State#state.tree, KeyGen()) of
|
||||||
{ok, _Value} ->
|
{ok, _Value} ->
|
||||||
{ok, State};
|
{ok, State};
|
||||||
not_found ->
|
not_found ->
|
||||||
|
@ -89,14 +89,14 @@ run(get, KeyGen, _ValueGen, State) ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end;
|
end;
|
||||||
run(put, KeyGen, ValueGen, State) ->
|
run(put, KeyGen, ValueGen, State) ->
|
||||||
case hanoi:put(State#state.tree, KeyGen(), ValueGen()) of
|
case hanoidb:put(State#state.tree, KeyGen(), ValueGen()) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, State};
|
{ok, State};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end;
|
end;
|
||||||
run(delete, KeyGen, _ValueGen, State) ->
|
run(delete, KeyGen, _ValueGen, State) ->
|
||||||
case hanoi:delete(State#state.tree, KeyGen()) of
|
case hanoidb:delete(State#state.tree, KeyGen()) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, State};
|
{ok, State};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -105,12 +105,12 @@ run(delete, KeyGen, _ValueGen, State) ->
|
||||||
|
|
||||||
run(fold_100, KeyGen, _ValueGen, State) ->
|
run(fold_100, KeyGen, _ValueGen, State) ->
|
||||||
[From,To] = lists:usort([KeyGen(), KeyGen()]),
|
[From,To] = lists:usort([KeyGen(), KeyGen()]),
|
||||||
case hanoi:sync_fold_range(State#state.tree,
|
case hanoidb:sync_fold_range(State#state.tree,
|
||||||
fun(_Key,_Value,Count) ->
|
fun(_Key,_Value,Count) ->
|
||||||
Count+1
|
Count+1
|
||||||
end,
|
end,
|
||||||
0,
|
0,
|
||||||
#btree_range{ from_key=From,
|
#key_range{ from_key=From,
|
||||||
to_key=To,
|
to_key=To,
|
||||||
limit=100 }) of
|
limit=100 }) of
|
||||||
Count when Count >= 0; Count =< 100 ->
|
Count when Count >= 0; Count =< 100 ->
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
{application, hanoi,
|
{application, hanoidb,
|
||||||
[
|
[
|
||||||
{description, ""},
|
{description, ""},
|
||||||
{vsn, "1.0.0"},
|
{vsn, "1.0.0"},
|
||||||
|
@ -31,6 +31,6 @@
|
||||||
kernel,
|
kernel,
|
||||||
stdlib
|
stdlib
|
||||||
]},
|
]},
|
||||||
{mod, {hanoi_app, []}},
|
{mod, {hanoidb_app, []}},
|
||||||
{env, []}
|
{env, []}
|
||||||
]}.
|
]}.
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi).
|
-module(hanoidb).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,9 +36,9 @@
|
||||||
|
|
||||||
-export([get_opt/2, get_opt/3]).
|
-export([get_opt/2, get_opt/3]).
|
||||||
|
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
-include_lib("kernel/include/file.hrl").
|
-include_lib("kernel/include/file.hrl").
|
||||||
-include_lib("include/hanoi.hrl").
|
-include_lib("include/hanoidb.hrl").
|
||||||
-include_lib("include/plain_rpc.hrl").
|
-include_lib("include/plain_rpc.hrl").
|
||||||
|
|
||||||
-record(state, { top, nursery, dir, opt, max_level }).
|
-record(state, { top, nursery, dir, opt, max_level }).
|
||||||
|
@ -53,19 +53,25 @@
|
||||||
|
|
||||||
%% PUBLIC API
|
%% PUBLIC API
|
||||||
|
|
||||||
-type hanoi() :: pid().
|
-type hanoidb() :: pid().
|
||||||
-type key_range() :: #btree_range{}.
|
-type key_range() :: #key_range{}.
|
||||||
|
-type config_option() :: {compress, none | gzip | snappy}
|
||||||
|
| {page_size, pos_integer()}
|
||||||
|
| {read_buffer_size, pos_integer()}
|
||||||
|
| {write_buffer_size, pos_integer()}
|
||||||
|
| {merge_strategy, fast | predictable }
|
||||||
|
.
|
||||||
|
|
||||||
% @doc
|
% @doc
|
||||||
% Create or open existing hanoi store. Argument `Dir' names a
|
% Create or open existing hanoidb store. Argument `Dir' names a
|
||||||
% directory in which to keep the data files. By convention, we
|
% directory in which to keep the data files. By convention, we
|
||||||
% name hanoi data directories with extension ".hanoi".
|
% name hanoidb data directories with extension ".hanoidb".
|
||||||
% @spec open(Dir::string()) -> pid().
|
% @spec open(Dir::string()) -> pid().
|
||||||
- spec open(Dir::string()) -> pid().
|
- spec open(Dir::string()) -> pid().
|
||||||
open(Dir) ->
|
open(Dir) ->
|
||||||
open(Dir, []).
|
open(Dir, []).
|
||||||
|
|
||||||
- spec open(Dir::string(), Opts::[_]) -> pid().
|
- spec open(Dir::string(), Opts::[config_option()]) -> pid().
|
||||||
open(Dir, Opts) ->
|
open(Dir, Opts) ->
|
||||||
ok = start_app(),
|
ok = start_app(),
|
||||||
gen_server:start(?MODULE, [Dir, Opts], []).
|
gen_server:start(?MODULE, [Dir, Opts], []).
|
||||||
|
@ -102,39 +108,39 @@ get(Ref,Key) when is_binary(Key) ->
|
||||||
lookup(Ref,Key) when is_binary(Key) ->
|
lookup(Ref,Key) when is_binary(Key) ->
|
||||||
gen_server:call(Ref, {get, Key}, infinity).
|
gen_server:call(Ref, {get, Key}, infinity).
|
||||||
|
|
||||||
-spec delete(hanoi(), binary()) ->
|
-spec delete(hanoidb(), binary()) ->
|
||||||
ok | {error, term()}.
|
ok | {error, term()}.
|
||||||
delete(Ref,Key) when is_binary(Key) ->
|
delete(Ref,Key) when is_binary(Key) ->
|
||||||
gen_server:call(Ref, {delete, Key}, infinity).
|
gen_server:call(Ref, {delete, Key}, infinity).
|
||||||
|
|
||||||
-spec put(hanoi(), binary(), binary()) ->
|
-spec put(hanoidb(), binary(), binary()) ->
|
||||||
ok | {error, term()}.
|
ok | {error, term()}.
|
||||||
put(Ref,Key,Value) when is_binary(Key), is_binary(Value) ->
|
put(Ref,Key,Value) when is_binary(Key), is_binary(Value) ->
|
||||||
gen_server:call(Ref, {put, Key, Value}, infinity).
|
gen_server:call(Ref, {put, Key, Value}, infinity).
|
||||||
|
|
||||||
-type transact_spec() :: {put, binary(), binary()} | {delete, binary()}.
|
-type transact_spec() :: {put, binary(), binary()} | {delete, binary()}.
|
||||||
-spec transact(hanoi(), [transact_spec()]) ->
|
-spec transact(hanoidb(), [transact_spec()]) ->
|
||||||
ok | {error, term()}.
|
ok | {error, term()}.
|
||||||
transact(Ref, TransactionSpec) ->
|
transact(Ref, TransactionSpec) ->
|
||||||
gen_server:call(Ref, {transact, TransactionSpec}, infinity).
|
gen_server:call(Ref, {transact, TransactionSpec}, infinity).
|
||||||
|
|
||||||
-type kv_fold_fun() :: fun((binary(),binary(),any())->any()).
|
-type kv_fold_fun() :: fun((binary(),binary(),any())->any()).
|
||||||
|
|
||||||
-spec fold(hanoi(),kv_fold_fun(),any()) -> any().
|
-spec fold(hanoidb(),kv_fold_fun(),any()) -> any().
|
||||||
fold(Ref,Fun,Acc0) ->
|
fold(Ref,Fun,Acc0) ->
|
||||||
fold_range(Ref,Fun,Acc0,#btree_range{from_key= <<>>, to_key=undefined}).
|
fold_range(Ref,Fun,Acc0,#key_range{from_key= <<>>, to_key=undefined}).
|
||||||
|
|
||||||
-spec fold_range(hanoi(),kv_fold_fun(),any(),key_range()) -> any().
|
-spec fold_range(hanoidb(),kv_fold_fun(),any(),key_range()) -> any().
|
||||||
fold_range(Ref,Fun,Acc0,Range) ->
|
fold_range(Ref,Fun,Acc0,Range) ->
|
||||||
{ok, FoldWorkerPID} = hanoi_fold_worker:start(self()),
|
{ok, FoldWorkerPID} = hanoidb_fold_worker:start(self()),
|
||||||
if Range#btree_range.limit < 10 ->
|
if Range#key_range.limit < 10 ->
|
||||||
ok = gen_server:call(Ref, {blocking_range, FoldWorkerPID, Range}, infinity);
|
ok = gen_server:call(Ref, {blocking_range, FoldWorkerPID, Range}, infinity);
|
||||||
true ->
|
true ->
|
||||||
ok = gen_server:call(Ref, {snapshot_range, FoldWorkerPID, Range}, infinity)
|
ok = gen_server:call(Ref, {snapshot_range, FoldWorkerPID, Range}, infinity)
|
||||||
end,
|
end,
|
||||||
MRef = erlang:monitor(process, FoldWorkerPID),
|
MRef = erlang:monitor(process, FoldWorkerPID),
|
||||||
?log("fold_range begin: self=~p, worker=~p~n", [self(), FoldWorkerPID]),
|
?log("fold_range begin: self=~p, worker=~p~n", [self(), FoldWorkerPID]),
|
||||||
Result = receive_fold_range(MRef, FoldWorkerPID, Fun, Acc0, Range#btree_range.limit),
|
Result = receive_fold_range(MRef, FoldWorkerPID, Fun, Acc0, Range#key_range.limit),
|
||||||
?log("fold_range done: self:~p, result=~P~n", [self(), Result, 20]),
|
?log("fold_range done: self:~p, result=~P~n", [self(), Result, 20]),
|
||||||
Result.
|
Result.
|
||||||
|
|
||||||
|
@ -154,8 +160,8 @@ receive_fold_range(MRef,PID,Fun,Acc0, Limit) ->
|
||||||
{ok, Fun(K,V,Acc0)}
|
{ok, Fun(K,V,Acc0)}
|
||||||
catch
|
catch
|
||||||
Class:Exception ->
|
Class:Exception ->
|
||||||
% ?log("Exception in hanoi fold: ~p ~p", [Exception, erlang:get_stacktrace()]),
|
% ?log("Exception in hanoidb fold: ~p ~p", [Exception, erlang:get_stacktrace()]),
|
||||||
% lager:warn("Exception in hanoi fold: ~p", [Exception]),
|
% lager:warn("Exception in hanoidb fold: ~p", [Exception]),
|
||||||
{'EXIT', Class, Exception, erlang:get_stacktrace()}
|
{'EXIT', Class, Exception, erlang:get_stacktrace()}
|
||||||
end
|
end
|
||||||
of
|
of
|
||||||
|
@ -236,13 +242,13 @@ init([Dir, Opts]) ->
|
||||||
case file:read_file_info(Dir) of
|
case file:read_file_info(Dir) of
|
||||||
{ok, #file_info{ type=directory }} ->
|
{ok, #file_info{ type=directory }} ->
|
||||||
{ok, TopLevel, MaxLevel} = open_levels(Dir,Opts),
|
{ok, TopLevel, MaxLevel} = open_levels(Dir,Opts),
|
||||||
{ok, Nursery} = hanoi_nursery:recover(Dir, TopLevel, MaxLevel);
|
{ok, Nursery} = hanoidb_nursery:recover(Dir, TopLevel, MaxLevel);
|
||||||
|
|
||||||
{error, E} when E =:= enoent ->
|
{error, E} when E =:= enoent ->
|
||||||
ok = file:make_dir(Dir),
|
ok = file:make_dir(Dir),
|
||||||
{ok, TopLevel} = hanoi_level:open(Dir, ?TOP_LEVEL, undefined, Opts, self()),
|
{ok, TopLevel} = hanoidb_level:open(Dir, ?TOP_LEVEL, undefined, Opts, self()),
|
||||||
MaxLevel = ?TOP_LEVEL,
|
MaxLevel = ?TOP_LEVEL,
|
||||||
{ok, Nursery} = hanoi_nursery:new(Dir, MaxLevel)
|
{ok, Nursery} = hanoidb_nursery:new(Dir, MaxLevel)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
{ok, #state{ top=TopLevel, dir=Dir, nursery=Nursery, opt=Opts, max_level=MaxLevel }}.
|
{ok, #state{ top=TopLevel, dir=Dir, nursery=Nursery, opt=Opts, max_level=MaxLevel }}.
|
||||||
|
@ -276,9 +282,9 @@ open_levels(Dir,Options) ->
|
||||||
%%
|
%%
|
||||||
{TopLevel, MaxMerge} =
|
{TopLevel, MaxMerge} =
|
||||||
lists:foldl( fun(LevelNo, {NextLevel, MergeWork0}) ->
|
lists:foldl( fun(LevelNo, {NextLevel, MergeWork0}) ->
|
||||||
{ok, Level} = hanoi_level:open(Dir,LevelNo,NextLevel,Options,self()),
|
{ok, Level} = hanoidb_level:open(Dir,LevelNo,NextLevel,Options,self()),
|
||||||
|
|
||||||
MergeWork = MergeWork0 + hanoi_level:unmerged_count(Level),
|
MergeWork = MergeWork0 + hanoidb_level:unmerged_count(Level),
|
||||||
|
|
||||||
{Level, MergeWork}
|
{Level, MergeWork}
|
||||||
end,
|
end,
|
||||||
|
@ -291,10 +297,10 @@ open_levels(Dir,Options) ->
|
||||||
{ok, TopLevel, MaxLevel}.
|
{ok, TopLevel, MaxLevel}.
|
||||||
|
|
||||||
do_merge(TopLevel, _Inc, N) when N =< 0 ->
|
do_merge(TopLevel, _Inc, N) when N =< 0 ->
|
||||||
ok = hanoi_level:await_incremental_merge(TopLevel);
|
ok = hanoidb_level:await_incremental_merge(TopLevel);
|
||||||
|
|
||||||
do_merge(TopLevel, Inc, N) ->
|
do_merge(TopLevel, Inc, N) ->
|
||||||
ok = hanoi_level:begin_incremental_merge(TopLevel),
|
ok = hanoidb_level:begin_incremental_merge(TopLevel),
|
||||||
do_merge(TopLevel, Inc, N-Inc).
|
do_merge(TopLevel, Inc, N-Inc).
|
||||||
|
|
||||||
|
|
||||||
|
@ -311,9 +317,9 @@ parse_level(FileName) ->
|
||||||
handle_info({bottom_level, N}, #state{ nursery=Nursery, top=TopLevel }=State)
|
handle_info({bottom_level, N}, #state{ nursery=Nursery, top=TopLevel }=State)
|
||||||
when N > State#state.max_level ->
|
when N > State#state.max_level ->
|
||||||
State2 = State#state{ max_level = N,
|
State2 = State#state{ max_level = N,
|
||||||
nursery= hanoi_nursery:set_max_level(Nursery, N) },
|
nursery= hanoidb_nursery:set_max_level(Nursery, N) },
|
||||||
|
|
||||||
hanoi_level:set_max_level(TopLevel, N),
|
hanoidb_level:set_max_level(TopLevel, N),
|
||||||
|
|
||||||
{noreply, State2};
|
{noreply, State2};
|
||||||
|
|
||||||
|
@ -340,13 +346,13 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
|
|
||||||
|
|
||||||
handle_call({snapshot_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
handle_call({snapshot_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
||||||
hanoi_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
hanoidb_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
||||||
Result = hanoi_level:snapshot_range(TopLevel, FoldWorkerPID, Range),
|
Result = hanoidb_level:snapshot_range(TopLevel, FoldWorkerPID, Range),
|
||||||
{reply, Result, State};
|
{reply, Result, State};
|
||||||
|
|
||||||
handle_call({blocking_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
handle_call({blocking_range, FoldWorkerPID, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
||||||
hanoi_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
hanoidb_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
||||||
Result = hanoi_level:blocking_range(TopLevel, FoldWorkerPID, Range),
|
Result = hanoidb_level:blocking_range(TopLevel, FoldWorkerPID, Range),
|
||||||
{reply, Result, State};
|
{reply, Result, State};
|
||||||
|
|
||||||
handle_call({put, Key, Value}, _From, State) when is_binary(Key), is_binary(Value) ->
|
handle_call({put, Key, Value}, _From, State) when is_binary(Key), is_binary(Value) ->
|
||||||
|
@ -362,20 +368,20 @@ handle_call({delete, Key}, _From, State) when is_binary(Key) ->
|
||||||
{reply, ok, State2};
|
{reply, ok, State2};
|
||||||
|
|
||||||
handle_call({get, Key}, _From, State=#state{ top=Top, nursery=Nursery } ) when is_binary(Key) ->
|
handle_call({get, Key}, _From, State=#state{ top=Top, nursery=Nursery } ) when is_binary(Key) ->
|
||||||
case hanoi_nursery:lookup(Key, Nursery) of
|
case hanoidb_nursery:lookup(Key, Nursery) of
|
||||||
{value, ?TOMBSTONE} ->
|
{value, ?TOMBSTONE} ->
|
||||||
{reply, not_found, State};
|
{reply, not_found, State};
|
||||||
{value, Value} when is_binary(Value) ->
|
{value, Value} when is_binary(Value) ->
|
||||||
{reply, {ok, Value}, State};
|
{reply, {ok, Value}, State};
|
||||||
none ->
|
none ->
|
||||||
Reply = hanoi_level:lookup(Top, Key),
|
Reply = hanoidb_level:lookup(Top, Key),
|
||||||
{reply, Reply, State}
|
{reply, Reply, State}
|
||||||
end;
|
end;
|
||||||
|
|
||||||
handle_call(close, _From, State=#state{top=Top}) ->
|
handle_call(close, _From, State=#state{top=Top}) ->
|
||||||
try
|
try
|
||||||
{ok, State2} = flush_nursery(State),
|
{ok, State2} = flush_nursery(State),
|
||||||
ok = hanoi_level:close(Top),
|
ok = hanoidb_level:close(Top),
|
||||||
{stop, normal, ok, State2}
|
{stop, normal, ok, State2}
|
||||||
catch
|
catch
|
||||||
E:R ->
|
E:R ->
|
||||||
|
@ -384,13 +390,13 @@ handle_call(close, _From, State=#state{top=Top}) ->
|
||||||
end;
|
end;
|
||||||
|
|
||||||
handle_call(destroy, _From, State=#state{top=Top, nursery=Nursery }) ->
|
handle_call(destroy, _From, State=#state{top=Top, nursery=Nursery }) ->
|
||||||
ok = hanoi_nursery:destroy(Nursery),
|
ok = hanoidb_nursery:destroy(Nursery),
|
||||||
ok = hanoi_level:destroy(Top),
|
ok = hanoidb_level:destroy(Top),
|
||||||
{stop, normal, ok, State#state{ top=undefined, nursery=undefined, max_level=?TOP_LEVEL }}.
|
{stop, normal, ok, State#state{ top=undefined, nursery=undefined, max_level=?TOP_LEVEL }}.
|
||||||
|
|
||||||
|
|
||||||
do_put(Key, Value, State=#state{ nursery=Nursery, top=Top }) ->
|
do_put(Key, Value, State=#state{ nursery=Nursery, top=Top }) ->
|
||||||
{ok, Nursery2} = hanoi_nursery:add_maybe_flush(Key, Value, Nursery, Top),
|
{ok, Nursery2} = hanoidb_nursery:add_maybe_flush(Key, Value, Nursery, Top),
|
||||||
{ok, State#state{ nursery=Nursery2 }}.
|
{ok, State#state{ nursery=Nursery2 }}.
|
||||||
|
|
||||||
do_transact([{put, Key, Value}], State) ->
|
do_transact([{put, Key, Value}], State) ->
|
||||||
|
@ -400,12 +406,12 @@ do_transact([{delete, Key}], State) ->
|
||||||
do_transact([], _State) ->
|
do_transact([], _State) ->
|
||||||
ok;
|
ok;
|
||||||
do_transact(TransactionSpec, State=#state{ nursery=Nursery, top=Top }) ->
|
do_transact(TransactionSpec, State=#state{ nursery=Nursery, top=Top }) ->
|
||||||
{ok, Nursery2} = hanoi_nursery:transact(TransactionSpec, Nursery, Top),
|
{ok, Nursery2} = hanoidb_nursery:transact(TransactionSpec, Nursery, Top),
|
||||||
{ok, State#state{ nursery=Nursery2 }}.
|
{ok, State#state{ nursery=Nursery2 }}.
|
||||||
|
|
||||||
flush_nursery(State=#state{nursery=Nursery, top=Top, dir=Dir, max_level=MaxLevel}) ->
|
flush_nursery(State=#state{nursery=Nursery, top=Top, dir=Dir, max_level=MaxLevel}) ->
|
||||||
ok = hanoi_nursery:finish(Nursery, Top),
|
ok = hanoidb_nursery:finish(Nursery, Top),
|
||||||
{ok, Nursery2} = hanoi_nursery:new(Dir, MaxLevel),
|
{ok, Nursery2} = hanoidb_nursery:new(Dir, MaxLevel),
|
||||||
{ok, State#state{ nursery=Nursery2 }}.
|
{ok, State#state{ nursery=Nursery2 }}.
|
||||||
|
|
||||||
start_app() ->
|
start_app() ->
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -30,18 +30,18 @@
|
||||||
-define(TOMBSTONE, 'deleted').
|
-define(TOMBSTONE, 'deleted').
|
||||||
|
|
||||||
-define(KEY_IN_FROM_RANGE(Key,Range),
|
-define(KEY_IN_FROM_RANGE(Key,Range),
|
||||||
((Range#btree_range.from_inclusive andalso
|
((Range#key_range.from_inclusive andalso
|
||||||
(Range#btree_range.from_key =< Key))
|
(Range#key_range.from_key =< Key))
|
||||||
orelse
|
orelse
|
||||||
(Range#btree_range.from_key < Key))).
|
(Range#key_range.from_key < Key))).
|
||||||
|
|
||||||
-define(KEY_IN_TO_RANGE(Key,Range),
|
-define(KEY_IN_TO_RANGE(Key,Range),
|
||||||
((Range#btree_range.to_key == undefined)
|
((Range#key_range.to_key == undefined)
|
||||||
orelse
|
orelse
|
||||||
((Range#btree_range.to_inclusive andalso
|
((Range#key_range.to_inclusive andalso
|
||||||
(Key =< Range#btree_range.to_key))
|
(Key =< Range#key_range.to_key))
|
||||||
orelse
|
orelse
|
||||||
(Key < Range#btree_range.to_key)))).
|
(Key < Range#key_range.to_key)))).
|
||||||
|
|
||||||
-define(KEY_IN_RANGE(Key,Range),
|
-define(KEY_IN_RANGE(Key,Range),
|
||||||
(?KEY_IN_FROM_RANGE(Key,Range) andalso ?KEY_IN_TO_RANGE(Key,Range))).
|
(?KEY_IN_FROM_RANGE(Key,Range) andalso ?KEY_IN_TO_RANGE(Key,Range))).
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_app).
|
-module(hanoidb_app).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-behaviour(application).
|
-behaviour(application).
|
||||||
|
@ -35,7 +35,7 @@
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
|
|
||||||
start(_StartType, _StartArgs) ->
|
start(_StartType, _StartArgs) ->
|
||||||
hanoi_sup:start_link().
|
hanoidb_sup:start_link().
|
||||||
|
|
||||||
stop(_State) ->
|
stop(_State) ->
|
||||||
ok.
|
ok.
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
{concurrent, 1}.
|
{concurrent, 1}.
|
||||||
|
|
||||||
{driver, basho_bench_driver_hanoi}.
|
{driver, basho_bench_driver_hanoidb}.
|
||||||
|
|
||||||
{key_generator, {int_to_bin,{uniform_int, 5000000}}}.
|
{key_generator, {int_to_bin,{uniform_int, 5000000}}}.
|
||||||
|
|
||||||
|
@ -16,11 +16,11 @@
|
||||||
%% the second element in the list below (e.g., "../../public/bitcask") must point to
|
%% the second element in the list below (e.g., "../../public/bitcask") must point to
|
||||||
%% the relevant directory of a hanoi installation
|
%% the relevant directory of a hanoi installation
|
||||||
{code_paths, ["deps/stats",
|
{code_paths, ["deps/stats",
|
||||||
"../hanoi/ebin",
|
"../hanoidb/ebin",
|
||||||
"../hanoi/deps/plain_fsm/ebin",
|
"../hanoidb/deps/plain_fsm/ebin",
|
||||||
"../hanoi/deps/ebloom/ebin"
|
"../hanoidb/deps/ebloom/ebin"
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{bitcask_dir, "/tmp/hanoi.bench"}.
|
{bitcask_dir, "/tmp/hanoidb.bench"}.
|
||||||
|
|
||||||
{bitcask_flags, [o_sync]}.
|
{bitcask_flags, [o_sync]}.
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_fold_worker).
|
-module(hanoidb_fold_worker).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-ifdef(DEBUG).
|
-ifdef(DEBUG).
|
||||||
|
@ -65,7 +65,7 @@
|
||||||
-behavior(plain_fsm).
|
-behavior(plain_fsm).
|
||||||
-export([data_vsn/0, code_change/3]).
|
-export([data_vsn/0, code_change/3]).
|
||||||
|
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
-include("plain_rpc.hrl").
|
-include("plain_rpc.hrl").
|
||||||
|
|
||||||
-record(state, {sendto, sendto_ref}).
|
-record(state, {sendto, sendto_ref}).
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,16 +22,16 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_level).
|
-module(hanoidb_level).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-include("include/plain_rpc.hrl").
|
-include("include/plain_rpc.hrl").
|
||||||
|
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
-include("src/hanoi.hrl").
|
-include("src/hanoidb.hrl").
|
||||||
|
|
||||||
%%
|
%%
|
||||||
%% Manages 0..2 of hanoi index file, and governs all aspects of
|
%% Manages 0..2 of hanoidb index file, and governs all aspects of
|
||||||
%% merging, lookup, folding, etc. for these files
|
%% merging, lookup, folding, etc. for these files
|
||||||
%%
|
%%
|
||||||
|
|
||||||
|
@ -182,12 +182,12 @@ initialize2(State) ->
|
||||||
file:delete(BFileName),
|
file:delete(BFileName),
|
||||||
ok = file:rename(MFileName, AFileName),
|
ok = file:rename(MFileName, AFileName),
|
||||||
|
|
||||||
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
|
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
|
||||||
|
|
||||||
case file:read_file_info(CFileName) of
|
case file:read_file_info(CFileName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
file:rename(CFileName, BFileName),
|
file:rename(CFileName, BFileName),
|
||||||
{ok, BTB} = hanoi_reader:open(BFileName, [random|State#state.opts]),
|
{ok, BTB} = hanoidb_reader:open(BFileName, [random|State#state.opts]),
|
||||||
check_begin_merge_then_loop0(init_state(State#state{ a= BTA, b=BTB }));
|
check_begin_merge_then_loop0(init_state(State#state{ a= BTA, b=BTB }));
|
||||||
|
|
||||||
{error, enoent} ->
|
{error, enoent} ->
|
||||||
|
@ -197,12 +197,12 @@ initialize2(State) ->
|
||||||
{error, enoent} ->
|
{error, enoent} ->
|
||||||
case file:read_file_info(BFileName) of
|
case file:read_file_info(BFileName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
|
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
|
||||||
{ok, BTB} = hanoi_reader:open(BFileName, [random|State#state.opts]),
|
{ok, BTB} = hanoidb_reader:open(BFileName, [random|State#state.opts]),
|
||||||
|
|
||||||
case file:read_file_info(CFileName) of
|
case file:read_file_info(CFileName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
{ok, BTC} = hanoi_reader:open(CFileName, [random|State#state.opts]);
|
{ok, BTC} = hanoidb_reader:open(CFileName, [random|State#state.opts]);
|
||||||
{error, enoent} ->
|
{error, enoent} ->
|
||||||
BTC = undefined
|
BTC = undefined
|
||||||
end,
|
end,
|
||||||
|
@ -216,7 +216,7 @@ initialize2(State) ->
|
||||||
|
|
||||||
case file:read_file_info(AFileName) of
|
case file:read_file_info(AFileName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
{ok, BTA} = hanoi_reader:open(AFileName, [random|State#state.opts]),
|
{ok, BTA} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
|
||||||
main_loop(init_state(State#state{ a=BTA }));
|
main_loop(init_state(State#state{ a=BTA }));
|
||||||
|
|
||||||
{error, enoent} ->
|
{error, enoent} ->
|
||||||
|
@ -292,7 +292,7 @@ main_loop(State = #state{ next=Next }) ->
|
||||||
|
|
||||||
plain_rpc:send_reply(From, ok),
|
plain_rpc:send_reply(From, ok),
|
||||||
|
|
||||||
case hanoi_reader:open(ToFileName, [random|State#state.opts]) of
|
case hanoidb_reader:open(ToFileName, [random|State#state.opts]) of
|
||||||
{ok, BT} ->
|
{ok, BT} ->
|
||||||
if SetPos == #state.b ->
|
if SetPos == #state.b ->
|
||||||
check_begin_merge_then_loop(setelement(SetPos, State, BT));
|
check_begin_merge_then_loop(setelement(SetPos, State, BT));
|
||||||
|
@ -396,7 +396,7 @@ main_loop(State = #state{ next=Next }) ->
|
||||||
%% rpc would fail when we fall off the cliff
|
%% rpc would fail when we fall off the cliff
|
||||||
if Next == undefined -> ok;
|
if Next == undefined -> ok;
|
||||||
true ->
|
true ->
|
||||||
hanoi_level:close(Next)
|
hanoidb_level:close(Next)
|
||||||
end,
|
end,
|
||||||
plain_rpc:send_reply(From, ok),
|
plain_rpc:send_reply(From, ok),
|
||||||
{ok, closing};
|
{ok, closing};
|
||||||
|
@ -412,7 +412,7 @@ main_loop(State = #state{ next=Next }) ->
|
||||||
%% rpc would fail when we fall off the cliff
|
%% rpc would fail when we fall off the cliff
|
||||||
if Next == undefined -> ok;
|
if Next == undefined -> ok;
|
||||||
true ->
|
true ->
|
||||||
hanoi_level:destroy(Next)
|
hanoidb_level:destroy(Next)
|
||||||
end,
|
end,
|
||||||
plain_rpc:send_reply(From, ok),
|
plain_rpc:send_reply(From, ok),
|
||||||
{ok, destroying};
|
{ok, destroying};
|
||||||
|
@ -531,7 +531,7 @@ main_loop(State = #state{ next=Next }) ->
|
||||||
% then, rename M to A, and open it
|
% then, rename M to A, and open it
|
||||||
AFileName = filename("A",State2),
|
AFileName = filename("A",State2),
|
||||||
ok = file:rename(MFileName, AFileName),
|
ok = file:rename(MFileName, AFileName),
|
||||||
{ok, BT} = hanoi_reader:open(AFileName, [random|State#state.opts]),
|
{ok, BT} = hanoidb_reader:open(AFileName, [random|State#state.opts]),
|
||||||
|
|
||||||
% iff there is a C file, then move it to B position
|
% iff there is a C file, then move it to B position
|
||||||
% TODO: consider recovery for this
|
% TODO: consider recovery for this
|
||||||
|
@ -629,7 +629,7 @@ do_step(StepFrom, PreviousWork, State) ->
|
||||||
TotalWork = (MaxLevel-?TOP_LEVEL+1) * WorkUnit,
|
TotalWork = (MaxLevel-?TOP_LEVEL+1) * WorkUnit,
|
||||||
WorkUnitsLeft = max(0, TotalWork-PreviousWork),
|
WorkUnitsLeft = max(0, TotalWork-PreviousWork),
|
||||||
|
|
||||||
case hanoi:get_opt( merge_strategy, State#state.opts, fast) of
|
case hanoidb:get_opt( merge_strategy, State#state.opts, fast) of
|
||||||
fast ->
|
fast ->
|
||||||
WorkToDoHere = min(WorkLeftHere, WorkUnitsLeft);
|
WorkToDoHere = min(WorkLeftHere, WorkUnitsLeft);
|
||||||
predictable ->
|
predictable ->
|
||||||
|
@ -696,7 +696,7 @@ do_lookup(_Key, [Pid]) when is_pid(Pid) ->
|
||||||
do_lookup(Key, [undefined|Rest]) ->
|
do_lookup(Key, [undefined|Rest]) ->
|
||||||
do_lookup(Key, Rest);
|
do_lookup(Key, Rest);
|
||||||
do_lookup(Key, [BT|Rest]) ->
|
do_lookup(Key, [BT|Rest]) ->
|
||||||
case hanoi_reader:lookup(BT, Key) of
|
case hanoidb_reader:lookup(BT, Key) of
|
||||||
{ok, ?TOMBSTONE} ->
|
{ok, ?TOMBSTONE} ->
|
||||||
not_found;
|
not_found;
|
||||||
{ok, Result} ->
|
{ok, Result} ->
|
||||||
|
@ -706,10 +706,10 @@ do_lookup(Key, [BT|Rest]) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
close_if_defined(undefined) -> ok;
|
close_if_defined(undefined) -> ok;
|
||||||
close_if_defined(BT) -> hanoi_reader:close(BT).
|
close_if_defined(BT) -> hanoidb_reader:close(BT).
|
||||||
|
|
||||||
destroy_if_defined(undefined) -> ok;
|
destroy_if_defined(undefined) -> ok;
|
||||||
destroy_if_defined(BT) -> hanoi_reader:destroy(BT).
|
destroy_if_defined(BT) -> hanoidb_reader:destroy(BT).
|
||||||
|
|
||||||
stop_if_defined(undefined) -> ok;
|
stop_if_defined(undefined) -> ok;
|
||||||
stop_if_defined(MergePid) when is_pid(MergePid) ->
|
stop_if_defined(MergePid) when is_pid(MergePid) ->
|
||||||
|
@ -736,7 +736,7 @@ begin_merge(State) ->
|
||||||
try
|
try
|
||||||
?log("merge begun~n", []),
|
?log("merge begun~n", []),
|
||||||
|
|
||||||
{ok, OutCount} = hanoi_merger:merge(AFileName, BFileName, XFileName,
|
{ok, OutCount} = hanoidb_merger:merge(AFileName, BFileName, XFileName,
|
||||||
?BTREE_SIZE(State#state.level + 1),
|
?BTREE_SIZE(State#state.level + 1),
|
||||||
State#state.next =:= undefined,
|
State#state.next =:= undefined,
|
||||||
State#state.opts ),
|
State#state.opts ),
|
||||||
|
@ -757,8 +757,8 @@ close_and_delete_a_and_b(State) ->
|
||||||
AFileName = filename("A",State),
|
AFileName = filename("A",State),
|
||||||
BFileName = filename("B",State),
|
BFileName = filename("B",State),
|
||||||
|
|
||||||
ok = hanoi_reader:close(State#state.a),
|
ok = hanoidb_reader:close(State#state.a),
|
||||||
ok = hanoi_reader:close(State#state.b),
|
ok = hanoidb_reader:close(State#state.b),
|
||||||
|
|
||||||
ok = file:delete(AFileName),
|
ok = file:delete(AFileName),
|
||||||
ok = file:delete(BFileName),
|
ok = file:delete(BFileName),
|
||||||
|
@ -777,10 +777,10 @@ start_range_fold(FileName, WorkerPID, Range, State) ->
|
||||||
try
|
try
|
||||||
?log("start_range_fold ~p on ~p -> ~p", [self, FileName, WorkerPID]),
|
?log("start_range_fold ~p on ~p -> ~p", [self, FileName, WorkerPID]),
|
||||||
erlang:link(WorkerPID),
|
erlang:link(WorkerPID),
|
||||||
{ok, File} = hanoi_reader:open(FileName, [folding|State#state.opts]),
|
{ok, File} = hanoidb_reader:open(FileName, [folding|State#state.opts]),
|
||||||
do_range_fold2(File, WorkerPID, self(), Range),
|
do_range_fold2(File, WorkerPID, self(), Range),
|
||||||
erlang:unlink(WorkerPID),
|
erlang:unlink(WorkerPID),
|
||||||
hanoi_reader:close(File),
|
hanoidb_reader:close(File),
|
||||||
|
|
||||||
%% this will release the pinning of the fold file
|
%% this will release the pinning of the fold file
|
||||||
Owner ! {range_fold_done, self(), FileName},
|
Owner ! {range_fold_done, self(), FileName},
|
||||||
|
@ -792,12 +792,12 @@ end
|
||||||
end ),
|
end ),
|
||||||
{ok, PID}.
|
{ok, PID}.
|
||||||
|
|
||||||
-spec do_range_fold(BT :: hanoi_reader:read_file(),
|
-spec do_range_fold(BT :: hanoidb_reader:read_file(),
|
||||||
WorkerPID :: pid(),
|
WorkerPID :: pid(),
|
||||||
SelfOrRef :: pid() | reference(),
|
SelfOrRef :: pid() | reference(),
|
||||||
Range :: #btree_range{} ) -> ok.
|
Range :: #key_range{} ) -> ok.
|
||||||
do_range_fold(BT, WorkerPID, SelfOrRef, Range) ->
|
do_range_fold(BT, WorkerPID, SelfOrRef, Range) ->
|
||||||
case hanoi_reader:range_fold(fun(Key,Value,_) ->
|
case hanoidb_reader:range_fold(fun(Key,Value,_) ->
|
||||||
WorkerPID ! {level_result, SelfOrRef, Key, Value},
|
WorkerPID ! {level_result, SelfOrRef, Key, Value},
|
||||||
ok
|
ok
|
||||||
end,
|
end,
|
||||||
|
@ -815,12 +815,12 @@ do_range_fold(BT, WorkerPID, SelfOrRef, Range) ->
|
||||||
|
|
||||||
-define(FOLD_CHUNK_SIZE, 100).
|
-define(FOLD_CHUNK_SIZE, 100).
|
||||||
|
|
||||||
-spec do_range_fold2(BT :: hanoi_reader:read_file(),
|
-spec do_range_fold2(BT :: hanoidb_reader:read_file(),
|
||||||
WorkerPID :: pid(),
|
WorkerPID :: pid(),
|
||||||
SelfOrRef :: pid() | reference(),
|
SelfOrRef :: pid() | reference(),
|
||||||
Range :: #btree_range{} ) -> ok.
|
Range :: #key_range{} ) -> ok.
|
||||||
do_range_fold2(BT, WorkerPID, SelfOrRef, Range) ->
|
do_range_fold2(BT, WorkerPID, SelfOrRef, Range) ->
|
||||||
try hanoi_reader:range_fold(fun(Key,Value,{0,KVs}) ->
|
try hanoidb_reader:range_fold(fun(Key,Value,{0,KVs}) ->
|
||||||
send(WorkerPID, SelfOrRef, [{Key,Value}|KVs]),
|
send(WorkerPID, SelfOrRef, [{Key,Value}|KVs]),
|
||||||
{?FOLD_CHUNK_SIZE-1, []};
|
{?FOLD_CHUNK_SIZE-1, []};
|
||||||
(Key,Value,{N,KVs}) ->
|
(Key,Value,{N,KVs}) ->
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_merger).
|
-module(hanoidb_merger).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
@ -31,7 +31,7 @@
|
||||||
|
|
||||||
-export([merge/6]).
|
-export([merge/6]).
|
||||||
|
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
|
|
||||||
%% A merger which is inactive for this long will sleep
|
%% A merger which is inactive for this long will sleep
|
||||||
%% which means that it will close open files, and compress
|
%% which means that it will close open files, and compress
|
||||||
|
@ -48,17 +48,17 @@
|
||||||
-define(LOCAL_WRITER, true).
|
-define(LOCAL_WRITER, true).
|
||||||
|
|
||||||
merge(A,B,C, Size, IsLastLevel, Options) ->
|
merge(A,B,C, Size, IsLastLevel, Options) ->
|
||||||
{ok, BT1} = hanoi_reader:open(A, [sequential|Options]),
|
{ok, BT1} = hanoidb_reader:open(A, [sequential|Options]),
|
||||||
{ok, BT2} = hanoi_reader:open(B, [sequential|Options]),
|
{ok, BT2} = hanoidb_reader:open(B, [sequential|Options]),
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{ok, Out} = hanoi_writer:init([C, [{size,Size} | Options]]);
|
{ok, Out} = hanoidb_writer:init([C, [{size,Size} | Options]]);
|
||||||
false ->
|
false ->
|
||||||
{ok, Out} = hanoi_writer:open(C, [{size,Size} | Options])
|
{ok, Out} = hanoidb_writer:open(C, [{size,Size} | Options])
|
||||||
end,
|
end,
|
||||||
|
|
||||||
{node, AKVs} = hanoi_reader:first_node(BT1),
|
{node, AKVs} = hanoidb_reader:first_node(BT1),
|
||||||
{node, BKVs} = hanoi_reader:first_node(BT2),
|
{node, BKVs} = hanoidb_reader:first_node(BT2),
|
||||||
|
|
||||||
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, 0, {0, none}).
|
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, 0, {0, none}).
|
||||||
|
|
||||||
|
@ -66,9 +66,9 @@ terminate(Count, Out) ->
|
||||||
|
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{stop, normal, ok, _} = hanoi_writer:handle_call(close, self(), Out);
|
{stop, normal, ok, _} = hanoidb_writer:handle_call(close, self(), Out);
|
||||||
false ->
|
false ->
|
||||||
ok = hanoi_writer:close(Out)
|
ok = hanoidb_writer:close(Out)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
{ok, Count}.
|
{ok, Count}.
|
||||||
|
@ -84,9 +84,9 @@ hibernate_scan(Keep) ->
|
||||||
receive
|
receive
|
||||||
{step, From, HowMany} ->
|
{step, From, HowMany} ->
|
||||||
{BT1, BT2, OutBin, IsLastLevel, AKVs, BKVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
|
{BT1, BT2, OutBin, IsLastLevel, AKVs, BKVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
|
||||||
scan(hanoi_reader:deserialize(BT1),
|
scan(hanoidb_reader:deserialize(BT1),
|
||||||
hanoi_reader:deserialize(BT2),
|
hanoidb_reader:deserialize(BT2),
|
||||||
hanoi_writer:deserialize(OutBin),
|
hanoidb_writer:deserialize(OutBin),
|
||||||
IsLastLevel, AKVs, BKVs, Count, {N+HowMany, From})
|
IsLastLevel, AKVs, BKVs, Count, {N+HowMany, From})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -104,9 +104,9 @@ scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, {N, FromPID}) when N < 1, AK
|
||||||
after ?HIBERNATE_TIMEOUT ->
|
after ?HIBERNATE_TIMEOUT ->
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
Args = {hanoi_reader:serialize(BT1),
|
Args = {hanoidb_reader:serialize(BT1),
|
||||||
hanoi_reader:serialize(BT2),
|
hanoidb_reader:serialize(BT2),
|
||||||
hanoi_writer:serialize(Out), IsLastLevel, AKVs, BKVs, Count, N},
|
hanoidb_writer:serialize(Out), IsLastLevel, AKVs, BKVs, Count, N},
|
||||||
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
|
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
|
||||||
hibernate_scan(Keep);
|
hibernate_scan(Keep);
|
||||||
false ->
|
false ->
|
||||||
|
@ -115,20 +115,20 @@ scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, {N, FromPID}) when N < 1, AK
|
||||||
end;
|
end;
|
||||||
|
|
||||||
scan(BT1, BT2, Out, IsLastLevel, [], BKVs, Count, Step) ->
|
scan(BT1, BT2, Out, IsLastLevel, [], BKVs, Count, Step) ->
|
||||||
case hanoi_reader:next_node(BT1) of
|
case hanoidb_reader:next_node(BT1) of
|
||||||
{node, AKVs} ->
|
{node, AKVs} ->
|
||||||
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
|
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
|
||||||
end_of_data ->
|
end_of_data ->
|
||||||
hanoi_reader:close(BT1),
|
hanoidb_reader:close(BT1),
|
||||||
scan_only(BT2, Out, IsLastLevel, BKVs, Count, Step)
|
scan_only(BT2, Out, IsLastLevel, BKVs, Count, Step)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
scan(BT1, BT2, Out, IsLastLevel, AKVs, [], Count, Step) ->
|
scan(BT1, BT2, Out, IsLastLevel, AKVs, [], Count, Step) ->
|
||||||
case hanoi_reader:next_node(BT2) of
|
case hanoidb_reader:next_node(BT2) of
|
||||||
{node, BKVs} ->
|
{node, BKVs} ->
|
||||||
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
|
scan(BT1, BT2, Out, IsLastLevel, AKVs, BKVs, Count, Step);
|
||||||
end_of_data ->
|
end_of_data ->
|
||||||
hanoi_reader:close(BT2),
|
hanoidb_reader:close(BT2),
|
||||||
scan_only(BT1, Out, IsLastLevel, AKVs, Count, Step)
|
scan_only(BT1, Out, IsLastLevel, AKVs, Count, Step)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
|
@ -136,9 +136,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
|
||||||
if Key1 < Key2 ->
|
if Key1 < Key2 ->
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{noreply, Out2} = hanoi_writer:handle_cast({add, Key1, Value1}, Out);
|
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key1, Value1}, Out);
|
||||||
false ->
|
false ->
|
||||||
ok = hanoi_writer:add(Out2=Out, Key1, Value1)
|
ok = hanoidb_writer:add(Out2=Out, Key1, Value1)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
scan(BT1, BT2, Out2, IsLastLevel, AT, BKVs, Count+1, step(Step));
|
scan(BT1, BT2, Out2, IsLastLevel, AT, BKVs, Count+1, step(Step));
|
||||||
|
@ -146,9 +146,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
|
||||||
Key2 < Key1 ->
|
Key2 < Key1 ->
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{noreply, Out2} = hanoi_writer:handle_cast({add, Key2, Value2}, Out);
|
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key2, Value2}, Out);
|
||||||
false ->
|
false ->
|
||||||
ok = hanoi_writer:add(Out2=Out, Key2, Value2)
|
ok = hanoidb_writer:add(Out2=Out, Key2, Value2)
|
||||||
end,
|
end,
|
||||||
scan(BT1, BT2, Out2, IsLastLevel, AKVs, BT, Count+1, step(Step));
|
scan(BT1, BT2, Out2, IsLastLevel, AKVs, BT, Count+1, step(Step));
|
||||||
|
|
||||||
|
@ -160,9 +160,9 @@ scan(BT1, BT2, Out, IsLastLevel, [{Key1,Value1}|AT]=AKVs, [{Key2,Value2}|BT]=BKV
|
||||||
true ->
|
true ->
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{noreply, Out2} = hanoi_writer:handle_cast({add, Key2, Value2}, Out);
|
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key2, Value2}, Out);
|
||||||
false ->
|
false ->
|
||||||
ok = hanoi_writer:add(Out2=Out, Key2, Value2)
|
ok = hanoidb_writer:add(Out2=Out, Key2, Value2)
|
||||||
end,
|
end,
|
||||||
scan(BT1, BT2, Out2, IsLastLevel, AT, BT, Count+1, step(Step, 2))
|
scan(BT1, BT2, Out2, IsLastLevel, AT, BT, Count+1, step(Step, 2))
|
||||||
end.
|
end.
|
||||||
|
@ -173,8 +173,8 @@ hibernate_scan_only(Keep) ->
|
||||||
receive
|
receive
|
||||||
{step, From, HowMany} ->
|
{step, From, HowMany} ->
|
||||||
{BT, OutBin, IsLastLevel, KVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
|
{BT, OutBin, IsLastLevel, KVs, Count, N} = erlang:binary_to_term( zlib:gunzip( Keep ) ),
|
||||||
scan_only(hanoi_reader:deserialize(BT),
|
scan_only(hanoidb_reader:deserialize(BT),
|
||||||
hanoi_writer:deserialize(OutBin),
|
hanoidb_writer:deserialize(OutBin),
|
||||||
IsLastLevel, KVs, Count, {N+HowMany, From})
|
IsLastLevel, KVs, Count, {N+HowMany, From})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -191,14 +191,14 @@ scan_only(BT, Out, IsLastLevel, KVs, Count, {N, FromPID}) when N < 1, KVs =/= []
|
||||||
{step, From, HowMany} ->
|
{step, From, HowMany} ->
|
||||||
scan_only(BT, Out, IsLastLevel, KVs, Count, {N+HowMany, From})
|
scan_only(BT, Out, IsLastLevel, KVs, Count, {N+HowMany, From})
|
||||||
after ?HIBERNATE_TIMEOUT ->
|
after ?HIBERNATE_TIMEOUT ->
|
||||||
Args = {hanoi_reader:serialize(BT),
|
Args = {hanoidb_reader:serialize(BT),
|
||||||
hanoi_writer:serialize(Out), IsLastLevel, KVs, Count, N},
|
hanoidb_writer:serialize(Out), IsLastLevel, KVs, Count, N},
|
||||||
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
|
Keep = zlib:gzip ( erlang:term_to_binary( Args ) ),
|
||||||
hibernate_scan_only(Keep)
|
hibernate_scan_only(Keep)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
scan_only(BT, Out, IsLastLevel, [], Count, {_, FromPID}=Step) ->
|
scan_only(BT, Out, IsLastLevel, [], Count, {_, FromPID}=Step) ->
|
||||||
case hanoi_reader:next_node(BT) of
|
case hanoidb_reader:next_node(BT) of
|
||||||
{node, KVs} ->
|
{node, KVs} ->
|
||||||
scan_only(BT, Out, IsLastLevel, KVs, Count, Step);
|
scan_only(BT, Out, IsLastLevel, KVs, Count, Step);
|
||||||
end_of_data ->
|
end_of_data ->
|
||||||
|
@ -208,7 +208,7 @@ scan_only(BT, Out, IsLastLevel, [], Count, {_, FromPID}=Step) ->
|
||||||
{PID, Ref} ->
|
{PID, Ref} ->
|
||||||
PID ! {Ref, step_done}
|
PID ! {Ref, step_done}
|
||||||
end,
|
end,
|
||||||
hanoi_reader:close(BT),
|
hanoidb_reader:close(BT),
|
||||||
terminate(Count, Out)
|
terminate(Count, Out)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
|
@ -218,8 +218,8 @@ scan_only(BT, Out, true, [{_,?TOMBSTONE}|Rest], Count, Step) ->
|
||||||
scan_only(BT, Out, IsLastLevel, [{Key,Value}|Rest], Count, Step) ->
|
scan_only(BT, Out, IsLastLevel, [{Key,Value}|Rest], Count, Step) ->
|
||||||
case ?LOCAL_WRITER of
|
case ?LOCAL_WRITER of
|
||||||
true ->
|
true ->
|
||||||
{noreply, Out2} = hanoi_writer:handle_cast({add, Key, Value}, Out);
|
{noreply, Out2} = hanoidb_writer:handle_cast({add, Key, Value}, Out);
|
||||||
false ->
|
false ->
|
||||||
ok = hanoi_writer:add(Out2=Out, Key, Value)
|
ok = hanoidb_writer:add(Out2=Out, Key, Value)
|
||||||
end,
|
end,
|
||||||
scan_only(BT, Out2, IsLastLevel, Rest, Count+1, step(Step)).
|
scan_only(BT, Out2, IsLastLevel, Rest, Count+1, step(Step)).
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,14 +22,14 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_nursery).
|
-module(hanoidb_nursery).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-export([new/2, recover/3, add/3, finish/2, lookup/2, add_maybe_flush/4]).
|
-export([new/2, recover/3, add/3, finish/2, lookup/2, add_maybe_flush/4]).
|
||||||
-export([do_level_fold/3, set_max_level/2, transact/3, destroy/1]).
|
-export([do_level_fold/3, set_max_level/2, transact/3, destroy/1]).
|
||||||
|
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
-include_lib("kernel/include/file.hrl").
|
-include_lib("kernel/include/file.hrl").
|
||||||
|
|
||||||
-record(nursery, { log_file, dir, cache, total_size=0, count=0,
|
-record(nursery, { log_file, dir, cache, total_size=0, count=0,
|
||||||
|
@ -75,7 +75,7 @@ fill_cache(Transaction, Cache) when is_list(Transaction) ->
|
||||||
|
|
||||||
read_nursery_from_log(Directory, MaxLevel) ->
|
read_nursery_from_log(Directory, MaxLevel) ->
|
||||||
{ok, LogBinary} = file:read_file( ?LOGFILENAME(Directory) ),
|
{ok, LogBinary} = file:read_file( ?LOGFILENAME(Directory) ),
|
||||||
KVs = hanoi_util:decode_crc_data( LogBinary, [] ),
|
KVs = hanoidb_util:decode_crc_data( LogBinary, [] ),
|
||||||
Cache = fill_cache(KVs, gb_trees:empty()),
|
Cache = fill_cache(KVs, gb_trees:empty()),
|
||||||
{ok, #nursery{ dir=Directory, cache=Cache, count=gb_trees:size(Cache), max_level=MaxLevel }}.
|
{ok, #nursery{ dir=Directory, cache=Cache, count=gb_trees:size(Cache), max_level=MaxLevel }}.
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ read_nursery_from_log(Directory, MaxLevel) ->
|
||||||
-spec add(#nursery{}, binary(), binary()|?TOMBSTONE) -> {ok, #nursery{}}.
|
-spec add(#nursery{}, binary(), binary()|?TOMBSTONE) -> {ok, #nursery{}}.
|
||||||
add(Nursery=#nursery{ log_file=File, cache=Cache, total_size=TotalSize, count=Count }, Key, Value) ->
|
add(Nursery=#nursery{ log_file=File, cache=Cache, total_size=TotalSize, count=Count }, Key, Value) ->
|
||||||
|
|
||||||
Data = hanoi_util:crc_encapsulate_kv_entry( Key, Value ),
|
Data = hanoidb_util:crc_encapsulate_kv_entry( Key, Value ),
|
||||||
ok = file:write(File, Data),
|
ok = file:write(File, Data),
|
||||||
|
|
||||||
Nursery1 = do_sync(File, Nursery),
|
Nursery1 = do_sync(File, Nursery),
|
||||||
|
@ -101,7 +101,7 @@ add(Nursery=#nursery{ log_file=File, cache=Cache, total_size=TotalSize, count=Co
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_sync(File, Nursery) ->
|
do_sync(File, Nursery) ->
|
||||||
case application:get_env(hanoi, sync_strategy) of
|
case application:get_env(hanoidb, sync_strategy) of
|
||||||
{ok, sync} ->
|
{ok, sync} ->
|
||||||
file:datasync(File),
|
file:datasync(File),
|
||||||
LastSync = now();
|
LastSync = now();
|
||||||
|
@ -143,15 +143,15 @@ finish(#nursery{ dir=Dir, cache=Cache, log_file=LogFile,
|
||||||
N when N>0 ->
|
N when N>0 ->
|
||||||
%% next, flush cache to a new BTree
|
%% next, flush cache to a new BTree
|
||||||
BTreeFileName = filename:join(Dir, "nursery.data"),
|
BTreeFileName = filename:join(Dir, "nursery.data"),
|
||||||
{ok, BT} = hanoi_writer:open(BTreeFileName, [{size,?BTREE_SIZE(?TOP_LEVEL)},
|
{ok, BT} = hanoidb_writer:open(BTreeFileName, [{size,?BTREE_SIZE(?TOP_LEVEL)},
|
||||||
{compress, none}]),
|
{compress, none}]),
|
||||||
try
|
try
|
||||||
lists:foreach( fun({Key,Value}) ->
|
lists:foreach( fun({Key,Value}) ->
|
||||||
ok = hanoi_writer:add(BT, Key, Value)
|
ok = hanoidb_writer:add(BT, Key, Value)
|
||||||
end,
|
end,
|
||||||
gb_trees:to_list(Cache))
|
gb_trees:to_list(Cache))
|
||||||
after
|
after
|
||||||
ok = hanoi_writer:close(BT)
|
ok = hanoidb_writer:close(BT)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
% {ok, FileInfo} = file:read_file_info(BTreeFileName),
|
% {ok, FileInfo} = file:read_file_info(BTreeFileName),
|
||||||
|
@ -160,11 +160,11 @@ finish(#nursery{ dir=Dir, cache=Cache, log_file=LogFile,
|
||||||
|
|
||||||
|
|
||||||
%% inject the B-Tree (blocking RPC)
|
%% inject the B-Tree (blocking RPC)
|
||||||
ok = hanoi_level:inject(TopLevel, BTreeFileName),
|
ok = hanoidb_level:inject(TopLevel, BTreeFileName),
|
||||||
|
|
||||||
%% issue some work if this is a top-level inject (blocks until previous such
|
%% issue some work if this is a top-level inject (blocks until previous such
|
||||||
%% incremental merge is finished).
|
%% incremental merge is finished).
|
||||||
hanoi_level:begin_incremental_merge(TopLevel),
|
hanoidb_level:begin_incremental_merge(TopLevel),
|
||||||
|
|
||||||
ok;
|
ok;
|
||||||
|
|
||||||
|
@ -201,7 +201,7 @@ add_maybe_flush(Key, Value, Nursery, Top) ->
|
||||||
flush(Nursery=#nursery{ dir=Dir, max_level=MaxLevel }, Top) ->
|
flush(Nursery=#nursery{ dir=Dir, max_level=MaxLevel }, Top) ->
|
||||||
ok = finish(Nursery, Top),
|
ok = finish(Nursery, Top),
|
||||||
{error, enoent} = file:read_file_info( filename:join(Dir, "nursery.log")),
|
{error, enoent} = file:read_file_info( filename:join(Dir, "nursery.log")),
|
||||||
hanoi_nursery:new(Dir, MaxLevel).
|
hanoidb_nursery:new(Dir, MaxLevel).
|
||||||
|
|
||||||
has_room(#nursery{ count=Count }, N) ->
|
has_room(#nursery{ count=Count }, N) ->
|
||||||
(Count+N) < ?BTREE_SIZE(?TOP_LEVEL).
|
(Count+N) < ?BTREE_SIZE(?TOP_LEVEL).
|
||||||
|
@ -217,7 +217,7 @@ ensure_space(Nursery, NeededRoom, Top) ->
|
||||||
transact(Spec, Nursery=#nursery{ log_file=File, cache=Cache0, total_size=TotalSize }, Top) ->
|
transact(Spec, Nursery=#nursery{ log_file=File, cache=Cache0, total_size=TotalSize }, Top) ->
|
||||||
Nursery1 = ensure_space(Nursery, length(Spec), Top),
|
Nursery1 = ensure_space(Nursery, length(Spec), Top),
|
||||||
|
|
||||||
Data = hanoi_util:crc_encapsulate_transaction( Spec ),
|
Data = hanoidb_util:crc_encapsulate_transaction( Spec ),
|
||||||
ok = file:write(File, Data),
|
ok = file:write(File, Data),
|
||||||
|
|
||||||
Nursery2 = do_sync(File, Nursery1),
|
Nursery2 = do_sync(File, Nursery1),
|
||||||
|
@ -254,7 +254,7 @@ do_level_fold(#nursery{ cache=Cache }, FoldWorkerPID, KeyRange) ->
|
||||||
{LastKey, Count}
|
{LastKey, Count}
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
{undefined, KeyRange#btree_range.limit},
|
{undefined, KeyRange#key_range.limit},
|
||||||
gb_trees:to_list(Cache))
|
gb_trees:to_list(Cache))
|
||||||
of
|
of
|
||||||
{LastKey, limit} when LastKey =/= undefined ->
|
{LastKey, limit} when LastKey =/= undefined ->
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,12 +22,12 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_reader).
|
-module(hanoidb_reader).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-include_lib("kernel/include/file.hrl").
|
-include_lib("kernel/include/file.hrl").
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
-include("include/plain_rpc.hrl").
|
-include("include/plain_rpc.hrl").
|
||||||
|
|
||||||
-export([open/1, open/2,close/1,lookup/2,fold/3,range_fold/4, destroy/1]).
|
-export([open/1, open/2,close/1,lookup/2,fold/3,range_fold/4, destroy/1]).
|
||||||
|
@ -50,14 +50,14 @@ open(Name) ->
|
||||||
open(Name, Config) ->
|
open(Name, Config) ->
|
||||||
case proplists:get_bool(sequential, Config) of
|
case proplists:get_bool(sequential, Config) of
|
||||||
true ->
|
true ->
|
||||||
ReadBufferSize = hanoi:get_opt(read_buffer_size, Config, 512 * 1024),
|
ReadBufferSize = hanoidb:get_opt(read_buffer_size, Config, 512 * 1024),
|
||||||
{ok, File} = file:open(Name, [raw,read,{read_ahead, ReadBufferSize},binary]),
|
{ok, File} = file:open(Name, [raw,read,{read_ahead, ReadBufferSize},binary]),
|
||||||
{ok, #index{file=File, name=Name, config=Config}};
|
{ok, #index{file=File, name=Name, config=Config}};
|
||||||
|
|
||||||
false ->
|
false ->
|
||||||
case proplists:get_bool(folding, Config) of
|
case proplists:get_bool(folding, Config) of
|
||||||
true ->
|
true ->
|
||||||
ReadBufferSize = hanoi:get_opt(read_buffer_size, Config, 512 * 1024),
|
ReadBufferSize = hanoidb:get_opt(read_buffer_size, Config, 512 * 1024),
|
||||||
{ok, File} = file:open(Name, [read,{read_ahead, ReadBufferSize},binary]);
|
{ok, File} = file:open(Name, [read,{read_ahead, ReadBufferSize},binary]);
|
||||||
false ->
|
false ->
|
||||||
{ok, File} = file:open(Name, [read,binary])
|
{ok, File} = file:open(Name, [read,binary])
|
||||||
|
@ -113,13 +113,13 @@ fold1(File,Fun,Acc0) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
range_fold(Fun, Acc0, #index{file=File,root=Root}, Range) ->
|
range_fold(Fun, Acc0, #index{file=File,root=Root}, Range) ->
|
||||||
case lookup_node(File,Range#btree_range.from_key,Root,0) of
|
case lookup_node(File,Range#key_range.from_key,Root,0) of
|
||||||
{ok, {Pos,_}} ->
|
{ok, {Pos,_}} ->
|
||||||
file:position(File, Pos),
|
file:position(File, Pos),
|
||||||
do_range_fold(Fun, Acc0, File, Range, Range#btree_range.limit);
|
do_range_fold(Fun, Acc0, File, Range, Range#key_range.limit);
|
||||||
{ok, Pos} ->
|
{ok, Pos} ->
|
||||||
file:position(File, Pos),
|
file:position(File, Pos),
|
||||||
do_range_fold(Fun, Acc0, File, Range, Range#btree_range.limit);
|
do_range_fold(Fun, Acc0, File, Range, Range#key_range.limit);
|
||||||
none ->
|
none ->
|
||||||
{done, Acc0}
|
{done, Acc0}
|
||||||
end.
|
end.
|
||||||
|
@ -301,7 +301,7 @@ find_start(K, KVs) ->
|
||||||
read_node(File,{Pos,Size}) ->
|
read_node(File,{Pos,Size}) ->
|
||||||
% error_logger:info_msg("read_node ~p ~p ~p~n", [File, Pos, Size]),
|
% error_logger:info_msg("read_node ~p ~p ~p~n", [File, Pos, Size]),
|
||||||
{ok, <<_:32, Level:16/unsigned, Data/binary>>} = file:pread(File, Pos, Size),
|
{ok, <<_:32, Level:16/unsigned, Data/binary>>} = file:pread(File, Pos, Size),
|
||||||
hanoi_util:decode_index_node(Level, Data);
|
hanoidb_util:decode_index_node(Level, Data);
|
||||||
|
|
||||||
read_node(File,Pos) ->
|
read_node(File,Pos) ->
|
||||||
{ok, Pos} = file:position(File, Pos),
|
{ok, Pos} = file:position(File, Pos),
|
||||||
|
@ -315,7 +315,7 @@ read_node(File) ->
|
||||||
0 -> eof;
|
0 -> eof;
|
||||||
_ ->
|
_ ->
|
||||||
{ok, Data} = file:read(File, Len-2),
|
{ok, Data} = file:read(File, Len-2),
|
||||||
hanoi_util:decode_index_node(Level, Data)
|
hanoidb_util:decode_index_node(Level, Data)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,7 +328,7 @@ next_leaf_node(File) ->
|
||||||
eof;
|
eof;
|
||||||
{ok, <<Len:32, 0:16>>} ->
|
{ok, <<Len:32, 0:16>>} ->
|
||||||
{ok, Data} = file:read(File, Len-2),
|
{ok, Data} = file:read(File, Len-2),
|
||||||
hanoi_util:decode_index_node(0, Data);
|
hanoidb_util:decode_index_node(0, Data);
|
||||||
{ok, <<Len:32, _:16>>} ->
|
{ok, <<Len:32, _:16>>} ->
|
||||||
{ok, _} = file:position(File, {cur,Len-2}),
|
{ok, _} = file:position(File, {cur,Len-2}),
|
||||||
next_leaf_node(File)
|
next_leaf_node(File)
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_sup).
|
-module(hanoidb_sup).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-behaviour(supervisor).
|
-behaviour(supervisor).
|
|
@ -20,14 +20,14 @@
|
||||||
|
|
||||||
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
|
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
|
||||||
%%%
|
%%%
|
||||||
%%% This is a temporary copy of riak_kv_backend, just here to keep hanoi
|
%%% This is a temporary copy of riak_kv_backend, just here to keep hanoidb
|
||||||
%%% development private for now. When riak_kv_hanoi_backend is moved to
|
%%% development private for now. When riak_kv_hanoidb_backend is moved to
|
||||||
%%% riak_kv, delete this file.
|
%%% riak_kv, delete this file.
|
||||||
%%%
|
%%%
|
||||||
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
|
%%% NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
|
||||||
|
|
||||||
|
|
||||||
-module(hanoi_temp_riak_kv_backend).
|
-module(hanoidb_temp_riak_kv_backend).
|
||||||
|
|
||||||
-export([behaviour_info/1]).
|
-export([behaviour_info/1]).
|
||||||
-export([callback_after/3]).
|
-export([callback_after/3]).
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,12 +22,12 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_util).
|
-module(hanoidb_util).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-include("src/hanoi.hrl").
|
-include("src/hanoidb.hrl").
|
||||||
|
|
||||||
-define(ERLANG_ENCODED, 131).
|
-define(ERLANG_ENCODED, 131).
|
||||||
-define(CRC_ENCODED, 127).
|
-define(CRC_ENCODED, 127).
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,10 +22,10 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_writer).
|
-module(hanoidb_writer).
|
||||||
-author('Kresten Krab Thorup <krab@trifork.com>').
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
-include("hanoi.hrl").
|
-include("hanoidb.hrl").
|
||||||
|
|
||||||
%%
|
%%
|
||||||
%% Streaming btree writer. Accepts only monotonically increasing keys for put.
|
%% Streaming btree writer. Accepts only monotonically increasing keys for put.
|
||||||
|
@ -87,16 +87,16 @@ init([Name,Options]) ->
|
||||||
case do_open(Name, Options, [exclusive]) of
|
case do_open(Name, Options, [exclusive]) of
|
||||||
{ok, IdxFile} ->
|
{ok, IdxFile} ->
|
||||||
{ok, BloomFilter} = ebloom:new(erlang:min(Size,16#ffffffff), 0.01, 123),
|
{ok, BloomFilter} = ebloom:new(erlang:min(Size,16#ffffffff), 0.01, 123),
|
||||||
BlockSize = hanoi:get_opt(block_size, Options, ?NODE_SIZE),
|
BlockSize = hanoidb:get_opt(block_size, Options, ?NODE_SIZE),
|
||||||
{ok, #state{ name=Name,
|
{ok, #state{ name=Name,
|
||||||
index_file_pos=0, index_file=IdxFile,
|
index_file_pos=0, index_file=IdxFile,
|
||||||
bloom = BloomFilter,
|
bloom = BloomFilter,
|
||||||
block_size = BlockSize,
|
block_size = BlockSize,
|
||||||
compress = hanoi:get_opt(compress, Options, none),
|
compress = hanoidb:get_opt(compress, Options, none),
|
||||||
opts = Options
|
opts = Options
|
||||||
}};
|
}};
|
||||||
{error, _}=Error ->
|
{error, _}=Error ->
|
||||||
error_logger:error_msg("hanoi_writer cannot open ~p: ~p~n", [Name, Error]),
|
error_logger:error_msg("hanoidb_writer cannot open ~p: ~p~n", [Name, Error]),
|
||||||
{stop, Error}
|
{stop, Error}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ terminate(normal,_State) ->
|
||||||
%% premature delete -> cleanup
|
%% premature delete -> cleanup
|
||||||
terminate(_Reason,State) ->
|
terminate(_Reason,State) ->
|
||||||
file:close( State#state.index_file ),
|
file:close( State#state.index_file ),
|
||||||
file:delete( hanoi_util:index_file_name(State#state.name) ).
|
file:delete( hanoidb_util:index_file_name(State#state.name) ).
|
||||||
|
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
@ -149,8 +149,8 @@ deserialize(Binary) ->
|
||||||
|
|
||||||
|
|
||||||
do_open(Name, Options, OpenOpts) ->
|
do_open(Name, Options, OpenOpts) ->
|
||||||
WriteBufferSize = hanoi:get_opt(write_buffer_size, Options, 512 * 1024),
|
WriteBufferSize = hanoidb:get_opt(write_buffer_size, Options, 512 * 1024),
|
||||||
file:open( hanoi_util:index_file_name(Name),
|
file:open( hanoidb_util:index_file_name(Name),
|
||||||
[raw, append, {delayed_write, WriteBufferSize, 2000} | OpenOpts]).
|
[raw, append, {delayed_write, WriteBufferSize, 2000} | OpenOpts]).
|
||||||
|
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ add_record(Level, Key, Value,
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
|
|
||||||
NewSize = NodeSize + hanoi_util:estimate_node_size_increment(List, Key, Value),
|
NewSize = NodeSize + hanoidb_util:estimate_node_size_increment(List, Key, Value),
|
||||||
|
|
||||||
ok = ebloom:insert( State#state.bloom, Key ),
|
ok = ebloom:insert( State#state.bloom, Key ),
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ add_record(Level, Key, Value, State=#state{ nodes=[ #node{level=Level2 } |_]=Sta
|
||||||
|
|
||||||
close_node(#state{nodes=[#node{ level=Level, members=NodeMembers }|RestNodes], compress=Compress} = State) ->
|
close_node(#state{nodes=[#node{ level=Level, members=NodeMembers }|RestNodes], compress=Compress} = State) ->
|
||||||
OrderedMembers = lists:reverse(NodeMembers),
|
OrderedMembers = lists:reverse(NodeMembers),
|
||||||
{ok, BlockData} = hanoi_util:encode_index_node(OrderedMembers, Compress),
|
{ok, BlockData} = hanoidb_util:encode_index_node(OrderedMembers, Compress),
|
||||||
NodePos = State#state.index_file_pos,
|
NodePos = State#state.index_file_pos,
|
||||||
|
|
||||||
BlockSize = erlang:iolist_size(BlockData),
|
BlockSize = erlang:iolist_size(BlockData),
|
241
src/lsm_btree.erl.orig
Normal file
241
src/lsm_btree.erl.orig
Normal file
|
@ -0,0 +1,241 @@
|
||||||
|
%% ----------------------------------------------------------------------------
|
||||||
|
%%
|
||||||
|
%% lsm_btree: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
|
%%
|
||||||
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
%%
|
||||||
|
%% Copyright 2012 (c) Basho Technologies, Inc. All Rights Reserved.
|
||||||
|
%% http://basho.com/ info@basho.com
|
||||||
|
%%
|
||||||
|
%% This file is provided to you under the Apache License, Version 2.0 (the
|
||||||
|
%% "License"); you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
%% License for the specific language governing permissions and limitations
|
||||||
|
%% under the License.
|
||||||
|
%%
|
||||||
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(lsm_btree).
|
||||||
|
-author('Kresten Krab Thorup <krab@trifork.com>').
|
||||||
|
|
||||||
|
|
||||||
|
-behavior(gen_server).
|
||||||
|
|
||||||
|
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
|
||||||
|
terminate/2, code_change/3]).
|
||||||
|
|
||||||
|
-export([open/1, close/1, get/2, lookup/2, delete/2, put/3,
|
||||||
|
async_range/2, async_fold_range/4, sync_range/2, sync_fold_range/4]).
|
||||||
|
|
||||||
|
-include("lsm_btree.hrl").
|
||||||
|
-include_lib("kernel/include/file.hrl").
|
||||||
|
-include_lib("include/lsm_btree.hrl").
|
||||||
|
|
||||||
|
-record(state, { top, nursery, dir }).
|
||||||
|
|
||||||
|
|
||||||
|
%% PUBLIC API
|
||||||
|
|
||||||
|
open(Dir) ->
|
||||||
|
gen_server:start(?MODULE, [Dir], []).
|
||||||
|
|
||||||
|
close(Ref) ->
|
||||||
|
try
|
||||||
|
gen_server:call(Ref, close)
|
||||||
|
catch
|
||||||
|
exit:{noproc,_} -> ok;
|
||||||
|
exit:noproc -> ok;
|
||||||
|
%% Handle the case where the monitor triggers
|
||||||
|
exit:{normal, _} -> ok
|
||||||
|
end.
|
||||||
|
|
||||||
|
get(Ref,Key) when is_binary(Key) ->
|
||||||
|
gen_server:call(Ref, {get, Key}).
|
||||||
|
|
||||||
|
%% for compatibility with original code
|
||||||
|
lookup(Ref,Key) when is_binary(Key) ->
|
||||||
|
gen_server:call(Ref, {get, Key}).
|
||||||
|
|
||||||
|
delete(Ref,Key) when is_binary(Key) ->
|
||||||
|
gen_server:call(Ref, {delete, Key}).
|
||||||
|
|
||||||
|
put(Ref,Key,Value) when is_binary(Key), is_binary(Value) ->
|
||||||
|
gen_server:call(Ref, {put, Key, Value}).
|
||||||
|
|
||||||
|
sync_range(Ref, #btree_range{}=Range) ->
|
||||||
|
gen_server:call(Ref, {sync_range, self(), Range}).
|
||||||
|
|
||||||
|
sync_fold_range(Ref,Fun,Acc0,Range) ->
|
||||||
|
{ok, PID} = sync_range(Ref, Range),
|
||||||
|
sync_receive_fold_range(PID,Fun,Acc0).
|
||||||
|
|
||||||
|
sync_receive_fold_range(PID,Fun,Acc0) ->
|
||||||
|
receive
|
||||||
|
{fold_result, PID, K,V} ->
|
||||||
|
sync_receive_fold_range(PID, Fun, Fun(K,V,Acc0));
|
||||||
|
{fold_limit, PID, _} ->
|
||||||
|
Acc0;
|
||||||
|
{fold_done, PID} ->
|
||||||
|
Acc0
|
||||||
|
end.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async_range(Ref, #btree_range{}=Range) ->
|
||||||
|
gen_server:call(Ref, {async_range, self(), Range}).
|
||||||
|
|
||||||
|
async_fold_range(Ref,Fun,Acc0,Range) ->
|
||||||
|
Range2 = Range#btree_range{ limit=?BTREE_ASYNC_CHUNK_SIZE },
|
||||||
|
FoldMoreFun = fun() ->
|
||||||
|
{ok, PID} = gen_server:call(Ref, {sync_range, self(), Range}),
|
||||||
|
async_receive_fold_range(PID,Fun,Acc0,Ref,Range2)
|
||||||
|
end,
|
||||||
|
{async, FoldMoreFun}.
|
||||||
|
|
||||||
|
async_receive_fold_range(PID,Fun,Acc0,Ref,Range) ->
|
||||||
|
receive
|
||||||
|
{fold_result, PID, K,V} ->
|
||||||
|
async_receive_fold_range(PID, Fun, Fun(K,V,Acc0), Ref, Range);
|
||||||
|
{fold_limit, PID, Key} ->
|
||||||
|
Range2 = Range#btree_range{ from_key = Key, from_inclusive=true },
|
||||||
|
async_fold_range(Ref, Fun, Acc0, Range2);
|
||||||
|
{fold_done, PID} ->
|
||||||
|
{ok, Acc0}
|
||||||
|
end.
|
||||||
|
|
||||||
|
|
||||||
|
init([Dir]) ->
|
||||||
|
|
||||||
|
case file:read_file_info(Dir) of
|
||||||
|
{ok, #file_info{ type=directory }} ->
|
||||||
|
{ok, TopLevel} = open_levels(Dir),
|
||||||
|
{ok, Nursery} = lsm_btree_nursery:recover(Dir, TopLevel);
|
||||||
|
|
||||||
|
{error, E} when E =:= enoent ->
|
||||||
|
ok = file:make_dir(Dir),
|
||||||
|
{ok, TopLevel} = lsm_btree_level:open(Dir, ?TOP_LEVEL, undefined),
|
||||||
|
{ok, Nursery} = lsm_btree_nursery:new(Dir)
|
||||||
|
end,
|
||||||
|
|
||||||
|
{ok, #state{ top=TopLevel, dir=Dir, nursery=Nursery }}.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
open_levels(Dir) ->
|
||||||
|
{ok, Files} = file:list_dir(Dir),
|
||||||
|
|
||||||
|
%% parse file names and find max level
|
||||||
|
{MinLevel,MaxLevel} =
|
||||||
|
lists:foldl(fun(FileName, {MinLevel,MaxLevel}) ->
|
||||||
|
case parse_level(FileName) of
|
||||||
|
{ok, Level} ->
|
||||||
|
{ erlang:min(MinLevel, Level),
|
||||||
|
erlang:max(MaxLevel, Level) };
|
||||||
|
_ ->
|
||||||
|
{MinLevel,MaxLevel}
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
{?TOP_LEVEL, ?TOP_LEVEL},
|
||||||
|
Files),
|
||||||
|
|
||||||
|
% error_logger:info_msg("found level files ... {~p,~p}~n", [MinLevel, MaxLevel]),
|
||||||
|
|
||||||
|
%% remove old nursery file
|
||||||
|
file:delete(filename:join(Dir,"nursery.data")),
|
||||||
|
|
||||||
|
TopLevel =
|
||||||
|
lists:foldl( fun(LevelNo, Prev) ->
|
||||||
|
{ok, Level} = lsm_btree_level:open(Dir,LevelNo,Prev),
|
||||||
|
Level
|
||||||
|
end,
|
||||||
|
undefined,
|
||||||
|
lists:seq(MaxLevel, MinLevel, -1)),
|
||||||
|
|
||||||
|
{ok, TopLevel}.
|
||||||
|
|
||||||
|
parse_level(FileName) ->
|
||||||
|
case re:run(FileName, "^[^\\d]+-(\\d+)\\.data$", [{capture,all_but_first,list}]) of
|
||||||
|
{match,[StringVal]} ->
|
||||||
|
{ok, list_to_integer(StringVal)};
|
||||||
|
_ ->
|
||||||
|
nomatch
|
||||||
|
end.
|
||||||
|
|
||||||
|
|
||||||
|
handle_info(Info,State) ->
|
||||||
|
error_logger:error_msg("Unknown info ~p~n", [Info]),
|
||||||
|
{stop,bad_msg,State}.
|
||||||
|
|
||||||
|
handle_cast(Info,State) ->
|
||||||
|
error_logger:error_msg("Unknown cast ~p~n", [Info]),
|
||||||
|
{stop,bad_msg,State}.
|
||||||
|
|
||||||
|
|
||||||
|
%% premature delete -> cleanup
|
||||||
|
terminate(_Reason,_State) ->
|
||||||
|
% error_logger:info_msg("got terminate(~p,~p)~n", [Reason,State]),
|
||||||
|
% flush_nursery(State),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
|
{ok, State}.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
handle_call({async_range, Sender, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
||||||
|
{ok, FoldWorkerPID} = lsm_btree_fold_worker:start(Sender),
|
||||||
|
lsm_btree_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
||||||
|
Result = lsm_btree_level:async_range(TopLevel, FoldWorkerPID, Range),
|
||||||
|
{reply, Result, State};
|
||||||
|
|
||||||
|
handle_call({sync_range, Sender, Range}, _From, State=#state{ top=TopLevel, nursery=Nursery }) ->
|
||||||
|
{ok, FoldWorkerPID} = lsm_btree_fold_worker:start(Sender),
|
||||||
|
lsm_btree_nursery:do_level_fold(Nursery, FoldWorkerPID, Range),
|
||||||
|
Result = lsm_btree_level:sync_range(TopLevel, FoldWorkerPID, Range),
|
||||||
|
{reply, Result, State};
|
||||||
|
|
||||||
|
handle_call({put, Key, Value}, _From, State) when is_binary(Key), is_binary(Value) ->
|
||||||
|
{ok, State2} = do_put(Key, Value, State),
|
||||||
|
{reply, ok, State2};
|
||||||
|
|
||||||
|
handle_call({delete, Key}, _From, State) when is_binary(Key) ->
|
||||||
|
{ok, State2} = do_put(Key, ?TOMBSTONE, State),
|
||||||
|
{reply, ok, State2};
|
||||||
|
|
||||||
|
handle_call({get, Key}, _From, State=#state{ top=Top, nursery=Nursery } ) when is_binary(Key) ->
|
||||||
|
case lsm_btree_nursery:lookup(Key, Nursery) of
|
||||||
|
{value, ?TOMBSTONE} ->
|
||||||
|
{reply, not_found, State};
|
||||||
|
{value, Value} when is_binary(Value) ->
|
||||||
|
{reply, {ok, Value}, State};
|
||||||
|
none ->
|
||||||
|
Reply = lsm_btree_level:lookup(Top, Key),
|
||||||
|
{reply, Reply, State}
|
||||||
|
end;
|
||||||
|
|
||||||
|
handle_call(close, _From, State=#state{top=Top}) ->
|
||||||
|
try
|
||||||
|
{ok, State2} = flush_nursery(State),
|
||||||
|
ok = lsm_btree_level:close(Top),
|
||||||
|
{stop, normal, ok, State2}
|
||||||
|
catch
|
||||||
|
E:R ->
|
||||||
|
error_logger:info_msg("exception from close ~p:~p~n", [E,R]),
|
||||||
|
{stop, normal, ok, State}
|
||||||
|
end.
|
||||||
|
|
||||||
|
do_put(Key, Value, State=#state{ nursery=Nursery, top=Top }) ->
|
||||||
|
{ok, Nursery2} = lsm_btree_nursery:add_maybe_flush(Key, Value, Nursery, Top),
|
||||||
|
{ok, State#state{ nursery=Nursery2 }}.
|
||||||
|
|
||||||
|
flush_nursery(State=#state{nursery=Nursery, top=Top, dir=Dir}) ->
|
||||||
|
ok = lsm_btree_nursery:finish(Nursery, Top),
|
||||||
|
{ok, Nursery2} = lsm_btree_nursery:new(Dir),
|
||||||
|
{ok, State#state{ nursery=Nursery2 }}.
|
40
src/qtop.erl
Normal file
40
src/qtop.erl
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
-module(qtop).
|
||||||
|
|
||||||
|
-export([max/0, max/1, queue/2, queue/1]).
|
||||||
|
|
||||||
|
max() ->
|
||||||
|
max(5).
|
||||||
|
|
||||||
|
max(N) ->
|
||||||
|
PIDs = erlang:processes(),
|
||||||
|
Pairs = lists:foldl(fun(PID,Acc) ->
|
||||||
|
case erlang:process_info(PID, message_queue_len) of
|
||||||
|
{message_queue_len, Len} ->
|
||||||
|
[{Len, PID}|Acc];
|
||||||
|
_ ->
|
||||||
|
Acc
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
[],
|
||||||
|
PIDs),
|
||||||
|
[{_, MaxPID}|_] = lists:reverse(lists:sort(Pairs)),
|
||||||
|
queue(MaxPID,N).
|
||||||
|
|
||||||
|
queue(PID) ->
|
||||||
|
queue(PID, 5).
|
||||||
|
|
||||||
|
queue(PID, N) when is_list(PID) ->
|
||||||
|
queue(erlang:list_to_pid(PID), N);
|
||||||
|
queue(MaxPID, N) ->
|
||||||
|
{message_queue_len, MaxLen} = erlang:process_info(MaxPID, message_queue_len),
|
||||||
|
{messages, Msgs} = erlang:process_info(MaxPID, messages),
|
||||||
|
{Front30,_} = lists:split(min(N,length(Msgs)), Msgs),
|
||||||
|
io:format("==== PID: ~p, qlen:~p~n", [MaxPID,MaxLen]),
|
||||||
|
lists:foldl( fun(Msg,M) ->
|
||||||
|
io:format("[~p]: ~P~n", [M, Msg,30]),
|
||||||
|
M+1
|
||||||
|
end,
|
||||||
|
1,
|
||||||
|
Front30),
|
||||||
|
ok.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2012 (c) Basho Technologies, Inc. All Rights Reserved.
|
%% Copyright 2012 (c) Basho Technologies, Inc. All Rights Reserved.
|
||||||
%% http://basho.com/ info@basho.com
|
%% http://basho.com/ info@basho.com
|
||||||
|
@ -19,8 +19,8 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(riak_kv_hanoi_backend).
|
-module(riak_kv_hanoidb_backend).
|
||||||
-behavior(hanoi_temp_riak_kv_backend).
|
-behavior(hanoidb_temp_riak_kv_backend).
|
||||||
-author('Steve Vinoski <steve@basho.com>').
|
-author('Steve Vinoski <steve@basho.com>').
|
||||||
-author('Greg Burd <greg@basho.com>').
|
-author('Greg Burd <greg@basho.com>').
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@
|
||||||
to_key_range/1]).
|
to_key_range/1]).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
|
|
||||||
-define(API_VERSION, 1).
|
-define(API_VERSION, 1).
|
||||||
%% TODO: for when this backend supports 2i
|
%% TODO: for when this backend supports 2i
|
||||||
|
@ -63,7 +63,8 @@
|
||||||
config :: config() }).
|
config :: config() }).
|
||||||
|
|
||||||
-type state() :: #state{}.
|
-type state() :: #state{}.
|
||||||
-type config() :: [{atom(), term()}].
|
-type config_option() :: {data_root, string()} | hanoidb:config_option().
|
||||||
|
-type config() :: [config_option()].
|
||||||
|
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
%% Public API
|
%% Public API
|
||||||
|
@ -85,37 +86,37 @@ capabilities(_) ->
|
||||||
capabilities(_, _) ->
|
capabilities(_, _) ->
|
||||||
{ok, ?CAPABILITIES}.
|
{ok, ?CAPABILITIES}.
|
||||||
|
|
||||||
%% @doc Start the hanoi backend
|
%% @doc Start the hanoidb backend
|
||||||
-spec start(integer(), config()) -> {ok, state()} | {error, term()}.
|
-spec start(integer(), config()) -> {ok, state()} | {error, term()}.
|
||||||
start(Partition, Config) ->
|
start(Partition, Config) ->
|
||||||
%% Get the data root directory
|
%% Get the data root directory
|
||||||
case app_helper:get_prop_or_env(data_root, Config, hanoi) of
|
case app_helper:get_prop_or_env(data_root, Config, hanoidb) of
|
||||||
undefined ->
|
undefined ->
|
||||||
lager:error("Failed to create hanoi dir: data_root is not set"),
|
lager:error("Failed to create hanoidb dir: data_root is not set"),
|
||||||
{error, data_root_unset};
|
{error, data_root_unset};
|
||||||
DataRoot ->
|
DataRoot ->
|
||||||
AppStart = case application:start(hanoi) of
|
AppStart = case application:start(hanoidb) of
|
||||||
ok ->
|
ok ->
|
||||||
ok;
|
ok;
|
||||||
{error, {already_started, _}} ->
|
{error, {already_started, _}} ->
|
||||||
ok;
|
ok;
|
||||||
{error, StartReason} ->
|
{error, StartReason} ->
|
||||||
lager:error("Failed to init the hanoi backend: ~p", [StartReason]),
|
lager:error("Failed to init the hanoidb backend: ~p", [StartReason]),
|
||||||
{error, StartReason}
|
{error, StartReason}
|
||||||
end,
|
end,
|
||||||
case AppStart of
|
case AppStart of
|
||||||
ok ->
|
ok ->
|
||||||
case get_data_dir(DataRoot, integer_to_list(Partition)) of
|
case get_data_dir(DataRoot, integer_to_list(Partition)) of
|
||||||
{ok, DataDir} ->
|
{ok, DataDir} ->
|
||||||
case hanoi:open(DataDir, Config) of
|
case hanoidb:open(DataDir, Config) of
|
||||||
{ok, Tree} ->
|
{ok, Tree} ->
|
||||||
{ok, #state{tree=Tree, partition=Partition, config=Config }};
|
{ok, #state{tree=Tree, partition=Partition, config=Config }};
|
||||||
{error, OpenReason}=OpenError ->
|
{error, OpenReason}=OpenError ->
|
||||||
lager:error("Failed to open hanoi: ~p\n", [OpenReason]),
|
lager:error("Failed to open hanoidb: ~p\n", [OpenReason]),
|
||||||
OpenError
|
OpenError
|
||||||
end;
|
end;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
lager:error("Failed to start hanoi backend: ~p\n", [Reason]),
|
lager:error("Failed to start hanoidb backend: ~p\n", [Reason]),
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end;
|
end;
|
||||||
Error ->
|
Error ->
|
||||||
|
@ -123,19 +124,19 @@ start(Partition, Config) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Stop the hanoi backend
|
%% @doc Stop the hanoidb backend
|
||||||
-spec stop(state()) -> ok.
|
-spec stop(state()) -> ok.
|
||||||
stop(#state{tree=Tree}) ->
|
stop(#state{tree=Tree}) ->
|
||||||
ok = hanoi:close(Tree).
|
ok = hanoidb:close(Tree).
|
||||||
|
|
||||||
%% @doc Retrieve an object from the hanoi backend
|
%% @doc Retrieve an object from the hanoidb backend
|
||||||
-spec get(riak_object:bucket(), riak_object:key(), state()) ->
|
-spec get(riak_object:bucket(), riak_object:key(), state()) ->
|
||||||
{ok, any(), state()} |
|
{ok, any(), state()} |
|
||||||
{ok, not_found, state()} |
|
{ok, not_found, state()} |
|
||||||
{error, term(), state()}.
|
{error, term(), state()}.
|
||||||
get(Bucket, Key, #state{tree=Tree}=State) ->
|
get(Bucket, Key, #state{tree=Tree}=State) ->
|
||||||
BKey = to_object_key(Bucket, Key),
|
BKey = to_object_key(Bucket, Key),
|
||||||
case hanoi:get(Tree, BKey) of
|
case hanoidb:get(Tree, BKey) of
|
||||||
{ok, Value} ->
|
{ok, Value} ->
|
||||||
{ok, Value, State};
|
{ok, Value, State};
|
||||||
not_found ->
|
not_found ->
|
||||||
|
@ -144,7 +145,7 @@ get(Bucket, Key, #state{tree=Tree}=State) ->
|
||||||
{error, Reason, State}
|
{error, Reason, State}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Insert an object into the hanoi backend.
|
%% @doc Insert an object into the hanoidb backend.
|
||||||
-type index_spec() :: {add, Index, SecondaryKey} | {remove, Index, SecondaryKey}.
|
-type index_spec() :: {add, Index, SecondaryKey} | {remove, Index, SecondaryKey}.
|
||||||
-spec put(riak_object:bucket(), riak_object:key(), [index_spec()], binary(), state()) ->
|
-spec put(riak_object:bucket(), riak_object:key(), [index_spec()], binary(), state()) ->
|
||||||
{ok, state()} |
|
{ok, state()} |
|
||||||
|
@ -162,10 +163,10 @@ put(Bucket, PrimaryKey, IndexSpecs, Val, #state{tree=Tree}=State) ->
|
||||||
end,
|
end,
|
||||||
Updates2 = [F(X) || X <- IndexSpecs],
|
Updates2 = [F(X) || X <- IndexSpecs],
|
||||||
|
|
||||||
ok = hanoi:transact(Tree, Updates1 ++ Updates2),
|
ok = hanoidb:transact(Tree, Updates1 ++ Updates2),
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
||||||
%% @doc Delete an object from the hanoi backend
|
%% @doc Delete an object from the hanoidb backend
|
||||||
-spec delete(riak_object:bucket(), riak_object:key(), [index_spec()], state()) ->
|
-spec delete(riak_object:bucket(), riak_object:key(), [index_spec()], state()) ->
|
||||||
{ok, state()} |
|
{ok, state()} |
|
||||||
{error, term(), state()}.
|
{error, term(), state()}.
|
||||||
|
@ -181,7 +182,7 @@ delete(Bucket, PrimaryKey, IndexSpecs, #state{tree=Tree}=State) ->
|
||||||
end,
|
end,
|
||||||
Updates2 = [F(X) || X <- IndexSpecs],
|
Updates2 = [F(X) || X <- IndexSpecs],
|
||||||
|
|
||||||
case hanoi:transact(Tree, Updates1 ++ Updates2) of
|
case hanoidb:transact(Tree, Updates1 ++ Updates2) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, State};
|
{ok, State};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -215,12 +216,12 @@ fold_list_buckets(PrevBucket, Tree, FoldBucketsFun, Acc) ->
|
||||||
RangeStart = to_object_key(<<PrevBucket/binary, 0>>, '_')
|
RangeStart = to_object_key(<<PrevBucket/binary, 0>>, '_')
|
||||||
end,
|
end,
|
||||||
|
|
||||||
Range = #btree_range{ from_key=RangeStart, from_inclusive=true,
|
Range = #key_range{ from_key=RangeStart, from_inclusive=true,
|
||||||
to_key=undefined, to_inclusive=undefined,
|
to_key=undefined, to_inclusive=undefined,
|
||||||
limit=1 },
|
limit=1 },
|
||||||
|
|
||||||
%% grab next bucket, it's a limit=1 range query :-)
|
%% grab next bucket, it's a limit=1 range query :-)
|
||||||
case hanoi:fold_range(Tree,
|
case hanoidb:fold_range(Tree,
|
||||||
fun(BucketKey,_Value,none) ->
|
fun(BucketKey,_Value,none) ->
|
||||||
?log( "IN_FOLDER ~p~n", [BucketKey]),
|
?log( "IN_FOLDER ~p~n", [BucketKey]),
|
||||||
case from_object_key(BucketKey) of
|
case from_object_key(BucketKey) of
|
||||||
|
@ -265,9 +266,9 @@ fold_keys(FoldKeysFun, Acc, Opts, #state{tree=Tree}) ->
|
||||||
Range = to_key_range(Limiter),
|
Range = to_key_range(Limiter),
|
||||||
case proplists:get_bool(async_fold, Opts) of
|
case proplists:get_bool(async_fold, Opts) of
|
||||||
true ->
|
true ->
|
||||||
{async, fun() -> hanoi:fold_range(Tree, FoldFun, Acc, Range) end};
|
{async, fun() -> hanoidb:fold_range(Tree, FoldFun, Acc, Range) end};
|
||||||
false ->
|
false ->
|
||||||
{ok, hanoi:fold_range(Tree, FoldFun, Acc, Range)}
|
{ok, hanoidb:fold_range(Tree, FoldFun, Acc, Range)}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Fold over all the objects for one or all buckets.
|
%% @doc Fold over all the objects for one or all buckets.
|
||||||
|
@ -281,7 +282,7 @@ fold_objects(FoldObjectsFun, Acc, Opts, #state{tree=Tree}) ->
|
||||||
ObjectFolder =
|
ObjectFolder =
|
||||||
fun() ->
|
fun() ->
|
||||||
% io:format(user, "starting fold_objects in ~p~n", [self()]),
|
% io:format(user, "starting fold_objects in ~p~n", [self()]),
|
||||||
Result = hanoi:fold_range(Tree, FoldFun, Acc, to_key_range(Bucket)),
|
Result = hanoidb:fold_range(Tree, FoldFun, Acc, to_key_range(Bucket)),
|
||||||
% io:format(user, "ended fold_objects in ~p => ~P~n", [self(),Result,20]),
|
% io:format(user, "ended fold_objects in ~p => ~P~n", [self(),Result,20]),
|
||||||
Result
|
Result
|
||||||
end,
|
end,
|
||||||
|
@ -292,30 +293,30 @@ fold_objects(FoldObjectsFun, Acc, Opts, #state{tree=Tree}) ->
|
||||||
{ok, ObjectFolder()}
|
{ok, ObjectFolder()}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Delete all objects from this hanoi backend
|
%% @doc Delete all objects from this hanoidb backend
|
||||||
-spec drop(state()) -> {ok, state()} | {error, term(), state()}.
|
-spec drop(state()) -> {ok, state()} | {error, term(), state()}.
|
||||||
drop(#state{ tree=Tree, partition=Partition, config=Config }=State) ->
|
drop(#state{ tree=Tree, partition=Partition, config=Config }=State) ->
|
||||||
case hanoi:destroy(Tree) of
|
case hanoidb:destroy(Tree) of
|
||||||
ok ->
|
ok ->
|
||||||
start(Partition, Config);
|
start(Partition, Config);
|
||||||
{error, Term} ->
|
{error, Term} ->
|
||||||
{error, Term, State}
|
{error, Term, State}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Returns true if this hanoi backend contains any
|
%% @doc Returns true if this hanoidb backend contains any
|
||||||
%% non-tombstone values; otherwise returns false.
|
%% non-tombstone values; otherwise returns false.
|
||||||
-spec is_empty(state()) -> boolean().
|
-spec is_empty(state()) -> boolean().
|
||||||
is_empty(#state{tree=Tree}) ->
|
is_empty(#state{tree=Tree}) ->
|
||||||
FoldFun = fun(K, _V, Acc) -> [K|Acc] end,
|
FoldFun = fun(K, _V, Acc) -> [K|Acc] end,
|
||||||
try
|
try
|
||||||
Range = to_key_range(undefined),
|
Range = to_key_range(undefined),
|
||||||
[] =:= hanoi:fold_range(Tree, FoldFun, [], Range#btree_range{ limit=1 })
|
[] =:= hanoidb:fold_range(Tree, FoldFun, [], Range#key_range{ limit=1 })
|
||||||
catch
|
catch
|
||||||
_:ok ->
|
_:ok ->
|
||||||
false
|
false
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Get the status information for this hanoi backend
|
%% @doc Get the status information for this hanoidb backend
|
||||||
-spec status(state()) -> [{atom(), term()}].
|
-spec status(state()) -> [{atom(), term()}].
|
||||||
status(#state{}) ->
|
status(#state{}) ->
|
||||||
%% TODO: not yet implemented
|
%% TODO: not yet implemented
|
||||||
|
@ -339,7 +340,7 @@ get_data_dir(DataRoot, Partition) ->
|
||||||
ok ->
|
ok ->
|
||||||
{ok, PartitionDir};
|
{ok, PartitionDir};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
lager:error("Failed to create hanoi dir ~s: ~p", [PartitionDir, Reason]),
|
lager:error("Failed to create hanoidb dir ~s: ~p", [PartitionDir, Reason]),
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -412,13 +413,13 @@ fold_objects_fun(FoldObjectsFun, FilterBucket) ->
|
||||||
-define(MAX_INDEX_KEY, <<16,0,0,0,6>>).
|
-define(MAX_INDEX_KEY, <<16,0,0,0,6>>).
|
||||||
|
|
||||||
to_key_range(undefined) ->
|
to_key_range(undefined) ->
|
||||||
#btree_range{ from_key = to_object_key(<<>>, <<>>),
|
#key_range{ from_key = to_object_key(<<>>, <<>>),
|
||||||
from_inclusive = true,
|
from_inclusive = true,
|
||||||
to_key = ?MAX_OBJECT_KEY,
|
to_key = ?MAX_OBJECT_KEY,
|
||||||
to_inclusive = false
|
to_inclusive = false
|
||||||
};
|
};
|
||||||
to_key_range({bucket, Bucket}) ->
|
to_key_range({bucket, Bucket}) ->
|
||||||
#btree_range{ from_key = to_object_key(Bucket, <<>>),
|
#key_range{ from_key = to_object_key(Bucket, <<>>),
|
||||||
from_inclusive = true,
|
from_inclusive = true,
|
||||||
to_key = to_object_key(<<Bucket/binary, 0>>, <<>>),
|
to_key = to_object_key(<<Bucket/binary, 0>>, <<>>),
|
||||||
to_inclusive = false };
|
to_inclusive = false };
|
||||||
|
@ -427,12 +428,12 @@ to_key_range({index, Bucket, {eq, <<"$bucket">>, _Term}}) ->
|
||||||
to_key_range({index, Bucket, {eq, Field, Term}}) ->
|
to_key_range({index, Bucket, {eq, Field, Term}}) ->
|
||||||
to_key_range({index, Bucket, {range, Field, Term, Term}});
|
to_key_range({index, Bucket, {range, Field, Term, Term}});
|
||||||
to_key_range({index, Bucket, {range, <<"$key">>, StartTerm, EndTerm}}) ->
|
to_key_range({index, Bucket, {range, <<"$key">>, StartTerm, EndTerm}}) ->
|
||||||
#btree_range{ from_key = to_object_key(Bucket, StartTerm),
|
#key_range{ from_key = to_object_key(Bucket, StartTerm),
|
||||||
from_inclusive = true,
|
from_inclusive = true,
|
||||||
to_key = to_object_key(Bucket, EndTerm),
|
to_key = to_object_key(Bucket, EndTerm),
|
||||||
to_inclusive = true };
|
to_inclusive = true };
|
||||||
to_key_range({index, Bucket, {range, Field, StartTerm, EndTerm}}) ->
|
to_key_range({index, Bucket, {range, Field, StartTerm, EndTerm}}) ->
|
||||||
#btree_range{ from_key = to_index_key(Bucket, <<>>, Field, StartTerm),
|
#key_range{ from_key = to_index_key(Bucket, <<>>, Field, StartTerm),
|
||||||
from_inclusive = true,
|
from_inclusive = true,
|
||||||
to_key = to_index_key(Bucket, <<16#ff,16#ff,16#ff,16#ff,
|
to_key = to_index_key(Bucket, <<16#ff,16#ff,16#ff,16#ff,
|
||||||
16#ff,16#ff,16#ff,16#ff,
|
16#ff,16#ff,16#ff,16#ff,
|
||||||
|
@ -476,7 +477,7 @@ from_index_key(LKey) ->
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
|
|
||||||
-include("src/hanoi.hrl").
|
-include("src/hanoidb.hrl").
|
||||||
|
|
||||||
key_range_test() ->
|
key_range_test() ->
|
||||||
Range = to_key_range({bucket, <<"a">>}),
|
Range = to_key_range({bucket, <<"a">>}),
|
||||||
|
@ -497,14 +498,14 @@ index_range_test() ->
|
||||||
|
|
||||||
|
|
||||||
simple_test_() ->
|
simple_test_() ->
|
||||||
?assertCmd("rm -rf test/hanoi-backend"),
|
?assertCmd("rm -rf test/hanoidb-backend"),
|
||||||
application:set_env(hanoi, data_root, "test/hanoid-backend"),
|
application:set_env(hanoidb, data_root, "test/hanoidbd-backend"),
|
||||||
hanoi_temp_riak_kv_backend:standard_test(?MODULE, []).
|
hanoidb_temp_riak_kv_backend:standard_test(?MODULE, []).
|
||||||
|
|
||||||
custom_config_test_() ->
|
custom_config_test_() ->
|
||||||
?assertCmd("rm -rf test/hanoi-backend"),
|
?assertCmd("rm -rf test/hanoidb-backend"),
|
||||||
application:set_env(hanoi, data_root, ""),
|
application:set_env(hanoidb, data_root, ""),
|
||||||
hanoi_temp_riak_kv_backend:standard_test(?MODULE, [{data_root, "test/hanoi-backend"}]).
|
hanoidb_temp_riak_kv_backend:standard_test(?MODULE, [{data_root, "test/hanoidb-backend"}]).
|
||||||
|
|
||||||
-ifdef(PROPER).
|
-ifdef(PROPER).
|
||||||
|
|
||||||
|
@ -519,25 +520,25 @@ eqc_test_() ->
|
||||||
[?_assertEqual(true,
|
[?_assertEqual(true,
|
||||||
backend_eqc:test(?MODULE, false,
|
backend_eqc:test(?MODULE, false,
|
||||||
[{data_root,
|
[{data_root,
|
||||||
"test/hanoidb-backend"},
|
"test/hanoidbdb-backend"},
|
||||||
{async_fold, false}]))]},
|
{async_fold, false}]))]},
|
||||||
{timeout, 60,
|
{timeout, 60,
|
||||||
[?_assertEqual(true,
|
[?_assertEqual(true,
|
||||||
backend_eqc:test(?MODULE, false,
|
backend_eqc:test(?MODULE, false,
|
||||||
[{data_root,
|
[{data_root,
|
||||||
"test/hanoidb-backend"}]))]}
|
"test/hanoidbdb-backend"}]))]}
|
||||||
]}]}]}.
|
]}]}]}.
|
||||||
|
|
||||||
setup() ->
|
setup() ->
|
||||||
application:load(sasl),
|
application:load(sasl),
|
||||||
application:set_env(sasl, sasl_error_logger, {file, "riak_kv_hanoidb_backend_eqc_sasl.log"}),
|
application:set_env(sasl, sasl_error_logger, {file, "riak_kv_hanoidbdb_backend_eqc_sasl.log"}),
|
||||||
error_logger:tty(false),
|
error_logger:tty(false),
|
||||||
error_logger:logfile({open, "riak_kv_hanoidb_backend_eqc.log"}),
|
error_logger:logfile({open, "riak_kv_hanoidbdb_backend_eqc.log"}),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
cleanup(_) ->
|
cleanup(_) ->
|
||||||
?_assertCmd("rm -rf test/hanoidb-backend").
|
?_assertCmd("rm -rf test/hanoidbdb-backend").
|
||||||
|
|
||||||
-endif. % EQC
|
-endif. % EQC
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
%% @Doc Drive a set of LSM BTrees
|
%% @Doc Drive a set of LSM BTrees
|
||||||
-module(hanoi_drv).
|
-module(hanoidb_drv).
|
||||||
|
|
||||||
-behaviour(gen_server).
|
-behaviour(gen_server).
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ init([]) ->
|
||||||
{ok, #state{}}.
|
{ok, #state{}}.
|
||||||
|
|
||||||
handle_call({open, N}, _, #state { btrees = D} = State) ->
|
handle_call({open, N}, _, #state { btrees = D} = State) ->
|
||||||
case hanoi:open(N) of
|
case hanoidb:open(N) of
|
||||||
{ok, Tree} ->
|
{ok, Tree} ->
|
||||||
{reply, ok, State#state { btrees = dict:store(N, Tree, D)}};
|
{reply, ok, State#state { btrees = dict:store(N, Tree, D)}};
|
||||||
Otherwise ->
|
Otherwise ->
|
||||||
|
@ -94,7 +94,7 @@ handle_call({open, N}, _, #state { btrees = D} = State) ->
|
||||||
end;
|
end;
|
||||||
handle_call({close, N}, _, #state { btrees = D} = State) ->
|
handle_call({close, N}, _, #state { btrees = D} = State) ->
|
||||||
Tree = dict:fetch(N, D),
|
Tree = dict:fetch(N, D),
|
||||||
case hanoi:close(Tree) of
|
case hanoidb:close(Tree) of
|
||||||
ok ->
|
ok ->
|
||||||
{reply, ok, State#state { btrees = dict:erase(N, D)}};
|
{reply, ok, State#state { btrees = dict:erase(N, D)}};
|
||||||
Otherwise ->
|
Otherwise ->
|
||||||
|
@ -104,11 +104,11 @@ handle_call({fold_range, Name, Fun, Acc0, Range},
|
||||||
_From,
|
_From,
|
||||||
#state { btrees = D } = State) ->
|
#state { btrees = D } = State) ->
|
||||||
Tree = dict:fetch(Name, D),
|
Tree = dict:fetch(Name, D),
|
||||||
Result = hanoi:fold_range(Tree, Fun, Acc0, Range),
|
Result = hanoidb:fold_range(Tree, Fun, Acc0, Range),
|
||||||
{reply, Result, State};
|
{reply, Result, State};
|
||||||
handle_call({put, N, K, V}, _, #state { btrees = D} = State) ->
|
handle_call({put, N, K, V}, _, #state { btrees = D} = State) ->
|
||||||
Tree = dict:fetch(N, D),
|
Tree = dict:fetch(N, D),
|
||||||
case hanoi:put(Tree, K, V) of
|
case hanoidb:put(Tree, K, V) of
|
||||||
ok ->
|
ok ->
|
||||||
{reply, ok, State};
|
{reply, ok, State};
|
||||||
Other ->
|
Other ->
|
||||||
|
@ -116,14 +116,14 @@ handle_call({put, N, K, V}, _, #state { btrees = D} = State) ->
|
||||||
end;
|
end;
|
||||||
handle_call({delete_exist, N, K}, _, #state { btrees = D} = State) ->
|
handle_call({delete_exist, N, K}, _, #state { btrees = D} = State) ->
|
||||||
Tree = dict:fetch(N, D),
|
Tree = dict:fetch(N, D),
|
||||||
Reply = hanoi:delete(Tree, K),
|
Reply = hanoidb:delete(Tree, K),
|
||||||
{reply, Reply, State};
|
{reply, Reply, State};
|
||||||
handle_call({get, N, K}, _, #state { btrees = D} = State) ->
|
handle_call({get, N, K}, _, #state { btrees = D} = State) ->
|
||||||
Tree = dict:fetch(N, D),
|
Tree = dict:fetch(N, D),
|
||||||
Reply = hanoi:get(Tree, K),
|
Reply = hanoidb:get(Tree, K),
|
||||||
{reply, Reply, State};
|
{reply, Reply, State};
|
||||||
handle_call(stop, _, #state{ btrees = D } = State ) ->
|
handle_call(stop, _, #state{ btrees = D } = State ) ->
|
||||||
[ hanoi:close(Tree) || {_,Tree} <- dict:to_list(D) ],
|
[ hanoidb:close(Tree) || {_,Tree} <- dict:to_list(D) ],
|
||||||
{stop, normal, ok, State};
|
{stop, normal, ok, State};
|
||||||
handle_call(_Request, _From, State) ->
|
handle_call(_Request, _From, State) ->
|
||||||
Reply = ok,
|
Reply = ok,
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_merger_tests).
|
-module(hanoidb_merger_tests).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("proper/include/proper.hrl").
|
-include_lib("proper/include/proper.hrl").
|
||||||
|
@ -37,26 +37,26 @@ merge_test() ->
|
||||||
file:delete("test2"),
|
file:delete("test2"),
|
||||||
file:delete("test3"),
|
file:delete("test3"),
|
||||||
|
|
||||||
{ok, BT1} = hanoi_writer:open("test1"),
|
{ok, BT1} = hanoidb_writer:open("test1"),
|
||||||
lists:foldl(fun(N,_) ->
|
lists:foldl(fun(N,_) ->
|
||||||
ok = hanoi_writer:add(BT1, <<N:128>>, <<"data",N:128>>)
|
ok = hanoidb_writer:add(BT1, <<N:128>>, <<"data",N:128>>)
|
||||||
end,
|
end,
|
||||||
ok,
|
ok,
|
||||||
lists:seq(1,10000,2)),
|
lists:seq(1,10000,2)),
|
||||||
ok = hanoi_writer:close(BT1),
|
ok = hanoidb_writer:close(BT1),
|
||||||
|
|
||||||
|
|
||||||
{ok, BT2} = hanoi_writer:open("test2"),
|
{ok, BT2} = hanoidb_writer:open("test2"),
|
||||||
lists:foldl(fun(N,_) ->
|
lists:foldl(fun(N,_) ->
|
||||||
ok = hanoi_writer:add(BT2, <<N:128>>, <<"data",N:128>>)
|
ok = hanoidb_writer:add(BT2, <<N:128>>, <<"data",N:128>>)
|
||||||
end,
|
end,
|
||||||
ok,
|
ok,
|
||||||
lists:seq(2,5001,1)),
|
lists:seq(2,5001,1)),
|
||||||
ok = hanoi_writer:close(BT2),
|
ok = hanoidb_writer:close(BT2),
|
||||||
|
|
||||||
|
|
||||||
self() ! {step, {self(), none}, 2000000000},
|
self() ! {step, {self(), none}, 2000000000},
|
||||||
{Time,{ok,Count}} = timer:tc(hanoi_merger, merge, ["test1", "test2", "test3", 10000, true, []]),
|
{Time,{ok,Count}} = timer:tc(hanoidb_merger, merge, ["test1", "test2", "test3", 10000, true, []]),
|
||||||
|
|
||||||
error_logger:info_msg("time to merge: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),
|
error_logger:info_msg("time to merge: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,10 +22,10 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_tests).
|
-module(hanoidb_tests).
|
||||||
|
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
-include("src/hanoi.hrl").
|
-include("src/hanoidb.hrl").
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-ifdef(TRIQ).
|
-ifdef(TRIQ).
|
||||||
|
@ -50,7 +50,7 @@
|
||||||
-record(tree, { elements = dict:new() }).
|
-record(tree, { elements = dict:new() }).
|
||||||
-record(state, { open = dict:new(),
|
-record(state, { open = dict:new(),
|
||||||
closed = dict:new() }).
|
closed = dict:new() }).
|
||||||
-define(SERVER, hanoi_drv).
|
-define(SERVER, hanoidb_drv).
|
||||||
|
|
||||||
full_test_() ->
|
full_test_() ->
|
||||||
{setup,
|
{setup,
|
||||||
|
@ -182,7 +182,7 @@ precondition(#state { open = Open, closed = Closed },
|
||||||
{call, ?SERVER, close, [Name]}) ->
|
{call, ?SERVER, close, [Name]}) ->
|
||||||
(dict:is_key(Name, Open)) and (not dict:is_key(Name, Closed)).
|
(dict:is_key(Name, Open)) and (not dict:is_key(Name, Closed)).
|
||||||
|
|
||||||
is_valid_range(#btree_range{ from_key=FromKey, from_inclusive=FromIncl,
|
is_valid_range(#key_range{ from_key=FromKey, from_inclusive=FromIncl,
|
||||||
to_key=ToKey, to_inclusive=ToIncl,
|
to_key=ToKey, to_inclusive=ToIncl,
|
||||||
limit=Limit })
|
limit=Limit })
|
||||||
when
|
when
|
||||||
|
@ -268,9 +268,9 @@ prop_dict_agree() ->
|
||||||
?FORALL(Cmds, commands(?MODULE),
|
?FORALL(Cmds, commands(?MODULE),
|
||||||
?TRAPEXIT(
|
?TRAPEXIT(
|
||||||
begin
|
begin
|
||||||
hanoi_drv:start_link(),
|
hanoidb_drv:start_link(),
|
||||||
{History,State,Result} = run_commands(?MODULE, Cmds),
|
{History,State,Result} = run_commands(?MODULE, Cmds),
|
||||||
hanoi_drv:stop(),
|
hanoidb_drv:stop(),
|
||||||
cleanup_test_trees(State),
|
cleanup_test_trees(State),
|
||||||
?WHENFAIL(io:format("History: ~w\nState: ~w\nResult: ~w\n",
|
?WHENFAIL(io:format("History: ~w\nState: ~w\nResult: ~w\n",
|
||||||
[History,State,Result]),
|
[History,State,Result]),
|
||||||
|
@ -280,31 +280,31 @@ prop_dict_agree() ->
|
||||||
%% UNIT TESTS
|
%% UNIT TESTS
|
||||||
%% ----------------------------------------------------------------------
|
%% ----------------------------------------------------------------------
|
||||||
test_tree_simple_1() ->
|
test_tree_simple_1() ->
|
||||||
{ok, Tree} = hanoi:open("simple"),
|
{ok, Tree} = hanoidb:open("simple"),
|
||||||
ok = hanoi:put(Tree, <<>>, <<"data", 77:128>>),
|
ok = hanoidb:put(Tree, <<>>, <<"data", 77:128>>),
|
||||||
{ok, <<"data", 77:128>>} = hanoi:get(Tree, <<>>),
|
{ok, <<"data", 77:128>>} = hanoidb:get(Tree, <<>>),
|
||||||
ok = hanoi:close(Tree).
|
ok = hanoidb:close(Tree).
|
||||||
|
|
||||||
test_tree_simple_2() ->
|
test_tree_simple_2() ->
|
||||||
{ok, Tree} = hanoi:open("simple"),
|
{ok, Tree} = hanoidb:open("simple"),
|
||||||
ok = hanoi:put(Tree, <<"ã">>, <<"µ">>),
|
ok = hanoidb:put(Tree, <<"ã">>, <<"µ">>),
|
||||||
ok = hanoi:delete(Tree, <<"ã">>),
|
ok = hanoidb:delete(Tree, <<"ã">>),
|
||||||
ok = hanoi:close(Tree).
|
ok = hanoidb:close(Tree).
|
||||||
|
|
||||||
test_tree_simple_4() ->
|
test_tree_simple_4() ->
|
||||||
Key = <<56,11,62,42,35,163,16,100,9,224,8,228,130,94,198,2,126,117,243,
|
Key = <<56,11,62,42,35,163,16,100,9,224,8,228,130,94,198,2,126,117,243,
|
||||||
1,122,175,79,159,212,177,30,153,71,91,85,233,41,199,190,58,3,
|
1,122,175,79,159,212,177,30,153,71,91,85,233,41,199,190,58,3,
|
||||||
173,220,9>>,
|
173,220,9>>,
|
||||||
Value = <<212,167,12,6,105,152,17,80,243>>,
|
Value = <<212,167,12,6,105,152,17,80,243>>,
|
||||||
{ok, Tree} = hanoi:open("simple"),
|
{ok, Tree} = hanoidb:open("simple"),
|
||||||
ok = hanoi:put(Tree, Key, Value),
|
ok = hanoidb:put(Tree, Key, Value),
|
||||||
?assertEqual({ok, Value}, hanoi:get(Tree, Key)),
|
?assertEqual({ok, Value}, hanoidb:get(Tree, Key)),
|
||||||
ok = hanoi:close(Tree).
|
ok = hanoidb:close(Tree).
|
||||||
|
|
||||||
test_tree() ->
|
test_tree() ->
|
||||||
{ok, Tree} = hanoi:open("simple2"),
|
{ok, Tree} = hanoidb:open("simple2"),
|
||||||
lists:foldl(fun(N,_) ->
|
lists:foldl(fun(N,_) ->
|
||||||
ok = hanoi:put(Tree,
|
ok = hanoidb:put(Tree,
|
||||||
<<N:128>>, <<"data",N:128>>)
|
<<N:128>>, <<"data",N:128>>)
|
||||||
end,
|
end,
|
||||||
ok,
|
ok,
|
||||||
|
@ -312,7 +312,7 @@ test_tree() ->
|
||||||
io:format(user, "INSERT DONE 1~n", []),
|
io:format(user, "INSERT DONE 1~n", []),
|
||||||
|
|
||||||
lists:foldl(fun(N,_) ->
|
lists:foldl(fun(N,_) ->
|
||||||
ok = hanoi:put(Tree,
|
ok = hanoidb:put(Tree,
|
||||||
<<N:128>>, <<"data",N:128>>)
|
<<N:128>>, <<"data",N:128>>)
|
||||||
end,
|
end,
|
||||||
ok,
|
ok,
|
||||||
|
@ -321,7 +321,7 @@ test_tree() ->
|
||||||
io:format(user, "INSERT DONE 2~n", []),
|
io:format(user, "INSERT DONE 2~n", []),
|
||||||
|
|
||||||
|
|
||||||
hanoi:delete(Tree, <<1500:128>>),
|
hanoidb:delete(Tree, <<1500:128>>),
|
||||||
|
|
||||||
io:format(user, "INSERT DONE 3~n", []),
|
io:format(user, "INSERT DONE 3~n", []),
|
||||||
|
|
||||||
|
@ -330,17 +330,17 @@ test_tree() ->
|
||||||
error_logger:info_msg("time to fold: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),
|
error_logger:info_msg("time to fold: ~p/sec (time=~p, count=~p)~n", [1000000/(Time/Count), Time/1000000, Count]),
|
||||||
|
|
||||||
|
|
||||||
ok = hanoi:close(Tree).
|
ok = hanoidb:close(Tree).
|
||||||
|
|
||||||
run_fold(Tree,From,To) ->
|
run_fold(Tree,From,To) ->
|
||||||
{_, Count} = hanoi:fold_range(Tree,
|
{_, Count} = hanoidb:fold_range(Tree,
|
||||||
fun(<<N:128>>,_Value, {N, C}) ->
|
fun(<<N:128>>,_Value, {N, C}) ->
|
||||||
{N + 1, C + 1};
|
{N + 1, C + 1};
|
||||||
(<<1501:128>>,_Value, {1500, C}) ->
|
(<<1501:128>>,_Value, {1500, C}) ->
|
||||||
{1502, C + 1}
|
{1502, C + 1}
|
||||||
end,
|
end,
|
||||||
{From, 0},
|
{From, 0},
|
||||||
#btree_range{from_key= <<From:128>>, to_key= <<(To+1):128>>}),
|
#key_range{from_key= <<From:128>>, to_key= <<(To+1):128>>}),
|
||||||
{ok, Count}.
|
{ok, Count}.
|
||||||
|
|
||||||
|
|
||||||
|
@ -376,7 +376,7 @@ cmd_sync_range_args(#state { open = Open }) ->
|
||||||
?LET(Tree, g_non_empty_btree(Open),
|
?LET(Tree, g_non_empty_btree(Open),
|
||||||
?LET({K1, K2}, {g_existing_key(Tree, Open),
|
?LET({K1, K2}, {g_existing_key(Tree, Open),
|
||||||
g_existing_key(Tree, Open)},
|
g_existing_key(Tree, Open)},
|
||||||
[Tree, #btree_range{from_key=K1, to_key=K2}])).
|
[Tree, #key_range{from_key=K1, to_key=K2}])).
|
||||||
|
|
||||||
cmd_sync_fold_range_args(State) ->
|
cmd_sync_fold_range_args(State) ->
|
||||||
?LET([Tree, Range], cmd_sync_range_args(State),
|
?LET([Tree, Range], cmd_sync_range_args(State),
|
|
@ -1,6 +1,6 @@
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
%%
|
%%
|
||||||
%% hanoi: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
%% hanoidb: LSM-trees (Log-Structured Merge Trees) Indexed Storage
|
||||||
%%
|
%%
|
||||||
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
%% Copyright 2011-2012 (c) Trifork A/S. All Rights Reserved.
|
||||||
%% http://trifork.com/ info@trifork.com
|
%% http://trifork.com/ info@trifork.com
|
||||||
|
@ -22,28 +22,28 @@
|
||||||
%%
|
%%
|
||||||
%% ----------------------------------------------------------------------------
|
%% ----------------------------------------------------------------------------
|
||||||
|
|
||||||
-module(hanoi_writer_tests).
|
-module(hanoidb_writer_tests).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("proper/include/proper.hrl").
|
-include_lib("proper/include/proper.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-include("include/hanoi.hrl").
|
-include("include/hanoidb.hrl").
|
||||||
|
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
simple_test() ->
|
simple_test() ->
|
||||||
|
|
||||||
file:delete("testdata"),
|
file:delete("testdata"),
|
||||||
{ok, BT} = hanoi_writer:open("testdata"),
|
{ok, BT} = hanoidb_writer:open("testdata"),
|
||||||
ok = hanoi_writer:add(BT, <<"A">>, <<"Avalue">>),
|
ok = hanoidb_writer:add(BT, <<"A">>, <<"Avalue">>),
|
||||||
ok = hanoi_writer:add(BT, <<"B">>, <<"Bvalue">>),
|
ok = hanoidb_writer:add(BT, <<"B">>, <<"Bvalue">>),
|
||||||
ok = hanoi_writer:close(BT),
|
ok = hanoidb_writer:close(BT),
|
||||||
|
|
||||||
{ok, IN} = hanoi_reader:open("testdata"),
|
{ok, IN} = hanoidb_reader:open("testdata"),
|
||||||
{ok, <<"Avalue">>} = hanoi_reader:lookup(IN, <<"A">>),
|
{ok, <<"Avalue">>} = hanoidb_reader:lookup(IN, <<"A">>),
|
||||||
ok = hanoi_reader:close(IN),
|
ok = hanoidb_reader:close(IN),
|
||||||
|
|
||||||
ok = file:delete("testdata").
|
ok = file:delete("testdata").
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ simple_test() ->
|
||||||
simple1_test() ->
|
simple1_test() ->
|
||||||
|
|
||||||
file:delete("testdata"),
|
file:delete("testdata"),
|
||||||
{ok, BT} = hanoi_writer:open("testdata", [{block_size, 1024}]),
|
{ok, BT} = hanoidb_writer:open("testdata", [{block_size, 1024}]),
|
||||||
|
|
||||||
Max = 1024,
|
Max = 1024,
|
||||||
Seq = lists:seq(0, Max),
|
Seq = lists:seq(0, Max),
|
||||||
|
@ -60,22 +60,22 @@ simple1_test() ->
|
||||||
fun() ->
|
fun() ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(Int) ->
|
fun(Int) ->
|
||||||
ok = hanoi_writer:add(BT, <<Int:128>>, <<"valuevalue/", Int:128>>)
|
ok = hanoidb_writer:add(BT, <<Int:128>>, <<"valuevalue/", Int:128>>)
|
||||||
end,
|
end,
|
||||||
Seq),
|
Seq),
|
||||||
ok = hanoi_writer:close(BT)
|
ok = hanoidb_writer:close(BT)
|
||||||
end,
|
end,
|
||||||
[]),
|
[]),
|
||||||
|
|
||||||
error_logger:info_msg("time to insert: ~p/sec~n", [1000000/(Time1/Max)]),
|
error_logger:info_msg("time to insert: ~p/sec~n", [1000000/(Time1/Max)]),
|
||||||
|
|
||||||
{ok, IN} = hanoi_reader:open("testdata"),
|
{ok, IN} = hanoidb_reader:open("testdata"),
|
||||||
Middle = Max div 2,
|
Middle = Max div 2,
|
||||||
{ok, <<"valuevalue/", Middle:128>>} = hanoi_reader:lookup(IN, <<Middle:128>>),
|
{ok, <<"valuevalue/", Middle:128>>} = hanoidb_reader:lookup(IN, <<Middle:128>>),
|
||||||
|
|
||||||
|
|
||||||
{Time2,Count} = timer:tc(
|
{Time2,Count} = timer:tc(
|
||||||
fun() -> hanoi_reader:fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
|
fun() -> hanoidb_reader:fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
|
||||||
N+1
|
N+1
|
||||||
end,
|
end,
|
||||||
0,
|
0,
|
||||||
|
@ -88,12 +88,12 @@ simple1_test() ->
|
||||||
Max = Count-1,
|
Max = Count-1,
|
||||||
|
|
||||||
{Time3,{done,Count2}} = timer:tc(
|
{Time3,{done,Count2}} = timer:tc(
|
||||||
fun() -> hanoi_reader:range_fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
|
fun() -> hanoidb_reader:range_fold(fun(Key, <<"valuevalue/", Key/binary>>, N) ->
|
||||||
N+1
|
N+1
|
||||||
end,
|
end,
|
||||||
0,
|
0,
|
||||||
IN,
|
IN,
|
||||||
#btree_range{ from_key= <<>>, to_key=undefined })
|
#key_range{ from_key= <<>>, to_key=undefined })
|
||||||
end,
|
end,
|
||||||
[]),
|
[]),
|
||||||
|
|
||||||
|
@ -103,5 +103,5 @@ simple1_test() ->
|
||||||
|
|
||||||
Max = Count2-1,
|
Max = Count2-1,
|
||||||
|
|
||||||
ok = hanoi_reader:close(IN).
|
ok = hanoidb_reader:close(IN).
|
||||||
|
|
Loading…
Reference in a new issue