Avoid name collisions and do some other minor cleanup.

This commit is contained in:
Gregory Burd 2012-07-24 16:39:28 -04:00
parent edf083e156
commit e59c3d7add
7 changed files with 32 additions and 21 deletions

View file

@ -3,7 +3,7 @@
{clean_files, ["*.eunit", "ebin/*.beam"]}.
{eunit_opts, [verbose, {report, {eunit_surefire, [{dir, "."}]}}]}.
{erl_opts, [{d,'TRIQ',true}, %{d,'DEBUG',true},
{erl_opts, [%{d,'TRIQ',true}, %{d,'DEBUG',true},
{parse_transform, lager_transform},
fail_on_warning,
warn_unused_vars,

View file

@ -25,7 +25,7 @@
{application, hanoidb,
[
{description, ""},
{vsn, "1.2.0"},
{vsn, "1.3.0"},
{registered, []},
{applications, [
kernel,

View file

@ -22,7 +22,7 @@
%% Modified slightly by Justin Sheehy to make it a single file (incorporated
%% the array-based bitarray internally).
-module(bloom).
-module(hanoidb_bloom).
-author("Paulo Sergio Almeida <psa@di.uminho.pt>").
-export([sbf/1, sbf/2, sbf/3, sbf/4,

View file

@ -81,7 +81,7 @@ open(Name, Config) ->
{ok, <<RootPos:64/unsigned>>} = file:pread(File, FileInfo#file_info.size - 8, 8),
{ok, <<BloomSize:32/unsigned>>} = file:pread(File, FileInfo#file_info.size - 12, 4),
{ok, BloomData} = file:pread(File, (FileInfo#file_info.size - 12 - BloomSize), BloomSize),
Bloom = bloom:decode(BloomData),
Bloom = hanoidb_bloom:decode(BloomData),
%% read in the root node
Root =
@ -268,7 +268,7 @@ close(#index{file=File}) ->
lookup(#index{file=File, root=Node, bloom=Bloom}, Key) ->
case bloom:member(Key, Bloom) of
case hanoidb_bloom:member(Key, Bloom) of
true ->
case lookup_in_node(File, Node, Key) of
not_found ->

View file

@ -87,7 +87,7 @@ estimate_node_size_increment(_KVList, Key, Value)
-define(NO_COMPRESSION, 0).
-define(SNAPPY_COMPRESSION, 1).
-define(GZIP_COMPRESSION, 2).
%-define(LZ4_COMPRESSION, 3).
%%-define(LZ4_COMPRESSION, 3).
use_compressed(UncompressedSize, CompressedSize) when CompressedSize < UncompressedSize ->
true;
@ -102,14 +102,14 @@ compress(snappy, Bin) ->
false ->
{?NO_COMPRESSION, Bin}
end;
%compress(lz4, Bin) ->
% lz4:compress(Bin)
% case use_compressed(erlang:iolist_size(Bin), erlang:iolist_size(CompressedBin)) of
% true ->
% {?LZ4_COMPRESSION, CompressedBin};
% false ->
% {?NO_COMPRESSION, Bin}
% end;
%% compress(lz4, Bin) ->
%% {ok, CompressedBin} = lz4:compress(Bin),
%% case use_compressed(erlang:iolist_size(Bin), erlang:iolist_size(CompressedBin)) of
%% true ->
%% {?LZ4_COMPRESSION, CompressedBin};
%% false ->
%% {?NO_COMPRESSION, Bin}
%% end;
compress(gzip, Bin) ->
CompressedBin = zlib:gzip(Bin),
case use_compressed(erlang:iolist_size(Bin), erlang:iolist_size(CompressedBin)) of
@ -126,8 +126,8 @@ uncompress(<<?NO_COMPRESSION, Data/binary>>) ->
uncompress(<<?SNAPPY_COMPRESSION, Data/binary>>) ->
{ok, UncompressedData} = snappy:decompress(Data),
UncompressedData;
%uncompress(<<?LZ4_COMPRESSION, Data/binary>>) ->
% lz4:uncompress(Data);
%%uncompress(<<?LZ4_COMPRESSION, Data/binary>>) ->
%% lz4:uncompress(Data);
uncompress(<<?GZIP_COMPRESSION, Data/binary>>) ->
zlib:gunzip(Data).

View file

@ -94,7 +94,7 @@ init([Name, Options]) ->
case do_open(Name, Options, [exclusive]) of
{ok, IdxFile} ->
ok = file:write(IdxFile, ?FILE_FORMAT),
Bloom = bloom:bloom(Size),
Bloom = hanoidb_bloom:bloom(Size),
BlockSize = hanoidb:get_opt(block_size, Options, ?NODE_SIZE),
{ok, #state{ name=Name,
index_file_pos=?FIRST_BLOCK_POS, index_file=IdxFile,
@ -170,11 +170,11 @@ serialize(#state{ bloom=Bloom, index_file=File, index_file_pos=Position }=State)
exit({bad_position, Position, WrongPosition})
end,
ok = file:close(File),
erlang:term_to_binary( { State#state{ index_file=closed }, bloom:encode(Bloom) } ).
erlang:term_to_binary( { State#state{ index_file=closed }, hanoidb_bloom:encode(Bloom) } ).
deserialize(Binary) ->
{State, Bin} = erlang:binary_to_term(Binary),
Bloom = bloom:decode(Bin),
Bloom = hanoidb_bloom:decode(Bin),
{ok, IdxFile} = do_open(State#state.name, State#state.opts, []),
State#state{ bloom=Bloom, index_file=IdxFile }.
@ -188,7 +188,7 @@ do_open(Name, Options, OpenOpts) ->
%% @doc flush pending nodes and write trailer
archive_nodes(#state{ nodes=[], last_node_pos=LastNodePos, last_node_size=_LastNodeSize, bloom=Bloom, index_file=IdxFile }=State) ->
BloomBin = bloom:encode(Bloom),
BloomBin = hanoidb_bloom:encode(Bloom),
BloomSize = byte_size(BloomBin),
RootPos =
case LastNodePos of
@ -239,7 +239,7 @@ append_node(Level, Key, Value, #state{ nodes=[ #node{level=Level, members=List,
NewSize = NodeSize + hanoidb_util:estimate_node_size_increment(List, Key, Value),
NewBloom = bloom:add(Key, Bloom),
NewBloom = hanoidb_bloom:add(Key, Bloom),
{TC1, VC1} =
case Level of

View file

@ -25,10 +25,21 @@
-module(hanoidb_writer_tests).
-ifdef(TEST).
-ifdef(TEST).
-ifdef(TRIQ).
-include_lib("triq/include/triq.hrl").
-include_lib("triq/include/triq_statem.hrl").
-else.
-include_lib("proper/include/proper.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-ifdef(PROPER).
-behaviour(proper_statem).
-endif.
-endif.
-include("include/hanoidb.hrl").
-compile(export_all).