Hooray, all eunit tests including EQC pass!

This commit is contained in:
Scott Lystig Fritchie 2016-02-10 19:35:52 +09:00
parent 7c39af5bb7
commit 943e23e050
10 changed files with 37 additions and 105 deletions

View file

@ -1937,8 +1937,7 @@ react_to_env_C103(#projection_v1{epoch_number=_Epoch_newprop} = _P_newprop,
?REACT({c103, ?LINE,
[{current_epoch, P_current#projection_v1.epoch_number},
{none_projection_epoch, P_none#projection_v1.epoch_number}]}),
io:format(user, "SET add_admin_down(~w) at ~w TODO current_epoch ~w none_proj_epoch ~w =====================================\n", [MyName, time(), P_current#projection_v1.epoch_number, P_none#projection_v1.epoch_number]),
%% io:format(user, "SET add_admin_down(~w) at ~w =====================================\n", [MyName, time()]),
io:format(user, "SET add_admin_down(~w) at ~w current_epoch ~w none_proj_epoch ~w =====================================\n", [MyName, time(), P_current#projection_v1.epoch_number, P_none#projection_v1.epoch_number]),
machi_fitness:add_admin_down(S#ch_mgr.fitness_svr, MyName, []),
timer:sleep(5*1000),
io:format(user, "SET delete_admin_down(~w) at ~w =====================================\n", [MyName, time()]),
@ -2968,8 +2967,7 @@ zerf_find_last_annotated(FLU, MajoritySize, S) ->
end.
perhaps_verbose_c111(P_latest2, S) ->
case true of
%%TODO put me back: case proplists:get_value(private_write_verbose, S#ch_mgr.opts) of
case proplists:get_value(private_write_verbose, S#ch_mgr.opts) of
true ->
Dbg2X = lists:keydelete(react, 1,
P_latest2#projection_v1.dbg2) ++
@ -2977,9 +2975,8 @@ perhaps_verbose_c111(P_latest2, S) ->
P_latest2x = P_latest2#projection_v1{dbg2=Dbg2X}, % limit verbose len.
Last2 = get(last_verbose),
Summ2 = machi_projection:make_summary(P_latest2x),
%% if P_latest2#projection_v1.upi == [],
%% (S#ch_mgr.proj)#projection_v1.upi /= [] ->
if true ->
if P_latest2#projection_v1.upi == [],
(S#ch_mgr.proj)#projection_v1.upi /= [] ->
<<CSumRep:4/binary,_/binary>> =
P_latest2#projection_v1.epoch_csum,
io:format(user, "~s CONFIRM epoch ~w ~w upi ~w rep ~w by ~w\n", [machi_util:pretty_time(), (S#ch_mgr.proj)#projection_v1.epoch_number, CSumRep, P_latest2#projection_v1.upi, P_latest2#projection_v1.repairing, S#ch_mgr.name]);

View file

@ -105,8 +105,6 @@ repair(ap_mode=ConsistencyMode, Src, Repairing, UPI, MembersDict, ETS, Opts) ->
RepairMode = proplists:get_value(repair_mode, Opts, repair),
Verb = proplists:get_value(verbose, Opts, false),
RepairId = proplists:get_value(repair_id, Opts, id1),
erlang:display(wtf),
%% io:format(user, "TODO: ~p\n", [{error, {What, Why, Stack}}]),
Res = try
_ = [begin
{ok, Proxy} = machi_proxy_flu1_client:start_link(P),
@ -129,7 +127,6 @@ erlang:display(wtf),
{ok, EpochID} = machi_proxy_flu1_client:get_epoch_id(
SrcProxy, ?SHORT_TIMEOUT),
%% ?VERB("Make repair directives: "),
erlang:display(yo1),
Ds =
[{File, make_repair_directives(
ConsistencyMode, RepairMode, File, Size, EpochID,
@ -149,21 +146,16 @@ erlang:display(yo1),
end || FLU <- OurFLUs],
%% ?VERB("Execute repair directives: "),
erlang:display(yo1),
ok = execute_repair_directives(ConsistencyMode, Ds, Src, EpochID,
Verb, OurFLUs, ProxiesDict, ETS),
erlang:display(yo2),
%% ?VERB(" done\n"),
lager:info("Repair ~w repair directives finished\n", [RepairId]),
ok
catch
What:Why ->
io:format(user, "yo3 ~p ~p\n", [What,Why]),
Stack = erlang:get_stacktrace(),
io:format(user, "yo3 ~p\n", [Stack]),
{error, {What, Why, Stack}}
after
erlang:display(yo4),
[(catch machi_proxy_flu1_client:quit(Pid)) ||
Pid <- orddict:to_list(get(proxies_dict))]
end,
@ -244,7 +236,6 @@ make_repair_directives(ConsistencyMode, RepairMode, File, Size, _EpochID,
make_repair_directives2(C2, ConsistencyMode, RepairMode,
File, Verb, Src, FLUs, ProxiesDict, ETS) ->
?VERB(".1"),
make_repair_directives3(C2, ConsistencyMode, RepairMode,
File, Verb, Src, FLUs, ProxiesDict, ETS, []).
@ -286,7 +277,6 @@ make_repair_directives3([{Offset, Size, CSum, _FLU}=A|Rest0],
end || {__Offset, __Size, __CSum, FLU} <- As],
exit({todo_repair_sanity_check, ?LINE, File, Offset, {as,As}, {qq,QQ}})
%% exit({todo_repair_sanity_check, ?LINE, File, Offset, As})
end,
%% List construction guarantees us that there's at least one ?MAX_OFFSET
%% item remains. Sort order + our "taking" of all exact Offset+Size
@ -339,17 +329,17 @@ execute_repair_directives(ap_mode=_ConsistencyMode, Ds, _Src, EpochID, Verb,
{ProxiesDict, EpochID, Verb, ETS}, Ds),
ok.
execute_repair_directive({File, Cmds}, {ProxiesDict, EpochID, Verb, ETS}=Acc) ->
execute_repair_directive({File, Cmds}, {ProxiesDict, EpochID, _Verb, ETS}=Acc) ->
EtsKeys = [{in_files, t_in_files}, {in_chunks, t_in_chunks},
{in_bytes, t_in_bytes}, {out_files, t_out_files},
{out_chunks, t_out_chunks}, {out_bytes, t_out_bytes}],
[ets:insert(ETS, {L_K, 0}) || {L_K, _T_K} <- EtsKeys],
F = fun({copy, {Offset, Size, TaggedCSum, MySrc}, MyDsts}, Acc2) ->
SrcP = orddict:fetch(MySrc, ProxiesDict),
case ets:lookup_element(ETS, in_chunks, 2) rem 100 of
0 -> ?VERB(".2", []);
_ -> ok
end,
%% case ets:lookup_element(ETS, in_chunks, 2) rem 100 of
%% 0 -> ?VERB(".2", []);
%% _ -> ok
%% end,
_T1 = os:timestamp(),
%% TODO: support case multiple written or trimmed chunks returned
NSInfo = undefined,
@ -391,9 +381,7 @@ execute_repair_directive({File, Cmds}, {ProxiesDict, EpochID, Verb, ETS}=Acc) ->
Acc2
end
end,
erlang:display({yo,?LINE}),
ok = lists:foldl(F, ok, Cmds),
erlang:display({yo,?LINE}),
%% Copy this file's stats to the total counts.
_ = [ets:update_counter(ETS, T_K, ets:lookup_element(ETS, L_K, 2)) ||
{L_K, T_K} <- EtsKeys],

View file

@ -299,20 +299,16 @@ do_append_head3(NSInfo, Prefix,
case ?FLU_PC:append_chunk(Proxy, NSInfo, EpochID,
Prefix, Chunk, CSum, Opts, ?TIMEOUT) of
{ok, {Offset, _Size, File}=_X} ->
io:format(user, "CLNT append_chunk: head ~w ok\n ~p\n hd ~p rest ~p epoch ~P\n", [HeadFLU, _X, HeadFLU, RestFLUs, EpochID, 8]),
do_wr_app_midtail(RestFLUs, NSInfo, Prefix,
File, Offset, Chunk, CSum, Opts,
[HeadFLU], 0, STime, TO, append, S);
{error, bad_checksum}=BadCS ->
io:format(user, "CLNT append_chunk: head ~w BAD CS\n", [HeadFLU]),
{reply, BadCS, S};
{error, Retry}
when Retry == partition; Retry == bad_epoch; Retry == wedged ->
io:format(user, "CLNT append_chunk: head ~w error ~p\n", [HeadFLU, Retry]),
do_append_head(NSInfo, Prefix,
Chunk, CSum, Opts, Depth, STime, TO, S);
{error, written} ->
io:format(user, "CLNT append_chunk: head ~w Written\n", [HeadFLU]),
%% Implicit sequencing + this error = we don't know where this
%% written block is. But we lost a race. Repeat, with a new
%% sequencer assignment.
@ -391,32 +387,26 @@ do_wr_app_midtail2([FLU|RestFLUs]=FLUs, NSInfo,
CSum, Opts, Ws, Depth, STime, TO, MyOp,
#state{epoch_id=EpochID, proxies_dict=PD}=S) ->
Proxy = orddict:fetch(FLU, PD),
io:format(user, "CLNT append_chunk: mid/tail ~w\n", [FLU]),
case ?FLU_PC:write_chunk(Proxy, NSInfo, EpochID, File, Offset, Chunk, CSum, ?TIMEOUT) of
ok ->
io:format(user, "CLNT append_chunk: mid/tail ~w ok\n", [FLU]),
do_wr_app_midtail2(RestFLUs, NSInfo, Prefix,
File, Offset, Chunk,
CSum, Opts, [FLU|Ws], Depth, STime, TO, MyOp, S);
{error, bad_checksum}=BadCS ->
io:format(user, "CLNT append_chunk: mid/tail ~w BAD CS\n", [FLU]),
%% TODO: alternate strategy?
{reply, BadCS, S};
{error, Retry}
when Retry == partition; Retry == bad_epoch; Retry == wedged ->
io:format(user, "CLNT append_chunk: mid/tail ~w error ~p\n", [FLU, Retry]),
do_wr_app_midtail(FLUs, NSInfo, Prefix,
File, Offset, Chunk,
CSum, Opts, Ws, Depth, STime, TO, MyOp, S);
{error, written} ->
io:format(user, "CLNT append_chunk: mid/tail ~w WRITTEN\n", [FLU]),
%% We know what the chunk ought to be, so jump to the
%% middle of read-repair.
Resume = {append, Offset, iolist_size(Chunk), File},
do_repair_chunk(FLUs, Resume, Chunk, CSum, [], NSInfo, File, Offset,
iolist_size(Chunk), Depth, STime, S);
{error, trimmed} = Err ->
io:format(user, "CLNT append_chunk: mid/tail ~w TRIMMED\n", [FLU]),
%% TODO: nothing can be done
{reply, Err, S};
{error, not_written} ->
@ -735,10 +725,8 @@ read_repair2(ap_mode=ConsistencyMode,
{ok, {Chunks, _Trimmed}, GotItFrom} when is_list(Chunks) ->
%% TODO: Repair trimmed chunks
ToRepair = mutation_flus(P) -- [GotItFrom],
{Reply0, S1} = do_repair_chunks(Chunks, ToRepair, ReturnMode, [GotItFrom],
{reply, Reply, S1} = do_repair_chunks(Chunks, ToRepair, ReturnMode, [GotItFrom],
NSInfo, File, Depth, STime, S, {ok, Chunks}),
{ok, Chunks} = Reply0,
Reply = {ok, {Chunks, _Trimmed}},
{reply, Reply, S1};
{error, bad_checksum}=BadCS ->
%% TODO: alternate strategy?
@ -761,7 +749,7 @@ do_repair_chunks([], _, _, _, _, _, _, _, S, Reply) ->
{Reply, S};
do_repair_chunks([{_, Offset, Chunk, CSum}|T],
ToRepair, ReturnMode, [GotItFrom], NSInfo, File, Depth, STime, S, Reply) ->
true = _TODO_fixme = not is_atom(CSum),
true = not is_atom(CSum),
Size = iolist_size(Chunk),
case do_repair_chunk(ToRepair, ReturnMode, Chunk, CSum, [GotItFrom], NSInfo, File, Offset,
Size, Depth, STime, S) of
@ -791,12 +779,12 @@ do_repair_chunk(ToRepair, ReturnMode, Chunk, CSum, Repaired, NSInfo, File, Offse
end
end.
do_repair_chunk2([], ReturnMode, Chunk, _CSum, _Repaired, _NSInfo, File, Offset,
do_repair_chunk2([], ReturnMode, Chunk, CSum, _Repaired, _NSInfo, File, Offset,
_IgnoreSize, _Depth, _STime, S) ->
%% TODO: add stats for # of repairs, length(_Repaired)-1, etc etc?
case ReturnMode of
read ->
{reply, {ok, {[Chunk], []}}, S};
{reply, {ok, {[{File, Offset, Chunk, CSum}], []}}, S};
{append, Offset, Size, File} ->
{reply, {ok, {[{Offset, Size, File}], []}}, S}
end;
@ -943,7 +931,6 @@ update_proj2(Count, #state{bad_proj=BadProj, proxies_dict=ProxiesDict,
NewProxiesDict = ?FLU_PC:start_proxies(NewMembersDict),
%% Make crash reports shorter by getting rid of 'react' history.
P2 = P#projection_v1{dbg2=lists:keydelete(react, 1, Dbg2)},
io:format(user, "CLNT PROJ: epoch ~p ~P upi ~w ~w\n", [P2#projection_v1.epoch_number, P2#projection_v1.epoch_csum, 6, P2#projection_v1.upi, P2#projection_v1.repairing]),
S#state{bad_proj=undefined, proj=P2, epoch_id=EpochID,
members_dict=NewMembersDict, proxies_dict=NewProxiesDict};
_P ->

View file

@ -120,7 +120,6 @@ handle_call(Else, From, S) ->
handle_cast({wedge_myself, WedgeEpochId},
#state{flu_name=FluName, wedged=Wedged_p, epoch_id=OldEpochId}=S) ->
if not Wedged_p andalso WedgeEpochId == OldEpochId ->
io:format(user, "FLU WEDGE 2: ~w : ~w ~P\n", [S#state.flu_name, true, OldEpochId, 6]),
true = ets:insert(S#state.etstab,
{epoch, {true, OldEpochId}}),
%% Tell my chain manager that it might want to react to
@ -139,7 +138,6 @@ handle_cast({wedge_state_change, Boolean, {NewEpoch, _}=NewEpochId},
undefined -> -1
end,
if NewEpoch >= OldEpoch ->
io:format(user, "FLU WEDGE 1: ~w : ~w ~P\n", [S#state.flu_name, Boolean, NewEpochId, 6]),
true = ets:insert(S#state.etstab,
{epoch, {Boolean, NewEpochId}}),
{noreply, S#state{wedged=Boolean, epoch_id=NewEpochId}};
@ -179,13 +177,8 @@ handle_append(NSInfo,
Prefix, Chunk, TCSum, Opts, FluName, EpochId) ->
Res = machi_flu_filename_mgr:find_or_make_filename_from_prefix(
FluName, EpochId, {prefix, Prefix}, NSInfo),
io:format(user, "FLU NAME: ~w + ~p got ~p\n", [FluName, Prefix, Res]),
case Res of
{file, F} ->
case re:run(F, atom_to_list(FluName) ++ ",") of
nomatch ->
io:format(user, "\n\n\t\tBAAAAAAA\n\n", []), timer:sleep(50), erlang:halt(0);
_ -> ok end,
case machi_flu_metadata_mgr:start_proxy_pid(FluName, {file, F}) of
{ok, Pid} ->
{Tag, CS} = machi_util:unmake_tagged_csum(TCSum),

View file

@ -146,16 +146,14 @@ handle_cast(Req, State) ->
handle_call({find_filename, FluName, EpochId, NSInfo, Prefix}, _From,
S = #state{ datadir = DataDir, epoch = EpochId, tid = Tid }) ->
%% Our state and the caller's epoch ids are the same. Business as usual.
io:format(user, "FMGR ~w LINE ~p\n", [FluName, ?LINE]),
File = handle_find_file(FluName, Tid, NSInfo, Prefix, DataDir),
{reply, {file, File}, S};
handle_call({find_filename, FluName, EpochId, NSInfo, Prefix}, _From, S = #state{ datadir = DataDir, tid = Tid }) ->
handle_call({find_filename, _FluName, EpochId, NSInfo, Prefix}, _From, S = #state{ datadir = DataDir, tid = Tid }) ->
%% If the epoch id in our state and the caller's epoch id were the same, it would've
%% matched the above clause. Since we're here, we know that they are different.
%% If epoch ids between our state and the caller's are different, we must increment the
%% sequence number, generate a filename and then cache it.
io:format(user, "FMGR ~w LINE ~p\n", [FluName, ?LINE]),
File = increment_and_cache_filename(Tid, DataDir, NSInfo, Prefix),
{reply, {file, File}, S#state{epoch = EpochId}};
@ -206,58 +204,31 @@ list_files(DataDir, Prefix) ->
make_filename_mgr_name(FluName) when is_atom(FluName) ->
list_to_atom(atom_to_list(FluName) ++ "_filename_mgr").
handle_find_file(FluName, Tid, #ns_info{name=NS, locator=NSLocator}=NSInfo, Prefix, DataDir) ->
N = machi_util:read_max_filenum(DataDir, NS, NSLocator, Prefix),
{File, Cleanup} = case find_file(DataDir, NSInfo, Prefix, N) of
[] ->
{find_or_make_filename(Tid, DataDir, NS, NSLocator, Prefix, N), false};
[H] -> {H, true};
[Fn | _ ] = L ->
lager:debug(
"Searching for a matching file to prefix ~p and sequence number ~p gave multiples: ~p",
[Prefix, N, L]),
{Fn, true}
end,
maybe_cleanup(Tid, {NS, NSLocator, Prefix, N}, Cleanup),
filename:basename(File).
find_or_make_filename(Tid, DataDir, NS, NSLocator, Prefix, N) ->
case ets:lookup(Tid, {NS, NSLocator, Prefix, N}) of
handle_find_file(_FluName, Tid, #ns_info{name=NS, locator=NSLocator}, Prefix, DataDir) ->
case ets:lookup(Tid, {NS, NSLocator, Prefix}) of
[] ->
N = machi_util:read_max_filenum(DataDir, NS, NSLocator, Prefix),
F = generate_filename(DataDir, NS, NSLocator, Prefix, N),
true = ets:insert_new(Tid, {{NS, NSLocator, Prefix, N}, F}),
true = ets:insert(Tid, {{NS, NSLocator, Prefix}, F}),
F;
[{_Key, File}] ->
File
end.
generate_filename(DataDir, NS, NSLocator, Prefix, N) ->
{A,B,C} = erlang:now(),
RN = case process_info(self(), registered_name) of
[] -> [];
{_,X} -> re:replace(atom_to_list(X), "_.*", "", [{return, binary}])
end,
TODO = lists:flatten([RN, ",", io_lib:format("~w,~w,~w", [A,B,C])]),
{F, _} = machi_util:make_data_filename(
{F, _Q} = machi_util:make_data_filename(
DataDir,
NS, NSLocator, Prefix,
TODO,
%% TODO put me back!!
%% generate_uuid_v4_str(),
generate_uuid_v4_str(),
N),
binary_to_list(F).
maybe_cleanup(_Tid, _Key, false) ->
ok;
maybe_cleanup(Tid, Key, true) ->
true = ets:delete(Tid, Key).
increment_and_cache_filename(Tid, DataDir, #ns_info{name=NS,locator=NSLocator}, Prefix) ->
ok = machi_util:increment_max_filenum(DataDir, NS, NSLocator, Prefix),
N = machi_util:read_max_filenum(DataDir, NS, NSLocator, Prefix),
F = generate_filename(DataDir, NS, NSLocator, Prefix, N),
true = ets:insert_new(Tid, {{NS, NSLocator, Prefix, N}, F}),
filename:basename(F).
true = ets:insert(Tid, {{NS, NSLocator, Prefix}, F}),
F.

View file

@ -501,12 +501,12 @@ convert_read_chunk_resp(#mpb_readchunkresp{status='OK', chunks=PB_Chunks, trimme
csum=#mpb_chunkcsum{type=T, csum=Ck}}) ->
%% TODO: cleanup export
Csum = <<(machi_pb_translate:conv_to_csum_tag(T)):8, Ck/binary>>,
{File, Offset, Chunk, Csum}
{list_to_binary(File), Offset, Chunk, Csum}
end, PB_Chunks),
Trimmed = lists:map(fun(#mpb_chunkpos{file_name=File,
offset=Offset,
chunk_size=Size}) ->
{File, Offset, Size}
{list_to_binary(File), Offset, Size}
end, PB_Trimmed),
{ok, {Chunks, Trimmed}};
convert_read_chunk_resp(#mpb_readchunkresp{status=Status}) ->

View file

@ -274,12 +274,12 @@ from_pb_response(#mpb_ll_response{
chunk=Bytes,
csum=#mpb_chunkcsum{type=T,csum=Ck}}) ->
Csum = <<(conv_to_csum_tag(T)):8, Ck/binary>>,
{File, Offset, Bytes, Csum}
{list_to_binary(File), Offset, Bytes, Csum}
end, PB_Chunks),
Trimmed = lists:map(fun(#mpb_chunkpos{file_name=File,
offset=Offset,
chunk_size=Size}) ->
{File, Offset, Size}
{list_to_binary(File), Offset, Size}
end, PB_Trimmed),
{ReqID, {ok, {Chunks, Trimmed}}};
_ ->

View file

@ -121,9 +121,7 @@ append(CRIndex, Bin, #state{verbose=V}=S) ->
NSInfo = #ns_info{},
NoCSum = <<>>,
Opts1 = #append_opts{},
io:format(user, "append_chunk ~p ~P ->\n", [Prefix, Bin, 6]),
Res = (catch machi_cr_client:append_chunk(C, NSInfo, Prefix, Bin, NoCSum, Opts1, sec(1))),
io:format(user, "append_chunk ~p ~P ->\n ~p\n", [Prefix, Bin, 6, Res]),
case Res of
{ok, {_Off, Len, _FileName}=Key} ->
case ets:insert_new(?WRITTEN_TAB, {Key, Bin}) of
@ -190,7 +188,6 @@ change_partition(Partition,
[] -> ?V("## Turn OFF partition: ~w~n", [Partition]);
_ -> ?V("## Turn ON partition: ~w~n", [Partition])
end || Verbose],
io:format(user, "partition ~p\n", [Partition]),
machi_partition_simulator:always_these_partitions(Partition),
_ = machi_partition_simulator:get(FLUNames),
%% Don't wait for stable chain, tick will be executed on demand
@ -459,15 +456,14 @@ confirm_written(C) ->
assert_chunk(C, {Off, Len, FileName}=Key, Bin) ->
%% TODO: This probably a bug, read_chunk respnds with filename of `string()' type
FileNameStr = binary_to_list(FileName),
%% TODO : Use CSum instead of binary (after disuccsion about CSum is calmed down?)
NSInfo = undefined,
case (catch machi_cr_client:read_chunk(C, NSInfo, FileName, Off, Len, undefined, sec(3))) of
{ok, {[{FileNameStr, Off, Bin, _}], []}} ->
{ok, {[{FileName, Off, Bin, _}], []}} ->
ok;
{ok, Got} ->
?V("read_chunk got different binary for Key=~p~n", [Key]),
?V(" Expected: ~p~n", [{[{FileNameStr, Off, Bin, <<"CSum-NYI">>}], []}]),
?V(" Expected: ~p~n", [{[{FileName, Off, Bin, <<"CSum-NYI">>}], []}]),
?V(" Got: ~p~n", [Got]),
{error, different_binary};
{error, Reason} ->

View file

@ -119,7 +119,7 @@ smoke_test2() ->
machi_cr_client:append_chunk(C1, NSInfo, Prefix, Chunk1, NoCSum),
{ok, {Off1,Size1,File1}} =
machi_cr_client:append_chunk(C1, NSInfo, Prefix, Chunk1, NoCSum),
BadCSum = {?CSUM_TAG_CLIENT_SHA, crypto:sha("foo")},
BadCSum = {?CSUM_TAG_CLIENT_SHA, crypto:hash(sha, "foo")},
{error, bad_checksum} =
machi_cr_client:append_chunk(C1, NSInfo, Prefix, Chunk1, BadCSum),
{ok, {[{_, Off1, Chunk1, _}], []}} =
@ -140,10 +140,10 @@ smoke_test2() ->
File1, FooOff1, Size1, undefined) || X <- [0,1,2] ],
ok = machi_flu1_client:write_chunk(Host, PortBase+0, NSInfo, EpochID,
File1, FooOff1, Chunk1, NoCSum),
{ok, {[{_, FooOff1, Chunk1, _}], []}} =
{ok, {[{File1, FooOff1, Chunk1, _}=_YY], []}} =
machi_flu1_client:read_chunk(Host, PortBase+0, NSInfo, EpochID,
File1, FooOff1, Size1, undefined),
{ok, {[{_, FooOff1, Chunk1, _}], []}} =
{ok, {[{File1, FooOff1, Chunk1, _}], []}} =
machi_cr_client:read_chunk(C1, NSInfo, File1, FooOff1, Size1, undefined),
[?assertMatch({X,{ok, {[{_, FooOff1, Chunk1, _}], []}}},
{X,machi_flu1_client:read_chunk(
@ -157,9 +157,9 @@ smoke_test2() ->
Size2 = size(Chunk2),
ok = machi_flu1_client:write_chunk(Host, PortBase+1, NSInfo, EpochID,
File1, FooOff2, Chunk2, NoCSum),
{ok, {[{_, FooOff2, Chunk2, _}], []}} =
{ok, {[{File1, FooOff2, Chunk2, _}], []}} =
machi_cr_client:read_chunk(C1, NSInfo, File1, FooOff2, Size2, undefined),
[{X,{ok, {[{_, FooOff2, Chunk2, _}], []}}} =
[{X,{ok, {[{File1, FooOff2, Chunk2, _}], []}}} =
{X,machi_flu1_client:read_chunk(
Host, PortBase+X, NSInfo, EpochID,
File1, FooOff2, Size2, undefined)} || X <- [0,1,2] ],
@ -167,7 +167,7 @@ smoke_test2() ->
%% Misc API smoke & minor regression checks
{error, bad_arg} = machi_cr_client:read_chunk(C1, NSInfo, <<"no">>,
999999999, 1, undefined),
{ok, {[{_,Off1,Chunk1,_}, {_,FooOff1,Chunk1,_}, {_,FooOff2,Chunk2,_}],
{ok, {[{File1,Off1,Chunk1,_}, {File1,FooOff1,Chunk1,_}, {File1,FooOff2,Chunk2,_}],
[]}} =
machi_cr_client:read_chunk(C1, NSInfo, File1, Off1, 88888888, undefined),
%% Checksum list return value is a primitive binary().
@ -242,7 +242,7 @@ witness_smoke_test2() ->
Chunk1, NoCSum),
{ok, {Off1,Size1,File1}} =
machi_cr_client:append_chunk(C1, NSInfo, Prefix, Chunk1, NoCSum),
BadCSum = {?CSUM_TAG_CLIENT_SHA, crypto:sha("foo")},
BadCSum = {?CSUM_TAG_CLIENT_SHA, crypto:hash(sha, "foo")},
{error, bad_checksum} =
machi_cr_client:append_chunk(C1, NSInfo, Prefix, Chunk1, BadCSum),
{ok, {[{_, Off1, Chunk1, _}], []}} =

View file

@ -78,7 +78,7 @@ smoke_test2() ->
{iolist_to_binary(Chunk2), File2, Off2, Size2},
{iolist_to_binary(Chunk3), File3, Off3, Size3}],
[begin
File = binary_to_list(Fl),
File = Fl,
?assertMatch({ok, {[{File, Off, Ch, _}], []}},
?C:read_chunk(Clnt, Fl, Off, Sz, undefined))
end || {Ch, Fl, Off, Sz} <- Reads],
@ -105,7 +105,7 @@ smoke_test2() ->
[begin
{ok, {[], Trimmed}} =
?C:read_chunk(Clnt, Fl, Off, Sz, #read_opts{needs_trimmed=true}),
Filename = binary_to_list(Fl),
Filename = Fl,
?assertEqual([{Filename, Off, Sz}], Trimmed)
end || {_Ch, Fl, Off, Sz} <- Reads],