WIP: stuck, fix think-o in unanimous_report()

This commit is contained in:
Scott Lystig Fritchie 2014-11-05 17:10:50 +09:00
parent c69a206039
commit 873aa749f9
2 changed files with 43 additions and 22 deletions

View file

@ -379,7 +379,7 @@ calc_projection(#ch_mgr{proj=LastProj, runenv=RunEnv} = S, RelativeToServer,
calc_projection(_OldThreshold, _NoPartitionThreshold, LastProj,
RelativeToServer, Dbg,
#ch_mgr{name=MyName, myflu=MyFLU, runenv=RunEnv1}=S) ->
#ch_mgr{name=MyName, runenv=RunEnv1}=S) ->
#projection{epoch_number=OldEpochNum,
all_members=All_list,
upi=OldUPI_list,
@ -401,26 +401,22 @@ calc_projection(_OldThreshold, _NoPartitionThreshold, LastProj,
D_foo=[],
{NewUPI_list, [], RunEnv2};
{[], [H|T]} when RelativeToServer == hd(NewUPI_list) ->
%% The author is head of the UPI list. Let's see if the
%% head of the repairing list is using our projection.
%% The author is head of the UPI list. Let's see if
%% *everyone* in the UPI+repairing lists are using our
%% projection. This is to simulate a requirement that repair
%% a real repair process cannot take place until the chain is
%% stable, i.e. everyone is in the same epoch.
%% TODO create a real API call for fetching this info.
F = fun() -> machi_flu0:proj_read_latest(H, private) end,
case perhaps_call_t(S, Partitions, MyFLU, F) of
{ok, RemotePrivateProj} ->
if (S#ch_mgr.proj)#projection.epoch_number ==
RemotePrivateProj#projection.epoch_number
andalso
(S#ch_mgr.proj)#projection.epoch_csum ==
RemotePrivateProj#projection.epoch_csum ->
D_foo=[{repair_airquote_done, {we_agree, RemotePrivateProj#projection.epoch_number}}],
{NewUPI_list ++ [H], T, RunEnv2};
true ->
SameEpoch_p = check_latest_private_projections(
tl(NewUPI_list) ++ Repairing_list2,
S#ch_mgr.proj, Partitions, S),
if not SameEpoch_p ->
D_foo=[],
{NewUPI_list, OldRepairing_list, RunEnv2}
end;
_ ->
D_foo=[],
{NewUPI_list, OldRepairing_list, RunEnv2}
{NewUPI_list, OldRepairing_list, RunEnv2};
true ->
D_foo=[{repair_airquote_done, {we_agree, (S#ch_mgr.proj)#projection.epoch_number}}],
{NewUPI_list ++ [H], T, RunEnv2}
end;
{_, _} ->
D_foo=[],
@ -449,6 +445,30 @@ D_foo=[],
Dbg ++ [{nodes_up, Up}]),
{P, S#ch_mgr{runenv=RunEnv3}}.
check_latest_private_projections(FLUs, MyProj, Partitions, S) ->
FoldFun = fun(_FLU, false) ->
false;
(FLU, true) ->
F = fun() ->
machi_flu0:proj_read_latest(FLU, private)
end,
case perhaps_call_t(S, Partitions, FLU, F) of
{ok, RemotePrivateProj} ->
if MyProj#projection.epoch_number ==
RemotePrivateProj#projection.epoch_number
andalso
MyProj#projection.epoch_csum ==
RemotePrivateProj#projection.epoch_csum ->
true;
true ->
false
end;
_ ->
false
end
end,
lists:foldl(FoldFun, true, FLUs).
calc_up_nodes(#ch_mgr{name=MyName, proj=Proj, runenv=RunEnv1}=S) ->
AllMembers = Proj#projection.all_members,
{UpNodes, Partitions, RunEnv2} =

View file

@ -297,9 +297,10 @@ unanimous_report(Epoch, Namez) ->
{UPI, not_unique, Epoch, _Else}
end
end || UPI <- UniqueUPIs],
UniqueResUPIs = [UPI || {unique, UPI} <- Res],
Tag = case lists:usort(lists:flatten(UniqueResUPIs)) ==
lists:sort(lists:flatten(UniqueResUPIs)) of
AgreedResUPI_Rs = [UPI++Repairing ||
{agreed_membership, {UPI, Repairing}} <- Res],
Tag = case lists:usort(lists:flatten(AgreedResUPI_Rs)) ==
lists:sort(lists:flatten(AgreedResUPI_Rs)) of
true ->
ok_disjoint;
false ->