Add read_approx() to the PULSE model, only 5% correctness checks done
This commit is contained in:
parent
572d1803d0
commit
c14e1facf4
1 changed files with 34 additions and 11 deletions
|
@ -45,6 +45,8 @@
|
||||||
-define(QC_OUT(P),
|
-define(QC_OUT(P),
|
||||||
eqc:on_output(fun(Str, Args) -> ?QC_FMT(Str, Args) end, P)).
|
eqc:on_output(fun(Str, Args) -> ?QC_FMT(Str, Args) end, P)).
|
||||||
|
|
||||||
|
-define(MAX_PAGES, 50000).
|
||||||
|
|
||||||
-record(run, {
|
-record(run, {
|
||||||
seq, % Sequencer
|
seq, % Sequencer
|
||||||
proj, % Projection
|
proj, % Projection
|
||||||
|
@ -76,6 +78,10 @@ gen_sequencer() ->
|
||||||
frequency([{100, standard},
|
frequency([{100, standard},
|
||||||
{50, {gen_seed(), gen_sequencer_percent(), choose(1, 2)}}]).
|
{50, {gen_seed(), gen_sequencer_percent(), choose(1, 2)}}]).
|
||||||
|
|
||||||
|
gen_approx_page() ->
|
||||||
|
%% EQC can't know what pages are perhaps-written, so pick something big.
|
||||||
|
?LET(I, largeint(), abs(I)).
|
||||||
|
|
||||||
command(#state{run=Run} = S) ->
|
command(#state{run=Run} = S) ->
|
||||||
?LET({NumChains, ChainLen, PageSize},
|
?LET({NumChains, ChainLen, PageSize},
|
||||||
{parameter(num_chains), parameter(chain_len), parameter(page_size)},
|
{parameter(num_chains), parameter(chain_len), parameter(page_size)},
|
||||||
|
@ -84,6 +90,8 @@ command(#state{run=Run} = S) ->
|
||||||
|| not S#state.is_setup] ++
|
|| not S#state.is_setup] ++
|
||||||
[{10, {call, ?MODULE, append, [Run, gen_page(PageSize)]}}
|
[{10, {call, ?MODULE, append, [Run, gen_page(PageSize)]}}
|
||||||
|| S#state.is_setup] ++
|
|| S#state.is_setup] ++
|
||||||
|
[{10, {call, ?MODULE, read_approx, [Run, gen_approx_page()]}}
|
||||||
|
|| S#state.is_setup] ++
|
||||||
[])).
|
[])).
|
||||||
|
|
||||||
%% Precondition, checked before a command is added to the command sequence.
|
%% Precondition, checked before a command is added to the command sequence.
|
||||||
|
@ -101,6 +109,8 @@ next_state(S, Res, {call, _, setup, [NumChains, ChainLen, PageSize, _SeqType]})
|
||||||
page_size=PageSize,
|
page_size=PageSize,
|
||||||
run=Res};
|
run=Res};
|
||||||
next_state(S, _, {call, _, append, _}) ->
|
next_state(S, _, {call, _, append, _}) ->
|
||||||
|
S;
|
||||||
|
next_state(S, _, {call, _, read_approx, _}) ->
|
||||||
S.
|
S.
|
||||||
|
|
||||||
eqeq(X, X) -> true;
|
eqeq(X, X) -> true;
|
||||||
|
@ -111,7 +121,14 @@ postcondition(_S, {call, _, setup, _}, #run{} = _V) ->
|
||||||
postcondition(_S, {call, _, append, _}, {ok, LPN}) when is_integer(LPN) ->
|
postcondition(_S, {call, _, append, _}, {ok, LPN}) when is_integer(LPN) ->
|
||||||
true;
|
true;
|
||||||
postcondition(_S, {call, _, append, _}, V) ->
|
postcondition(_S, {call, _, append, _}, V) ->
|
||||||
eqeq(V, todoTODO_fixit).
|
eqeq(V, todoTODO_fixit);
|
||||||
|
postcondition(_S, {call, _, read_approx, _}, V) ->
|
||||||
|
case V of
|
||||||
|
{ok, Pg} when is_binary(Pg) -> true;
|
||||||
|
error_unwritten -> true;
|
||||||
|
error_trimmed -> true;
|
||||||
|
_ -> eqeq(V, todoTODO_fixit)
|
||||||
|
end.
|
||||||
|
|
||||||
run_commands_on_node(_LocalOrSlave, Cmds, Seed) ->
|
run_commands_on_node(_LocalOrSlave, Cmds, Seed) ->
|
||||||
%% AfterTime = if LocalOrSlave == local -> 50000;
|
%% AfterTime = if LocalOrSlave == local -> 50000;
|
||||||
|
@ -270,11 +287,10 @@ check_trace(Trace0, _Cmds, _Seed) ->
|
||||||
|
|
||||||
%% StartMod contains {m_start, LPN, V} when a modification finished.
|
%% StartMod contains {m_start, LPN, V} when a modification finished.
|
||||||
%% DoneMod contains {m_end, LPN, V} when a modification finished.
|
%% DoneMod contains {m_end, LPN, V} when a modification finished.
|
||||||
|
|
||||||
%% This is a clever trick: Mods contains the start & end timestamp
|
%% This is a clever trick: Mods contains the start & end timestamp
|
||||||
%% for each modification. Use shift() by 1 usec to move all timestamps
|
%% for each modification. Use shift() by 1 usec to move all timestamps
|
||||||
%% backward 1 usec, then subtract away the original time range to leave
|
%% forward/backward 1 usec, then subtract away the original time range to
|
||||||
%% a 1 usec relation in time, then map() to convert it to a {m_end,...}.
|
%% leave a 1 usec relation in time.
|
||||||
DoneMod = eqc_temporal:map(
|
DoneMod = eqc_temporal:map(
|
||||||
fun({lpn, LPN, Pg, _Pid}) -> {m_end, LPN, Pg} end,
|
fun({lpn, LPN, Pg, _Pid}) -> {m_end, LPN, Pg} end,
|
||||||
eqc_temporal:subtract(eqc_temporal:shift(1, Mods), Mods)),
|
eqc_temporal:subtract(eqc_temporal:shift(1, Mods), Mods)),
|
||||||
|
@ -427,11 +443,11 @@ make_chains(ChainLen, [H|T], SmallAcc, BigAcc) ->
|
||||||
|
|
||||||
setup(NumChains, ChainLen, PageSize, SeqType) ->
|
setup(NumChains, ChainLen, PageSize, SeqType) ->
|
||||||
N = NumChains * ChainLen,
|
N = NumChains * ChainLen,
|
||||||
FLUs = corfurl_test:setup_basic_flus(N, PageSize, 50000),
|
FLUs = corfurl_test:setup_basic_flus(N, PageSize, ?MAX_PAGES),
|
||||||
{ok, Seq} = corfurl_sequencer:start_link(FLUs, SeqType),
|
{ok, Seq} = corfurl_sequencer:start_link(FLUs, SeqType),
|
||||||
Chains = make_chains(ChainLen, FLUs),
|
Chains = make_chains(ChainLen, FLUs),
|
||||||
%% io:format(user, "Cs = ~p\n", [Chains]),
|
%% io:format(user, "Cs = ~p\n", [Chains]),
|
||||||
Proj = corfurl:new_simple_projection(1, 1, 50000, Chains),
|
Proj = corfurl:new_simple_projection(1, 1, ?MAX_PAGES, Chains),
|
||||||
#run{seq=Seq, proj=Proj, flus=FLUs}.
|
#run{seq=Seq, proj=Proj, flus=FLUs}.
|
||||||
|
|
||||||
range_ify([]) ->
|
range_ify([]) ->
|
||||||
|
@ -474,6 +490,13 @@ append(#run{seq=Seq,proj=Proj}, Page) ->
|
||||||
end).
|
end).
|
||||||
-endif. % TEST_TRIP_no_append_duplicates
|
-endif. % TEST_TRIP_no_append_duplicates
|
||||||
|
|
||||||
|
read_approx(#run{seq=Seq, proj=Proj}, SeedInt) ->
|
||||||
|
Max = corfurl_sequencer:get(Seq, 0),
|
||||||
|
%% The sequencer may be lying to us, shouganai.
|
||||||
|
LPN = (SeedInt rem Max) + 1,
|
||||||
|
?LOG({read, LPN},
|
||||||
|
corfurl:read_page(Proj, LPN)).
|
||||||
|
|
||||||
-endif. % PULSE
|
-endif. % PULSE
|
||||||
-endif. % TEST
|
-endif. % TEST
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue