Skip to content
This repository has been archived by the owner on Mar 5, 2024. It is now read-only.

Commit

Permalink
Merge pull request #608 from helium/rg/eqc-update
Browse files Browse the repository at this point in the history
Update EQC
  • Loading branch information
evanmcc authored Oct 22, 2020
2 parents 6249f7b + 599459b commit 575338f
Show file tree
Hide file tree
Showing 3 changed files with 120 additions and 8 deletions.
18 changes: 14 additions & 4 deletions eqc/eqc_utils.erl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
ledger/1,
name/1,
maybe_output_paths/3,
ledger_vars/1
ledger_vars/1,
big_witness_hotspots/0
]).

find_challenger(ChallengerIndex, ActiveGateways) ->
Expand Down Expand Up @@ -47,8 +48,8 @@ name(PubkeyBin) ->
Name.

ledger(ExtraVars) ->
%% Ledger at height: 194196
%% ActiveGateway Count: 3023
%% Ledger at height: 481929
%% ActiveGateway Count: 8000
{ok, Dir} = file:get_cwd(),
%% Ensure priv dir exists
PrivDir = filename:join([Dir, "priv"]),
Expand All @@ -72,6 +73,15 @@ ledger(ExtraVars) ->
ledger_vars(Ledger) ->
blockchain_utils:vars_binary_keys_to_atoms(maps:from_list(blockchain_ledger_v1:snapshot_vars(Ledger))).

big_witness_hotspots() ->
{ok, Dir} = file:get_cwd(),
%% Ensure priv dir exists
PrivDir = filename:join([Dir, "priv"]),
ok = filelib:ensure_dir(PrivDir ++ "/"),
BigWitnessHotspotFile = filename:join([PrivDir, "big_witness_hotspots"]),
{ok, LBin} = file:read_file(BigWitnessHotspotFile),
binary_to_term(LBin).

extract_ledger_tar(PrivDir, LedgerTar) ->
case filelib:is_file(LedgerTar) of
true ->
Expand All @@ -87,7 +97,7 @@ extract_ledger_tar(PrivDir, LedgerTar) ->
false ->
%% ledger tar file not found, download & extract
ok = ssl:start(),
{ok, {{_, 200, "OK"}, _, Body}} = httpc:request("https://blockchain-core.s3-us-west-1.amazonaws.com/ledger.tar.gz"),
{ok, {{_, 200, "OK"}, _, Body}} = httpc:request("https://blockchain-core.s3-us-west-1.amazonaws.com/ledger-481929.tar.gz"),
ok = file:write_file(filename:join([PrivDir, "ledger.tar.gz"]), Body),
erl_tar:extract(LedgerTar, [compressed, {cwd, PrivDir}])
end.
Expand Down
110 changes: 106 additions & 4 deletions eqc/path_v4_eqc.erl
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@
[ledger/1,
dead_hotspots/0,
find_challenger/2,
ledger_vars/1
%% maybe_output_paths/3
ledger_vars/1,
big_witness_hotspots/0,
maybe_output_paths/3
]).

-export([prop_path_check/0]).
-export([prop_path_check/0, prop_path_top_200_witness_check/0]).

prop_path_check() ->
?FORALL({Hash, PathLimit, ChallengerIndex},
Expand Down Expand Up @@ -77,7 +78,7 @@ prop_path_check() ->
end,
PathLength = length(Path),

%% ok = maybe_output_paths(TargetPubkeyBin, Path, Time),
ok = maybe_output_paths(TargetPubkeyBin, Path, Time),

%% Checks:
%% - honor path limit
Expand Down Expand Up @@ -113,6 +114,107 @@ prop_path_check() ->
)
end).

prop_path_top_200_witness_check() ->
?FORALL({Hash, PathLimit, ChallengerIndex, TargetPubkeyBin},
{gen_hash(), gen_path_limit(), gen_challenger_index(), gen_big_witness_target()},
begin
{ok, GWCache} = blockchain_gateway_cache:start_link(),
Ledger = ledger(poc_v8_vars()),
application:set_env(blockchain, disable_score_cache, true),
{ok, _Pid} = blockchain_score_cache:start_link(),
ActiveGateways = blockchain_ledger_v1:active_gateways(Ledger),
LedgerVars = ledger_vars(Ledger),

%% use this to artificially constrain path limit for testing
%% PathLimit = 5,

%% use this to make timings more realistic for pi
%% disks, set to 1 or 2
application:set_env(blockchain, find_gateway_sim_delay, 0),

%% Overwrite poc_path_limit for checking generated path limits
Vars = maps:put(poc_path_limit, PathLimit, LedgerVars),

%% Find some challenger
{ChallengerPubkeyBin, _ChallengerLoc} = find_challenger(ChallengerIndex, ActiveGateways),

{ok, {_, TargetRandState}} = blockchain_poc_target_v3:target(ChallengerPubkeyBin, Hash, Ledger, Vars),
{Time, Path} = timer:tc(fun() ->
blockchain_poc_path_v4:build(TargetPubkeyBin,
TargetRandState,
Ledger,
block_time(),
Vars)
end),

%% change the default to true to collect and display
%% time stats at each run
case application:get_env(blockchain, collect_stats, false) of
true ->
Runs =
case get(runs) of
undefined ->
put(runs, [Time]),
[Time];
R ->
R1 = [Time | R],
put(runs, R1),
R1
end,

L = length(Runs),
Avg = trunc(lists:sum(Runs) / L),
Med = case L < 5 of
true -> no_med;
_ -> lists:nth((L div 2), lists:sort(Runs))
end,
Max = lists:max(Runs),

io:fwrite(standard_error, "build took avg ~p med ~p max ~p time ~p us\n", [Avg, Med, Max, Time]);
false ->
ok
end,
PathLength = length(Path),

ok = maybe_output_paths(TargetPubkeyBin, Path, Time),

%% Checks:
%% - honor path limit
%% - atleast one element in path
%% - target is always in path
%% - we never go back to the same h3 index in path
%% - check next hop is a witness of previous gateway
C1 = PathLength =< PathLimit andalso PathLength >= 1,
C2 = length(Path) == length(lists:usort(Path)),
C3 = lists:member(TargetPubkeyBin, Path),
C4 = check_path_h3_indices(Path, ActiveGateways),
C5 = check_next_hop(Path, ActiveGateways),
C6 = check_target_and_path_members_not_dead(TargetPubkeyBin, Path),

blockchain_ledger_v1:close(Ledger),
blockchain_score_cache:stop(),
gen_server:stop(GWCache),

?WHENFAIL(begin
blockchain_ledger_v1:close(Ledger),
blockchain_score_cache:stop(),
gen_server:stop(GWCache)
end,
%% TODO: split into multiple verifiers instead of a single consolidated one
conjunction([
{verify_path_limit, C1},
{verify_minimum_one_element, C2},
{verify_target_in_path, C3},
{verify_non_vw_path, C4},
{verify_next_hop_is_witness, C5},
{verify_not_dead, C6}
])
)
end).

gen_big_witness_target() ->
elements(big_witness_hotspots()).

gen_path_limit() ->
elements([3, 4, 5, 6, 7]).

Expand Down
Binary file added priv/big_witness_hotspots
Binary file not shown.

0 comments on commit 575338f

Please sign in to comment.