From 471cc870c30bf294b939e94e58f9340b2ca2c298 Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Mon, 16 Aug 2021 13:10:58 -0600 Subject: [PATCH 1/4] remove print statements --- tests/core/pyspec/eth2spec/test/helpers/block_processing.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/block_processing.py b/tests/core/pyspec/eth2spec/test/helpers/block_processing.py index e82f62ed0..8721a772e 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/block_processing.py +++ b/tests/core/pyspec/eth2spec/test/helpers/block_processing.py @@ -48,11 +48,9 @@ def run_block_processing_to(spec, state, block, process_name: str): A test prepares a pre-state by calling this function, output the pre-state, and it can then proceed to run the returned callable, and output a post-state. """ - print(f"state.slot {state.slot} block.slot {block.slot}") # transition state to slot before block state transition if state.slot < block.slot: spec.process_slots(state, block.slot) - print(f"state.slot {state.slot} block.slot {block.slot} A") # process components of block transition for name, call in get_process_calls(spec).items(): From 817d2ee7bf86922329345334dcbb6ac285d58e5d Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Mon, 16 Aug 2021 13:11:31 -0600 Subject: [PATCH 2/4] Refactor sync committee tests so rewards are verified for all test cases --- .../test_process_sync_aggregate.py | 249 +----------------- .../test_process_sync_aggregate_random.py | 165 ++++++++++++ .../eth2spec/test/helpers/sync_committee.py | 108 +++++++- 3 files changed, 274 insertions(+), 248 deletions(-) create mode 100644 tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py index e4176ee58..4be6737e2 100644 --- a/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py +++ b/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py @@ -2,7 +2,6 @@ import random from eth2spec.test.helpers.block import ( build_empty_block_for_next_slot, ) -from eth2spec.test.helpers.block_processing import run_block_processing_to from eth2spec.test.helpers.state import ( state_transition_and_sign_block, transition_to, @@ -12,60 +11,17 @@ from eth2spec.test.helpers.constants import ( ) from eth2spec.test.helpers.sync_committee import ( compute_aggregate_sync_committee_signature, - compute_sync_committee_participant_reward_and_penalty, - compute_sync_committee_proposer_reward, compute_committee_indices, + get_committee_indices, + run_sync_committee_processing, + run_successful_sync_committee_test, ) from eth2spec.test.context import ( - default_activation_threshold, - expect_assertion_error, - misc_balances, - single_phase, with_altair_and_later, - with_custom_state, with_presets, spec_state_test, always_bls, - spec_test, ) -from eth2spec.utils.hash_function import hash - - -def run_sync_committee_processing(spec, state, block, expect_exception=False): - """ - Processes everything up to the sync committee work, then runs the sync committee work in isolation, and - produces a pre-state and post-state (None if exception) specifically for sync-committee processing changes. - """ - # process up to the sync committee work - call = run_block_processing_to(spec, state, block, 'process_sync_aggregate') - yield 'pre', state - yield 'sync_aggregate', block.body.sync_aggregate - if expect_exception: - expect_assertion_error(lambda: call(state, block)) - yield 'post', None - else: - call(state, block) - yield 'post', state - - -def get_committee_indices(spec, state, duplicates=False): - """ - This utility function allows the caller to ensure there are or are not - duplicate validator indices in the returned committee based on - the boolean ``duplicates``. - """ - state = state.copy() - current_epoch = spec.get_current_epoch(state) - randao_index = (current_epoch + 1) % spec.EPOCHS_PER_HISTORICAL_VECTOR - while True: - committee = spec.get_next_sync_committee_indices(state) - if duplicates: - if len(committee) != len(set(committee)): - return committee - else: - if len(committee) == len(set(committee)): - return committee - state.randao_mixes[randao_index] = hash(state.randao_mixes[randao_index]) @with_altair_and_later @@ -177,58 +133,6 @@ def test_invalid_signature_extra_participant(spec, state): yield from run_sync_committee_processing(spec, state, block, expect_exception=True) -def validate_sync_committee_rewards(spec, pre_state, post_state, committee_indices, committee_bits, proposer_index): - for index in range(len(post_state.validators)): - reward = 0 - penalty = 0 - if index in committee_indices: - _reward, _penalty = compute_sync_committee_participant_reward_and_penalty( - spec, - pre_state, - index, - committee_indices, - committee_bits, - ) - reward += _reward - penalty += _penalty - - if proposer_index == index: - reward += compute_sync_committee_proposer_reward( - spec, - pre_state, - committee_indices, - committee_bits, - ) - - assert post_state.balances[index] == pre_state.balances[index] + reward - penalty - - -def run_successful_sync_committee_test(spec, state, committee_indices, committee_bits): - pre_state = state.copy() - - block = build_empty_block_for_next_slot(spec, state) - block.body.sync_aggregate = spec.SyncAggregate( - sync_committee_bits=committee_bits, - sync_committee_signature=compute_aggregate_sync_committee_signature( - spec, - state, - block.slot - 1, - [index for index, bit in zip(committee_indices, committee_bits) if bit], - ) - ) - - yield from run_sync_committee_processing(spec, state, block) - - validate_sync_committee_rewards( - spec, - pre_state, - state, - committee_indices, - committee_bits, - block.proposer_index, - ) - - @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") @spec_state_test @@ -502,150 +406,3 @@ def test_proposer_in_committee_with_participation(spec, state): else: state_transition_and_sign_block(spec, state, block) raise AssertionError("failed to find a proposer in the sync committee set; check test setup") - - -def _test_harness_for_randomized_test_case(spec, state, duplicates=False, participation_fn=None): - committee_indices = get_committee_indices(spec, state, duplicates=duplicates) - - if participation_fn: - participating_indices = participation_fn(committee_indices) - else: - participating_indices = committee_indices - - committee_bits = [index in participating_indices for index in committee_indices] - committee_size = len(committee_indices) - if duplicates: - assert committee_size > len(set(committee_indices)) - else: - assert committee_size == len(set(committee_indices)) - - yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits) - - -@with_altair_and_later -@with_presets([MAINNET], reason="to create duplicate committee") -@spec_state_test -def test_random_only_one_participant_with_duplicates(spec, state): - rng = random.Random(101) - yield from _test_harness_for_randomized_test_case( - spec, - state, - duplicates=True, - participation_fn=lambda comm: [rng.choice(comm)], - ) - - -@with_altair_and_later -@with_presets([MAINNET], reason="to create duplicate committee") -@spec_state_test -def test_random_low_participation_with_duplicates(spec, state): - rng = random.Random(201) - yield from _test_harness_for_randomized_test_case( - spec, - state, - duplicates=True, - participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.25)), - ) - - -@with_altair_and_later -@with_presets([MAINNET], reason="to create duplicate committee") -@spec_state_test -def test_random_high_participation_with_duplicates(spec, state): - rng = random.Random(301) - yield from _test_harness_for_randomized_test_case( - spec, - state, - duplicates=True, - participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.75)), - ) - - -@with_altair_and_later -@with_presets([MAINNET], reason="to create duplicate committee") -@spec_state_test -def test_random_all_but_one_participating_with_duplicates(spec, state): - rng = random.Random(401) - yield from _test_harness_for_randomized_test_case( - spec, - state, - duplicates=True, - participation_fn=lambda comm: rng.sample(comm, len(comm) - 1), - ) - - -@with_altair_and_later -@with_presets([MAINNET], reason="to create duplicate committee") -@spec_test -@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) -@single_phase -def test_random_misc_balances_and_half_participation_with_duplicates(spec, state): - rng = random.Random(1401) - yield from _test_harness_for_randomized_test_case( - spec, - state, - duplicates=True, - participation_fn=lambda comm: rng.sample(comm, len(comm) // 2), - ) - - -@with_altair_and_later -@with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test -def test_random_only_one_participant_without_duplicates(spec, state): - rng = random.Random(501) - yield from _test_harness_for_randomized_test_case( - spec, - state, - participation_fn=lambda comm: [rng.choice(comm)], - ) - - -@with_altair_and_later -@with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test -def test_random_low_participation_without_duplicates(spec, state): - rng = random.Random(601) - yield from _test_harness_for_randomized_test_case( - spec, - state, - participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.25)), - ) - - -@with_altair_and_later -@with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test -def test_random_high_participation_without_duplicates(spec, state): - rng = random.Random(701) - yield from _test_harness_for_randomized_test_case( - spec, - state, - participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.75)), - ) - - -@with_altair_and_later -@with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test -def test_random_all_but_one_participating_without_duplicates(spec, state): - rng = random.Random(801) - yield from _test_harness_for_randomized_test_case( - spec, - state, - participation_fn=lambda comm: rng.sample(comm, len(comm) - 1), - ) - - -@with_altair_and_later -@with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_test -@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) -@single_phase -def test_random_misc_balances_and_half_participation_without_duplicates(spec, state): - rng = random.Random(1501) - yield from _test_harness_for_randomized_test_case( - spec, - state, - participation_fn=lambda comm: rng.sample(comm, len(comm) // 2), - ) diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py new file mode 100644 index 000000000..75845e060 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py @@ -0,0 +1,165 @@ +import random +from eth2spec.test.helpers.constants import ( + MAINNET, MINIMAL, +) +from eth2spec.test.helpers.sync_committee import ( + get_committee_indices, + run_successful_sync_committee_test, +) +from eth2spec.test.context import ( + with_altair_and_later, + spec_state_test, + default_activation_threshold, + misc_balances, + single_phase, + with_custom_state, + with_presets, + spec_test, +) + + +def _test_harness_for_randomized_test_case(spec, state, duplicates=False, participation_fn=None): + committee_indices = get_committee_indices(spec, state, duplicates=duplicates) + + if participation_fn: + participating_indices = participation_fn(committee_indices) + else: + participating_indices = committee_indices + + committee_bits = [index in participating_indices for index in committee_indices] + committee_size = len(committee_indices) + if duplicates: + assert committee_size > len(set(committee_indices)) + else: + assert committee_size == len(set(committee_indices)) + + yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits) + + +@with_altair_and_later +@with_presets([MAINNET], reason="to create duplicate committee") +@spec_state_test +def test_random_only_one_participant_with_duplicates(spec, state): + rng = random.Random(101) + yield from _test_harness_for_randomized_test_case( + spec, + state, + duplicates=True, + participation_fn=lambda comm: [rng.choice(comm)], + ) + + +@with_altair_and_later +@with_presets([MAINNET], reason="to create duplicate committee") +@spec_state_test +def test_random_low_participation_with_duplicates(spec, state): + rng = random.Random(201) + yield from _test_harness_for_randomized_test_case( + spec, + state, + duplicates=True, + participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.25)), + ) + + +@with_altair_and_later +@with_presets([MAINNET], reason="to create duplicate committee") +@spec_state_test +def test_random_high_participation_with_duplicates(spec, state): + rng = random.Random(301) + yield from _test_harness_for_randomized_test_case( + spec, + state, + duplicates=True, + participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.75)), + ) + + +@with_altair_and_later +@with_presets([MAINNET], reason="to create duplicate committee") +@spec_state_test +def test_random_all_but_one_participating_with_duplicates(spec, state): + rng = random.Random(401) + yield from _test_harness_for_randomized_test_case( + spec, + state, + duplicates=True, + participation_fn=lambda comm: rng.sample(comm, len(comm) - 1), + ) + + +@with_altair_and_later +@with_presets([MAINNET], reason="to create duplicate committee") +@spec_test +@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) +@single_phase +def test_random_misc_balances_and_half_participation_with_duplicates(spec, state): + rng = random.Random(1401) + yield from _test_harness_for_randomized_test_case( + spec, + state, + duplicates=True, + participation_fn=lambda comm: rng.sample(comm, len(comm) // 2), + ) + + +@with_altair_and_later +@with_presets([MINIMAL], reason="to create nonduplicate committee") +@spec_state_test +def test_random_only_one_participant_without_duplicates(spec, state): + rng = random.Random(501) + yield from _test_harness_for_randomized_test_case( + spec, + state, + participation_fn=lambda comm: [rng.choice(comm)], + ) + + +@with_altair_and_later +@with_presets([MINIMAL], reason="to create nonduplicate committee") +@spec_state_test +def test_random_low_participation_without_duplicates(spec, state): + rng = random.Random(601) + yield from _test_harness_for_randomized_test_case( + spec, + state, + participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.25)), + ) + + +@with_altair_and_later +@with_presets([MINIMAL], reason="to create nonduplicate committee") +@spec_state_test +def test_random_high_participation_without_duplicates(spec, state): + rng = random.Random(701) + yield from _test_harness_for_randomized_test_case( + spec, + state, + participation_fn=lambda comm: rng.sample(comm, int(len(comm) * 0.75)), + ) + + +@with_altair_and_later +@with_presets([MINIMAL], reason="to create nonduplicate committee") +@spec_state_test +def test_random_all_but_one_participating_without_duplicates(spec, state): + rng = random.Random(801) + yield from _test_harness_for_randomized_test_case( + spec, + state, + participation_fn=lambda comm: rng.sample(comm, len(comm) - 1), + ) + + +@with_altair_and_later +@with_presets([MINIMAL], reason="to create nonduplicate committee") +@spec_test +@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) +@single_phase +def test_random_misc_balances_and_half_participation_without_duplicates(spec, state): + rng = random.Random(1501) + yield from _test_harness_for_randomized_test_case( + spec, + state, + participation_fn=lambda comm: rng.sample(comm, len(comm) // 2), + ) diff --git a/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py b/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py index 71be65044..e59f679e1 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py +++ b/tests/core/pyspec/eth2spec/test/helpers/sync_committee.py @@ -1,10 +1,15 @@ from collections import Counter +from eth2spec.test.context import ( + expect_assertion_error, +) from eth2spec.test.helpers.keys import privkeys from eth2spec.test.helpers.block import ( build_empty_block_for_next_slot, ) +from eth2spec.test.helpers.block_processing import run_block_processing_to from eth2spec.utils import bls +from eth2spec.utils.hash_function import hash def compute_sync_committee_signature(spec, state, slot, privkey, block_root=None, domain_type=None): @@ -75,5 +80,104 @@ def compute_committee_indices(spec, state, committee): Given a ``committee``, calculate and return the related indices """ all_pubkeys = [v.pubkey for v in state.validators] - committee_indices = [all_pubkeys.index(pubkey) for pubkey in committee.pubkeys] - return committee_indices + return [all_pubkeys.index(pubkey) for pubkey in committee.pubkeys] + + +def validate_sync_committee_rewards(spec, pre_state, post_state, committee_indices, committee_bits, proposer_index): + for index in range(len(post_state.validators)): + reward = 0 + penalty = 0 + if index in committee_indices: + _reward, _penalty = compute_sync_committee_participant_reward_and_penalty( + spec, + pre_state, + index, + committee_indices, + committee_bits, + ) + reward += _reward + penalty += _penalty + + if proposer_index == index: + reward += compute_sync_committee_proposer_reward( + spec, + pre_state, + committee_indices, + committee_bits, + ) + + assert post_state.balances[index] == pre_state.balances[index] + reward - penalty + + +def run_sync_committee_processing(spec, state, block, expect_exception=False): + """ + Processes everything up to the sync committee work, then runs the sync committee work in isolation, and + produces a pre-state and post-state (None if exception) specifically for sync-committee processing changes. + """ + pre_state = state.copy() + # process up to the sync committee work + call = run_block_processing_to(spec, state, block, 'process_sync_aggregate') + yield 'pre', state + yield 'sync_aggregate', block.body.sync_aggregate + if expect_exception: + expect_assertion_error(lambda: call(state, block)) + yield 'post', None + else: + call(state, block) + yield 'post', state + if expect_exception: + assert pre_state.balances == state.balances + else: + committee_indices = compute_committee_indices( + spec, + state, + state.current_sync_committee, + ) + committee_bits = block.body.sync_aggregate.sync_committee_bits + validate_sync_committee_rewards( + spec, + pre_state, + state, + committee_indices, + committee_bits, + block.proposer_index + ) + + +def _build_block_for_next_slot_with_sync_participation(spec, state, committee_indices, committee_bits): + block = build_empty_block_for_next_slot(spec, state) + block.body.sync_aggregate = spec.SyncAggregate( + sync_committee_bits=committee_bits, + sync_committee_signature=compute_aggregate_sync_committee_signature( + spec, + state, + block.slot - 1, + [index for index, bit in zip(committee_indices, committee_bits) if bit], + ) + ) + return block + + +def run_successful_sync_committee_test(spec, state, committee_indices, committee_bits): + block = _build_block_for_next_slot_with_sync_participation(spec, state, committee_indices, committee_bits) + yield from run_sync_committee_processing(spec, state, block) + + +def get_committee_indices(spec, state, duplicates=False): + """ + This utility function allows the caller to ensure there are or are not + duplicate validator indices in the returned committee based on + the boolean ``duplicates``. + """ + state = state.copy() + current_epoch = spec.get_current_epoch(state) + randao_index = (current_epoch + 1) % spec.EPOCHS_PER_HISTORICAL_VECTOR + while True: + committee = spec.get_next_sync_committee_indices(state) + if duplicates: + if len(committee) != len(set(committee)): + return committee + else: + if len(committee) == len(set(committee)): + return committee + state.randao_mixes[randao_index] = hash(state.randao_mixes[randao_index]) From 5a17fa65b238eb1507f16a7d6db447075de1774d Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Tue, 17 Aug 2021 08:28:45 -0600 Subject: [PATCH 3/4] group test files into subdirectory --- .../test/altair/block_processing/sync_aggregate/__init__.py | 0 .../{ => sync_aggregate}/test_process_sync_aggregate.py | 0 .../{ => sync_aggregate}/test_process_sync_aggregate_random.py | 0 tests/generators/operations/main.py | 3 ++- 4 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/__init__.py rename tests/core/pyspec/eth2spec/test/altair/block_processing/{ => sync_aggregate}/test_process_sync_aggregate.py (100%) rename tests/core/pyspec/eth2spec/test/altair/block_processing/{ => sync_aggregate}/test_process_sync_aggregate_random.py (100%) diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/__init__.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py similarity index 100% rename from tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate.py rename to tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py similarity index 100% rename from tests/core/pyspec/eth2spec/test/altair/block_processing/test_process_sync_aggregate_random.py rename to tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 57fc6dd96..d2653d87d 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -12,8 +12,9 @@ if __name__ == "__main__": 'voluntary_exit', ]} altair_mods = { - **{key: 'eth2spec.test.altair.block_processing.test_process_' + key for key in [ + **{key: 'eth2spec.test.altair.block_processing.sync_aggregate.test_process_' + key for key in [ 'sync_aggregate', + 'sync_aggregate_random', ]}, **phase_0_mods, } # also run the previous phase 0 tests From 5a918dbdf3dbb591b415903e297dcfa6863abba7 Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Wed, 18 Aug 2021 08:55:22 -0600 Subject: [PATCH 4/4] Add test count to test gen diagnostics --- .../pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py index 2b02d1b5c..be5720265 100644 --- a/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py +++ b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py @@ -96,6 +96,8 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): if len(presets) != 0: print(f"Filtering test-generator runs to only include presets: {', '.join(presets)}") + generated_test_count = 0 + skipped_test_count = 0 for tprov in test_providers: # runs anything that we don't want to repeat for every test case. tprov.prepare() @@ -149,6 +151,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): output_part("ssz", name, dump_ssz_fn(data, name, file_mode)) except SkippedTest as e: print(e) + skipped_test_count += 1 shutil.rmtree(case_dir) continue @@ -172,10 +175,13 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): if not written_part: shutil.rmtree(case_dir) else: + generated_test_count += 1 # Only remove `INCOMPLETE` tag file os.remove(incomplete_tag_file) - print(f"completed {generator_name}") + summary_message = f"completed generation of {generator_name} with {generated_test_count} tests" + summary_message += f" ({skipped_test_count} skipped tests)" + print(summary_message) def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML):