Introduce unittest2 and junit reports (#2522)

* Introduce unittest2 and junit reports

* fix XML path

* don't combine multiple CI runs

* fixup

* public combined report also

Co-authored-by: Ștefan Talpalaru <stefantalpalaru@yahoo.com>
This commit is contained in:
Jacek Sieka 2021-04-28 18:41:02 +02:00 committed by GitHub
parent 4cdbc17e39
commit ce49da6c0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
57 changed files with 681 additions and 678 deletions

View File

@ -1,4 +1,4 @@
name: Nimbus nim-beacon-chain CI
name: CI
on: [push, pull_request]
jobs:
@ -273,6 +273,32 @@ jobs:
run: |
make -j$ncpu ARCH_OVERRIDE=$PLATFORM DISABLE_TEST_FIXTURES_SCRIPT=1 test
# The upload creates a combined report that gets posted as a comment on the PR
- name: Upload combined results
if: matrix.target.TEST_KIND == 'unit-tests'
uses: actions/upload-artifact@v2
with:
name: Unit Test Results ${{ matrix.target.os }}-${{ matrix.target.cpu }}
path: nim-beacon-chain/build/*.xml
# Linux
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
if: always() && matrix.target.TEST_KIND == 'unit-tests' && runner.os == 'Linux'
with:
files: nim-beacon-chain/build/*.xml
check_name: Unit Test Results ${{ matrix.target.os }}-${{ matrix.target.cpu }}
comment_on_pr: false
# Windows and macOS
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action/composite@v1
if: always() && matrix.target.TEST_KIND == 'unit-tests' && runner.os != 'Linux'
with:
files: nim-beacon-chain/build/*.xml
check_name: Unit Test Results ${{ matrix.target.os }}-${{ matrix.target.cpu }}
comment_on_pr: false
- name: Run nim-beacon-chain testnet0 (minimal)
if: matrix.target.TEST_KIND == 'finalization-minimal'
shell: bash
@ -286,3 +312,21 @@ jobs:
working-directory: nim-beacon-chain
run: |
./scripts/launch_local_testnet.sh --testnet 1 --nodes 4 --stop-at-epoch 5 --log-level DEBUG --disable-htop --enable-logtrace --data-dir local_testnet0_data --base-port 9000 --base-rpc-port 7000 --base-metrics-port 8008 --timeout 2400 -- --verify-finalization --discv5:no
publish-test-results:
name: "Publish Unit Tests Results"
needs: build
runs-on: ubuntu-latest
# the build job might be skipped, we don't need to run this job then
if: success() || failure()
steps:
- name: Download Artifacts
uses: actions/download-artifact@v2
with:
path: artifacts
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
files: artifacts/**/*.xml

View File

@ -1,118 +1,6 @@
AllTests-mainnet
===
## Attestation pool processing [Preset: mainnet]
```diff
+ Attestations may arrive in any order [Preset: mainnet] OK
+ Attestations may overlap, bigger first [Preset: mainnet] OK
+ Attestations may overlap, smaller first [Preset: mainnet] OK
+ Attestations should be combined [Preset: mainnet] OK
+ Can add and retrieve simple attestations [Preset: mainnet] OK
+ Everyone voting for something different [Preset: mainnet] OK
+ Fork choice returns block with attestation OK
+ Fork choice returns latest block with no attestations OK
+ Trying to add a block twice tags the second as an error OK
+ Trying to add a duplicate block from an old pruned epoch is tagged as an error OK
+ Working with aggregates [Preset: mainnet] OK
```
OK: 11/11 Fail: 0/11 Skip: 0/11
## Beacon chain DB [Preset: mainnet]
```diff
+ empty database [Preset: mainnet] OK
+ find ancestors [Preset: mainnet] OK
+ sanity check blocks [Preset: mainnet] OK
+ sanity check full states [Preset: mainnet] OK
+ sanity check full states, reusing buffers [Preset: mainnet] OK
+ sanity check genesis roundtrip [Preset: mainnet] OK
+ sanity check state diff roundtrip [Preset: mainnet] OK
+ sanity check states [Preset: mainnet] OK
+ sanity check states, reusing buffers [Preset: mainnet] OK
```
OK: 9/9 Fail: 0/9 Skip: 0/9
## Beacon node
```diff
+ Compile OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Beacon state [Preset: mainnet]
```diff
+ Smoke test initialize_beacon_state [Preset: mainnet] OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Block pool processing [Preset: mainnet]
```diff
+ Adding the same block twice returns a Duplicate error [Preset: mainnet] OK
+ Reverse order block add & get [Preset: mainnet] OK
+ Simple block add&get [Preset: mainnet] OK
+ getRef returns nil for missing blocks OK
+ loading tail block works [Preset: mainnet] OK
+ updateHead updates head and headState [Preset: mainnet] OK
+ updateStateData sanity [Preset: mainnet] OK
```
OK: 7/7 Fail: 0/7 Skip: 0/7
## Block processing [Preset: mainnet]
```diff
+ Attestation gets processed at epoch [Preset: mainnet] OK
+ Passes from genesis state, empty block [Preset: mainnet] OK
+ Passes from genesis state, no block [Preset: mainnet] OK
+ Passes through epoch update, empty block [Preset: mainnet] OK
+ Passes through epoch update, no block [Preset: mainnet] OK
```
OK: 5/5 Fail: 0/5 Skip: 0/5
## BlockRef and helpers [Preset: mainnet]
```diff
+ epochAncestor sanity [Preset: mainnet] OK
+ get_ancestor sanity [Preset: mainnet] OK
+ isAncestorOf sanity [Preset: mainnet] OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## BlockSlot and helpers [Preset: mainnet]
```diff
+ atSlot sanity [Preset: mainnet] OK
+ parent sanity [Preset: mainnet] OK
```
OK: 2/2 Fail: 0/2 Skip: 0/2
## Eth2 specific discovery tests
```diff
+ Invalid attnets field OK
+ Subnet query OK
+ Subnet query after ENR update OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Exit pool testing suite
```diff
+ addExitMessage/getAttesterSlashingMessage OK
+ addExitMessage/getProposerSlashingMessage OK
+ addExitMessage/getVoluntaryExitMessage OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Fork Choice + Finality [Preset: mainnet]
```diff
+ fork_choice - testing finality #01 OK
+ fork_choice - testing finality #02 OK
+ fork_choice - testing no votes OK
+ fork_choice - testing with votes OK
```
OK: 4/4 Fail: 0/4 Skip: 0/4
## Gossip validation [Preset: mainnet]
```diff
+ Validation sanity OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Honest validator
```diff
+ General pubsub topics OK
+ Mainnet attestation topics OK
+ is_aggregator OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Interop
```diff
+ Interop genesis OK
+ Interop signatures OK
+ Mocked start private key OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Official - constants & config [Preset: mainnet]
##
```diff
+ BASE_REWARD_FACTOR 64 [Preset: mainnet] OK
+ BLS_WITHDRAWAL_PREFIX "0x00" [Preset: mainnet] OK
@ -176,6 +64,125 @@ OK: 3/3 Fail: 0/3 Skip: 0/3
+ WHISTLEBLOWER_REWARD_QUOTIENT 512 [Preset: mainnet] OK
```
OK: 51/60 Fail: 0/60 Skip: 9/60
## Attestation pool processing [Preset: mainnet]
```diff
+ Attestations may arrive in any order [Preset: mainnet] OK
+ Attestations may overlap, bigger first [Preset: mainnet] OK
+ Attestations may overlap, smaller first [Preset: mainnet] OK
+ Attestations should be combined [Preset: mainnet] OK
+ Can add and retrieve simple attestations [Preset: mainnet] OK
+ Everyone voting for something different [Preset: mainnet] OK
+ Fork choice returns block with attestation OK
+ Fork choice returns latest block with no attestations OK
+ Trying to add a block twice tags the second as an error OK
+ Trying to add a duplicate block from an old pruned epoch is tagged as an error OK
+ Working with aggregates [Preset: mainnet] OK
```
OK: 11/11 Fail: 0/11 Skip: 0/11
## Beacon chain DB [Preset: mainnet]
```diff
+ empty database [Preset: mainnet] OK
+ find ancestors [Preset: mainnet] OK
+ sanity check blocks [Preset: mainnet] OK
+ sanity check full states [Preset: mainnet] OK
+ sanity check full states, reusing buffers [Preset: mainnet] OK
+ sanity check genesis roundtrip [Preset: mainnet] OK
+ sanity check state diff roundtrip [Preset: mainnet] OK
+ sanity check states [Preset: mainnet] OK
+ sanity check states, reusing buffers [Preset: mainnet] OK
```
OK: 9/9 Fail: 0/9 Skip: 0/9
## Beacon state [Preset: mainnet]
```diff
+ Smoke test initialize_beacon_state [Preset: mainnet] OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Bit fields
```diff
+ iterating words OK
+ overlaps OK
+ roundtrips OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Block pool processing [Preset: mainnet]
```diff
+ Adding the same block twice returns a Duplicate error [Preset: mainnet] OK
+ Reverse order block add & get [Preset: mainnet] OK
+ Simple block add&get [Preset: mainnet] OK
+ getRef returns nil for missing blocks OK
+ loading tail block works [Preset: mainnet] OK
+ updateHead updates head and headState [Preset: mainnet] OK
+ updateStateData sanity [Preset: mainnet] OK
```
OK: 7/7 Fail: 0/7 Skip: 0/7
## Block processing [Preset: mainnet]
```diff
+ Attestation gets processed at epoch [Preset: mainnet] OK
+ Passes from genesis state, empty block [Preset: mainnet] OK
+ Passes from genesis state, no block [Preset: mainnet] OK
+ Passes through epoch update, empty block [Preset: mainnet] OK
+ Passes through epoch update, no block [Preset: mainnet] OK
```
OK: 5/5 Fail: 0/5 Skip: 0/5
## BlockRef and helpers [Preset: mainnet]
```diff
+ epochAncestor sanity [Preset: mainnet] OK
+ get_ancestor sanity [Preset: mainnet] OK
+ isAncestorOf sanity [Preset: mainnet] OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## BlockSlot and helpers [Preset: mainnet]
```diff
+ atSlot sanity [Preset: mainnet] OK
+ parent sanity [Preset: mainnet] OK
```
OK: 2/2 Fail: 0/2 Skip: 0/2
## Eth1 monitor
```diff
+ Rewrite HTTPS Infura URLs OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Eth2 specific discovery tests
```diff
+ Invalid attnets field OK
+ Subnet query OK
+ Subnet query after ENR update OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Exit pool testing suite
```diff
+ addExitMessage/getAttesterSlashingMessage OK
+ addExitMessage/getProposerSlashingMessage OK
+ addExitMessage/getVoluntaryExitMessage OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Fork Choice + Finality [Preset: mainnet]
```diff
+ fork_choice - testing finality #01 OK
+ fork_choice - testing finality #02 OK
+ fork_choice - testing no votes OK
+ fork_choice - testing with votes OK
```
OK: 4/4 Fail: 0/4 Skip: 0/4
## Gossip validation [Preset: mainnet]
```diff
+ Validation sanity OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## Honest validator
```diff
+ General pubsub topics OK
+ Mainnet attestation topics OK
+ is_aggregator OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Interop
```diff
+ Interop genesis OK
+ Interop signatures OK
+ Mocked start private key OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## PeerPool testing suite
```diff
+ Access peers by key test OK
@ -242,6 +249,23 @@ OK: 1/1 Fail: 0/1 Skip: 0/1
+ Compile OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
## SyncManager test suite
```diff
+ [SyncQueue] Async pending and resetWait() test OK
+ [SyncQueue] Async unordered push start from zero OK
+ [SyncQueue] Async unordered push with not full start from non-zero OK
+ [SyncQueue] Full and incomplete success/fail start from non-zero OK
+ [SyncQueue] Full and incomplete success/fail start from zero OK
+ [SyncQueue] One smart and one stupid + debt split + empty OK
+ [SyncQueue] Smart and stupid success/fail OK
+ [SyncQueue] Start and finish slots equal OK
+ [SyncQueue] Two full requests success/fail OK
+ [SyncQueue] checkResponse() test OK
+ [SyncQueue] contains() test OK
+ [SyncQueue] getLastNonEmptySlot() test OK
+ [SyncQueue] hasEndGap() test OK
```
OK: 13/13 Fail: 0/13 Skip: 0/13
## Zero signature sanity checks
```diff
+ SSZ serialization roundtrip of SignedBeaconBlockHeader OK
@ -277,8 +301,10 @@ OK: 8/8 Fail: 0/8 Skip: 0/8
## chain DAG finalization tests [Preset: mainnet]
```diff
+ init with gaps [Preset: mainnet] OK
+ orphaned epoch block [Preset: mainnet] OK
+ prune heads on finalization [Preset: mainnet] OK
```
OK: 1/1 Fail: 0/1 Skip: 0/1
OK: 3/3 Fail: 0/3 Skip: 0/3
## hash
```diff
+ HashArray OK
@ -292,4 +318,4 @@ OK: 2/2 Fail: 0/2 Skip: 0/2
OK: 1/1 Fail: 0/1 Skip: 0/1
---TOTAL---
OK: 157/166 Fail: 0/166 Skip: 9/166
OK: 175/184 Fail: 0/184 Skip: 9/184

View File

@ -1,168 +1,6 @@
FixtureAll-mainnet
===
## Official - Epoch Processing - Final updates [Preset: mainnet]
```diff
+ Final updates - effective_balance_hysteresis [Preset: mainnet] OK
+ Final updates - eth1_vote_no_reset [Preset: mainnet] OK
+ Final updates - eth1_vote_reset [Preset: mainnet] OK
+ Final updates - historical_root_accumulator [Preset: mainnet] OK
```
OK: 4/4 Fail: 0/4 Skip: 0/4
## Official - Epoch Processing - Justification & Finalization [Preset: mainnet]
```diff
+ Justification & Finalization - 123_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 123_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 12_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 12_ok_support_messed_target [Preset: mainnet] OK
+ Justification & Finalization - 12_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 234_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 234_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 23_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 23_poor_support [Preset: mainnet] OK
```
OK: 9/9 Fail: 0/9 Skip: 0/9
## Official - Epoch Processing - Registry updates [Preset: mainnet]
```diff
+ Registry updates - activation_queue_activation_and_ejection [Preset: mainnet] OK
+ Registry updates - activation_queue_efficiency [Preset: mainnet] OK
+ Registry updates - activation_queue_no_activation_no_finality [Preset: mainnet] OK
+ Registry updates - activation_queue_sorting [Preset: mainnet] OK
+ Registry updates - activation_queue_to_activated_if_finalized [Preset: mainnet] OK
+ Registry updates - add_to_activation_queue [Preset: mainnet] OK
+ Registry updates - ejection [Preset: mainnet] OK
+ Registry updates - ejection_past_churn_limit [Preset: mainnet] OK
```
OK: 8/8 Fail: 0/8 Skip: 0/8
## Official - Epoch Processing - Slashings [Preset: mainnet]
```diff
+ Slashings - max_penalties [Preset: mainnet] OK
+ Slashings - scaled_penalties [Preset: mainnet] OK
+ Slashings - small_penalty [Preset: mainnet] OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
## Official - Finality [Preset: mainnet]
```diff
+ [Valid] Official - Finality - finality_no_updates_at_genesis [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_1 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_2 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_3 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_4 [Preset: mainnet] OK
```
OK: 5/5 Fail: 0/5 Skip: 0/5
## Official - Operations - Attestations [Preset: mainnet]
```diff
+ [Invalid] after_epoch_slots OK
+ [Invalid] bad_source_root OK
+ [Invalid] before_inclusion_delay OK
+ [Invalid] empty_participants_seemingly_valid_sig OK
+ [Invalid] empty_participants_zeroes_sig OK
+ [Invalid] future_target_epoch OK
+ [Invalid] invalid_attestation_signature OK
+ [Invalid] invalid_current_source_root OK
+ [Invalid] invalid_index OK
+ [Invalid] mismatched_target_and_slot OK
+ [Invalid] new_source_epoch OK
+ [Invalid] old_source_epoch OK
+ [Invalid] old_target_epoch OK
+ [Invalid] source_root_is_target_root OK
+ [Invalid] too_few_aggregation_bits OK
+ [Invalid] too_many_aggregation_bits OK
+ [Invalid] wrong_index_for_committee_signature OK
+ [Invalid] wrong_index_for_slot_0 OK
+ [Invalid] wrong_index_for_slot_1 OK
+ [Valid] success OK
+ [Valid] success_multi_proposer_index_iterations OK
+ [Valid] success_previous_epoch OK
```
OK: 22/22 Fail: 0/22 Skip: 0/22
## Official - Operations - Attester slashing [Preset: mainnet]
```diff
+ [Invalid] all_empty_indices OK
+ [Invalid] att1_bad_extra_index OK
+ [Invalid] att1_bad_replaced_index OK
+ [Invalid] att1_duplicate_index_double_signed OK
+ [Invalid] att1_duplicate_index_normal_signed OK
+ [Invalid] att1_empty_indices OK
+ [Invalid] att1_high_index OK
+ [Invalid] att2_bad_extra_index OK
+ [Invalid] att2_bad_replaced_index OK
+ [Invalid] att2_duplicate_index_double_signed OK
+ [Invalid] att2_duplicate_index_normal_signed OK
+ [Invalid] att2_empty_indices OK
+ [Invalid] att2_high_index OK
+ [Invalid] invalid_sig_1 OK
+ [Invalid] invalid_sig_1_and_2 OK
+ [Invalid] invalid_sig_2 OK
+ [Invalid] no_double_or_surround OK
+ [Invalid] participants_already_slashed OK
+ [Invalid] same_data OK
+ [Invalid] unsorted_att_1 OK
+ [Invalid] unsorted_att_2 OK
+ [Valid] success_already_exited_long_ago OK
+ [Valid] success_already_exited_recent OK
+ [Valid] success_double OK
+ [Valid] success_surround OK
```
OK: 25/25 Fail: 0/25 Skip: 0/25
## Official - Operations - Block header [Preset: mainnet]
```diff
+ [Invalid] invalid_multiple_blocks_single_slot OK
+ [Invalid] invalid_parent_root OK
+ [Invalid] invalid_proposer_index OK
+ [Invalid] invalid_slot_block_header OK
+ [Invalid] proposer_slashed OK
+ [Valid] success_block_header OK
```
OK: 6/6 Fail: 0/6 Skip: 0/6
## Official - Operations - Deposits [Preset: mainnet]
```diff
+ [Invalid] bad_merkle_proof OK
+ [Invalid] wrong_deposit_for_deposit_count OK
+ [Valid] invalid_sig_new_deposit OK
+ [Valid] invalid_sig_other_version OK
+ [Valid] invalid_sig_top_up OK
+ [Valid] invalid_withdrawal_credentials_top_up OK
+ [Valid] new_deposit_eth1_withdrawal_credentials OK
+ [Valid] new_deposit_max OK
+ [Valid] new_deposit_non_versioned_withdrawal_credentials OK
+ [Valid] new_deposit_over_max OK
+ [Valid] new_deposit_under_max OK
+ [Valid] success_top_up OK
+ [Valid] valid_sig_but_forked_state OK
```
OK: 13/13 Fail: 0/13 Skip: 0/13
## Official - Operations - Proposer slashing [Preset: mainnet]
```diff
+ [Invalid] epochs_are_different OK
+ [Invalid] headers_are_same_sigs_are_different OK
+ [Invalid] headers_are_same_sigs_are_same OK
+ [Invalid] invalid_different_proposer_indices OK
+ [Invalid] invalid_proposer_index OK
+ [Invalid] invalid_sig_1 OK
+ [Invalid] invalid_sig_1_and_2 OK
+ [Invalid] invalid_sig_1_and_2_swap OK
+ [Invalid] invalid_sig_2 OK
+ [Invalid] proposer_is_not_activated OK
+ [Invalid] proposer_is_slashed OK
+ [Invalid] proposer_is_withdrawn OK
+ [Valid] success OK
+ [Valid] success_slashed_and_proposer_index_the_same OK
```
OK: 14/14 Fail: 0/14 Skip: 0/14
## Official - Operations - Voluntary exit [Preset: mainnet]
```diff
+ [Invalid] invalid_signature OK
+ [Invalid] validator_already_exited OK
+ [Invalid] validator_exit_in_future OK
+ [Invalid] validator_invalid_validator_index OK
+ [Invalid] validator_not_active OK
+ [Invalid] validator_not_active_long_enough OK
+ [Valid] default_exit_epoch_subsequent_exit OK
+ [Valid] success OK
+ [Valid] success_exit_queue OK
```
OK: 9/9 Fail: 0/9 Skip: 0/9
## Official - Rewards [Preset: mainnet]
##
```diff
+ Rewards - all_balances_too_low_for_reward [Preset: mainnet] OK
+ Rewards - duplicate_attestations_at_later_slots [Preset: mainnet] OK
@ -208,10 +46,13 @@ OK: 9/9 Fail: 0/9 Skip: 0/9
+ Rewards - with_not_yet_activated_validators_leak [Preset: mainnet] OK
+ Rewards - with_slashed_validators [Preset: mainnet] OK
+ Rewards - with_slashed_validators_leak [Preset: mainnet] OK
```
OK: 44/44 Fail: 0/44 Skip: 0/44
## Official - Sanity - Blocks [Preset: mainnet]
```diff
+ Slots - double_empty_epoch OK
+ Slots - empty_epoch OK
+ Slots - over_epoch_boundary OK
+ Slots - slots_1 OK
+ Slots - slots_2 OK
+ [Invalid] bad_merkle_proof OK
+ [Invalid] wrong_deposit_for_deposit_count OK
+ [Invalid] Official - Sanity - Blocks - double_same_proposer_slashings_same_block [Preset: OK
+ [Invalid] Official - Sanity - Blocks - double_similar_proposer_slashings_same_block [Prese OK
+ [Invalid] Official - Sanity - Blocks - double_validator_exit_same_block [Preset: mainnet] OK
@ -227,6 +68,81 @@ OK: 44/44 Fail: 0/44 Skip: 0/44
+ [Invalid] Official - Sanity - Blocks - same_slot_block_transition [Preset: mainnet] OK
+ [Invalid] Official - Sanity - Blocks - slash_and_exit_same_index [Preset: mainnet] OK
+ [Invalid] Official - Sanity - Blocks - zero_block_sig [Preset: mainnet] OK
+ [Invalid] after_epoch_slots OK
+ [Invalid] all_empty_indices OK
+ [Invalid] att1_bad_extra_index OK
+ [Invalid] att1_bad_replaced_index OK
+ [Invalid] att1_duplicate_index_double_signed OK
+ [Invalid] att1_duplicate_index_normal_signed OK
+ [Invalid] att1_empty_indices OK
+ [Invalid] att1_high_index OK
+ [Invalid] att2_bad_extra_index OK
+ [Invalid] att2_bad_replaced_index OK
+ [Invalid] att2_duplicate_index_double_signed OK
+ [Invalid] att2_duplicate_index_normal_signed OK
+ [Invalid] att2_empty_indices OK
+ [Invalid] att2_high_index OK
+ [Invalid] bad_source_root OK
+ [Invalid] before_inclusion_delay OK
+ [Invalid] empty_participants_seemingly_valid_sig OK
+ [Invalid] empty_participants_zeroes_sig OK
+ [Invalid] epochs_are_different OK
+ [Invalid] future_target_epoch OK
+ [Invalid] headers_are_same_sigs_are_different OK
+ [Invalid] headers_are_same_sigs_are_same OK
+ [Invalid] invalid_attestation_signature OK
+ [Invalid] invalid_current_source_root OK
+ [Invalid] invalid_different_proposer_indices OK
+ [Invalid] invalid_index OK
+ [Invalid] invalid_multiple_blocks_single_slot OK
+ [Invalid] invalid_parent_root OK
+ [Invalid] invalid_proposer_index OK
+ [Invalid] invalid_sig_1 OK
+ [Invalid] invalid_sig_1_and_2 OK
+ [Invalid] invalid_sig_1_and_2_swap OK
+ [Invalid] invalid_sig_2 OK
+ [Invalid] invalid_signature OK
+ [Invalid] invalid_slot_block_header OK
+ [Invalid] mismatched_target_and_slot OK
+ [Invalid] new_source_epoch OK
+ [Invalid] no_double_or_surround OK
+ [Invalid] old_source_epoch OK
+ [Invalid] old_target_epoch OK
+ [Invalid] participants_already_slashed OK
+ [Invalid] proposer_is_not_activated OK
+ [Invalid] proposer_is_slashed OK
+ [Invalid] proposer_is_withdrawn OK
+ [Invalid] proposer_slashed OK
+ [Invalid] same_data OK
+ [Invalid] source_root_is_target_root OK
+ [Invalid] too_few_aggregation_bits OK
+ [Invalid] too_many_aggregation_bits OK
+ [Invalid] unsorted_att_1 OK
+ [Invalid] unsorted_att_2 OK
+ [Invalid] validator_already_exited OK
+ [Invalid] validator_exit_in_future OK
+ [Invalid] validator_invalid_validator_index OK
+ [Invalid] validator_not_active OK
+ [Invalid] validator_not_active_long_enough OK
+ [Invalid] wrong_index_for_committee_signature OK
+ [Invalid] wrong_index_for_slot_0 OK
+ [Invalid] wrong_index_for_slot_1 OK
+ [Valid] invalid_sig_new_deposit OK
+ [Valid] invalid_sig_other_version OK
+ [Valid] invalid_sig_top_up OK
+ [Valid] invalid_withdrawal_credentials_top_up OK
+ [Valid] new_deposit_eth1_withdrawal_credentials OK
+ [Valid] new_deposit_max OK
+ [Valid] new_deposit_non_versioned_withdrawal_credentials OK
+ [Valid] new_deposit_over_max OK
+ [Valid] new_deposit_under_max OK
+ [Valid] success_top_up OK
+ [Valid] valid_sig_but_forked_state OK
+ [Valid] Official - Finality - finality_no_updates_at_genesis [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_1 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_2 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_3 [Preset: mainnet] OK
+ [Valid] Official - Finality - finality_rule_4 [Preset: mainnet] OK
+ [Valid] Official - Sanity - Blocks - attestation [Preset: mainnet] OK
+ [Valid] Official - Sanity - Blocks - attester_slashing [Preset: mainnet] OK
+ [Valid] Official - Sanity - Blocks - balance_driven_status_transitions [Preset: mainnet] OK
@ -250,17 +166,59 @@ OK: 44/44 Fail: 0/44 Skip: 0/44
+ [Valid] Official - Sanity - Blocks - skipped_slots [Preset: mainnet] OK
+ [Valid] Official - Sanity - Blocks - slash_and_exit_diff_index [Preset: mainnet] OK
+ [Valid] Official - Sanity - Blocks - voluntary_exit [Preset: mainnet] OK
+ [Valid] default_exit_epoch_subsequent_exit OK
+ [Valid] success OK
+ [Valid] success_already_exited_long_ago OK
+ [Valid] success_already_exited_recent OK
+ [Valid] success_block_header OK
+ [Valid] success_double OK
+ [Valid] success_exit_queue OK
+ [Valid] success_multi_proposer_index_iterations OK
+ [Valid] success_previous_epoch OK
+ [Valid] success_slashed_and_proposer_index_the_same OK
+ [Valid] success_surround OK
```
OK: 38/38 Fail: 0/38 Skip: 0/38
## Official - Sanity - Slots [Preset: mainnet]
OK: 175/175 Fail: 0/175 Skip: 0/175
## Official - Epoch Processing - Final updates [Preset: mainnet]
```diff
+ Slots - double_empty_epoch OK
+ Slots - empty_epoch OK
+ Slots - over_epoch_boundary OK
+ Slots - slots_1 OK
+ Slots - slots_2 OK
+ Final updates - effective_balance_hysteresis [Preset: mainnet] OK
+ Final updates - eth1_vote_no_reset [Preset: mainnet] OK
+ Final updates - eth1_vote_reset [Preset: mainnet] OK
+ Final updates - historical_root_accumulator [Preset: mainnet] OK
```
OK: 5/5 Fail: 0/5 Skip: 0/5
OK: 4/4 Fail: 0/4 Skip: 0/4
## Official - Epoch Processing - Justification & Finalization [Preset: mainnet]
```diff
+ Justification & Finalization - 123_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 123_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 12_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 12_ok_support_messed_target [Preset: mainnet] OK
+ Justification & Finalization - 12_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 234_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 234_poor_support [Preset: mainnet] OK
+ Justification & Finalization - 23_ok_support [Preset: mainnet] OK
+ Justification & Finalization - 23_poor_support [Preset: mainnet] OK
```
OK: 9/9 Fail: 0/9 Skip: 0/9
## Official - Epoch Processing - Registry updates [Preset: mainnet]
```diff
+ Registry updates - activation_queue_activation_and_ejection [Preset: mainnet] OK
+ Registry updates - activation_queue_efficiency [Preset: mainnet] OK
+ Registry updates - activation_queue_no_activation_no_finality [Preset: mainnet] OK
+ Registry updates - activation_queue_sorting [Preset: mainnet] OK
+ Registry updates - activation_queue_to_activated_if_finalized [Preset: mainnet] OK
+ Registry updates - add_to_activation_queue [Preset: mainnet] OK
+ Registry updates - ejection [Preset: mainnet] OK
+ Registry updates - ejection_past_churn_limit [Preset: mainnet] OK
```
OK: 8/8 Fail: 0/8 Skip: 0/8
## Official - Epoch Processing - Slashings [Preset: mainnet]
```diff
+ Slashings - max_penalties [Preset: mainnet] OK
+ Slashings - scaled_penalties [Preset: mainnet] OK
+ Slashings - small_penalty [Preset: mainnet] OK
```
OK: 3/3 Fail: 0/3 Skip: 0/3
---TOTAL---
OK: 205/205 Fail: 0/205 Skip: 0/205
OK: 199/199 Fail: 0/199 Skip: 0/199

View File

@ -144,21 +144,25 @@ update: | update-common
libbacktrace:
+ "$(MAKE)" -C vendor/nim-libbacktrace --no-print-directory BUILD_CXX_LIB=0
# test suite
TEST_BINARIES := \
# test binaries that can output an XML report
XML_TEST_BINARIES := \
test_fixture_const_sanity_check_minimal \
test_fixture_const_sanity_check_mainnet \
test_fixture_ssz_generic_types \
test_fixture_ssz_consensus_objects \
all_fixtures_require_ssz \
test_official_interchange_vectors \
all_tests \
test_keystore
# test suite
TEST_BINARIES := \
proto_array \
fork_choice \
all_tests \
test_keystore \
test_ssz_roundtrip \
state_sim \
block_sim
.PHONY: $(TEST_BINARIES)
.PHONY: $(TEST_BINARIES) $(XML_TEST_BINARIES)
test_fixture_const_sanity_check_minimal: | build deps
+ echo -e $(BUILD_MSG) "build/$@" && \
@ -237,6 +241,15 @@ all_tests: | build deps
$(NIM_PARAMS) -d:chronicles_log_level=TRACE -d:const_preset=mainnet -d:chronicles_sinks="json[file]" && \
echo -e $(BUILD_END_MSG) "build/$@"
# TODO `test_ssz_roundtrip` is extracted from the rest of the tests because it's incompatible with unittest2
test_ssz_roundtrip: | build deps
+ echo -e $(BUILD_MSG) "build/$@" && \
MAKE="$(MAKE)" V="$(V)" $(ENV_SCRIPT) scripts/compile_nim_program.sh \
$@ \
"tests/$@.nim" \
$(NIM_PARAMS) -d:chronicles_log_level=TRACE -d:const_preset=mainnet -d:chronicles_sinks="json[file]" && \
echo -e $(BUILD_END_MSG) "build/$@"
# TODO `test_keystore` is extracted from the rest of the tests because it uses conflicting BLST headers
test_keystore: | build deps
+ echo -e $(BUILD_MSG) "build/$@" && \
@ -265,11 +278,17 @@ block_sim: | build deps
DISABLE_TEST_FIXTURES_SCRIPT := 0
# This parameter passing scheme is ugly, but short.
test: | $(TEST_BINARIES)
test: | $(XML_TEST_BINARIES) $(TEST_BINARIES)
ifeq ($(DISABLE_TEST_FIXTURES_SCRIPT), 0)
V=$(V) scripts/setup_official_tests.sh
endif
tests/simulation/restapi.sh
for TEST_BINARY in $(XML_TEST_BINARIES); do \
PARAMS="--xml:build/$${TEST_BINARY}.xml --console"; \
echo -e "\nRunning $${TEST_BINARY} $${PARAMS}\n"; \
build/$${TEST_BINARY} $${PARAMS} || { echo -e "\n$${TEST_BINARY} $${PARAMS} failed; Aborting."; exit 1; }; \
done; \
rm -rf 0000-*.json t_slashprot_migration.* *.log block_sim_db
for TEST_BINARY in $(TEST_BINARIES); do \
PARAMS=""; \
if [[ "$${TEST_BINARY}" == "state_sim" ]]; then PARAMS="--validators=6000 --slots=128"; \

View File

@ -18,7 +18,7 @@ import
../beacon_clock,
"."/[block_pools_types, block_quarantine]
export block_pools_types, helpers
export block_pools_types, helpers, datatypes
# https://github.com/ethereum/eth2.0-metrics/blob/master/metrics.md#interop-metrics
declareGauge beacon_head_root, "Root of the head block of the beacon chain"

View File

@ -13,7 +13,6 @@ import # Official constants
import # Unit test
./test_attestation_pool,
./test_beacon_chain_db,
./test_beacon_node,
./test_beaconstate,
./test_bitseqs,
./test_block_pool,

View File

@ -5,7 +5,7 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
# import ../interpreter # included to be able to use "suiteReport"
# import ../interpreter # included to be able to use "suite"
proc setup_finality_01(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
var balances = @[Gwei(1), Gwei(1)]
@ -116,7 +116,7 @@ proc setup_finality_01(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operati
)
proc test_ffg01() =
timedTest "fork_choice - testing finality #01":
test "fork_choice - testing finality #01":
# for i in 0 ..< 4:
# echo " block (", i, ") hash: ", fakeHash(i)
# echo " ------------------------------------------------------"

View File

@ -5,7 +5,7 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
# import ../interpreter # included to be able to use "suiteReport"
# import ../interpreter # included to be able to use "suite"
proc setup_finality_02(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
var balances = @[Gwei(1), Gwei(1)]
@ -378,7 +378,7 @@ proc setup_finality_02(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operati
)
proc test_ffg02() =
timedTest "fork_choice - testing finality #02":
test "fork_choice - testing finality #02":
# for i in 0 ..< 12:
# echo " block (", i, ") hash: ", fakeHash(i)
# echo " ------------------------------------------------------"

View File

@ -5,7 +5,7 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
# import ../interpreter # included to be able to use "suiteReport"
# import ../interpreter # included to be able to use "suite"
proc setup_no_votes(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
let balances = newSeq[Gwei](16)
@ -253,7 +253,7 @@ proc setup_no_votes(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]
)
proc test_no_votes() =
timedTest "fork_choice - testing no votes":
test "fork_choice - testing no votes":
# for i in 0 ..< 6:
# echo " block (", i, ") hash: ", fakeHash(i)
# echo " ------------------------------------------------------"

View File

@ -5,7 +5,7 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
# import ../interpreter # included to be able to use "suiteReport"
# import ../interpreter # included to be able to use "suite"
proc setup_votes(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
var balances = @[Gwei(1), Gwei(1)]
@ -689,7 +689,7 @@ proc setup_votes(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
)
proc test_votes() =
timedTest "fork_choice - testing with votes":
test "fork_choice - testing with votes":
# for i in 0 ..< 12:
# echo " block (", i, ") hash: ", fakeHash(i)
# echo " ------------------------------------------------------"

View File

@ -4,9 +4,9 @@
{.used.}
import ../testutil, std/unittest
import ../testutil
# include to be able to use "suiteReport"
# include to be able to use "suite"
import ./interpreter
suiteReport "Fork Choice + Finality " & preset():
suite "Fork Choice + Finality " & preset():
include scenarios/[no_votes, votes, ffg_01, ffg_02]

View File

@ -9,8 +9,9 @@
import
# Standard libs
os, unittest,
os,
# Status libs
unittest2,
blscurve, stew/byteutils,
# Beacon chain internals
../../beacon_chain/spec/crypto,
@ -49,14 +50,14 @@ proc readValue*(r: var JsonReader, a: var Eth2Domain) =
# TODO: json tests were removed
const BLSDir = JsonTestsDir/"general"/"phase0"/"bls"
suiteReport "Official - BLS tests":
timedTest "Private to public key conversion":
suite "Official - BLS tests":
test "Private to public key conversion":
for file in walkDirRec(BLSDir/"priv_to_pub"):
let t = parseTest(file, Json, BLSPrivToPub)
let implResult = t.input.pubkey()
check: implResult == t.output
timedTest "Message signing":
test "Message signing":
for file in walkDirRec(BLSDir/"sign_msg"):
let t = parseTest(file, Json, BLSSignMsg)
let implResult = t.input.privkey.bls_sign(
@ -65,13 +66,13 @@ suiteReport "Official - BLS tests":
)
check: implResult == t.output
timedTest "Aggregating signatures":
test "Aggregating signatures":
for file in walkDirRec(BLSDir/"aggregate_sigs"):
let t = parseTest(file, Json, BLSAggSig)
let implResult = t.input.combine()
check: implResult == t.output
timedTest "Aggregating public keys":
test "Aggregating public keys":
for file in walkDirRec(BLSDir/"aggregate_pubkeys"):
let t = parseTest(file, Json, BLSAggPubKey)
let implResult = t.input.combine()

View File

@ -6,10 +6,11 @@
import
# Standard library
macros, os, strutils, tables, math, json, streams,
strformat, unittest,
strformat,
# Third party
yaml,
# Status libraries
unittest2,
stew/[byteutils, endians2],
# Internals
../../beacon_chain/spec/[datatypes, presets],
@ -128,7 +129,7 @@ proc checkConfig() =
var config = yamlStream.loadToJson()
doAssert config.len == 1
for constant, value in config[0]:
timedTest &"{constant:<50}{value:<20}{preset()}":
test &"{constant:<50}{value:<20}{preset()}":
if constant in IgnoreKeys:
echo &" ↶↶ Skipping {constant}"
skip()
@ -142,5 +143,5 @@ proc checkConfig() =
else:
check: ConstsToCheck[constant] == value.getBiggestInt().uint64()
suiteReport "Official - constants & config " & preset():
suite "Official - constants & config " & preset():
checkConfig()

View File

@ -9,8 +9,9 @@
import
# Standard library
os, unittest,
os,
# Utilities
unittest2,
stew/results,
# Beacon chain internals
../../beacon_chain/spec/[datatypes, beaconstate],
@ -38,7 +39,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & identifier:
test prefix & identifier:
var cache = StateCache()
let attestation = parseTest(testDir/"attestation.ssz", SSZ, Attestation)
@ -56,6 +57,6 @@ proc runTest(identifier: string) =
`testImpl _ operations_attestations _ identifier`()
suiteReport "Official - Operations - Attestations " & preset():
suite "Official - Operations - Attestations " & preset():
for kind, path in walkDir(OperationsAttestationsDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -38,7 +38,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & identifier:
test prefix & identifier:
var cache = StateCache()
let attesterSlashing = parseTest(testDir/"attester_slashing.ssz", SSZ, AttesterSlashing)
@ -58,6 +58,6 @@ proc runTest(identifier: string) =
`testImpl _ operations_attester_slashing _ identifier`()
suiteReport "Official - Operations - Attester slashing " & preset():
suite "Official - Operations - Attester slashing " & preset():
for kind, path in walkDir(OpAttSlashingDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -38,7 +38,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & identifier:
test prefix & identifier:
var cache = StateCache()
let blck = parseTest(testDir/"block.ssz", SSZ, BeaconBlock)
@ -56,6 +56,6 @@ proc runTest(identifier: string) =
`testImpl _ blockheader _ identifier`()
suiteReport "Official - Operations - Block header " & preset():
suite "Official - Operations - Block header " & preset():
for kind, path in walkDir(OpBlockHeaderDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -38,7 +38,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & " " & identifier:
test prefix & " " & identifier:
let deposit = parseTest(testDir/"deposit.ssz", SSZ, Deposit)
var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
@ -51,6 +51,6 @@ proc runTest(identifier: string) =
`testImpl _ operations_deposits _ identifier`()
suiteReport "Official - Operations - Deposits " & preset():
suite "Official - Operations - Deposits " & preset():
for kind, path in walkDir(OperationsDepositsDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -38,7 +38,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & identifier:
test prefix & identifier:
let proposerSlashing = parseTest(testDir/"proposer_slashing.ssz", SSZ, ProposerSlashing)
var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
@ -56,6 +56,6 @@ proc runTest(identifier: string) =
`testImpl_proposer_slashing _ identifier`()
suiteReport "Official - Operations - Proposer slashing " & preset():
suite "Official - Operations - Proposer slashing " & preset():
for kind, path in walkDir(OpProposerSlashingDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -38,7 +38,7 @@ proc runTest(identifier: string) =
else:
prefix = "[Invalid] "
timedTest prefix & identifier:
test prefix & identifier:
let voluntaryExit = parseTest(testDir/"voluntary_exit.ssz", SSZ, SignedVoluntaryExit)
var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
@ -58,6 +58,6 @@ proc runTest(identifier: string) =
`testImpl _ voluntary_exit _ identifier`()
suiteReport "Official - Operations - Voluntary exit " & preset():
suite "Official - Operations - Voluntary exit " & preset():
for kind, path in walkDir(OpVoluntaryExitDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest,
os,
# Utilities
stew/results,
# Beacon chain internals
@ -48,7 +48,7 @@ proc runTest(rewardsDir, identifier: string) =
let testDir = rewardsDir / identifier
proc `testImpl _ rewards _ identifier`() =
timedTest "Rewards" & " - " & identifier & preset():
test "Rewards" & " - " & identifier & preset():
var
state = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
cache = StateCache()
@ -123,7 +123,7 @@ proc runTest(rewardsDir, identifier: string) =
`testImpl _ rewards _ identifier`()
suiteReport "Official - Rewards " & preset():
suite "Official - Rewards " & preset():
for rewardsDir in [RewardsDirBasic, RewardsDirLeak, RewardsDirRandom]:
for kind, path in walkDir(rewardsDir, true):
runTest(rewardsDir, path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, sequtils, unittest, chronicles,
os, sequtils, chronicles,
# Beacon chain internals
../../beacon_chain/spec/[crypto, datatypes, state_transition, presets],
../../beacon_chain/ssz,
@ -34,7 +34,7 @@ proc runTest(testName, testDir, unitTestName: string) =
hasPostState = existsFile(testPath/"post.ssz")
prefix = if hasPostState: "[Valid] " else: "[Invalid] "
timedTest prefix & testName & " - " & unitTestName & preset():
test prefix & testName & " - " & unitTestName & preset():
var
preState = newClone(parseTest(testPath/"pre.ssz", SSZ, BeaconState))
hashedPreState = (ref HashedBeaconState)(
@ -67,10 +67,10 @@ proc runTest(testName, testDir, unitTestName: string) =
`testImpl _ blck _ testName`()
suiteReport "Official - Sanity - Blocks " & preset():
suite "Official - Sanity - Blocks " & preset():
for kind, path in walkDir(SanityBlocksDir, true):
runTest("Official - Sanity - Blocks", SanityBlocksDir, path)
suiteReport "Official - Finality " & preset():
suite "Official - Finality " & preset():
for kind, path in walkDir(FinalityDir, true):
runTest("Official - Finality", FinalityDir, path)

View File

@ -9,7 +9,7 @@
import
# Standard library
os, strutils, unittest,
os, strutils,
# Beacon chain internals
../../beacon_chain/spec/[datatypes, state_transition],
# Test utilities
@ -29,7 +29,7 @@ proc runTest(identifier: string) =
num_slots = readLines(testDir / "slots.yaml", 2)[0].parseInt.uint64
proc `testImpl _ slots _ identifier`() =
timedTest "Slots - " & identifier:
test "Slots - " & identifier:
var
preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
hashedPreState = (ref HashedBeaconState)(
@ -47,6 +47,6 @@ proc runTest(identifier: string) =
`testImpl _ slots _ identifier`()
suiteReport "Official - Sanity - Slots " & preset():
suite "Official - Sanity - Slots " & preset():
for kind, path in walkDir(SanitySlotsDir, true):
runTest(path)

View File

@ -7,7 +7,7 @@
import
# Standard library
os, unittest, strutils, streams, strformat,
os, strutils, streams, strformat,
macros, sets,
# Third-party
yaml,
@ -80,12 +80,12 @@ proc loadExpectedHashTreeRoot(dir: string): SSZHashTreeRoot =
# Test runner
# ----------------------------------------------------------------
proc runSSZtests() =
suite "Official - SSZ consensus objects " & preset():
doAssert existsDir(SSZDir), "You need to run the \"download_test_vectors.sh\" script to retrieve the official test vectors."
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
timedTest &" Testing {sszType}":
test &" Testing {sszType}":
let path = SSZDir/sszType
for pathKind, sszTestKind in walkDir(path, relative = true):
doAssert pathKind == pcDir
@ -128,7 +128,4 @@ proc runSSZtests() =
else:
raise newException(ValueError, "Unsupported test: " & sszType)
suiteReport "Official - SSZ consensus objects " & preset():
runSSZtests()
summarizeLongTests("FixtureSSZConsensus")

View File

@ -7,7 +7,7 @@
import
# Standard library
os, unittest, strutils, streams, strformat, strscans,
os, strutils, streams, strformat, strscans,
macros, typetraits,
# Status libraries
faststreams, ../testutil,
@ -245,7 +245,7 @@ proc sszCheck(baseDir, sszType, sszSubType: string) =
# Test runner
# ------------------------------------------------------------------------
proc runSSZtests() =
suite "Official - SSZ generic types":
doAssert existsDir(SSZDir), "You need to run the \"download_test_vectors.sh\" script to retrieve the official test vectors."
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
@ -259,13 +259,13 @@ proc runSSZtests() =
of "containers":
skipped = " - skipping BitsStruct"
timedTest &"Testing {sszType:12} inputs - valid" & skipped:
test &"Testing {sszType:12} inputs - valid" & skipped:
let path = SSZDir/sszType/"valid"
for pathKind, sszSubType in walkDir(path, relative = true):
if pathKind != pcDir: continue
sszCheck(path, sszType, sszSubType)
timedTest &"Testing {sszType:12} inputs - invalid" & skipped:
test &"Testing {sszType:12} inputs - invalid" & skipped:
let path = SSZDir/sszType/"invalid"
for pathKind, sszSubType in walkDir(path, relative = true):
if pathKind != pcDir: continue
@ -281,7 +281,4 @@ proc runSSZtests() =
checkpoint getCurrentExceptionMsg()
check false
suiteReport "Official - SSZ generic types":
runSSZtests()
summarizeLongTests("FixtureSSZGeneric")

View File

@ -9,7 +9,7 @@
import
# Standard library
os, unittest, strutils,
os, strutils,
# Beacon chain internals
../../beacon_chain/spec/[datatypes, state_transition_epoch],
# Test utilities
@ -22,31 +22,23 @@ from ../../beacon_chain/spec/beaconstate import process_registry_updates
# XXX: move to state_transition_epoch?
template runSuite(suiteDir, testName: string, transitionProc: untyped{ident}, useCache: static bool): untyped =
# We wrap the tests in a proc to avoid running out of globals
# in the future: Nim supports up to 3500 globals
# but unittest with the macro/templates put everything as globals
# https://github.com/nim-lang/Nim/issues/12084#issue-486866402
suite "Official - Epoch Processing - " & testName & preset():
doAssert dirExists(suiteDir)
for testDir in walkDirRec(suiteDir, yieldFilter = {pcDir}):
proc `suiteImpl _ transitionProc`() =
suiteReport "Official - Epoch Processing - " & testName & preset():
doAssert dirExists(suiteDir)
for testDir in walkDirRec(suiteDir, yieldFilter = {pcDir}):
let unitTestName = testDir.rsplit(DirSep, 1)[1]
test testName & " - " & unitTestName & preset():
# BeaconState objects are stored on the heap to avoid stack overflow
var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let unitTestName = testDir.rsplit(DirSep, 1)[1]
timedTest testName & " - " & unitTestName & preset():
# BeaconState objects are stored on the heap to avoid stack overflow
var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
when useCache:
var cache = StateCache()
transitionProc(preState[], cache)
else:
transitionProc(preState[])
when useCache:
var cache = StateCache()
transitionProc(preState[], cache)
else:
transitionProc(preState[])
reportDiff(preState, postState)
`suiteImpl _ transitionProc`()
reportDiff(preState, postState)
# Justification & Finalization
# ---------------------------------------------------------------

View File

@ -9,7 +9,7 @@
import
# Standard library
std/[unittest, os],
std/[os],
# Status lib
eth/db/kvstore,
stew/results,
@ -29,7 +29,7 @@ template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
test name:
body
wrappedTest()
@ -55,7 +55,7 @@ proc sqlite3db_delete(basepath, dbname: string) =
const TestDir = ""
const TestDbName = "t_slashprot_migration"
suiteReport "Slashing Protection DB - v1 and v2 migration" & preset():
suite "Slashing Protection DB - v1 and v2 migration" & preset():
# https://eips.ethereum.org/EIPS/eip-3076
sqlite3db_delete(TestDir, TestDbName)

View File

@ -7,7 +7,7 @@
import
# Standard library
std/[unittest, os],
std/[os],
# Status lib
stew/[results, byteutils],
nimcrypto/utils,
@ -16,7 +16,7 @@ import
../../beacon_chain/validators/slashing_protection,
../../beacon_chain/spec/[datatypes, digest, crypto, presets],
# Test utilies
../testutil,
../testutil, ../testdbutil,
../official/fixtures_utils
type
@ -141,7 +141,7 @@ proc runTest(identifier: string) =
let testCase = InterchangeTestsDir / identifier
timedTest "Slashing test: " & identifier:
test "Slashing test: " & identifier:
let t = parseTest(InterchangeTestsDir/identifier, Json, TestInterchange)
# Create a test specific DB
@ -211,6 +211,6 @@ proc runTest(identifier: string) =
sqlite3db_delete(TestDir, dbname)
suiteReport "Slashing Interchange tests " & preset():
suite "Slashing Interchange tests " & preset():
for kind, path in walkDir(InterchangeTestsDir, true):
runTest(path)

View File

@ -9,7 +9,7 @@
import
# Standard library
std/[unittest, os],
std/[os],
# Status lib
eth/db/kvstore,
stew/results,
@ -20,14 +20,6 @@ import
# Test utilies
../testutil
template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
body
wrappedTest()
func fakeRoot(index: SomeInteger): Eth2Digest =
## Create fake roots
## Those are just the value serialized in big-endian
@ -50,12 +42,12 @@ proc sqlite3db_delete(basepath, dbname: string) =
const TestDir = ""
const TestDbName = "test_slashprot"
suiteReport "Slashing Protection DB - Interchange" & preset():
suite "Slashing Protection DB - Interchange" & preset():
# https://hackmd.io/@sproul/Bk0Y0qdGD#Format-1-Complete
# https://eips.ethereum.org/EIPS/eip-3076
sqlite3db_delete(TestDir, TestDbName)
wrappedTimedTest "Smoke test - Complete format" & preset():
test "Smoke test - Complete format" & preset():
let genesis_validators_root = hexToDigest"0x04700007fabc8282644aed6d1c7c9e21d38a03a0c4ba193f3afe428824b3a673"
block: # export
let db = SlashingProtectionDB.init(
@ -122,7 +114,7 @@ suiteReport "Slashing Protection DB - Interchange" & preset():
doAssert siSuccess == db3.importSlashingInterchange(currentSourcePath.parentDir/"test_complete_export_slashing_protection.json")
db3.exportSlashingInterchange(currentSourcePath.parentDir/"test_complete_export_slashing_protection_roundtrip2.json")
wrappedTimedTest "Smoke test - Complete format - Invalid database is refused" & preset():
test "Smoke test - Complete format - Invalid database is refused" & preset():
block: # import - invalid root db
let invalid_genvalroot = hexToDigest"0x1234"
let db4 = SlashingProtectionDB.init(

View File

@ -9,7 +9,7 @@
import
# Standard library
std/[unittest, os],
std/[os],
# Status lib
eth/db/kvstore,
stew/results,
@ -23,7 +23,7 @@ template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
test name:
body
wrappedTest()
@ -56,7 +56,7 @@ const TestDbName = "test_slashprot"
# - block_root is unique
# - (validator_id, slot)
suiteReport "Slashing Protection DB" & preset():
suite "Slashing Protection DB" & preset():
wrappedTimedTest "Empty database" & preset():
sqlite3db_delete(TestDir, TestDbName)
let db = SlashingProtectionDB.init(

View File

@ -12,8 +12,6 @@
{.used.}
import
# Standard library
unittest,
stew/results,
# Specs
../../beacon_chain/spec/[beaconstate, datatypes, helpers],
@ -21,7 +19,7 @@ import
../mocking/[mock_genesis, mock_attestations, mock_state],
../testutil
suiteReport "[Unit - Spec - Block processing] Attestations " & preset():
suite "[Unit - Spec - Block processing] Attestations " & preset():
const NumValidators = uint64(8) * SLOTS_PER_EPOCH
let genesisState = newClone(initGenesisState(NumValidators))
@ -33,7 +31,7 @@ suiteReport "[Unit - Spec - Block processing] Attestations " & preset():
# The BeaconState is exposed as "state" in the calling context
# The attestation to process must be named "attestation" in the calling context
timedTest name:
test name:
var state {.inject.} = newClone(genesisState[])
# Attestation setup body

View File

@ -14,7 +14,7 @@
import
# Standard library
unittest, math,
std/math,
# Specs
../../beacon_chain/spec/[beaconstate, datatypes, crypto, presets],
# Internals
@ -24,14 +24,14 @@ import
../mocking/[mock_deposits, mock_genesis],
../testutil, ../helpers/math_helpers
suiteReport "[Unit - Spec - Block processing] Deposits " & preset():
suite "[Unit - Spec - Block processing] Deposits " & preset():
const NumValidators = uint64 5 * SLOTS_PER_EPOCH
let genesisState = newClone(initGenesisState(NumValidators))
doAssert genesisState.data.validators.lenu64 == NumValidators
template valid_deposit(deposit_amount: uint64, name: string): untyped =
timedTest "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
test "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
var state = assignClone(genesisState[])
@ -73,7 +73,7 @@ suiteReport "[Unit - Spec - Block processing] Deposits " & preset():
valid_deposit(MAX_EFFECTIVE_BALANCE, "at")
valid_deposit(MAX_EFFECTIVE_BALANCE + 1, "over")
timedTest "Validator top-up":
test "Validator top-up":
var state = assignClone(genesisState[])
# Test configuration
@ -112,7 +112,7 @@ suiteReport "[Unit - Spec - Block processing] Deposits " & preset():
)
template invalid_signature(deposit_amount: uint64, name: string): untyped =
timedTest "Invalid deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
test "Invalid deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
var state = assignClone(genesisState[])

View File

@ -9,7 +9,6 @@
import
# Standard library
unittest,
# Vendored packages
stew/bitops2,
# Specs
@ -213,7 +212,7 @@ proc finalizeOn12(state: var HashedBeaconState, epoch: Epoch, sufficient_support
doAssert state.data.finalized_checkpoint == old_finalized # no new finalized checkpoint
proc payload =
suiteReport "[Unit - Spec - Epoch processing] Justification and Finalization " & preset():
suite "[Unit - Spec - Epoch processing] Justification and Finalization " & preset():
echo " Finalization rules are detailed at https://github.com/protolambda/eth2-docs#justification-and-finalization"
const NumValidators = uint64(8) * SLOTS_PER_EPOCH
@ -223,28 +222,28 @@ proc payload =
setup:
var state = assignClone(genesisState[])
timedTest " Rule I - 234 finalization with enough support":
test " Rule I - 234 finalization with enough support":
finalizeOn234(state[], Epoch 5, sufficient_support = true)
timedTest " Rule I - 234 finalization without support":
test " Rule I - 234 finalization without support":
finalizeOn234(state[], Epoch 5, sufficient_support = false)
timedTest " Rule II - 23 finalization with enough support":
test " Rule II - 23 finalization with enough support":
finalizeOn23(state[], Epoch 4, sufficient_support = true)
timedTest " Rule II - 23 finalization without support":
test " Rule II - 23 finalization without support":
finalizeOn23(state[], Epoch 4, sufficient_support = false)
timedTest " Rule III - 123 finalization with enough support":
test " Rule III - 123 finalization with enough support":
finalizeOn123(state[], Epoch 6, sufficient_support = true)
timedTest " Rule III - 123 finalization without support":
test " Rule III - 123 finalization without support":
finalizeOn123(state[], Epoch 6, sufficient_support = false)
timedTest " Rule IV - 12 finalization with enough support":
test " Rule IV - 12 finalization with enough support":
finalizeOn12(state[], Epoch 3, sufficient_support = true)
timedTest " Rule IV - 12 finalization without support":
test " Rule IV - 12 finalization without support":
finalizeOn12(state[], Epoch 3, sufficient_support = false)
payload()

View File

@ -24,7 +24,7 @@ import
../beacon_chain/spec/[crypto, datatypes, digest, validator, state_transition,
helpers, beaconstate, presets],
# Test utilities
./testutil, ./testblockutil
./testutil, ./testdbutil, ./testblockutil
func combine(tgt: var Attestation, src: Attestation) =
## Combine the signature and participation bitfield, with the assumption that
@ -53,7 +53,7 @@ proc pruneAtFinalization(dag: ChainDAGRef, attPool: AttestationPool) =
dag.pruneStateCachesDAG()
# pool[].prune() # We test logic without attestation pool / fork choice pruning
suiteReport "Attestation pool processing" & preset():
suite "Attestation pool processing" & preset():
## For now just test that we can compile and execute block processing with
## mock data.
@ -69,7 +69,7 @@ suiteReport "Attestation pool processing" & preset():
check:
process_slots(state.data, getStateField(state, slot) + 1, cache)
timedTest "Can add and retrieve simple attestations" & preset():
test "Can add and retrieve simple attestations" & preset():
let
# Create an attestation for slot 1!
bc0 = get_beacon_committee(
@ -174,7 +174,7 @@ suiteReport "Attestation pool processing" & preset():
pool[].addAttestation(
att4, @[bc1[2]], att3.loadSig, att3.data.slot)
timedTest "Working with aggregates" & preset():
test "Working with aggregates" & preset():
let
# Create an attestation for slot 1!
bc0 = get_beacon_committee(
@ -226,7 +226,7 @@ suiteReport "Attestation pool processing" & preset():
attestations[0].aggregation_bits.countOnes() == 4
pool[].getAggregatedAttestation(1.Slot, 0.CommitteeIndex).isSome()
timedTest "Everyone voting for something different" & preset():
test "Everyone voting for something different" & preset():
var attestations: int
for i in 0..<SLOTS_PER_EPOCH:
var root: Eth2Digest
@ -253,7 +253,7 @@ suiteReport "Attestation pool processing" & preset():
pool[].getAggregatedAttestation(
getStateField(state, slot) - 1, 0.CommitteeIndex).isSome()
timedTest "Attestations may arrive in any order" & preset():
test "Attestations may arrive in any order" & preset():
var cache = StateCache()
let
# Create an attestation for slot 1!
@ -285,7 +285,7 @@ suiteReport "Attestation pool processing" & preset():
check:
attestations.len == 1
timedTest "Attestations should be combined" & preset():
test "Attestations should be combined" & preset():
var cache = StateCache()
let
# Create an attestation for slot 1!
@ -309,7 +309,7 @@ suiteReport "Attestation pool processing" & preset():
check:
attestations.len == 1
timedTest "Attestations may overlap, bigger first" & preset():
test "Attestations may overlap, bigger first" & preset():
var cache = StateCache()
var
@ -336,7 +336,7 @@ suiteReport "Attestation pool processing" & preset():
check:
attestations.len == 1
timedTest "Attestations may overlap, smaller first" & preset():
test "Attestations may overlap, smaller first" & preset():
var cache = StateCache()
var
# Create an attestation for slot 1!
@ -362,7 +362,7 @@ suiteReport "Attestation pool processing" & preset():
check:
attestations.len == 1
timedTest "Fork choice returns latest block with no attestations":
test "Fork choice returns latest block with no attestations":
var cache = StateCache()
let
b1 = addTestBlock(state.data, chainDag.tail.root, cache)
@ -390,7 +390,7 @@ suiteReport "Attestation pool processing" & preset():
check:
head2 == b2Add[]
timedTest "Fork choice returns block with attestation":
test "Fork choice returns block with attestation":
var cache = StateCache()
let
b10 = makeTestBlock(state.data, chainDag.tail.root, cache)
@ -450,7 +450,7 @@ suiteReport "Attestation pool processing" & preset():
# Two votes for b11
head4 == b11Add[]
timedTest "Trying to add a block twice tags the second as an error":
test "Trying to add a block twice tags the second as an error":
var cache = StateCache()
let
b10 = makeTestBlock(state.data, chainDag.tail.root, cache)
@ -476,7 +476,7 @@ suiteReport "Attestation pool processing" & preset():
doAssert: b10Add_clone.error == (ValidationResult.Ignore, Duplicate)
timedTest "Trying to add a duplicate block from an old pruned epoch is tagged as an error":
test "Trying to add a duplicate block from an old pruned epoch is tagged as an error":
# Note: very sensitive to stack usage
chainDag.updateFlags.incl {skipBLSValidation}

View File

@ -7,14 +7,16 @@
{.used.}
import algorithm, options, sequtils, unittest,
import
std/[algorithm, options, sequtils],
unittest2,
../beacon_chain/[beacon_chain_db, extras, interop, ssz],
../beacon_chain/spec/[
beaconstate, datatypes, digest, crypto, state_transition, presets],
../beacon_chain/consensus_object_pools/blockchain_dag,
eth/db/kvstore,
# test utilies
./testutil, ./testblockutil, ./teststateutil
./testutil, ./testdbutil, ./testblockutil, ./teststateutil
when isMainModule:
import chronicles # or some random compile error happens...
@ -25,29 +27,21 @@ proc getStateRef(db: BeaconChainDB, root: Eth2Digest): NilableBeaconStateRef =
if db.getState(root, res[], noRollback):
return res
template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
body
wrappedTest()
func withDigest(blck: TrustedBeaconBlock): TrustedSignedBeaconBlock =
TrustedSignedBeaconBlock(
message: blck,
root: hash_tree_root(blck)
)
suiteReport "Beacon chain DB" & preset():
wrappedTimedTest "empty database" & preset():
suite "Beacon chain DB" & preset():
test "empty database" & preset():
var
db = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)
check:
db.getStateRef(Eth2Digest()).isNil
db.getBlock(Eth2Digest()).isNone
wrappedTimedTest "sanity check blocks" & preset():
test "sanity check blocks" & preset():
var
db = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)
@ -72,7 +66,7 @@ suiteReport "Beacon chain DB" & preset():
db.close()
wrappedTimedTest "sanity check states" & preset():
test "sanity check states" & preset():
var
db = makeTestDB(SLOTS_PER_EPOCH)
dag = init(ChainDAGRef, defaultRuntimePreset, db)
@ -95,7 +89,7 @@ suiteReport "Beacon chain DB" & preset():
db.close()
wrappedTimedTest "sanity check states, reusing buffers" & preset():
test "sanity check states, reusing buffers" & preset():
var
db = makeTestDB(SLOTS_PER_EPOCH)
dag = init(ChainDAGRef, defaultRuntimePreset, db)
@ -121,7 +115,7 @@ suiteReport "Beacon chain DB" & preset():
db.close()
wrappedTimedTest "sanity check full states" & preset():
test "sanity check full states" & preset():
var
db = makeTestDB(SLOTS_PER_EPOCH)
dag = init(ChainDAGRef, defaultRuntimePreset, db)
@ -144,7 +138,7 @@ suiteReport "Beacon chain DB" & preset():
db.close()
wrappedTimedTest "sanity check full states, reusing buffers" & preset():
test "sanity check full states, reusing buffers" & preset():
var
db = makeTestDB(SLOTS_PER_EPOCH)
dag = init(ChainDAGRef, defaultRuntimePreset, db)
@ -170,7 +164,7 @@ suiteReport "Beacon chain DB" & preset():
db.close()
wrappedTimedTest "find ancestors" & preset():
test "find ancestors" & preset():
var
db = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)
@ -212,7 +206,7 @@ suiteReport "Beacon chain DB" & preset():
doAssert toSeq(db.getAncestorSummaries(a0.root)).len == 1
doAssert toSeq(db.getAncestorSummaries(a2.root)).len == 3
wrappedTimedTest "sanity check genesis roundtrip" & preset():
test "sanity check genesis roundtrip" & preset():
# This is a really dumb way of checking that we can roundtrip a genesis
# state. We've been bit by this because we've had a bug in the BLS
# serialization where an all-zero default-initialized bls signature could
@ -237,7 +231,7 @@ suiteReport "Beacon chain DB" & preset():
check:
hash_tree_root(state2[]) == root
wrappedTimedTest "sanity check state diff roundtrip" & preset():
test "sanity check state diff roundtrip" & preset():
var
db = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)

View File

@ -8,12 +8,12 @@
{.used.}
import
times, unittest,
./testutil, ./testblockutil,
../beacon_chain/spec/[beaconstate, datatypes, digest, presets]
unittest2,
../beacon_chain/spec/[beaconstate, datatypes, digest, presets],
./testutil, ./testblockutil
suiteReport "Beacon state" & preset():
timedTest "Smoke test initialize_beacon_state" & preset():
suite "Beacon state" & preset():
test "Smoke test initialize_beacon_state" & preset():
let state = initialize_beacon_state(
defaultRuntimePreset, Eth2Digest(), 0, makeInitialDeposits(SLOTS_PER_EPOCH, {}), {})
check: state.validators.lenu64 == SLOTS_PER_EPOCH

View File

@ -1,8 +1,10 @@
{.used.}
import
unittest, strformat,
../beacon_chain/ssz/bitseqs
unittest2,
strformat,
../beacon_chain/ssz/bitseqs,
./testutil
suite "Bit fields":
test "roundtrips":

View File

@ -8,15 +8,16 @@
{.used.}
import
std/[options, sequtils, unittest],
std/[options, sequtils],
unittest2,
stew/assign2,
eth/keys,
./testutil, ./testblockutil,
../beacon_chain/spec/[datatypes, digest, helpers, state_transition, presets],
../beacon_chain/beacon_node_types,
../beacon_chain/ssz,
../beacon_chain/consensus_object_pools/[
blockchain_dag, block_quarantine, block_clearance, statedata_helpers]
blockchain_dag, block_quarantine, block_clearance, statedata_helpers],
./testutil, ./testdbutil, ./testblockutil
when isMainModule:
import chronicles # or some random compile error happens...
@ -24,22 +25,12 @@ when isMainModule:
proc `$`(x: BlockRef): string =
$x.root
template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
# This leads to stack overflow
# We can mitigate that by wrapping checks in proc
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
body
wrappedTest()
proc pruneAtFinalization(dag: ChainDAGRef) =
if dag.needStateCachesAndForkChoicePruning():
dag.pruneStateCachesDAG()
suiteReport "BlockRef and helpers" & preset():
wrappedTimedTest "isAncestorOf sanity" & preset():
suite "BlockRef and helpers" & preset():
test "isAncestorOf sanity" & preset():
let
s0 = BlockRef(slot: Slot(0))
s1 = BlockRef(slot: Slot(1), parent: s0)
@ -56,7 +47,7 @@ suiteReport "BlockRef and helpers" & preset():
not s2.isAncestorOf(s1)
not s1.isAncestorOf(s0)
wrappedTimedTest "get_ancestor sanity" & preset():
test "get_ancestor sanity" & preset():
let
s0 = BlockRef(slot: Slot(0))
s1 = BlockRef(slot: Slot(1), parent: s0)
@ -76,7 +67,7 @@ suiteReport "BlockRef and helpers" & preset():
s4.get_ancestor(Slot(3)) == s2
s4.get_ancestor(Slot(4)) == s4
wrappedTimedTest "epochAncestor sanity" & preset():
test "epochAncestor sanity" & preset():
let
s0 = BlockRef(slot: Slot(0))
var cur = s0
@ -92,8 +83,8 @@ suiteReport "BlockRef and helpers" & preset():
ancestor.blck.epochAncestor(cur.slot.epoch) == ancestor
ancestor.blck.epochAncestor(ancestor.blck.slot.epoch) != ancestor
suiteReport "BlockSlot and helpers" & preset():
wrappedTimedTest "atSlot sanity" & preset():
suite "BlockSlot and helpers" & preset():
test "atSlot sanity" & preset():
let
s0 = BlockRef(slot: Slot(0))
s1 = BlockRef(slot: Slot(1), parent: s0)
@ -107,7 +98,7 @@ suiteReport "BlockSlot and helpers" & preset():
s4.atSlot(Slot(0)).blck == s0
wrappedTimedTest "parent sanity" & preset():
test "parent sanity" & preset():
let
s0 = BlockRef(slot: Slot(0))
s00 = BlockSlot(blck: s0, slot: Slot(0))
@ -125,7 +116,7 @@ suiteReport "BlockSlot and helpers" & preset():
s24.parent == BlockSlot(blck: s2, slot: Slot(3))
s24.parent.parent == s22
suiteReport "Block pool processing" & preset():
suite "Block pool processing" & preset():
setup:
var
db = makeTestDB(SLOTS_PER_EPOCH)
@ -135,18 +126,18 @@ suiteReport "Block pool processing" & preset():
cache = StateCache()
b1 = addTestBlock(stateData.data, dag.tail.root, cache)
b2 = addTestBlock(stateData.data, b1.root, cache)
wrappedTimedTest "getRef returns nil for missing blocks":
test "getRef returns nil for missing blocks":
check:
dag.getRef(default Eth2Digest) == nil
wrappedTimedTest "loading tail block works" & preset():
test "loading tail block works" & preset():
let
b0 = dag.get(dag.tail.root)
check:
b0.isSome()
wrappedTimedTest "Simple block add&get" & preset():
test "Simple block add&get" & preset():
let
b1Add = dag.addRawBlock(quarantine, b1, nil)
b1Get = dag.get(b1.root)
@ -226,7 +217,7 @@ suiteReport "Block pool processing" & preset():
dag.getBlockRange(Slot(3), 2, blocks.toOpenArray(0, 1)) == 2
blocks[2..<2].len == 0
wrappedTimedTest "Reverse order block add & get" & preset():
test "Reverse order block add & get" & preset():
let missing = dag.addRawBlock(quarantine, b2, nil)
check: missing.error == (ValidationResult.Ignore, MissingParent)
@ -269,7 +260,7 @@ suiteReport "Block pool processing" & preset():
dag2.heads.len == 1
dag2.heads[0].root == b2.root
wrappedTimedTest "Adding the same block twice returns a Duplicate error" & preset():
test "Adding the same block twice returns a Duplicate error" & preset():
let
b10 = dag.addRawBlock(quarantine, b1, nil)
b11 = dag.addRawBlock(quarantine, b1, nil)
@ -278,7 +269,7 @@ suiteReport "Block pool processing" & preset():
b11.error == (ValidationResult.Ignore, Duplicate)
not b10[].isNil
wrappedTimedTest "updateHead updates head and headState" & preset():
test "updateHead updates head and headState" & preset():
let
b1Add = dag.addRawBlock(quarantine, b1, nil)
@ -289,7 +280,7 @@ suiteReport "Block pool processing" & preset():
dag.head == b1Add[]
getStateField(dag.headState, slot) == b1Add[].slot
wrappedTimedTest "updateStateData sanity" & preset():
test "updateStateData sanity" & preset():
let
b1Add = dag.addRawBlock(quarantine, b1, nil)
b2Add = dag.addRawBlock(quarantine, b2, nil)
@ -338,7 +329,7 @@ suiteReport "Block pool processing" & preset():
tmpState.blck == b1Add[].parent
getStateField(tmpState, slot) == bs1.parent.slot
suiteReport "chain DAG finalization tests" & preset():
suite "chain DAG finalization tests" & preset():
setup:
var
db = makeTestDB(SLOTS_PER_EPOCH)
@ -346,7 +337,7 @@ suiteReport "chain DAG finalization tests" & preset():
quarantine = QuarantineRef.init(keys.newRng())
cache = StateCache()
wrappedTimedTest "prune heads on finalization" & preset():
test "prune heads on finalization" & preset():
# Create a fork that will not be taken
var
blck = makeTestBlock(dag.headState.data, dag.head.root, cache)
@ -434,7 +425,7 @@ suiteReport "chain DAG finalization tests" & preset():
dag2.finalizedHead.slot == dag.finalizedHead.slot
hash_tree_root(dag2.headState) == hash_tree_root(dag.headState)
wrappedTimedTest "orphaned epoch block" & preset():
test "orphaned epoch block" & preset():
var prestate = (ref HashedBeaconState)()
for i in 0 ..< SLOTS_PER_EPOCH:
if i == SLOTS_PER_EPOCH - 1:
@ -470,7 +461,7 @@ suiteReport "chain DAG finalization tests" & preset():
let added2 = dag2.addRawBlock(quarantine, blck, nil)
check: added2.isOk()
suiteReport "chain DAG finalization tests" & preset():
suite "chain DAG finalization tests" & preset():
setup:
var
db = makeTestDB(SLOTS_PER_EPOCH)
@ -478,7 +469,7 @@ suiteReport "chain DAG finalization tests" & preset():
quarantine = QuarantineRef.init(keys.newRng())
cache = StateCache()
timedTest "init with gaps" & preset():
test "init with gaps" & preset():
for blck in makeTestBlocks(
dag.headState.data, dag.head.root, cache, int(SLOTS_PER_EPOCH * 6 - 2),
true):

View File

@ -8,12 +8,13 @@
{.used.}
import
unittest, typetraits,
std/typetraits,
unittest2,
../beacon_chain/spec/datatypes,
./testutil
suiteReport "Spec datatypes":
timedTest "Graffiti bytes":
suite "Spec datatypes":
test "Graffiti bytes":
var
g1 = GraffitiBytes.init "Hello"
g2 = default(GraffitiBytes)
@ -28,4 +29,3 @@ suiteReport "Spec datatypes":
g2 == GraffitiBytes.init("")
g3 == GraffitiBytes.init($g3)

View File

@ -1,16 +1,16 @@
{.used.}
import
std/unittest,
unittest2,
chronos, stew/shims/net, eth/keys, eth/p2p/discoveryv5/enr,
../beacon_chain/conf,
../beacon_chain/spec/datatypes,
../beacon_chain/networking/[eth2_network, eth2_discovery],
./testutil
template timedAsyncTest*(name, body: untyped) =
timedTest name:
proc scenario {.async.} = body
template asyncTest*(name, body: untyped) =
test name:
proc scenario {.async.} = {.gcsafe.}: body
waitFor scenario()
proc new*(T: type Eth2DiscoveryProtocol,
@ -30,7 +30,7 @@ proc generateNode(rng: ref BrHmacDrbgContext, port: Port,
Eth2DiscoveryProtocol.new(keys.PrivateKey.random(rng[]),
some(ip), some(port), some(port), port, ip, enrFields, rng = rng)
suiteReport "Eth2 specific discovery tests":
suite "Eth2 specific discovery tests":
let
rng = keys.newRng()
enrForkId = ENRForkID(
@ -38,7 +38,7 @@ suiteReport "Eth2 specific discovery tests":
next_fork_version: Version([byte 0, 0, 0, 0]),
next_fork_epoch: Epoch(0))
timedAsyncTest "Subnet query":
asyncTest "Subnet query":
var attnets: BitArray[ATTESTATION_SUBNET_COUNT]
attnets.setBit(34)
@ -63,7 +63,7 @@ suiteReport "Eth2 specific discovery tests":
await node1.closeWait()
await node2.closeWait()
timedAsyncTest "Invalid attnets field":
asyncTest "Invalid attnets field":
var invalidAttnets: BitArray[ATTESTATION_SUBNET_COUNT div 2]
invalidAttnets.setBit(15)
# TODO: This doesn't fail actually.
@ -97,7 +97,7 @@ suiteReport "Eth2 specific discovery tests":
await node2.closeWait()
await node3.closeWait()
timedAsyncTest "Subnet query after ENR update":
asyncTest "Subnet query after ENR update":
var attnets: BitArray[ATTESTATION_SUBNET_COUNT]
attnets.setBit(1)

View File

@ -1,9 +1,10 @@
{.used.}
import
unittest,
unittest2,
chronos, web3/ethtypes,
../beacon_chain/eth1/eth1_monitor
../beacon_chain/eth1/eth1_monitor,
./testutil
suite "Eth1 Chain":
discard

View File

@ -7,21 +7,21 @@
{.used.}
import std/unittest
import chronicles, chronos, testutil
import chronicles, chronos
import eth/keys
import ../beacon_chain/spec/[datatypes, presets]
import ../beacon_chain/consensus_object_pools/[block_quarantine, blockchain_dag, exit_pool]
import "."/[testutil, testdbutil]
proc getExitPool(): auto =
let chainDag =
init(ChainDAGRef, defaultRuntimePreset, makeTestDB(SLOTS_PER_EPOCH * 3))
newClone(ExitPool.init(chainDag, QuarantineRef.init(keys.newRng())))
suiteReport "Exit pool testing suite":
suite "Exit pool testing suite":
setup:
let pool = getExitPool()
timedTest "addExitMessage/getProposerSlashingMessage":
test "addExitMessage/getProposerSlashingMessage":
for i in 0'u64 .. MAX_PROPOSER_SLASHINGS + 5:
for j in 0'u64 .. i:
pool.proposer_slashings.addExitMessage(
@ -31,7 +31,7 @@ suiteReport "Exit pool testing suite":
min(i + 1, MAX_PROPOSER_SLASHINGS)
pool[].getProposerSlashingsForBlock().len == 0
timedTest "addExitMessage/getAttesterSlashingMessage":
test "addExitMessage/getAttesterSlashingMessage":
for i in 0'u64 .. MAX_ATTESTER_SLASHINGS + 5:
for j in 0'u64 .. i:
pool.attester_slashings.addExitMessage(
@ -46,7 +46,7 @@ suiteReport "Exit pool testing suite":
min(i + 1, MAX_ATTESTER_SLASHINGS)
pool[].getAttesterSlashingsForBlock().len == 0
timedTest "addExitMessage/getVoluntaryExitMessage":
test "addExitMessage/getVoluntaryExitMessage":
for i in 0'u64 .. MAX_VOLUNTARY_EXITS + 5:
for j in 0'u64 .. i:
pool.voluntary_exits.addExitMessage(

View File

@ -22,14 +22,14 @@ import
../beacon_chain/spec/[crypto, datatypes, digest, validator, state_transition,
helpers, presets, network],
# Test utilities
./testutil, ./testblockutil
./testutil, ./testdbutil, ./testblockutil
proc pruneAtFinalization(dag: ChainDAGRef, attPool: AttestationPool) =
if dag.needStateCachesAndForkChoicePruning():
dag.pruneStateCachesDAG()
# pool[].prune() # We test logic without att_1_0 pool / fork choice pruning
suiteReport "Gossip validation " & preset():
suite "Gossip validation " & preset():
setup:
# Genesis state that results in 3 members per committee
var
@ -43,7 +43,7 @@ suiteReport "Gossip validation " & preset():
check:
process_slots(state.data, getStateField(state, slot) + 1, cache)
timedTest "Validation sanity":
test "Validation sanity":
# TODO: refactor tests to avoid skipping BLS validation
chainDag.updateFlags.incl {skipBLSValidation}

View File

@ -8,11 +8,11 @@
{.used.}
import
unittest, ./testutil,
./unittest2,
../beacon_chain/spec/[helpers]
suiteReport "Spec helpers":
timedTest "integer_squareroot":
suite "Spec helpers":
test "integer_squareroot":
check:
integer_squareroot(0'u64) == 0'u64
integer_squareroot(1'u64) == 1'u64

View File

@ -1,14 +1,15 @@
{.used.}
import
unittest, ./testutil,
unittest2,
./testutil,
../beacon_chain/spec/[crypto, datatypes, network],
../beacon_chain/validators/attestation_aggregation
suiteReport "Honest validator":
suite "Honest validator":
var forkDigest: ForkDigest
timedTest "General pubsub topics":
test "General pubsub topics":
check:
getBeaconBlocksTopic(forkDigest) == "/eth2/00000000/beacon_block/ssz"
getVoluntaryExitsTopic(forkDigest) == "/eth2/00000000/voluntary_exit/ssz"
@ -16,7 +17,7 @@ suiteReport "Honest validator":
getAttesterSlashingsTopic(forkDigest) == "/eth2/00000000/attester_slashing/ssz"
getAggregateAndProofsTopic(forkDigest) == "/eth2/00000000/beacon_aggregate_and_proof/ssz"
timedTest "Mainnet attestation topics":
test "Mainnet attestation topics":
check:
getAttestationTopic(forkDigest, 0) ==
"/eth2/00000000/beacon_attestation_0/ssz"
@ -59,7 +60,7 @@ suiteReport "Honest validator":
getAttestationTopic(forkDigest, 63) ==
"/eth2/00000000/beacon_attestation_63/ssz"
timedTest "is_aggregator":
test "is_aggregator":
check:
not is_aggregator(146, ValidatorSig.fromHex(
"aa176502f0a5e954e4c6b452d0e11a03513c19b6d189f125f07b6c5c120df011c31da4c4a9c4a52a5a48fcba5b14d7b316b986a146187966d2341388bbf1f86c42e90553ba009ba10edc6b5544a6e945ce6d2419197f66ab2b9df2b0a0c89987")[])

View File

@ -1,7 +1,7 @@
{.used.}
import
unittest, stint, ./testutil, stew/byteutils,
stint, ./testutil, stew/byteutils,
../beacon_chain/interop,
../beacon_chain/ssz,
../beacon_chain/spec/[beaconstate, crypto, datatypes, presets]
@ -116,8 +116,8 @@ let depositsConfig = [
)
]
suiteReport "Interop":
timedTest "Mocked start private key":
suite "Interop":
test "Mocked start private key":
for i, k in privateKeys:
let
key = makeInteropPrivKey(i)
@ -127,7 +127,7 @@ suiteReport "Interop":
# getBytes is bigendian and returns full 48 bytes of key..
Uint256.fromBytesBE(key.toRaw()) == v
timedTest "Interop signatures":
test "Interop signatures":
for dep in depositsConfig:
let computed_sig = bls_sign(
privkey = dep.privkey,
@ -138,7 +138,7 @@ suiteReport "Interop":
# TODO re-enable
true or dep.sig == computed_sig.toValidatorSig()
timedTest "Interop genesis":
test "Interop genesis":
# Check against https://github.com/protolambda/zcli:
# zcli keys generate --to 64 | zcli genesis mock --genesis-time 1570500000 > /tmp/state.ssz
# zcli hash-tree-root state /tmp/state.ssz

View File

@ -8,7 +8,8 @@
{.used.}
import
json, unittest, typetraits,
std/[json, typetraits],
unittest2,
stew/byteutils, blscurve, eth/keys, json_serialization,
libp2p/crypto/crypto as lcrypto,
nimcrypto/utils as ncrutils,
@ -166,12 +167,12 @@ const
let
rng = keys.newRng()
suiteReport "KeyStorage testing suite":
suite "KeyStorage testing suite":
setup:
let secret = ValidatorPrivKey.fromRaw(secretBytes).get
let nsecret = init(lcrypto.PrivateKey, secretNetBytes).get
timedTest "[PBKDF2] Keystore decryption":
test "[PBKDF2] Keystore decryption":
let
keystore = Json.decode(pbkdf2Vector, Keystore)
decrypt = decryptKeystore(keystore, KeystorePass.init password)
@ -179,7 +180,7 @@ suiteReport "KeyStorage testing suite":
check decrypt.isOk
check secret.isEqual(decrypt.get())
timedTest "[SCRYPT] Keystore decryption":
test "[SCRYPT] Keystore decryption":
let
keystore = Json.decode(scryptVector, Keystore)
decrypt = decryptKeystore(keystore, KeystorePass.init password)
@ -187,7 +188,7 @@ suiteReport "KeyStorage testing suite":
check decrypt.isOk
check secret.isEqual(decrypt.get())
timedTest "[PBKDF2] Network Keystore decryption":
test "[PBKDF2] Network Keystore decryption":
let
keystore = Json.decode(pbkdf2NetVector, NetKeystore)
decrypt = decryptNetKeystore(keystore, KeystorePass.init password)
@ -195,7 +196,7 @@ suiteReport "KeyStorage testing suite":
check decrypt.isOk
check nsecret == decrypt.get()
timedTest "[SCRYPT] Network Keystore decryption":
test "[SCRYPT] Network Keystore decryption":
let
keystore = Json.decode(scryptNetVector, NetKeystore)
decrypt = decryptNetKeystore(keystore, KeystorePass.init password)
@ -203,7 +204,7 @@ suiteReport "KeyStorage testing suite":
check decrypt.isOk
check nsecret == decrypt.get()
timedTest "[PBKDF2] Keystore encryption":
test "[PBKDF2] Keystore encryption":
let keystore = createKeystore(kdfPbkdf2, rng[], secret,
KeystorePass.init password,
salt=salt, iv=iv,
@ -217,7 +218,7 @@ suiteReport "KeyStorage testing suite":
check encryptJson == pbkdf2Json
timedTest "[PBKDF2] Network Keystore encryption":
test "[PBKDF2] Network Keystore encryption":
let nkeystore = createNetKeystore(kdfPbkdf2, rng[], nsecret,
KeystorePass.init password,
salt = salt, iv = iv,
@ -230,7 +231,7 @@ suiteReport "KeyStorage testing suite":
pbkdf2Json{"uuid"} = %""
check encryptJson == pbkdf2Json
timedTest "[SCRYPT] Keystore encryption":
test "[SCRYPT] Keystore encryption":
let keystore = createKeystore(kdfScrypt, rng[], secret,
KeystorePass.init password,
salt=salt, iv=iv,
@ -244,7 +245,7 @@ suiteReport "KeyStorage testing suite":
check encryptJson == scryptJson
timedTest "[SCRYPT] Network Keystore encryption":
test "[SCRYPT] Network Keystore encryption":
let nkeystore = createNetKeystore(kdfScrypt, rng[], nsecret,
KeystorePass.init password,
salt = salt, iv = iv,
@ -257,7 +258,7 @@ suiteReport "KeyStorage testing suite":
pbkdf2Json{"uuid"} = %""
check encryptJson == pbkdf2Json
timedTest "Pbkdf2 errors":
test "Pbkdf2 errors":
expect Defect:
echo createKeystore(kdfPbkdf2, rng[], secret, salt = [byte 1])

View File

@ -7,10 +7,10 @@
{.used.}
import std/[unittest, random, heapqueue, tables]
import std/[random, heapqueue, tables]
import chronos
import ../beacon_chain/networking/peer_pool
import testutil
import ./testutil
type
PeerTestID* = string
@ -35,8 +35,8 @@ proc init*(t: typedesc[PeerTest], id: string = "",
proc close*(peer: PeerTest) =
peer.future.complete()
suiteReport "PeerPool testing suite":
timedTest "addPeerNoWait() test":
suite "PeerPool testing suite":
test "addPeerNoWait() test":
const peersCount = [
[10, 5, 5, 10, 5, 5],
[-1, 5, 5, 10, 5, 5],
@ -66,7 +66,7 @@ suiteReport "PeerPool testing suite":
pool.lenAvailable({PeerType.Incoming}) == item[4]
pool.lenAvailable({PeerType.Outgoing}) == item[5]
timedTest "addPeer() test":
test "addPeer() test":
proc testAddPeer1(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID](maxPeers = 1,
maxIncomingPeers = 1,
@ -185,7 +185,7 @@ suiteReport "PeerPool testing suite":
waitFor(testAddPeer3()) == true
waitFor(testAddPeer4()) == true
timedTest "Acquire from empty pool":
test "Acquire from empty pool":
var pool0 = newPeerPool[PeerTest, PeerTestID]()
var pool1 = newPeerPool[PeerTest, PeerTestID]()
var pool2 = newPeerPool[PeerTest, PeerTestID]()
@ -237,7 +237,7 @@ suiteReport "PeerPool testing suite":
itemFut23.finished == false
itemFut24.finished == false
timedTest "Acquire/Sorting and consistency test": closureScope:
test "Acquire/Sorting and consistency test": closureScope:
const
TestsCount = 1000
MaxNumber = 1_000_000
@ -306,7 +306,7 @@ suiteReport "PeerPool testing suite":
check waitFor(testAcquireRelease()) == TestsCount
timedTest "deletePeer() test":
test "deletePeer() test":
proc testDeletePeer(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("deletePeer")
@ -362,7 +362,7 @@ suiteReport "PeerPool testing suite":
result = true
check waitFor(testDeletePeer()) == true
timedTest "Peer lifetime test":
test "Peer lifetime test":
proc testPeerLifetime(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("closingPeer")
@ -411,7 +411,7 @@ suiteReport "PeerPool testing suite":
check waitFor(testPeerLifetime()) == true
timedTest "Safe/Clear test": closureScope:
test "Safe/Clear test": closureScope:
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -458,7 +458,7 @@ suiteReport "PeerPool testing suite":
asyncCheck testConsumer()
check waitFor(testClose()) == true
timedTest "Access peers by key test": closureScope:
test "Access peers by key test": closureScope:
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -487,7 +487,7 @@ suiteReport "PeerPool testing suite":
ppeer[].weight = 100
check pool["peer1"].weight == 100
timedTest "Iterators test":
test "Iterators test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -587,7 +587,7 @@ suiteReport "PeerPool testing suite":
len(acqui2) == 2
len(acqui3) == 1
timedTest "Score check test":
test "Score check test":
var pool = newPeerPool[PeerTest, PeerTestID]()
proc scoreCheck(peer: PeerTest): bool =
if peer.weight >= 0:
@ -649,7 +649,7 @@ suiteReport "PeerPool testing suite":
lenAcquired(pool) == 0
len(pool) == 0
timedTest "Delete peer on release text":
test "Delete peer on release text":
proc testDeleteOnRelease(): Future[bool] {.async.} =
proc scoreCheck(peer: PeerTest): bool =
if peer.weight >= 0:
@ -683,7 +683,7 @@ suiteReport "PeerPool testing suite":
check waitFor(testDeleteOnRelease()) == true
timedTest "Space tests":
test "Space tests":
var pool1 = newPeerPool[PeerTest, PeerTestID](maxPeers = 79)
var pool2 = newPeerPool[PeerTest, PeerTestID](maxPeers = 79,
maxIncomingPeers = 39)

View File

@ -8,11 +8,10 @@
{.used.}
import
std/[unittest, options],
std/[options],
unittest2,
nimcrypto/hash,
json_serialization,
serialization/testing/generic_suite,
./testutil,
../beacon_chain/spec/[datatypes, digest],
../beacon_chain/ssz, ../beacon_chain/ssz/[navigator, dynamic_navigator]
@ -55,8 +54,6 @@ static:
doAssert fixedPortionSize(ObjWithFields) ==
1 + 4 + sizeof(array[20, byte]) + (256 div 8) + 4 + 8
executeRoundTripTests SSZ
type
Foo = object
bar: Bar
@ -73,8 +70,8 @@ type
proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
result.data[0 .. N-1] = x
suiteReport "SSZ navigator":
timedTest "simple object fields":
suite "SSZ navigator":
test "simple object fields":
var foo = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
let encoded = SSZ.encode(foo)
@ -86,7 +83,7 @@ suiteReport "SSZ navigator":
let mountedBar = mountedFoo.bar
check mountedBar.baz.i == 10'u64
timedTest "lists with max size":
test "lists with max size":
let a = [byte 0x01, 0x02, 0x03].toDigest
let b = [byte 0x04, 0x05, 0x06].toDigest
let c = [byte 0x07, 0x08, 0x09].toDigest
@ -147,15 +144,15 @@ suiteReport "SSZ navigator":
leaves3.add c
hash_tree_root(leaves3) == hash_tree_root(leaves3.data)
timedTest "basictype":
test "basictype":
var leaves = HashList[uint64, 1'i64 shl 3]()
while leaves.len < leaves.maxLen:
check:
leaves.add leaves.lenu64
hash_tree_root(leaves) == hash_tree_root(leaves.data)
suiteReport "SSZ dynamic navigator":
timedTest "navigating fields":
suite "SSZ dynamic navigator":
test "navigating fields":
var fooOrig = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
let fooEncoded = SSZ.encode(fooOrig)
@ -185,8 +182,8 @@ type
li: HashList[Eth2Digest, 8]
suiteReport "hash":
timedTest "HashArray":
suite "hash":
test "HashArray":
var
o = Obj()
ho = HashObj()
@ -217,7 +214,7 @@ suiteReport "hash":
y[i] = 42'u64
doAssert hash_tree_root(y) == hash_tree_root(y.data)
timedTest "HashList":
test "HashList":
type MyList = HashList[uint64, 1024]
var
small, large: MyList

View File

@ -7,13 +7,10 @@
{.used.}
import unittest, ./testutil
# this is not part of test_ssz because the roundtrip tests are incompatible
# with unittest2 as of writing
import
serialization/testing/generic_suite,
../beacon_chain/ssz
when false:
import ../beacon_chain/nimbus_beacon_node
suiteReport "Beacon node":
# Compile test
timedTest "Compile":
discard
executeRoundTripTests SSZ

View File

@ -8,13 +8,14 @@
{.used.}
import
unittest, chronicles,
chronicles,
unittest2,
./testutil, ./testblockutil,
../beacon_chain/spec/[beaconstate, datatypes, digest, crypto,
validator, state_transition, presets],
../beacon_chain/ssz
suiteReport "Block processing" & preset():
suite "Block processing" & preset():
## For now just test that we can compile and execute block processing with
## mock data.
@ -31,12 +32,12 @@ suiteReport "Block processing" & preset():
state = newClone(genesisState[])
cache = StateCache()
timedTest "Passes from genesis state, no block" & preset():
test "Passes from genesis state, no block" & preset():
check:
process_slots(state[], state.data.slot + 1, cache)
state.data.slot == genesisState.data.slot + 1
timedTest "Passes from genesis state, empty block" & preset():
test "Passes from genesis state, empty block" & preset():
var
previous_block_root = genesisBlock.root
new_block = makeTestBlock(state[], previous_block_root, cache)
@ -49,12 +50,12 @@ suiteReport "Block processing" & preset():
state.data.slot == genesisState.data.slot + 1
timedTest "Passes through epoch update, no block" & preset():
test "Passes through epoch update, no block" & preset():
check:
process_slots(state[], Slot(SLOTS_PER_EPOCH), cache)
state.data.slot == genesisState.data.slot + SLOTS_PER_EPOCH
timedTest "Passes through epoch update, empty block" & preset():
test "Passes through epoch update, empty block" & preset():
var
previous_block_root = genesisRoot
cache = StateCache()
@ -73,7 +74,7 @@ suiteReport "Block processing" & preset():
check:
state.data.slot == genesisState.data.slot + SLOTS_PER_EPOCH
timedTest "Attestation gets processed at epoch" & preset():
test "Attestation gets processed at epoch" & preset():
var
previous_block_root = genesisRoot
cache = StateCache()

View File

@ -8,8 +8,9 @@
{.used.}
import
options, sequtils, unittest,
./testutil, ./teststateutil,
options, sequtils,
unittest2,
./testutil, ./testdbutil, ./teststateutil,
../beacon_chain/spec/[datatypes, digest, helpers, presets],
../beacon_chain/[beacon_node_types, statediff],
../beacon_chain/ssz,
@ -18,23 +19,13 @@ import
when isMainModule:
import chronicles # or some random compile error happens...
template wrappedTimedTest(name: string, body: untyped) =
# `check` macro takes a copy of whatever it's checking, on the stack!
# This leads to stack overflow
# We can mitigate that by wrapping checks in proc
block: # Symbol namespacing
proc wrappedTest() =
timedTest name:
body
wrappedTest()
suiteReport "state diff tests" & preset():
suite "state diff tests" & preset():
setup:
var
db = makeTestDB(SLOTS_PER_EPOCH)
dag = init(ChainDAGRef, defaultRuntimePreset, db)
wrappedTimedTest "random slot differences" & preset():
test "random slot differences" & preset():
let testStates = getTestStates(dag.headState.data)
for i in 0 ..< testStates.len:

View File

@ -1,6 +1,6 @@
{.used.}
import unittest
import unittest2
import chronos
import ../beacon_chain/gossip_processing/gossip_to_consensus,
../beacon_chain/sync/sync_manager

View File

@ -7,13 +7,12 @@
{.used.}
import unittest, ./testutil
import unittest2
when false:
import ../beacon_chain/sync/sync_protocol
import ../beacon_chain/sync/sync_protocol
suiteReport "Sync protocol":
suite "Sync protocol":
# Compile test
timedTest "Compile":
test "Compile":
discard

View File

@ -8,7 +8,7 @@
{.used.}
import
unittest, ./testutil,
unittest2,
../beacon_chain/spec/[datatypes, crypto],
../beacon_chain/ssz
@ -17,7 +17,7 @@ import
# and https://github.com/ethereum/eth2.0-specs/issues/1396
# don't blow up.
suiteReport "Zero signature sanity checks":
suite "Zero signature sanity checks":
# Using signature directly triggers a bug
# in object_serialization/stew: https://github.com/status-im/nimbus-eth2/issues/396
@ -36,7 +36,7 @@ suiteReport "Zero signature sanity checks":
# check(zeroSIg == deserZeroSig)
timedTest "SSZ serialization roundtrip of SignedBeaconBlockHeader":
test "SSZ serialization roundtrip of SignedBeaconBlockHeader":
let defaultBlockHeader = SignedBeaconBlockHeader()

31
tests/testdbutil.nim Normal file
View File

@ -0,0 +1,31 @@
# beacon_chain
# Copyright (c) 2018-2019 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
chronicles,
../beacon_chain/[beacon_chain_db, extras],
../beacon_chain/consensus_object_pools/blockchain_dag,
../beacon_chain/spec/[beaconstate, digest],
eth/db/[kvstore, kvstore_sqlite3],
./testblockutil
export beacon_chain_db, testblockutil, kvstore, kvstore_sqlite3
proc makeTestDB*(tailState: var BeaconState, tailBlock: SignedBeaconBlock): BeaconChainDB =
result = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)
ChainDAGRef.preInit(result, tailState, tailState, tailBlock)
proc makeTestDB*(validators: Natural): BeaconChainDB =
let
genState = initialize_beacon_state(
defaultRuntimePreset,
Eth2Digest(),
0,
makeInitialDeposits(validators.uint64, flags = {skipBlsValidation}),
{skipBlsValidation})
genBlock = get_initial_beacon_block(genState[])
makeTestDB(genState[], genBlock)

View File

@ -5,17 +5,16 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
os, algorithm, strformat, stats, times, tables, std/monotimes, stew/endians2,
testutils/markdown_reports, chronicles,
../beacon_chain/[beacon_chain_db, extras],
../beacon_chain/ssz,
../beacon_chain/spec/[digest, beaconstate, datatypes, presets],
../beacon_chain/consensus_object_pools/blockchain_dag,
eth/db/[kvstore, kvstore_sqlite3],
testblockutil
{.push raises: [Defect].}
{.used.}
export beacon_chain_db
import
std/[algorithm, strformat, stats, tables, times],
testutils/markdown_reports,
unittest2,
../beacon_chain/spec/presets
export unittest2
type
TestDuration = tuple[duration: float, label: string]
@ -46,64 +45,38 @@ var testTimes: seq[TestDuration]
var status = initOrderedTable[string, OrderedTable[string, Status]]()
var last: string
type TimingCollector = ref object of OutputFormatter
func toFloatSeconds(duration: Duration): float =
duration.inNanoseconds().float / 1_000_000_000.0
method testEnded*(formatter: TimingCollector, testResult: TestResult) =
{.gcsafe.}: # Lie!
status.mGetOrPut(testResult.suiteName, initOrderedTable[string, Status]())[testResult.testName] =
case testResult.status
of TestStatus.OK: Status.OK
of TestStatus.FAILED: Status.Fail
of TestStatus.SKIPPED: Status.Skip
testTimes.add (testResult.duration.toFloatSeconds, testResult.testName)
proc summarizeLongTests*(name: string) =
# TODO clean-up and make machine-readable/storable the output
# TODO this is too hard-coded and mostly a demo for using the
# timedTest wrapper template for unittest
sort(testTimes, system.cmp, SortOrder.Descending)
echo ""
echo "10 longest individual test durations"
echo "------------------------------------"
for i, item in testTimes:
echo &"{item.duration:6.2f}s for {item.label}"
if i >= 10:
break
try:
echo ""
echo "10 longest individual test durations"
echo "------------------------------------"
for i, item in testTimes:
echo &"{item.duration:6.2f}s for {item.label}"
if i >= 10:
break
status.sort do (a: (string, OrderedTable[string, Status]),
b: (string, OrderedTable[string, Status])) -> int: cmp(a[0], b[0])
status.sort do (a: (string, OrderedTable[string, Status]),
b: (string, OrderedTable[string, Status])) -> int: cmp(a[0], b[0])
generateReport(name & "-" & const_preset, status, width=90)
generateReport(name & "-" & const_preset, status, width=90)
except CatchableError as exc:
raiseAssert exc.msg
template suiteReport*(name, body) =
last = name
status[last] = initOrderedTable[string, Status]()
block: # namespacing
proc runSuite() =
suite name:
body
runSuite()
template timedTest*(name, body) =
var f: float
test name:
status[last][name] = Status.Fail
withTimer f:
body
status[last][name] = case testStatusIMPL
of OK: Status.OK
of FAILED: Status.Fail
of SKIPPED: Status.Skip
# TODO reached for a failed test; maybe defer or similar
# TODO noto thread-safe as-is
testTimes.add (f, name)
proc makeTestDB*(tailState: var BeaconState, tailBlock: SignedBeaconBlock): BeaconChainDB =
result = BeaconChainDB.new(defaultRuntimePreset, "", inMemory = true)
ChainDAGRef.preInit(result, tailState, tailState, tailBlock)
proc makeTestDB*(validators: Natural): BeaconChainDB =
let
genState = initialize_beacon_state(
defaultRuntimePreset,
Eth2Digest(),
0,
makeInitialDeposits(validators.uint64, flags = {skipBlsValidation}),
{skipBlsValidation})
genBlock = get_initial_beacon_block(genState[])
makeTestDB(genState[], genBlock)
export inMicroseconds
addOutputFormatter(new TimingCollector)

@ -1 +1 @@
Subproject commit e788deab3d59ff8a4fe103aeb5d82d3d82fcac7d
Subproject commit c847fc3ec77fd2c41c8897b7722580d403f35cfb