measure/find slow tests (#624)

This commit is contained in:
Dustin Brody 2019-12-05 10:27:00 +00:00 committed by Mamy Ratsimbazafy
parent 5599c76d46
commit 570de0839d
33 changed files with 136 additions and 85 deletions

View File

@ -5,6 +5,8 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import ./testutil
import # Official constants
./official/test_fixture_const_sanity_check
@ -35,3 +37,5 @@ import # Refactor state transition unit tests
# # https://github.com/status-im/nim-beacon-chain/issues/374
# ./official/test_fixture_shuffling,
# ./official/test_fixture_bls
summarizeLongTests()

View File

@ -9,6 +9,8 @@
# to ignore invalid BLS signature in EF test vectors
# https://github.com/status-im/nim-beacon-chain/issues/374
import ../testutil
import
./test_fixture_sanity_slots,
./test_fixture_sanity_blocks,
@ -19,3 +21,5 @@ import
./test_fixture_operations_block_header,
./test_fixture_operations_proposer_slashings,
./test_fixture_operations_voluntary_exit
summarizeLongTests()

View File

@ -50,13 +50,13 @@ proc readValue*(r: var JsonReader, a: var Domain) {.inline.} =
const BLSDir = JsonTestsDir/"general"/"phase0"/"bls"
suite "Official - BLS tests":
test "Private to public key conversion":
timedTest "Private to public key conversion":
for file in walkDirRec(BLSDir/"priv_to_pub"):
let t = parseTest(file, Json, BLSPrivToPub)
let implResult = t.input.pubkey()
check: implResult == t.output
test "Message signing":
timedTest "Message signing":
for file in walkDirRec(BLSDir/"sign_msg"):
let t = parseTest(file, Json, BLSSignMsg)
let implResult = t.input.privkey.bls_sign(
@ -65,13 +65,13 @@ suite "Official - BLS tests":
)
check: implResult == t.output
test "Aggregating signatures":
timedTest "Aggregating signatures":
for file in walkDirRec(BLSDir/"aggregate_sigs"):
let t = parseTest(file, Json, BLSAggSig)
let implResult = t.input.combine()
check: implResult == t.output
test "Aggregating public keys":
timedTest "Aggregating public keys":
for file in walkDirRec(BLSDir/"aggregate_pubkeys"):
let t = parseTest(file, Json, BLSAggPubKey)
let implResult = t.input.combine()

View File

@ -108,7 +108,7 @@ proc checkConfig() =
var config = yamlStream.loadToJson()
doAssert config.len == 1
for constant, value in config[0]:
test &"{constant:<50}{value:<20}{preset()}":
timedTest &"{constant:<50}{value:<20}{preset()}":
if constant in IgnoreKeys:
echo &" ↶↶ Skipping {constant}"
continue

View File

@ -39,7 +39,7 @@ template runTest(testName: string, identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & testName & " (" & astToStr(identifier) & ")":
timedTest prefix & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
var attestationRef: ref Attestation
new attestationRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var attesterSlashingRef: ref AttesterSlashing
new attesterSlashingRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var blck: ref BeaconBlock
new blck

View File

@ -39,7 +39,7 @@ template runTest(testName: string, identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & testName & " (" & astToStr(identifier) & ")":
timedTest prefix & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
var depositRef: ref Deposit
new depositRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var proposerSlashing: ref ProposerSlashing
new proposerSlashing

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var voluntaryExit: ref VoluntaryExit
new voluntaryExit

View File

@ -29,7 +29,7 @@ template runValidTest(testName: string, identifier: untyped, num_blocks: int): u
const testDir = SanityBlocksDir / astToStr(identifier)
proc `testImpl _ blck _ identifier`() =
test "[Valid] " & testName & " (" & astToStr(identifier) & ")":
timedTest "[Valid] " & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
new stateRef
new postRef
@ -50,7 +50,7 @@ template runValidTest(testName: string, identifier: untyped, num_blocks: int): u
`testImpl _ blck _ identifier`()
suite "Official - Sanity - Blocks " & preset():
test "[Invalid] Previous slot block transition (prev_slot_block_transition)":
timedTest "[Invalid] Previous slot block transition (prev_slot_block_transition)":
const testDir = SanityBlocksDir/"prev_slot_block_transition"
var stateRef: ref BeaconState
new stateRef
@ -68,7 +68,7 @@ suite "Official - Sanity - Blocks " & preset():
runValidTest("Empty block transition", empty_block_transition, 1)
when false: # TODO: we need more granular skipValidation
test "[Invalid] Invalid state root":
timedTest "[Invalid] Invalid state root":
const testDir = SanityBlocksDir/"invalid_state_root"
var stateRef: ref BeaconState
new stateRef

View File

@ -29,7 +29,7 @@ template runTest(testName: string, identifier: untyped, num_slots: uint64): unty
const testDir = SanitySlotsDir / astToStr(identifier)
proc `testImpl _ slots _ identifier`() =
test "Slots - " & testName & " (" & astToStr(identifier) & ")":
timedTest "Slots - " & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
new stateRef
new postRef

View File

@ -26,7 +26,7 @@ type
const ShufflingDir = JsonTestsDir/const_preset/"phase0"/"shuffling"/"core"/"shuffle"
suite "Official - Shuffling tests [Preset: " & preset():
test "Shuffling a sequence of N validators" & preset():
timedTest "Shuffling a sequence of N validators" & preset():
for file in walkDirRec(ShufflingDir):
let t = parseTest(file, Json, Shuffling)
let implResult = get_shuffled_seq(t.seed, t.count)

View File

@ -70,11 +70,11 @@ proc runSSZtests() =
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
if sszType in Unsupported:
test &" Skipping {sszType:20} ✗✗✗":
timedTest &" Skipping {sszType:20} ✗✗✗":
discard
continue
test &" Testing {sszType}":
timedTest &" Testing {sszType}":
let path = SSZDir/sszType
for pathKind, sszTestKind in walkDir(path, relative = true):
doAssert pathKind == pcDir

View File

@ -10,7 +10,7 @@ import
os, unittest, strutils, streams, strformat, strscans,
macros,
# Status libraries
stint, stew/bitseqs,
stint, stew/bitseqs, ../testutil,
# Third-party
yaml,
# Beacon chain internals
@ -251,7 +251,7 @@ proc runSSZtests() =
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
if sszType == "bitlist":
test &"**Skipping** {sszType} inputs - valid - skipped altogether":
timedTest &"**Skipping** {sszType} inputs - valid - skipped altogether":
# TODO: serialization of "type BitList[maxLen] = distinct BitSeq is not supported"
# https://github.com/status-im/nim-beacon-chain/issues/518
discard
@ -266,7 +266,7 @@ proc runSSZtests() =
of "containers":
skipped = " - skipping VarTestStruct, ComplexTestStruct, BitsStruct"
test &"Testing {sszType:12} inputs - valid" & skipped:
timedTest &"Testing {sszType:12} inputs - valid" & skipped:
let path = SSZDir/sszType/"valid"
for pathKind, sszSubType in walkDir(path, relative = true):
doAssert pathKind == pcDir

View File

@ -37,7 +37,7 @@ template runSuite(suiteDir, testName: string, transitionProc: untyped{ident}, us
for testDir in walkDirRec(suiteDir, yieldFilter = {pcDir}):
let unitTestName = testDir.rsplit(DirSep, 1)[1]
test testName & " - " & unitTestName & preset():
timedTest testName & " - " & unitTestName & preset():
var stateRef, postRef: ref BeaconState
new stateRef
new postRef

View File

@ -28,29 +28,29 @@ import
# - is_valid_genesis_state is not implemented
suite "[Unit - Spec - Genesis] Genesis block checks " & preset():
test "is_valid_genesis_state for a valid state":
timedTest "is_valid_genesis_state for a valid state":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT,
genesis_time = MIN_GENESIS_TIME
)
discard "TODO"
test "Invalid genesis time":
timedTest "Invalid genesis time":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT,
genesis_time = MIN_GENESIS_TIME.uint64 - 1
)
discard "TODO"
test "Validators with more than 32 ETH":
timedTest "Validators with more than 32 ETH":
discard "TODO"
test "More validators than minimum":
timedTest "More validators than minimum":
discard "TODO"
when false:
# TODO causes possible stack overflow in mainnet
test "Not enough validators":
timedTest "Not enough validators":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT.uint64 - 1,
genesis_time = MIN_GENESIS_TIME.uint64 - 1

View File

@ -32,7 +32,7 @@ suite "[Unit - Spec - Block processing] Attestations " & preset():
# The BeaconState is exposed as "state" in the calling context
# The attestation to process must be named "attestation" in the calling context
test name:
timedTest name:
var state{.inject.}: BeaconState
deepCopy(state, genesisState)
@ -72,7 +72,7 @@ suite "[Unit - Spec - Block processing] Attestations " & preset():
# TODO check if this should be replaced
when false:
when MAX_EPOCHS_PER_CROSSLINK > 4'u64:
test "Valid attestation since max epochs per crosslinks [Skipped for preset: " & const_preset & ']':
timedTest "Valid attestation since max epochs per crosslinks [Skipped for preset: " & const_preset & ']':
discard
else:
valid_attestation("Valid attestation since max epochs per crosslinks"):

View File

@ -31,7 +31,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
template valid_deposit(deposit_amount: uint64, name: string): untyped =
# TODO: BLS signature
test "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
timedTest "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
var state: BeaconState
deepCopy(state, genesisState)
@ -74,7 +74,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
valid_deposit(MAX_EFFECTIVE_BALANCE, "at")
valid_deposit(MAX_EFFECTIVE_BALANCE + 1, "over")
test "Validator top-up":
timedTest "Validator top-up":
var state: BeaconState
deepCopy(state, genesisState)

View File

@ -223,35 +223,35 @@ suite "[Unit - Spec - Epoch processing] Justification and Finalization " & prese
template resetState: untyped =
deepCopy(state, genesisState)
test " Rule I - 234 finalization with enough support":
timedTest " Rule I - 234 finalization with enough support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = true)
test " Rule I - 234 finalization without support":
timedTest " Rule I - 234 finalization without support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = false)
test " Rule II - 23 finalization with enough support":
timedTest " Rule II - 23 finalization with enough support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = true)
test " Rule II - 23 finalization without support":
timedTest " Rule II - 23 finalization without support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = false)
test " Rule III - 123 finalization with enough support":
timedTest " Rule III - 123 finalization with enough support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = true)
test " Rule III - 123 finalization without support":
timedTest " Rule III - 123 finalization without support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = false)
test " Rule IV - 12 finalization with enough support":
timedTest " Rule IV - 12 finalization with enough support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = true)
test " Rule IV - 12 finalization without support":
timedTest " Rule IV - 12 finalization without support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = false)

View File

@ -38,7 +38,7 @@ suite "Attestation pool processing" & preset():
{skipValidation})
genBlock = get_initial_beacon_block(genState)
test "Can add and retrieve simple attestation" & preset():
timedTest "Can add and retrieve simple attestation" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -58,7 +58,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may arrive in any order" & preset():
timedTest "Attestations may arrive in any order" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -88,7 +88,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations should be combined" & preset():
timedTest "Attestations should be combined" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -111,7 +111,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may overlap, bigger first" & preset():
timedTest "Attestations may overlap, bigger first" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
@ -137,7 +137,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may overlap, smaller first" & preset():
timedTest "Attestations may overlap, smaller first" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
var

View File

@ -15,7 +15,7 @@ import options, unittest, sequtils, eth/trie/[db],
suite "Beacon chain DB" & preset():
test "empty database" & preset():
timedTest "empty database" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -26,7 +26,7 @@ suite "Beacon chain DB" & preset():
# TODO re-check crash here in mainnet
true
test "sanity check blocks" & preset():
timedTest "sanity check blocks" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -44,7 +44,7 @@ suite "Beacon chain DB" & preset():
check:
db.getStateRoot(root, blck.slot).get() == root
test "sanity check states" & preset():
timedTest "sanity check states" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -58,7 +58,7 @@ suite "Beacon chain DB" & preset():
db.containsState(root)
db.getState(root).get() == state
test "find ancestors" & preset():
timedTest "find ancestors" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
x: ValidatorSig
@ -93,7 +93,7 @@ suite "Beacon chain DB" & preset():
doAssert toSeq(db.getAncestors(a0r)) == [(a0r, a0)]
doAssert toSeq(db.getAncestors(a2r)) == [(a2r, a2), (a1r, a1), (a0r, a0)]
test "sanity check genesis roundtrip" & preset():
timedTest "sanity check genesis roundtrip" & preset():
# This is a really dumb way of checking that we can roundtrip a genesis
# state. We've been bit by this because we've had a bug in the BLS
# serialization where an all-zero default-initialized bls signature could

View File

@ -7,7 +7,7 @@
{.used.}
import unittest
import unittest, ./testutil
when false:
import ../beacon_chain/beacon_node
@ -15,5 +15,5 @@ when false:
suite "Beacon node":
# Compile test
test "Compile":
timedTest "Compile":
discard

View File

@ -8,12 +8,12 @@
{.used.}
import
unittest,
times, unittest,
./testutil, ./testblockutil,
../beacon_chain/spec/[beaconstate, datatypes, digest]
suite "Beacon state" & preset():
test "Smoke test initialize_beacon_state_from_eth1" & preset():
timedTest "Smoke test initialize_beacon_state_from_eth1" & preset():
let state = initialize_beacon_state_from_eth1(
Eth2Digest(), 0,
makeInitialDeposits(SLOTS_PER_EPOCH, {}), {})

View File

@ -26,11 +26,11 @@ suite "Block pool processing" & preset():
pool = BlockPool.init(db)
state = pool.loadTailState()
test "getRef returns nil for missing blocks":
timedTest "getRef returns nil for missing blocks":
check:
pool.getRef(default Eth2Digest) == nil
test "loadTailState gets genesis block on first load" & preset():
timedTest "loadTailState gets genesis block on first load" & preset():
var
b0 = pool.get(state.blck.root)
@ -39,7 +39,7 @@ suite "Block pool processing" & preset():
b0.isSome()
toSeq(pool.blockRootsForSlot(GENESIS_SLOT)) == @[state.blck.root]
test "Simple block add&get" & preset():
timedTest "Simple block add&get" & preset():
let
b1 = makeBlock(state.data.data, state.blck.root, BeaconBlockBody())
b1Root = signing_root(b1)
@ -54,7 +54,7 @@ suite "Block pool processing" & preset():
b1Ref.get().refs.root == b1Root
hash_tree_root(state.data.data) == state.data.root
test "Reverse order block add & get" & preset():
timedTest "Reverse order block add & get" & preset():
let
b1 = addBlock(state.data.data, state.blck.root, BeaconBlockBody(), {})
b1Root = signing_root(b1)
@ -95,7 +95,7 @@ suite "Block pool processing" & preset():
pool2.get(b1Root).isSome()
pool2.get(b2Root).isSome()
test "isAncestorOf sanity" & preset():
timedTest "isAncestorOf sanity" & preset():
let
a = BlockRef(slot: Slot(1))
b = BlockRef(slot: Slot(2), parent: a)

View File

@ -8,11 +8,11 @@
{.used.}
import
unittest,
unittest, ./testutil,
../beacon_chain/spec/[helpers]
suite "Spec helpers":
test "integer_squareroot":
timedTest "integer_squareroot":
check:
integer_squareroot(0'u64) == 0'u64
integer_squareroot(1'u64) == 1'u64

View File

@ -1,7 +1,7 @@
{.used.}
import
unittest, stint, blscurve, stew/byteutils,
unittest, stint, blscurve, ./testutil, stew/byteutils,
../beacon_chain/[extras, interop, ssz],
../beacon_chain/spec/[beaconstate, crypto, helpers, datatypes]
@ -116,7 +116,7 @@ let depositsConfig = [
]
suite "Interop":
test "Mocked start private key":
timedTest "Mocked start private key":
for i, k in privateKeys:
let
key = makeInteropPrivKey(i)
@ -126,7 +126,7 @@ suite "Interop":
# getBytes is bigendian and returns full 48 bytes of key..
Uint256.fromBytesBE(key.getBytes()[48-32..<48]) == v
test "Interop signatures":
timedTest "Interop signatures":
for dep in depositsConfig:
let computed_sig = bls_sign(
key = dep.privkey,
@ -137,7 +137,7 @@ suite "Interop":
check:
dep.sig == computed_sig
test "Interop genesis":
timedTest "Interop genesis":
# Check against https://github.com/protolambda/zcli:
# zcli keys generate --to 64 | zcli genesis mock --genesis-time 1570500000 > /tmp/state.ssz
# zcli hash-tree-root /tmp.state.ssz

View File

@ -9,6 +9,7 @@
import
unittest, random, heapqueue, tables, strutils,
./testutil,
chronos,
../beacon_chain/peer_pool
@ -36,7 +37,7 @@ proc close*(peer: PeerTest) =
peer.future.complete()
suite "PeerPool testing suite":
test "addPeer() test":
timedTest "addPeer() test":
const peersCount = [
[10, 5, 5, 10, 5, 5],
[-1, 5, 5, 10, 5, 5],
@ -63,7 +64,7 @@ suite "PeerPool testing suite":
pool.lenAvailable == item[3]
pool.lenAvailable({PeerType.Incoming}) == item[4]
pool.lenAvailable({PeerType.Outgoing}) == item[5]
test "Acquire from empty pool":
timedTest "Acquire from empty pool":
var pool0 = newPeerPool[PeerTest, PeerTestID]()
var pool1 = newPeerPool[PeerTest, PeerTestID]()
var pool2 = newPeerPool[PeerTest, PeerTestID]()
@ -115,7 +116,7 @@ suite "PeerPool testing suite":
itemFut23.finished == false
itemFut24.finished == false
test "Acquire/Sorting and consistency test":
timedTest "Acquire/Sorting and consistency test":
const
TestsCount = 1000
MaxNumber = 1_000_000
@ -184,7 +185,7 @@ suite "PeerPool testing suite":
check waitFor(testAcquireRelease()) == TestsCount
test "deletePeer() test":
timedTest "deletePeer() test":
proc testDeletePeer(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("deletePeer")
@ -237,7 +238,7 @@ suite "PeerPool testing suite":
result = true
check waitFor(testDeletePeer()) == true
test "Peer lifetime test":
timedTest "Peer lifetime test":
proc testPeerLifetime(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("closingPeer")
@ -284,7 +285,7 @@ suite "PeerPool testing suite":
check waitFor(testPeerLifetime()) == true
test "Safe/Clear test":
timedTest "Safe/Clear test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -331,7 +332,7 @@ suite "PeerPool testing suite":
asyncCheck testConsumer()
check waitFor(testClose()) == true
test "Access peers by key test":
timedTest "Access peers by key test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -360,7 +361,7 @@ suite "PeerPool testing suite":
ppeer[].weight = 100
check pool["peer1"].weight == 100
test "Iterators test":
timedTest "Iterators test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)

View File

@ -10,6 +10,7 @@
import
unittest, options,
stint, nimcrypto, eth/common, serialization/testing/generic_suite,
./testutil,
../beacon_chain/spec/[datatypes, digest],
../beacon_chain/ssz, ../beacon_chain/ssz/[navigator, dynamic_navigator]
@ -75,7 +76,7 @@ proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
result.data[0 .. N-1] = x
suite "SSZ navigator":
test "simple object fields":
timedTest "simple object fields":
var foo = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
let encoded = SSZ.encode(foo)
@ -87,7 +88,7 @@ suite "SSZ navigator":
let mountedBar = mountedFoo.bar
check mountedBar.baz.i == 10'u64
test "lists with max size":
timedTest "lists with max size":
let a = [byte 0x01, 0x02, 0x03].toDigest
let b = [byte 0x04, 0x05, 0x06].toDigest
let c = [byte 0x07, 0x08, 0x09].toDigest
@ -101,7 +102,7 @@ suite "SSZ navigator":
check $root2 == "9FB7D518368DC14E8CC588FB3FD2749BEEF9F493FEF70AE34AF5721543C67173"
suite "SSZ dynamic navigator":
test "navigating fields":
timedTest "navigating fields":
var fooOrig = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
let fooEncoded = SSZ.encode(fooOrig)

View File

@ -26,7 +26,7 @@ suite "Block processing" & preset():
genesisBlock = get_initial_beacon_block(genesisState)
genesisRoot = signing_root(genesisBlock)
test "Passes from genesis state, no block" & preset():
timedTest "Passes from genesis state, no block" & preset():
var
state = genesisState
@ -34,7 +34,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + 1
test "Passes from genesis state, empty block" & preset():
timedTest "Passes from genesis state, empty block" & preset():
var
state = genesisState
previous_block_root = signing_root(genesisBlock)
@ -47,7 +47,7 @@ suite "Block processing" & preset():
state.slot == genesisState.slot + 1
test "Passes through epoch update, no block" & preset():
timedTest "Passes through epoch update, no block" & preset():
var
state = genesisState
@ -56,7 +56,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + SLOTS_PER_EPOCH
test "Passes through epoch update, empty block" & preset():
timedTest "Passes through epoch update, empty block" & preset():
var
state = genesisState
previous_block_root = genesisRoot
@ -74,7 +74,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + SLOTS_PER_EPOCH
test "Attestation gets processed at epoch" & preset():
timedTest "Attestation gets processed at epoch" & preset():
var
state = genesisState
previous_block_root = genesisRoot

View File

@ -7,7 +7,7 @@
{.used.}
import unittest
import unittest, ./testutil
when false:
import ../beacon_chain/sync_protocol
@ -15,5 +15,5 @@ when false:
suite "Sync protocol":
# Compile test
test "Compile":
timedTest "Compile":
discard

View File

@ -8,7 +8,7 @@
{.used.}
import
unittest,
unittest, ./testutil,
../beacon_chain/spec/[datatypes, crypto],
../beacon_chain/ssz
@ -36,7 +36,7 @@ suite "Zero signature sanity checks":
# check(zeroSIg == deserZeroSig)
test "SSZ serialization roundtrip of BeaconBlockHeader":
timedTest "SSZ serialization roundtrip of BeaconBlockHeader":
let defaultBlockHeader = BeaconBlockHeader(
signature: BlsValue[Signature](kind: OpaqueBlob)

View File

@ -6,14 +6,18 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
stats, stew/endians2,
algorithm, strformat, stats, times, std/monotimes, stew/endians2,
chronicles, eth/trie/[db],
../beacon_chain/[beacon_chain_db, block_pool, ssz, beacon_node_types],
../beacon_chain/spec/datatypes
type
TestDuration = tuple[duration: float, label: string]
func preset*(): string =
" [Preset: " & const_preset & ']'
# For state_sim
template withTimer*(stats: var RunningStat, body: untyped) =
let start = getMonoTime()
@ -23,6 +27,15 @@ template withTimer*(stats: var RunningStat, body: untyped) =
let stop = getMonoTime()
stats.push (stop - start).inMicroseconds.float / 1000000.0
template withTimer*(duration: var float, body: untyped) =
let start = getMonoTime()
block:
body
duration = (getMonoTime() - start).inMicroseconds.float / 1000000.0
# For state_sim
template withTimerRet*(stats: var RunningStat, body: untyped): untyped =
let start = getMonoTime()
let tmp = block:
@ -32,6 +45,34 @@ template withTimerRet*(stats: var RunningStat, body: untyped): untyped =
tmp
var testTimes: seq[TestDuration]
proc summarizeLongTests*() =
# TODO clean-up and make machine-readable/storable the output
# TODO this is too hard-coded and mostly a demo for using the
# timedTest wrapper template for unittest
sort(testTimes, system.cmp, SortOrder.Descending)
echo ""
echo "10 longest individual test durations"
echo "------------------------------------"
for i, item in testTimes:
echo &"{item.duration:6.2f}s for {item.label}"
if i >= 10:
break
template timedTest*(name, body) =
var f: float
test name:
withTimer f:
body
# TODO reached for a failed test; maybe defer or similar
# TODO noto thread-safe as-is
testTimes.add (f, name)
proc makeTestDB*(tailState: BeaconState, tailBlock: BeaconBlock): BeaconChainDB =
result = init(BeaconChainDB, newMemoryDB())
BlockPool.preInit(result, tailState, tailBlock)
export inMicroseconds