Merge pull request #633 from status-im/devel

Testnet0 release 2019-12-09
This commit is contained in:
zah 2019-12-10 01:17:08 +02:00 committed by GitHub
commit 1597f0c943
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 281 additions and 192 deletions

View File

@ -13,11 +13,39 @@ BUILD_SYSTEM_DIR := vendor/nimbus-build-system
# we don't want an error here, so we can handle things later, in the build-system-checks target
-include $(BUILD_SYSTEM_DIR)/makefiles/variables.mk
TOOLS := beacon_node bench_bls_sig_agggregation ncli_hash_tree_root ncli_pretty ncli_signing_root ncli_transition process_dashboard deposit_contract
TOOLS_DIRS := beacon_chain benchmarks research ncli tests/simulation
# unconditionally built by the default Make target
TOOLS := \
beacon_node \
bench_bls_sig_agggregation \
deposit_contract \
ncli_hash_tree_root \
ncli_pretty \
ncli_signing_root \
ncli_transition \
process_dashboard
TOOLS_DIRS := \
beacon_chain \
benchmarks \
ncli \
research \
tests/simulation
TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(TOOLS))
.PHONY: all build-system-checks deps update p2pd test $(TOOLS) clean_eth2_network_simulation_files eth2_network_simulation clean-testnet0 testnet0 clean-testnet1 testnet1 clean
.PHONY: \
all \
build-system-checks \
deps \
update \
p2pd \
test \
$(TOOLS) \
clean_eth2_network_simulation_files \
eth2_network_simulation \
clean-testnet0 \
testnet0 \
clean-testnet1 \
testnet1 \
clean
ifeq ($(NIM_PARAMS),)
# "variables.mk" was not included. We can only execute one target in this state.
@ -29,6 +57,8 @@ endif
# must be included after the default target
-include $(BUILD_SYSTEM_DIR)/makefiles/targets.mk
#- the Windows build fails on Azure Pipelines if we have Unicode symbols copy/pasted here,
# so we encode them in ASCII
GIT_SUBMODULE_UPDATE := git submodule update --init --recursive
build-system-checks:
@[[ -e "$(BUILD_SYSTEM_DIR)/makefiles" ]] || { \
@ -44,7 +74,7 @@ deps: | deps-common beacon_chain.nims p2pd
#- deletes and recreates "beacon_chain.nims" which on Windows is a copy instead of a proper symlink
update: | update-common
rm -rf beacon_chain.nims && \
rm -f beacon_chain.nims && \
$(MAKE) beacon_chain.nims
# symlink
@ -77,26 +107,20 @@ clean_eth2_network_simulation_files:
eth2_network_simulation: | build deps p2pd clean_eth2_network_simulation_files process_dashboard
GIT_ROOT="$$PWD" tests/simulation/start.sh
testnet0: | build deps
+ $(MAKE) testnet0-no-clean
testnet1: | build deps
+ $(MAKE) testnet1-no-clean
clean-testnet0:
rm -rf build/data/testnet0
clean-testnet1:
rm -rf build/data/testnet1
testnet0-no-clean: | build deps
testnet0: | build deps
NIM_PARAMS="$(NIM_PARAMS)" $(ENV_SCRIPT) nim $(NIM_PARAMS) scripts/connect_to_testnet.nims testnet0
testnet1-no-clean: | build deps
testnet1: | build deps
NIM_PARAMS="$(NIM_PARAMS)" $(ENV_SCRIPT) nim $(NIM_PARAMS) scripts/connect_to_testnet.nims testnet1
clean: | clean-common
rm -rf build/{$(TOOLS_CSV),all_tests,*_node}
rm -rf build/{$(TOOLS_CSV),all_tests,*_node,*ssz*,beacon_node_testnet*,state_sim,transition*}
libnfuzz.so: | build deps-common beacon_chain.nims
echo -e $(BUILD_MSG) "build/$@" && \
@ -109,3 +133,4 @@ libnfuzz.a: | build deps-common beacon_chain.nims
rm -f build/$@ && \
$(ENV_SCRIPT) nim c -d:release --app:staticlib --noMain --nimcache:nimcache/libnfuzz_static $(NIM_PARAMS) -o:build/$@ nfuzz/libnfuzz.nim && \
[[ -e "$@" ]] && mv "$@" build/ # workaround for https://github.com/nim-lang/Nim/issues/12745

View File

@ -1,6 +0,0 @@
# beacon_chain
# Copyright (c) 2018 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at http://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at http://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.

View File

@ -13,7 +13,6 @@ skipDirs = @["nfuzz"]
bin = @[
"beacon_chain/beacon_node",
"research/serialized_sizes",
"research/state_sim",
]
### Dependencies

View File

@ -645,9 +645,19 @@ proc handleProposal(node: BeaconNode, head: BlockRef, slot: Slot):
# revisit this - we should be able to advance behind
var cache = get_empty_per_epoch_cache()
node.blockPool.withState(node.stateCache, BlockSlot(blck: head, slot: slot)):
let
proposerIdx = get_beacon_proposer_index(state, cache)
validator = node.getAttachedValidator(state, proposerIdx)
let proposerIdx = get_beacon_proposer_index(state, cache)
if proposerIdx.isNone:
notice "Missing proposer index",
slot=slot,
epoch=slot.compute_epoch_at_slot,
num_validators=state.validators.len,
active_validators=
get_active_validator_indices(state, slot.compute_epoch_at_slot),
balances=state.balances
return head
let validator = node.getAttachedValidator(state, proposerIdx.get)
if validator != nil:
return await proposeBlock(node, validator, head, slot)
@ -655,7 +665,7 @@ proc handleProposal(node: BeaconNode, head: BlockRef, slot: Slot):
trace "Expecting block proposal",
headRoot = shortLog(head.root),
slot = shortLog(slot),
proposer = shortLog(state.validators[proposerIdx].pubKey),
proposer = shortLog(state.validators[proposerIdx.get].pubKey),
cat = "consensus",
pcs = "wait_for_proposal"

View File

@ -1,6 +1,6 @@
import
os, strutils, options, json,
chronos, nimcrypto, confutils, web3, stint,
chronos, nimcrypto/utils, confutils, web3, stint,
eth/keys
# Compiled version of /scripts/depositContract.v.py in this repo

View File

@ -144,12 +144,6 @@ proc disconnectAndRaise(peer: Peer,
await peer.disconnect(r)
raisePeerDisconnected(msg, r)
template reraiseAsPeerDisconnected(peer: Peer, errMsgExpr: static string,
reason = FaultOrError): auto =
const errMsg = errMsgExpr
debug errMsg
disconnectAndRaise(peer, reason, errMsg)
proc registerProtocol(protocol: ProtocolInfo) =
# TODO: This can be done at compile-time in the future
let pos = lowerBound(gProtocols, protocol)
@ -326,16 +320,6 @@ proc sendErrorResponse(peer: Peer,
discard await stream.transp.write(responseBytes)
await stream.close()
proc writeSizePrefix(transp: StreamTransport, size: uint64) {.async.} =
var
varintBuf: array[10, byte]
varintSize = vsizeof(size)
cursor = createWriteCursor(varintBuf)
cursor.appendVarint size
var sent = await transp.write(varintBuf[0 ..< varintSize])
if sent != varintSize:
raise newException(TransmissionError, "Failed to deliver size prefix")
proc sendNotificationMsg(peer: Peer, protocolId: string, requestBytes: Bytes) {.async} =
var deadline = sleepAsync RESP_TIMEOUT
var streamFut = peer.network.daemon.openStream(peer.id, @[protocolId])
@ -422,10 +406,6 @@ proc makeEth2Request(peer: Peer, protocolId: string, requestBytes: Bytes,
# Read the response
return await stream.readResponse(ResponseMsg, deadline)
proc p2pStreamName(MsgType: type): string =
mixin msgProtocol, protocolInfo, msgId
MsgType.msgProtocol.protocolInfo.messages[MsgType.msgId].libp2pProtocol
proc init*(T: type Peer, network: Eth2Node, id: PeerID): Peer =
new result
result.id = id
@ -635,7 +615,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
except CatchableError as `errVar`:
try:
`await` sendErrorResponse(`peerVar`, `streamVar`, ServerError, `errVar`.msg)
except CatchableError as err:
except CatchableError:
debug "Failed to deliver error response", peer = `peerVar`
##

View File

@ -47,6 +47,5 @@ proc fetchAncestorBlocks*(requestManager: RequestManager,
const ParallelRequests = 2
var fetchComplete = false
for peer in requestManager.network.randomPeers(ParallelRequests, BeaconSync):
traceAsyncErrors peer.fetchAncestorBlocksFromPeer(roots.sample(), responseHandler)

View File

@ -6,7 +6,7 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
tables, algorithm, math, sequtils,
tables, algorithm, math, sequtils, options,
json_serialization/std/sets, chronicles, stew/bitseqs,
../extras, ../ssz,
./crypto, ./datatypes, ./digest, ./helpers, ./validator
@ -171,14 +171,19 @@ proc slash_validator*(state: var BeaconState, slashed_index: ValidatorIndex,
decrease_balance(state, slashed_index,
validator.effective_balance div MIN_SLASHING_PENALTY_QUOTIENT)
# The rest doesn't make sense without there being any proposer index, so skip
let proposer_index = get_beacon_proposer_index(state, stateCache)
if proposer_index.isNone:
debug "No beacon proposer index and probably no active validators"
return
let
proposer_index = get_beacon_proposer_index(state, stateCache)
# Spec has whistleblower_index as optional param, but it's never used.
whistleblower_index = proposer_index
whistleblower_index = proposer_index.get
whistleblowing_reward =
(validator.effective_balance div WHISTLEBLOWER_REWARD_QUOTIENT).Gwei
proposer_reward = whistleblowing_reward div PROPOSER_REWARD_QUOTIENT
increase_balance(state, proposer_index, proposer_reward)
increase_balance(state, proposer_index.get, proposer_reward)
# TODO: evaluate if spec bug / underflow can be triggered
doAssert(whistleblowing_reward >= proposer_reward, "Spec bug: underflow in slash_validator")
increase_balance(
@ -492,6 +497,12 @@ proc process_attestation*(
# reused when looking for suitable blocks to include in attestations.
# TODO don't log warnings when looking for attestations (return
# Result[void, cstring] instead of logging in check_attestation?)
let proposer_index = get_beacon_proposer_index(state, stateCache)
if proposer_index.isNone:
debug "No beacon proposer index and probably no active validators"
return false
if check_attestation(state, attestation, flags, stateCache):
let
attestation_slot = attestation.data.slot
@ -499,7 +510,7 @@ proc process_attestation*(
data: attestation.data,
aggregation_bits: attestation.aggregation_bits,
inclusion_delay: state.slot - attestation_slot,
proposer_index: get_beacon_proposer_index(state, stateCache).uint64,
proposer_index: proposer_index.get.uint64,
)
if attestation.data.target.epoch == get_current_epoch(state):

View File

@ -32,8 +32,8 @@
# improvements to be made - other than that, keep things similar to spec for
# now.
import # TODO - cleanup imports
algorithm, collections/sets, chronicles, sequtils, sets, tables,
import
algorithm, collections/sets, chronicles, options, sequtils, sets, tables,
../extras, ../ssz, metrics,
beaconstate, crypto, datatypes, digest, helpers, validator
@ -76,10 +76,13 @@ proc process_block_header*(
signature: BlsValue[Signature](kind: OpaqueBlob)
)
# Verify proposer is not slashed
let proposer =
state.validators[get_beacon_proposer_index(state, stateCache)]
let proposer_index = get_beacon_proposer_index(state, stateCache)
if proposer_index.isNone:
debug "Block header: proposer missing"
return false
let proposer = state.validators[proposer_index.get]
if proposer.slashed:
notice "Block header: proposer slashed"
return false
@ -105,7 +108,12 @@ proc process_randao(
let
epoch = state.get_current_epoch()
proposer_index = get_beacon_proposer_index(state, stateCache)
proposer = addr state.validators[proposer_index]
if proposer_index.isNone:
debug "Proposer index missing, probably along with any active validators"
return false
let proposer = addr state.validators[proposer_index.get]
# Verify that the provided randao value is valid
if skipValidation notin flags:

View File

@ -7,8 +7,8 @@
# Helpers and functions pertaining to managing the validator set
import
options, nimcrypto, sequtils, math, tables,
./datatypes, ./digest, ./helpers
options, sequtils, math, tables,
./crypto, ./datatypes, ./digest, ./helpers
# TODO: Proceed to renaming and signature changes
# https://github.com/ethereum/eth2.0-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#compute_shuffled_index
@ -148,11 +148,12 @@ func get_empty_per_epoch_cache*(): StateCache =
# https://github.com/ethereum/eth2.0-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#compute_proposer_index
func compute_proposer_index(state: BeaconState, indices: seq[ValidatorIndex],
seed: Eth2Digest, stateCache: var StateCache): ValidatorIndex =
seed: Eth2Digest, stateCache: var StateCache): Option[ValidatorIndex] =
# Return from ``indices`` a random index sampled by effective balance.
const MAX_RANDOM_BYTE = 255
doAssert len(indices) > 0
if len(indices) == 0:
return none(ValidatorIndex)
# TODO fixme; should only be run once per slot and cached
# There's exactly one beacon proposer per slot.
@ -175,12 +176,12 @@ func compute_proposer_index(state: BeaconState, indices: seq[ValidatorIndex],
state.validators[candidate_index].effective_balance
if effective_balance * MAX_RANDOM_BYTE >=
MAX_EFFECTIVE_BALANCE * random_byte:
return candidate_index
return some(candidate_index)
i += 1
# https://github.com/ethereum/eth2.0-specs/blob/v0.9.2/specs/core/0_beacon-chain.md#get_beacon_proposer_index
func get_beacon_proposer_index*(state: BeaconState, stateCache: var StateCache):
ValidatorIndex =
Option[ValidatorIndex] =
# Return the beacon proposer index at the current slot.
let epoch = get_current_epoch(state)

View File

@ -598,9 +598,6 @@ func lastFieldName(RecordType: type): string {.compileTime.} =
enumAllSerializedFields(RecordType):
result = fieldName
func hasSigningRoot(T: type): bool {.compileTime.} =
lastFieldName(T) == "signature"
func signingRoot*(obj: object): Eth2Digest =
const lastField = lastFieldName(obj.type)
merkelizeFields:

View File

@ -1,6 +1,6 @@
import
os, strutils,
chronicles, chronos, blscurve, nimcrypto, json_serialization, serialization,
chronicles, chronos, blscurve, json_serialization, serialization,
web3, stint, eth/keys,
spec/[datatypes, digest, crypto], conf, ssz, interop

View File

@ -19,6 +19,12 @@ make libnfuzz.a
make libnfuzz.so
```
Default, the library is build with the `minimal` config. To select a specific config you can instead run:
```bash
# build with mainnet config
make libnfuzz.a NIMFLAGS="-d:const_preset=mainnet"
```
For the library to be useful for fuzzing with libFuzzer (e.g. for
integration with [beacon-fuzz](https://github.com/sigp/beacon-fuzz)) we can pass
additional Nim arguments, e.g.:

View File

@ -78,31 +78,34 @@ cli do (testnetName {.argument.}: string):
mkDir dumpDir
proc execIgnoringExitCode(s: string) =
# reduces the error output when interrupting an external command with Ctrl+C
try:
exec s
except OsError:
discard
if depositContractOpt.len > 0 and not system.dirExists(validatorsDir):
mode = Silent
echo "Would you like to become a validator (you'll need access to 32 GoETH)? [Yn]"
while true:
let answer = readLineFromStdin()
if answer in ["y", "Y", "yes", ""]:
echo "Please enter your Eth1 private key in hex form (e.g. 0x1a2...f3c). Hit Enter to cancel."
let privKey = readLineFromStdin()
if privKey.len > 0:
mkDir validatorsDir
exec replace(&"""{beaconNodeBinary} makeDeposits
--random-deposits=1
--deposits-dir="{validatorsDir}"
--deposit-private-key={privKey}
--web3-url=wss://goerli.infura.io/ws/v3/809a18497dd74102b5f37d25aae3c85a
{depositContractOpt}
""", "\n", " ")
break
elif answer in ["n", "N", "no"]:
break
else:
echo "Please answer 'yes' or 'no'"
echo "\nPlease enter your Goerli Eth1 private key in hex form (e.g. 0x1a2...f3c) in order to become a validator (you'll need access to 32 GoETH)."
echo "Hit Enter to skip this."
# is there no other way to print without a trailing newline?
exec "printf '> '"
let privKey = readLineFromStdin()
if privKey.len > 0:
mkDir validatorsDir
mode = Verbose
execIgnoringExitCode replace(&"""{beaconNodeBinary} makeDeposits
--random-deposits=1
--deposits-dir="{validatorsDir}"
--deposit-private-key={privKey}
--web3-url=wss://goerli.infura.io/ws/v3/809a18497dd74102b5f37d25aae3c85a
{depositContractOpt}
""", "\n", " ")
quit()
mode = Verbose
exec replace(&"""{beaconNodeBinary}
execIgnoringExitCode replace(&"""{beaconNodeBinary}
--data-dir="{dataDir}"
--dump=true
--bootstrap-file="{testnetDir/bootstrapFile}"

View File

@ -5,6 +5,8 @@
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import ./testutil
import # Official constants
./official/test_fixture_const_sanity_check
@ -24,7 +26,9 @@ import # Unit test
./test_peer_pool
import # Refactor state transition unit tests
./spec_block_processing/test_genesis,
# TODO re-enable when useful
# ./spec_block_processing/test_genesis,
# In mainnet these take 2 minutes and are empty TODOs
./spec_block_processing/test_process_deposits,
./spec_block_processing/test_process_attestation,
./spec_epoch_processing/test_process_justification_and_finalization
@ -35,3 +39,5 @@ import # Refactor state transition unit tests
# # https://github.com/status-im/nim-beacon-chain/issues/374
# ./official/test_fixture_shuffling,
# ./official/test_fixture_bls
summarizeLongTests()

View File

@ -6,6 +6,7 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
options,
# Specs
../../beacon_chain/spec/[datatypes, crypto, helpers, validator],
# Internals
@ -60,20 +61,21 @@ proc signMockBlock*(
blck: var BeaconBlock
) =
var proposer_index: ValidatorIndex
var emptyCache = get_empty_per_epoch_cache()
if blck.slot == state.slot:
proposer_index = get_beacon_proposer_index(state, emptyCache)
else:
# Stub to get proposer index of future slot
# Note: this relies on ``let`` deep-copying the state
# i.e. BeaconState should have value semantics
# and not contain ref objects or pointers
var stubState = state
process_slots(stub_state, blck.slot)
proposer_index = get_beacon_proposer_index(stub_state, emptyCache)
let proposer_index =
if blck.slot == state.slot:
get_beacon_proposer_index(state, emptyCache)
else:
# Stub to get proposer index of future slot
# Note: this relies on ``let`` deep-copying the state
# i.e. BeaconState should have value semantics
# and not contain ref objects or pointers
var stubState = state
process_slots(stub_state, blck.slot)
get_beacon_proposer_index(stub_state, emptyCache)
signMockBlockImpl(state, blck, proposer_index)
# In tests, just let this throw if appropriate
signMockBlockImpl(state, blck, proposer_index.get)
proc mockBlock*(
state: BeaconState,

View File

@ -9,6 +9,8 @@
# to ignore invalid BLS signature in EF test vectors
# https://github.com/status-im/nim-beacon-chain/issues/374
import ../testutil
import
./test_fixture_sanity_slots,
./test_fixture_sanity_blocks,
@ -19,3 +21,5 @@ import
./test_fixture_operations_block_header,
./test_fixture_operations_proposer_slashings,
./test_fixture_operations_voluntary_exit
summarizeLongTests()

View File

@ -50,13 +50,13 @@ proc readValue*(r: var JsonReader, a: var Domain) {.inline.} =
const BLSDir = JsonTestsDir/"general"/"phase0"/"bls"
suite "Official - BLS tests":
test "Private to public key conversion":
timedTest "Private to public key conversion":
for file in walkDirRec(BLSDir/"priv_to_pub"):
let t = parseTest(file, Json, BLSPrivToPub)
let implResult = t.input.pubkey()
check: implResult == t.output
test "Message signing":
timedTest "Message signing":
for file in walkDirRec(BLSDir/"sign_msg"):
let t = parseTest(file, Json, BLSSignMsg)
let implResult = t.input.privkey.bls_sign(
@ -65,13 +65,13 @@ suite "Official - BLS tests":
)
check: implResult == t.output
test "Aggregating signatures":
timedTest "Aggregating signatures":
for file in walkDirRec(BLSDir/"aggregate_sigs"):
let t = parseTest(file, Json, BLSAggSig)
let implResult = t.input.combine()
check: implResult == t.output
test "Aggregating public keys":
timedTest "Aggregating public keys":
for file in walkDirRec(BLSDir/"aggregate_pubkeys"):
let t = parseTest(file, Json, BLSAggPubKey)
let implResult = t.input.combine()

View File

@ -108,7 +108,7 @@ proc checkConfig() =
var config = yamlStream.loadToJson()
doAssert config.len == 1
for constant, value in config[0]:
test &"{constant:<50}{value:<20}{preset()}":
timedTest &"{constant:<50}{value:<20}{preset()}":
if constant in IgnoreKeys:
echo &" ↶↶ Skipping {constant}"
continue

View File

@ -39,7 +39,7 @@ template runTest(testName: string, identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & testName & " (" & astToStr(identifier) & ")":
timedTest prefix & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
var attestationRef: ref Attestation
new attestationRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var attesterSlashingRef: ref AttesterSlashing
new attesterSlashingRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var blck: ref BeaconBlock
new blck

View File

@ -39,7 +39,7 @@ template runTest(testName: string, identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & testName & " (" & astToStr(identifier) & ")":
timedTest prefix & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
var depositRef: ref Deposit
new depositRef

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var proposerSlashing: ref ProposerSlashing
new proposerSlashing

View File

@ -39,7 +39,7 @@ template runTest(identifier: untyped) =
else:
prefix = "[Invalid] "
test prefix & astToStr(identifier):
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var voluntaryExit: ref VoluntaryExit
new voluntaryExit

View File

@ -29,7 +29,7 @@ template runValidTest(testName: string, identifier: untyped, num_blocks: int): u
const testDir = SanityBlocksDir / astToStr(identifier)
proc `testImpl _ blck _ identifier`() =
test "[Valid] " & testName & " (" & astToStr(identifier) & ")":
timedTest "[Valid] " & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
new stateRef
new postRef
@ -50,7 +50,7 @@ template runValidTest(testName: string, identifier: untyped, num_blocks: int): u
`testImpl _ blck _ identifier`()
suite "Official - Sanity - Blocks " & preset():
test "[Invalid] Previous slot block transition (prev_slot_block_transition)":
timedTest "[Invalid] Previous slot block transition (prev_slot_block_transition)":
const testDir = SanityBlocksDir/"prev_slot_block_transition"
var stateRef: ref BeaconState
new stateRef
@ -68,7 +68,7 @@ suite "Official - Sanity - Blocks " & preset():
runValidTest("Empty block transition", empty_block_transition, 1)
when false: # TODO: we need more granular skipValidation
test "[Invalid] Invalid state root":
timedTest "[Invalid] Invalid state root":
const testDir = SanityBlocksDir/"invalid_state_root"
var stateRef: ref BeaconState
new stateRef

View File

@ -29,7 +29,7 @@ template runTest(testName: string, identifier: untyped, num_slots: uint64): unty
const testDir = SanitySlotsDir / astToStr(identifier)
proc `testImpl _ slots _ identifier`() =
test "Slots - " & testName & " (" & astToStr(identifier) & ")":
timedTest "Slots - " & testName & " (" & astToStr(identifier) & ")":
var stateRef, postRef: ref BeaconState
new stateRef
new postRef

View File

@ -26,7 +26,7 @@ type
const ShufflingDir = JsonTestsDir/const_preset/"phase0"/"shuffling"/"core"/"shuffle"
suite "Official - Shuffling tests [Preset: " & preset():
test "Shuffling a sequence of N validators" & preset():
timedTest "Shuffling a sequence of N validators" & preset():
for file in walkDirRec(ShufflingDir):
let t = parseTest(file, Json, Shuffling)
let implResult = get_shuffled_seq(t.seed, t.count)

View File

@ -70,11 +70,11 @@ proc runSSZtests() =
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
if sszType in Unsupported:
test &" Skipping {sszType:20} ✗✗✗":
timedTest &" Skipping {sszType:20} ✗✗✗":
discard
continue
test &" Testing {sszType}":
timedTest &" Testing {sszType}":
let path = SSZDir/sszType
for pathKind, sszTestKind in walkDir(path, relative = true):
doAssert pathKind == pcDir

View File

@ -10,7 +10,7 @@ import
os, unittest, strutils, streams, strformat, strscans,
macros,
# Status libraries
stint, stew/bitseqs,
stint, stew/bitseqs, ../testutil,
# Third-party
yaml,
# Beacon chain internals
@ -251,7 +251,7 @@ proc runSSZtests() =
for pathKind, sszType in walkDir(SSZDir, relative = true):
doAssert pathKind == pcDir
if sszType == "bitlist":
test &"**Skipping** {sszType} inputs - valid - skipped altogether":
timedTest &"**Skipping** {sszType} inputs - valid - skipped altogether":
# TODO: serialization of "type BitList[maxLen] = distinct BitSeq is not supported"
# https://github.com/status-im/nim-beacon-chain/issues/518
discard
@ -266,7 +266,7 @@ proc runSSZtests() =
of "containers":
skipped = " - skipping VarTestStruct, ComplexTestStruct, BitsStruct"
test &"Testing {sszType:12} inputs - valid" & skipped:
timedTest &"Testing {sszType:12} inputs - valid" & skipped:
let path = SSZDir/sszType/"valid"
for pathKind, sszSubType in walkDir(path, relative = true):
doAssert pathKind == pcDir

View File

@ -37,7 +37,7 @@ template runSuite(suiteDir, testName: string, transitionProc: untyped{ident}, us
for testDir in walkDirRec(suiteDir, yieldFilter = {pcDir}):
let unitTestName = testDir.rsplit(DirSep, 1)[1]
test testName & " - " & unitTestName & preset():
timedTest testName & " - " & unitTestName & preset():
var stateRef, postRef: ref BeaconState
new stateRef
new postRef

View File

@ -28,29 +28,29 @@ import
# - is_valid_genesis_state is not implemented
suite "[Unit - Spec - Genesis] Genesis block checks " & preset():
test "is_valid_genesis_state for a valid state":
timedTest "is_valid_genesis_state for a valid state":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT,
genesis_time = MIN_GENESIS_TIME
)
discard "TODO"
test "Invalid genesis time":
timedTest "Invalid genesis time":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT,
genesis_time = MIN_GENESIS_TIME.uint64 - 1
)
discard "TODO"
test "Validators with more than 32 ETH":
timedTest "Validators with more than 32 ETH":
discard "TODO"
test "More validators than minimum":
timedTest "More validators than minimum":
discard "TODO"
when false:
# TODO causes possible stack overflow in mainnet
test "Not enough validators":
timedTest "Not enough validators":
discard initGenesisState(
num_validators = MIN_GENESIS_ACTIVE_VALIDATOR_COUNT.uint64 - 1,
genesis_time = MIN_GENESIS_TIME.uint64 - 1

View File

@ -32,7 +32,7 @@ suite "[Unit - Spec - Block processing] Attestations " & preset():
# The BeaconState is exposed as "state" in the calling context
# The attestation to process must be named "attestation" in the calling context
test name:
timedTest name:
var state{.inject.}: BeaconState
deepCopy(state, genesisState)
@ -72,7 +72,7 @@ suite "[Unit - Spec - Block processing] Attestations " & preset():
# TODO check if this should be replaced
when false:
when MAX_EPOCHS_PER_CROSSLINK > 4'u64:
test "Valid attestation since max epochs per crosslinks [Skipped for preset: " & const_preset & ']':
timedTest "Valid attestation since max epochs per crosslinks [Skipped for preset: " & const_preset & ']':
discard
else:
valid_attestation("Valid attestation since max epochs per crosslinks"):

View File

@ -31,7 +31,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
template valid_deposit(deposit_amount: uint64, name: string): untyped =
# TODO: BLS signature
test "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
timedTest "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
var state: BeaconState
deepCopy(state, genesisState)
@ -74,7 +74,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
valid_deposit(MAX_EFFECTIVE_BALANCE, "at")
valid_deposit(MAX_EFFECTIVE_BALANCE + 1, "over")
test "Validator top-up":
timedTest "Validator top-up":
var state: BeaconState
deepCopy(state, genesisState)

View File

@ -223,35 +223,35 @@ suite "[Unit - Spec - Epoch processing] Justification and Finalization " & prese
template resetState: untyped =
deepCopy(state, genesisState)
test " Rule I - 234 finalization with enough support":
timedTest " Rule I - 234 finalization with enough support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = true)
test " Rule I - 234 finalization without support":
timedTest " Rule I - 234 finalization without support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = false)
test " Rule II - 23 finalization with enough support":
timedTest " Rule II - 23 finalization with enough support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = true)
test " Rule II - 23 finalization without support":
timedTest " Rule II - 23 finalization without support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = false)
test " Rule III - 123 finalization with enough support":
timedTest " Rule III - 123 finalization with enough support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = true)
test " Rule III - 123 finalization without support":
timedTest " Rule III - 123 finalization without support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = false)
test " Rule IV - 12 finalization with enough support":
timedTest " Rule IV - 12 finalization with enough support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = true)
test " Rule IV - 12 finalization without support":
timedTest " Rule IV - 12 finalization without support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = false)

View File

@ -38,7 +38,7 @@ suite "Attestation pool processing" & preset():
{skipValidation})
genBlock = get_initial_beacon_block(genState)
test "Can add and retrieve simple attestation" & preset():
timedTest "Can add and retrieve simple attestation" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -58,7 +58,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may arrive in any order" & preset():
timedTest "Attestations may arrive in any order" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -88,7 +88,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations should be combined" & preset():
timedTest "Attestations should be combined" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
let
@ -111,7 +111,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may overlap, bigger first" & preset():
timedTest "Attestations may overlap, bigger first" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
@ -137,7 +137,7 @@ suite "Attestation pool processing" & preset():
check:
attestations.len == 1
test "Attestations may overlap, smaller first" & preset():
timedTest "Attestations may overlap, smaller first" & preset():
var cache = get_empty_per_epoch_cache()
withPool:
var

View File

@ -15,7 +15,7 @@ import options, unittest, sequtils, eth/trie/[db],
suite "Beacon chain DB" & preset():
test "empty database" & preset():
timedTest "empty database" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -26,7 +26,7 @@ suite "Beacon chain DB" & preset():
# TODO re-check crash here in mainnet
true
test "sanity check blocks" & preset():
timedTest "sanity check blocks" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -44,7 +44,7 @@ suite "Beacon chain DB" & preset():
check:
db.getStateRoot(root, blck.slot).get() == root
test "sanity check states" & preset():
timedTest "sanity check states" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
@ -58,7 +58,7 @@ suite "Beacon chain DB" & preset():
db.containsState(root)
db.getState(root).get() == state
test "find ancestors" & preset():
timedTest "find ancestors" & preset():
var
db = init(BeaconChainDB, newMemoryDB())
x: ValidatorSig
@ -93,7 +93,7 @@ suite "Beacon chain DB" & preset():
doAssert toSeq(db.getAncestors(a0r)) == [(a0r, a0)]
doAssert toSeq(db.getAncestors(a2r)) == [(a2r, a2), (a1r, a1), (a0r, a0)]
test "sanity check genesis roundtrip" & preset():
timedTest "sanity check genesis roundtrip" & preset():
# This is a really dumb way of checking that we can roundtrip a genesis
# state. We've been bit by this because we've had a bug in the BLS
# serialization where an all-zero default-initialized bls signature could

View File

@ -7,7 +7,7 @@
{.used.}
import unittest
import unittest, ./testutil
when false:
import ../beacon_chain/beacon_node
@ -15,5 +15,5 @@ when false:
suite "Beacon node":
# Compile test
test "Compile":
timedTest "Compile":
discard

View File

@ -8,12 +8,12 @@
{.used.}
import
unittest,
times, unittest,
./testutil, ./testblockutil,
../beacon_chain/spec/[beaconstate, datatypes, digest]
suite "Beacon state" & preset():
test "Smoke test initialize_beacon_state_from_eth1" & preset():
timedTest "Smoke test initialize_beacon_state_from_eth1" & preset():
let state = initialize_beacon_state_from_eth1(
Eth2Digest(), 0,
makeInitialDeposits(SLOTS_PER_EPOCH, {}), {})

View File

@ -26,11 +26,11 @@ suite "Block pool processing" & preset():
pool = BlockPool.init(db)
state = pool.loadTailState()
test "getRef returns nil for missing blocks":
timedTest "getRef returns nil for missing blocks":
check:
pool.getRef(default Eth2Digest) == nil
test "loadTailState gets genesis block on first load" & preset():
timedTest "loadTailState gets genesis block on first load" & preset():
var
b0 = pool.get(state.blck.root)
@ -39,7 +39,7 @@ suite "Block pool processing" & preset():
b0.isSome()
toSeq(pool.blockRootsForSlot(GENESIS_SLOT)) == @[state.blck.root]
test "Simple block add&get" & preset():
timedTest "Simple block add&get" & preset():
let
b1 = makeBlock(state.data.data, state.blck.root, BeaconBlockBody())
b1Root = signing_root(b1)
@ -54,7 +54,7 @@ suite "Block pool processing" & preset():
b1Ref.get().refs.root == b1Root
hash_tree_root(state.data.data) == state.data.root
test "Reverse order block add & get" & preset():
timedTest "Reverse order block add & get" & preset():
let
b1 = addBlock(state.data.data, state.blck.root, BeaconBlockBody(), {})
b1Root = signing_root(b1)
@ -95,7 +95,7 @@ suite "Block pool processing" & preset():
pool2.get(b1Root).isSome()
pool2.get(b2Root).isSome()
test "isAncestorOf sanity" & preset():
timedTest "isAncestorOf sanity" & preset():
let
a = BlockRef(slot: Slot(1))
b = BlockRef(slot: Slot(2), parent: a)

View File

@ -8,11 +8,11 @@
{.used.}
import
unittest,
unittest, ./testutil,
../beacon_chain/spec/[helpers]
suite "Spec helpers":
test "integer_squareroot":
timedTest "integer_squareroot":
check:
integer_squareroot(0'u64) == 0'u64
integer_squareroot(1'u64) == 1'u64

View File

@ -1,7 +1,7 @@
{.used.}
import
unittest, stint, blscurve, stew/byteutils,
unittest, stint, blscurve, ./testutil, stew/byteutils,
../beacon_chain/[extras, interop, ssz],
../beacon_chain/spec/[beaconstate, crypto, helpers, datatypes]
@ -116,7 +116,7 @@ let depositsConfig = [
]
suite "Interop":
test "Mocked start private key":
timedTest "Mocked start private key":
for i, k in privateKeys:
let
key = makeInteropPrivKey(i)
@ -126,7 +126,7 @@ suite "Interop":
# getBytes is bigendian and returns full 48 bytes of key..
Uint256.fromBytesBE(key.getBytes()[48-32..<48]) == v
test "Interop signatures":
timedTest "Interop signatures":
for dep in depositsConfig:
let computed_sig = bls_sign(
key = dep.privkey,
@ -137,7 +137,7 @@ suite "Interop":
check:
dep.sig == computed_sig
test "Interop genesis":
timedTest "Interop genesis":
# Check against https://github.com/protolambda/zcli:
# zcli keys generate --to 64 | zcli genesis mock --genesis-time 1570500000 > /tmp/state.ssz
# zcli hash-tree-root /tmp.state.ssz

View File

@ -4,7 +4,7 @@ import
../beacon_chain/[conf, eth2_network]
template asyncTest*(name, body: untyped) =
test name:
timedTest name:
proc scenario {.async.} = body
waitFor scenario()

View File

@ -9,6 +9,7 @@
import
unittest, random, heapqueue, tables, strutils,
./testutil,
chronos,
../beacon_chain/peer_pool
@ -36,7 +37,7 @@ proc close*(peer: PeerTest) =
peer.future.complete()
suite "PeerPool testing suite":
test "addPeer() test":
timedTest "addPeer() test":
const peersCount = [
[10, 5, 5, 10, 5, 5],
[-1, 5, 5, 10, 5, 5],
@ -63,7 +64,7 @@ suite "PeerPool testing suite":
pool.lenAvailable == item[3]
pool.lenAvailable({PeerType.Incoming}) == item[4]
pool.lenAvailable({PeerType.Outgoing}) == item[5]
test "Acquire from empty pool":
timedTest "Acquire from empty pool":
var pool0 = newPeerPool[PeerTest, PeerTestID]()
var pool1 = newPeerPool[PeerTest, PeerTestID]()
var pool2 = newPeerPool[PeerTest, PeerTestID]()
@ -115,7 +116,7 @@ suite "PeerPool testing suite":
itemFut23.finished == false
itemFut24.finished == false
test "Acquire/Sorting and consistency test":
timedTest "Acquire/Sorting and consistency test":
const
TestsCount = 1000
MaxNumber = 1_000_000
@ -184,7 +185,7 @@ suite "PeerPool testing suite":
check waitFor(testAcquireRelease()) == TestsCount
test "deletePeer() test":
timedTest "deletePeer() test":
proc testDeletePeer(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("deletePeer")
@ -237,7 +238,7 @@ suite "PeerPool testing suite":
result = true
check waitFor(testDeletePeer()) == true
test "Peer lifetime test":
timedTest "Peer lifetime test":
proc testPeerLifetime(): Future[bool] {.async.} =
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer = PeerTest.init("closingPeer")
@ -284,7 +285,7 @@ suite "PeerPool testing suite":
check waitFor(testPeerLifetime()) == true
test "Safe/Clear test":
timedTest "Safe/Clear test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -331,7 +332,7 @@ suite "PeerPool testing suite":
asyncCheck testConsumer()
check waitFor(testClose()) == true
test "Access peers by key test":
timedTest "Access peers by key test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)
@ -360,7 +361,7 @@ suite "PeerPool testing suite":
ppeer[].weight = 100
check pool["peer1"].weight == 100
test "Iterators test":
timedTest "Iterators test":
var pool = newPeerPool[PeerTest, PeerTestID]()
var peer1 = PeerTest.init("peer1", 10)
var peer2 = PeerTest.init("peer2", 9)

View File

@ -9,7 +9,8 @@
import
unittest, options,
stint, nimcrypto, eth/common, serialization/testing/generic_suite,
stint, eth/common, serialization/testing/generic_suite,
./testutil,
../beacon_chain/spec/[datatypes, digest],
../beacon_chain/ssz, ../beacon_chain/ssz/[navigator, dynamic_navigator]
@ -50,7 +51,7 @@ type
f0: uint8
f1: uint32
f2: EthAddress
f3: MDigest[256]
f3: Eth2Digest
f4: seq[byte]
f5: ValidatorIndex
@ -75,7 +76,7 @@ proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
result.data[0 .. N-1] = x
suite "SSZ navigator":
test "simple object fields":
timedTest "simple object fields":
var foo = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
let encoded = SSZ.encode(foo)
@ -87,7 +88,7 @@ suite "SSZ navigator":
let mountedBar = mountedFoo.bar
check mountedBar.baz.i == 10'u64
test "lists with max size":
timedTest "lists with max size":
let a = [byte 0x01, 0x02, 0x03].toDigest
let b = [byte 0x04, 0x05, 0x06].toDigest
let c = [byte 0x07, 0x08, 0x09].toDigest
@ -101,7 +102,7 @@ suite "SSZ navigator":
check $root2 == "9FB7D518368DC14E8CC588FB3FD2749BEEF9F493FEF70AE34AF5721543C67173"
suite "SSZ dynamic navigator":
test "navigating fields":
timedTest "navigating fields":
var fooOrig = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
let fooEncoded = SSZ.encode(fooOrig)

View File

@ -26,7 +26,7 @@ suite "Block processing" & preset():
genesisBlock = get_initial_beacon_block(genesisState)
genesisRoot = signing_root(genesisBlock)
test "Passes from genesis state, no block" & preset():
timedTest "Passes from genesis state, no block" & preset():
var
state = genesisState
@ -34,7 +34,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + 1
test "Passes from genesis state, empty block" & preset():
timedTest "Passes from genesis state, empty block" & preset():
var
state = genesisState
previous_block_root = signing_root(genesisBlock)
@ -47,7 +47,7 @@ suite "Block processing" & preset():
state.slot == genesisState.slot + 1
test "Passes through epoch update, no block" & preset():
timedTest "Passes through epoch update, no block" & preset():
var
state = genesisState
@ -56,7 +56,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + SLOTS_PER_EPOCH
test "Passes through epoch update, empty block" & preset():
timedTest "Passes through epoch update, empty block" & preset():
var
state = genesisState
previous_block_root = genesisRoot
@ -74,7 +74,7 @@ suite "Block processing" & preset():
check:
state.slot == genesisState.slot + SLOTS_PER_EPOCH
test "Attestation gets processed at epoch" & preset():
timedTest "Attestation gets processed at epoch" & preset():
var
state = genesisState
previous_block_root = genesisRoot

View File

@ -7,7 +7,7 @@
{.used.}
import unittest
import unittest, ./testutil
when false:
import ../beacon_chain/sync_protocol
@ -15,5 +15,5 @@ when false:
suite "Sync protocol":
# Compile test
test "Compile":
timedTest "Compile":
discard

View File

@ -8,7 +8,7 @@
{.used.}
import
unittest,
unittest, ./testutil,
../beacon_chain/spec/[datatypes, crypto],
../beacon_chain/ssz
@ -36,7 +36,7 @@ suite "Zero signature sanity checks":
# check(zeroSIg == deserZeroSig)
test "SSZ serialization roundtrip of BeaconBlockHeader":
timedTest "SSZ serialization roundtrip of BeaconBlockHeader":
let defaultBlockHeader = BeaconBlockHeader(
signature: BlsValue[Signature](kind: OpaqueBlob)

View File

@ -6,7 +6,7 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
stew/endians2,
options, stew/endians2,
chronicles, eth/trie/[db],
../beacon_chain/[beacon_chain_db, block_pool, extras, ssz, state_transition,
validator_pool],
@ -87,7 +87,8 @@ proc addBlock*(
let
# Index from the new state, but registry from the old state.. hmm...
proposer = state.validators[proposer_index]
# In tests, let this throw
proposer = state.validators[proposer_index.get]
privKey = hackPrivKey(proposer)
# TODO ugly hack; API needs rethinking

View File

@ -6,14 +6,18 @@
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
stats, stew/endians2,
algorithm, strformat, stats, times, std/monotimes, stew/endians2,
chronicles, eth/trie/[db],
../beacon_chain/[beacon_chain_db, block_pool, ssz, beacon_node_types],
../beacon_chain/spec/datatypes
type
TestDuration = tuple[duration: float, label: string]
func preset*(): string =
" [Preset: " & const_preset & ']'
# For state_sim
template withTimer*(stats: var RunningStat, body: untyped) =
let start = getMonoTime()
@ -23,6 +27,15 @@ template withTimer*(stats: var RunningStat, body: untyped) =
let stop = getMonoTime()
stats.push (stop - start).inMicroseconds.float / 1000000.0
template withTimer*(duration: var float, body: untyped) =
let start = getMonoTime()
block:
body
duration = (getMonoTime() - start).inMicroseconds.float / 1000000.0
# For state_sim
template withTimerRet*(stats: var RunningStat, body: untyped): untyped =
let start = getMonoTime()
let tmp = block:
@ -32,6 +45,34 @@ template withTimerRet*(stats: var RunningStat, body: untyped): untyped =
tmp
var testTimes: seq[TestDuration]
proc summarizeLongTests*() =
# TODO clean-up and make machine-readable/storable the output
# TODO this is too hard-coded and mostly a demo for using the
# timedTest wrapper template for unittest
sort(testTimes, system.cmp, SortOrder.Descending)
echo ""
echo "10 longest individual test durations"
echo "------------------------------------"
for i, item in testTimes:
echo &"{item.duration:6.2f}s for {item.label}"
if i >= 10:
break
template timedTest*(name, body) =
var f: float
test name:
withTimer f:
body
# TODO reached for a failed test; maybe defer or similar
# TODO noto thread-safe as-is
testTimes.add (f, name)
proc makeTestDB*(tailState: BeaconState, tailBlock: BeaconBlock): BeaconChainDB =
result = init(BeaconChainDB, newMemoryDB())
BlockPool.preInit(result, tailState, tailBlock)
export inMicroseconds