nimbus-eth2/tests/consensus_spec/fixtures_utils.nim

169 lines
5.3 KiB
Nim
Raw Normal View History

# beacon_chain
# Copyright (c) 2018-2024 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
{.push raises: [].}
import
# Standard library
std/[strutils, typetraits],
# Internals
./os_ops,
../../beacon_chain/spec/datatypes/[phase0, altair, bellatrix],
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
../../beacon_chain/spec/[
eth2_merkleization, eth2_ssz_serialization, forks],
# Status libs,
snappy,
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
stew/byteutils
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
export
eth2_merkleization, eth2_ssz_serialization
# Process current EF test format
# ---------------------------------------------
# #######################
# Path parsing
func forkForPathComponent*(forkPath: string): Opt[ConsensusFork] =
for fork in ConsensusFork:
if ($fork).toLowerAscii() == forkPath:
return ok fork
err()
# #######################
# JSON deserialization
func readValue*(r: var JsonReader, a: var seq[byte]) =
## Custom deserializer for seq[byte]
a = hexToSeqByte(r.readValue(string))
# #######################
# Mock RuntimeConfig
func genesisTestRuntimeConfig*(consensusFork: ConsensusFork): RuntimeConfig =
var res = defaultRuntimeConfig
case consensusFork
of ConsensusFork.Electra:
res.ELECTRA_FORK_EPOCH = GENESIS_EPOCH
res.DENEB_FORK_EPOCH = GENESIS_EPOCH
res.CAPELLA_FORK_EPOCH = GENESIS_EPOCH
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of ConsensusFork.Deneb:
res.DENEB_FORK_EPOCH = GENESIS_EPOCH
2022-12-07 16:47:23 +00:00
res.CAPELLA_FORK_EPOCH = GENESIS_EPOCH
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of ConsensusFork.Capella:
res.CAPELLA_FORK_EPOCH = GENESIS_EPOCH
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of ConsensusFork.Bellatrix:
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of ConsensusFork.Altair:
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of ConsensusFork.Phase0:
discard
res
# #######################
# Test helpers
2020-05-11 16:27:44 +00:00
type
UnconsumedInput* = object of CatchableError
TestSizeError* = object of ValueError
# https://github.com/ethereum/consensus-specs/tree/v1.3.0/tests/formats/rewards#rewards-tests
Deltas* = object
rewards*: List[Gwei, Limit VALIDATOR_REGISTRY_LIMIT]
penalties*: List[Gwei, Limit VALIDATOR_REGISTRY_LIMIT]
# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.8/specs/phase0/validator.md#eth1block
Eth1Block* = object
timestamp*: uint64
deposit_root*: Eth2Digest
deposit_count*: uint64
# All other eth1 block fields
const
FixturesDir* =
currentSourcePath.rsplit(DirSep, 1)[0] / ".." / ".." / "vendor" / "nim-eth2-scenarios"
SszTestsDir* = FixturesDir / "tests-v" & SPEC_VERSION
MaxObjectSize* = 10_000_000
proc relativeTestPathComponent*(path: string, suitePath = SszTestsDir): string =
try:
path.relativePath(suitePath)
except Exception as exc:
raiseAssert "relativePath failed unexpectedly: " & $exc.msg
proc parseTest*(path: string, Format: typedesc[Json], T: typedesc): T =
try:
# debugEcho " [Debug] Loading file: \"", path, '\"'
result = Format.decode(readFileBytes(path), T)
except SerializationError as err:
writeStackTrace()
try:
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write err.formatMsg(path), "\n"
except IOError:
discard
quit 1
2020-05-11 16:27:44 +00:00
proc sszDecodeEntireInput*(
input: openArray[byte],
Decoded: type
): Decoded {.raises: [IOError, SerializationError, UnconsumedInput].} =
let stream = unsafeMemoryInput(input)
2020-05-28 12:36:37 +00:00
var reader = init(SszReader, stream)
2020-05-27 15:04:43 +00:00
reader.readValue(result)
2020-05-11 16:27:44 +00:00
if stream.readable:
raise newException(UnconsumedInput, "Remaining bytes in the input")
2020-10-22 18:53:40 +00:00
iterator walkTests*(dir: static string): string {.raises: [OSError].} =
for kind, path in walkDir(
dir/"pyspec_tests", relative = true, checkDir = true):
yield path
proc parseTest*(path: string, Format: typedesc[SSZ], T: typedesc): T =
try:
# debugEcho " [Debug] Loading file: \"", path, '\"'
sszDecodeEntireInput(snappy.decode(readFileBytes(path), MaxObjectSize), T)
except IOError as err:
writeStackTrace()
try:
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write "IOError: " & err.msg, "\n"
except IOError:
discard
quit 1
except SerializationError as err:
writeStackTrace()
try:
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write err.formatMsg(path), "\n"
except IOError:
discard
quit 1
except UnconsumedInput as err:
writeStackTrace()
try:
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write "UnconsumedInput: " & err.msg, "\n"
except IOError:
discard
quit 1
proc loadForkedState*(
path: string, consensusFork: ConsensusFork): ref ForkedHashedBeaconState =
let state = (ref ForkedHashedBeaconState)(kind: consensusFork)
withState(state[]):
forkyState.data = parseTest(path, SSZ, consensusFork.BeaconState)
forkyState.root = hash_tree_root(forkyState.data)
state