nimbus-eth2/tests/consensus_spec/fixtures_utils.nim

150 lines
5.1 KiB
Nim
Raw Normal View History

# beacon_chain
# Copyright (c) 2018-2022 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import
# Standard library
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
std/[os, strutils, typetraits],
# Internals
../../beacon_chain/spec/datatypes/[phase0, altair, bellatrix],
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
../../beacon_chain/spec/[
eth2_merkleization, eth2_ssz_serialization, forks],
# Status libs,
snappy,
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
stew/byteutils
disentangle eth2 types from the ssz library (#2785) * reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
2021-08-18 18:57:58 +00:00
export
eth2_merkleization, eth2_ssz_serialization
# Process current EF test format
# ---------------------------------------------
# #######################
# Path parsing
func forkForPathComponent*(forkPath: string): Opt[BeaconStateFork] =
for fork in BeaconStateFork:
if ($fork).toLowerAscii() == forkPath:
return ok fork
err()
# #######################
# JSON deserialization
func readValue*(r: var JsonReader, a: var seq[byte]) =
## Custom deserializer for seq[byte]
a = hexToSeqByte(r.readValue(string))
# #######################
# Mock RuntimeConfig
func genesisTestRuntimeConfig*(stateFork: BeaconStateFork): RuntimeConfig =
var res = defaultRuntimeConfig
case stateFork
of BeaconStateFork.Capella:
res.CAPELLA_FORK_EPOCH = GENESIS_EPOCH
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of BeaconStateFork.Bellatrix:
res.BELLATRIX_FORK_EPOCH = GENESIS_EPOCH
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of BeaconStateFork.Altair:
res.ALTAIR_FORK_EPOCH = GENESIS_EPOCH
of BeaconStateFork.Phase0:
discard
res
# #######################
# Test helpers
2020-05-11 16:27:44 +00:00
type
UnconsumedInput* = object of CatchableError
TestSizeError* = object of ValueError
2022-05-31 11:15:31 +00:00
# https://github.com/ethereum/consensus-specs/tree/v1.2.0-rc.1/tests/formats/rewards#rewards-tests
Deltas* = object
rewards*: List[uint64, Limit VALIDATOR_REGISTRY_LIMIT]
penalties*: List[uint64, Limit VALIDATOR_REGISTRY_LIMIT]
# https://github.com/ethereum/consensus-specs/blob/v1.3.0-alpha.0/specs/phase0/validator.md#eth1block
Eth1Block* = object
timestamp*: uint64
deposit_root*: Eth2Digest
deposit_count*: uint64
# All other eth1 block fields
const
FixturesDir* =
currentSourcePath.rsplit(DirSep, 1)[0] / ".." / ".." / "vendor" / "nim-eth2-scenarios"
SszTestsDir* = FixturesDir / "tests-v" & SPEC_VERSION
MaxObjectSize* = 3_000_000
proc parseTest*(path: string, Format: typedesc[Json], T: typedesc): T =
try:
# debugEcho " [Debug] Loading file: \"", path, '\"'
result = Format.loadFile(path, T)
except SerializationError as err:
writeStackTrace()
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write err.formatMsg(path), "\n"
quit 1
2020-05-11 16:27:44 +00:00
template readFileBytes*(path: string): seq[byte] =
cast[seq[byte]](readFile(path))
2020-10-28 18:35:31 +00:00
proc sszDecodeEntireInput*(input: openArray[byte], Decoded: type): Decoded =
let stream = unsafeMemoryInput(input)
2020-05-28 12:36:37 +00:00
var reader = init(SszReader, stream)
2020-05-27 15:04:43 +00:00
reader.readValue(result)
2020-05-11 16:27:44 +00:00
if stream.readable:
raise newException(UnconsumedInput, "Remaining bytes in the input")
2020-10-22 18:53:40 +00:00
iterator walkTests*(dir: static string): string =
for kind, path in walkDir(
dir/"pyspec_tests", relative = true, checkDir = true):
yield path
proc parseTest*(path: string, Format: typedesc[SSZ], T: typedesc): T =
try:
# debugEcho " [Debug] Loading file: \"", path, '\"'
sszDecodeEntireInput(snappy.decode(readFileBytes(path), MaxObjectSize), T)
except SerializationError as err:
writeStackTrace()
stderr.write $Format & " load issue for file \"", path, "\"\n"
stderr.write err.formatMsg(path), "\n"
quit 1
from ../../beacon_chain/spec/datatypes/capella import BeaconState
proc loadForkedState*(
path: string, fork: BeaconStateFork): ref ForkedHashedBeaconState =
# TODO stack usage. newClone and assignClone do not seem to
# prevent temporaries created by case objects
let forkedState = new ForkedHashedBeaconState
case fork
of BeaconStateFork.Capella:
let state = newClone(parseTest(path, SSZ, capella.BeaconState))
forkedState.kind = BeaconStateFork.Capella
forkedState.capellaData.data = state[]
forkedState.capellaData.root = hash_tree_root(state[])
of BeaconStateFork.Bellatrix:
let state = newClone(parseTest(path, SSZ, bellatrix.BeaconState))
forkedState.kind = BeaconStateFork.Bellatrix
forkedState.bellatrixData.data = state[]
forkedState.bellatrixData.root = hash_tree_root(state[])
of BeaconStateFork.Altair:
let state = newClone(parseTest(path, SSZ, altair.BeaconState))
forkedState.kind = BeaconStateFork.Altair
forkedState.altairData.data = state[]
forkedState.altairData.root = hash_tree_root(state[])
of BeaconStateFork.Phase0:
let state = newClone(parseTest(path, SSZ, phase0.BeaconState))
forkedState.kind = BeaconStateFork.Phase0
forkedState.phase0Data.data = state[]
forkedState.phase0Data.root = hash_tree_root(state[])
forkedState