disentangle eth2 types from the ssz library (#2785)
* reorganize ssz dependencies This PR continues the work in https://github.com/status-im/nimbus-eth2/pull/2646, https://github.com/status-im/nimbus-eth2/pull/2779 as well as past issues with serialization and type, to disentangle SSZ from eth2 and at the same time simplify imports and exports with a structured approach. The principal idea here is that when a library wants to introduce SSZ support, they do so via 3 files: * `ssz_codecs` which imports and reexports `codecs` - this covers the basic byte conversions and ensures no overloads get lost * `xxx_merkleization` imports and exports `merkleization` to specialize and get access to `hash_tree_root` and friends * `xxx_ssz_serialization` imports and exports `ssz_serialization` to specialize ssz for a specific library Those that need to interact with SSZ always import the `xxx_` versions of the modules and never `ssz` itself so as to keep imports simple and safe. This is similar to how the REST / JSON-RPC serializers are structured in that someone wanting to serialize spec types to REST-JSON will import `eth2_rest_serialization` and nothing else. * split up ssz into a core library that is independendent of eth2 types * rename `bytes_reader` to `codec` to highlight that it contains coding and decoding of bytes and native ssz types * remove tricky List init overload that causes compile issues * get rid of top-level ssz import * reenable merkleization tests * move some "standard" json serializers to spec * remove `ValidatorIndex` serialization for now * remove test_ssz_merkleization * add tests for over/underlong byte sequences * fix broken seq[byte] test - seq[byte] is not an SSZ type There are a few things this PR doesn't solve: * like #2646 this PR is weak on how to handle root and other dontSerialize fields that "sometimes" should be computed - the same problem appears in REST / JSON-RPC etc * Fix a build problem on macOS * Another way to fix the macOS builds Co-authored-by: Zahary Karadjov <zahary@gmail.com>
This commit is contained in:
parent
a060985abc
commit
a7a65bce42
|
@ -153,6 +153,17 @@ OK: 12/12 Fail: 0/12 Skip: 0/12
|
|||
+ navigating fields OK
|
||||
```
|
||||
OK: 1/1 Fail: 0/1 Skip: 0/1
|
||||
## SSZ generic roundtrip tests
|
||||
```diff
|
||||
+ case objects OK
|
||||
+ lists OK
|
||||
+ objects OK
|
||||
+ sets OK
|
||||
+ simple values OK
|
||||
+ tables OK
|
||||
+ tuple OK
|
||||
```
|
||||
OK: 7/7 Fail: 0/7 Skip: 0/7
|
||||
## SSZ navigator
|
||||
```diff
|
||||
+ basictype OK
|
||||
|
@ -197,6 +208,12 @@ OK: 1/1 Fail: 0/1 Skip: 0/1
|
|||
+ integer_squareroot OK
|
||||
```
|
||||
OK: 1/1 Fail: 0/1 Skip: 0/1
|
||||
## Specific field types
|
||||
```diff
|
||||
+ root update OK
|
||||
+ roundtrip OK
|
||||
```
|
||||
OK: 2/2 Fail: 0/2 Skip: 0/2
|
||||
## SyncManager test suite
|
||||
```diff
|
||||
+ [SyncQueue] Async pending and resetWait() test OK
|
||||
|
@ -268,6 +285,58 @@ OK: 3/3 Fail: 0/3 Skip: 0/3
|
|||
+ random slot differences [Preset: mainnet] OK
|
||||
```
|
||||
OK: 1/1 Fail: 0/1 Skip: 0/1
|
||||
## underlong values
|
||||
```diff
|
||||
Overlong SSZ.decode: BitArray[32] Skip
|
||||
Overlong SSZ.decode: BitList[32] Skip
|
||||
Overlong SSZ.decode: HashArray[32, system.uint8] Skip
|
||||
+ Overlong SSZ.decode: HashList[system.uint64, 32] OK
|
||||
+ Overlong SSZ.decode: List[system.uint64, 32] OK
|
||||
Overlong SSZ.decode: Simple Skip
|
||||
Overlong SSZ.decode: array[0..31, byte] Skip
|
||||
Overlong SSZ.decode: bool Skip
|
||||
Overlong SSZ.decode: limb_t Skip
|
||||
Overlong SSZ.decode: uint16 Skip
|
||||
Overlong SSZ.decode: uint32 Skip
|
||||
Overlong SSZ.decode: uint8 Skip
|
||||
+ Overlong readSszBytes: BitArray[32] OK
|
||||
Overlong readSszBytes: BitList[32] Skip
|
||||
+ Overlong readSszBytes: HashArray[32, system.uint8] OK
|
||||
+ Overlong readSszBytes: HashList[system.uint64, 32] OK
|
||||
+ Overlong readSszBytes: List[system.uint64, 32] OK
|
||||
Overlong readSszBytes: Simple Skip
|
||||
+ Overlong readSszBytes: array[0..31, byte] OK
|
||||
+ Overlong readSszBytes: bool OK
|
||||
+ Overlong readSszBytes: limb_t OK
|
||||
+ Overlong readSszBytes: uint16 OK
|
||||
+ Overlong readSszBytes: uint32 OK
|
||||
+ Overlong readSszBytes: uint8 OK
|
||||
+ Underlong SSZ.decode: BitArray[32] OK
|
||||
+ Underlong SSZ.decode: BitList[32] OK
|
||||
+ Underlong SSZ.decode: HashArray[32, system.uint8] OK
|
||||
+ Underlong SSZ.decode: HashList[system.uint64, 32] OK
|
||||
+ Underlong SSZ.decode: List[system.uint64, 32] OK
|
||||
+ Underlong SSZ.decode: Simple OK
|
||||
+ Underlong SSZ.decode: array[0..31, byte] OK
|
||||
+ Underlong SSZ.decode: bool OK
|
||||
+ Underlong SSZ.decode: limb_t OK
|
||||
+ Underlong SSZ.decode: uint16 OK
|
||||
+ Underlong SSZ.decode: uint32 OK
|
||||
+ Underlong SSZ.decode: uint8 OK
|
||||
+ Underlong readSszBytes: BitArray[32] OK
|
||||
+ Underlong readSszBytes: BitList[32] OK
|
||||
+ Underlong readSszBytes: HashArray[32, system.uint8] OK
|
||||
+ Underlong readSszBytes: HashList[system.uint64, 32] OK
|
||||
+ Underlong readSszBytes: List[system.uint64, 32] OK
|
||||
+ Underlong readSszBytes: Simple OK
|
||||
+ Underlong readSszBytes: array[0..31, byte] OK
|
||||
+ Underlong readSszBytes: bool OK
|
||||
+ Underlong readSszBytes: limb_t OK
|
||||
+ Underlong readSszBytes: uint16 OK
|
||||
+ Underlong readSszBytes: uint32 OK
|
||||
+ Underlong readSszBytes: uint8 OK
|
||||
```
|
||||
OK: 36/48 Fail: 0/48 Skip: 12/48
|
||||
|
||||
---TOTAL---
|
||||
OK: 140/140 Fail: 0/140 Skip: 0/140
|
||||
OK: 185/197 Fail: 0/197 Skip: 12/197
|
||||
|
|
10
Makefile
10
Makefile
|
@ -158,7 +158,6 @@ XML_TEST_BINARIES := \
|
|||
TEST_BINARIES := \
|
||||
proto_array \
|
||||
fork_choice \
|
||||
test_ssz_roundtrip \
|
||||
state_sim \
|
||||
block_sim
|
||||
.PHONY: $(TEST_BINARIES) $(XML_TEST_BINARIES)
|
||||
|
@ -223,15 +222,6 @@ all_tests: | build deps
|
|||
$(NIM_PARAMS) -d:chronicles_log_level=TRACE -d:const_preset=mainnet -d:chronicles_sinks="json[file]" && \
|
||||
echo -e $(BUILD_END_MSG) "build/$@"
|
||||
|
||||
# TODO `test_ssz_roundtrip` is extracted from the rest of the tests because it's incompatible with unittest2
|
||||
test_ssz_roundtrip: | build deps
|
||||
+ echo -e $(BUILD_MSG) "build/$@" && \
|
||||
MAKE="$(MAKE)" V="$(V)" $(ENV_SCRIPT) scripts/compile_nim_program.sh \
|
||||
$@ \
|
||||
"tests/$@.nim" \
|
||||
$(NIM_PARAMS) -d:chronicles_log_level=TRACE -d:const_preset=mainnet -d:chronicles_sinks="json[file]" && \
|
||||
echo -e $(BUILD_END_MSG) "build/$@"
|
||||
|
||||
# TODO `test_keystore` is extracted from the rest of the tests because it uses conflicting BLST headers
|
||||
test_keystore: | build deps
|
||||
+ echo -e $(BUILD_MSG) "build/$@" && \
|
||||
|
|
|
@ -13,12 +13,11 @@ import
|
|||
serialization, chronicles, snappy,
|
||||
eth/db/[kvstore, kvstore_sqlite3],
|
||||
./networking/network_metadata, ./beacon_chain_db_immutable,
|
||||
./spec/state_transition,
|
||||
./spec/[eth2_ssz_serialization, eth2_merkleization, state_transition],
|
||||
./spec/datatypes/[phase0, altair],
|
||||
./ssz/[ssz_serialization, merkleization],
|
||||
./filepath
|
||||
|
||||
export phase0, altair
|
||||
export phase0, altair, eth2_ssz_serialization, eth2_merkleization
|
||||
|
||||
logScope: topics = "bc_db"
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import
|
|||
serialization,
|
||||
eth/db/kvstore,
|
||||
./spec/datatypes/[base, altair],
|
||||
./ssz/[ssz_serialization, merkleization]
|
||||
./spec/[eth2_ssz_serialization, eth2_merkleization]
|
||||
|
||||
type
|
||||
# https://github.com/ethereum/eth2.0-specs/blob/v1.0.1/specs/phase0/beacon-chain.md#beaconstate
|
||||
|
|
|
@ -14,9 +14,8 @@ import
|
|||
metrics,
|
||||
chronicles, stew/byteutils, json_serialization/std/sets as jsonSets,
|
||||
# Internal
|
||||
../spec/[beaconstate, forks, helpers, validator],
|
||||
../spec/[beaconstate, eth2_merkleization, forks, helpers, validator],
|
||||
../spec/datatypes/[phase0, altair],
|
||||
../ssz/[merkleization, types],
|
||||
"."/[spec_cache, blockchain_dag, block_quarantine],
|
||||
".."/[beacon_clock, beacon_node_types],
|
||||
../fork_choice/fork_choice
|
||||
|
|
|
@ -13,7 +13,7 @@ import
|
|||
stew/[assign2, results],
|
||||
eth/keys,
|
||||
".."/[beacon_clock],
|
||||
../spec/[forks, helpers, signatures, signatures_batch, state_transition],
|
||||
../spec/[eth2_merkleization, forks, helpers, signatures, signatures_batch, state_transition],
|
||||
../spec/datatypes/[phase0, altair],
|
||||
"."/[block_pools_types, blockchain_dag, block_quarantine]
|
||||
|
||||
|
|
|
@ -11,8 +11,9 @@ import
|
|||
std/[options, sequtils, tables, sets],
|
||||
stew/[assign2, byteutils],
|
||||
metrics, snappy, chronicles,
|
||||
../ssz/[ssz_serialization, merkleization],
|
||||
../spec/[helpers, validator, state_transition, beaconstate, forks],
|
||||
../spec/[
|
||||
beaconstate, eth2_merkleization, eth2_ssz_serialization, forks, helpers,
|
||||
state_transition, validator],
|
||||
../spec/datatypes/[phase0, altair],
|
||||
".."/[beacon_clock, beacon_chain_db],
|
||||
"."/[block_pools_types, block_quarantine, forkedbeaconstate_dbhelpers]
|
||||
|
|
|
@ -14,7 +14,8 @@ import
|
|||
../networking/network_metadata,
|
||||
web3, web3/confutils_defs, eth/keys, eth/p2p/discoveryv5/random2,
|
||||
stew/io2,
|
||||
../spec/datatypes/base, ../ssz/merkleization,
|
||||
../spec/eth2_merkleization,
|
||||
../spec/datatypes/base,
|
||||
../validators/keystore_management
|
||||
|
||||
# Compiled version of /scripts/depositContract.v.py in this repo
|
||||
|
|
|
@ -15,11 +15,10 @@ import
|
|||
web3, web3/ethtypes as web3Types, web3/ethhexstrings, eth/common/eth_types,
|
||||
eth/async_utils, stew/byteutils,
|
||||
# Local modules:
|
||||
../spec/[forks, helpers],
|
||||
../spec/[eth2_merkleization, forks, helpers],
|
||||
../spec/datatypes/[base, merge],
|
||||
../networking/network_metadata,
|
||||
../consensus_object_pools/block_pools_types,
|
||||
../ssz,
|
||||
../rpc/eth_merge_web3,
|
||||
".."/[beacon_chain_db, beacon_node_status],
|
||||
./merkle_minimal
|
||||
|
|
|
@ -16,9 +16,8 @@ import
|
|||
sequtils,
|
||||
stew/endians2,
|
||||
# Specs
|
||||
../../beacon_chain/spec/digest,
|
||||
../../beacon_chain/spec/datatypes/base,
|
||||
../../beacon_chain/ssz/merkleization
|
||||
../spec/[eth2_merkleization, digest],
|
||||
../spec/datatypes/base
|
||||
|
||||
const depositContractLimit* = Limit(1'u64 shl DEPOSIT_CONTRACT_TREE_DEPTH)
|
||||
|
||||
|
|
|
@ -16,11 +16,10 @@ import
|
|||
../spec/[helpers, signatures_batch],
|
||||
../spec/datatypes/base,
|
||||
../consensus_object_pools/[
|
||||
blockchain_dag, block_quarantine,
|
||||
attestation_pool, exit_pool,
|
||||
blockchain_dag, block_quarantine, attestation_pool, exit_pool,
|
||||
block_pools_types, spec_cache
|
||||
],
|
||||
".."/[beacon_node_types, ssz, beacon_clock]
|
||||
".."/[beacon_node_types, beacon_clock]
|
||||
|
||||
export BrHmacDrbgContext
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import
|
|||
../consensus_object_pools/[block_clearance, blockchain_dag, attestation_pool],
|
||||
./consensus_manager,
|
||||
".."/[beacon_clock, beacon_node_types],
|
||||
../ssz/sszdump
|
||||
../sszdump
|
||||
|
||||
export sszdump
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
chronicles,
|
||||
chronicles, chronos,
|
||||
../spec/datatypes/base,
|
||||
../consensus_object_pools/[blockchain_dag, attestation_pool]
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ import
|
|||
spec_cache, blockchain_dag, block_quarantine, spec_cache,
|
||||
attestation_pool, exit_pool
|
||||
],
|
||||
".."/[beacon_node_types, ssz, beacon_clock],
|
||||
".."/[beacon_node_types, beacon_clock],
|
||||
../validators/attestation_aggregation,
|
||||
./batch_validation
|
||||
|
||||
|
|
|
@ -9,9 +9,8 @@
|
|||
|
||||
import
|
||||
stew/endians2, stint,
|
||||
./ssz/merkleization,
|
||||
./extras,
|
||||
spec/[keystore, signatures],
|
||||
spec/[eth2_merkleization, keystore, signatures],
|
||||
spec/datatypes/base
|
||||
|
||||
func get_eth1data_stub*(deposit_count: uint64, current_epoch: Epoch): Eth1Data =
|
||||
|
|
|
@ -32,11 +32,9 @@ import
|
|||
libp2p/utils/semaphore,
|
||||
eth/[keys, async_utils], eth/p2p/p2p_protocol_dsl,
|
||||
eth/net/nat, eth/p2p/discoveryv5/[enr, node, random2],
|
||||
".."/[
|
||||
version, conf,
|
||||
ssz/ssz_serialization, beacon_clock],
|
||||
".."/[version, conf, beacon_clock],
|
||||
../spec/datatypes/[phase0, altair],
|
||||
../spec/[network, helpers, forks],
|
||||
../spec/[eth2_ssz_serialization, network, helpers, forks],
|
||||
../validators/keystore_management,
|
||||
./eth2_discovery, ./peer_pool, ./libp2p_json_serialization
|
||||
|
||||
|
@ -45,7 +43,7 @@ when chronicles.enabledLogLevel == LogLevel.TRACE:
|
|||
|
||||
export
|
||||
version, multiaddress, peer_pool, peerinfo, p2pProtocol, connection,
|
||||
libp2p_json_serialization, ssz_serialization, results, eth2_discovery
|
||||
libp2p_json_serialization, eth2_ssz_serialization, results, eth2_discovery
|
||||
|
||||
logScope:
|
||||
topics = "networking"
|
||||
|
|
|
@ -13,10 +13,9 @@ import
|
|||
eth/common/eth_types as commonEthTypes,
|
||||
web3/[ethtypes, conversions],
|
||||
chronicles,
|
||||
json_serialization,
|
||||
json_serialization/std/[options, sets, net], serialization/errors,
|
||||
../ssz/[navigator, spec_types],
|
||||
eth/common/eth_types_json_serialization,
|
||||
../ssz/[navigator],
|
||||
../spec/eth2_ssz_serialization,
|
||||
../spec/datatypes/phase0
|
||||
|
||||
# ATTENTION! This file will produce a large C file, because we are inlining
|
||||
|
|
|
@ -36,7 +36,6 @@ import
|
|||
./spec/datatypes/[altair, phase0],
|
||||
./spec/eth2_apis/rpc_beacon_client,
|
||||
./spec/[beaconstate, forks, helpers, network, weak_subjectivity, signatures],
|
||||
./ssz/merkleization,
|
||||
./consensus_object_pools/[
|
||||
blockchain_dag, block_quarantine, block_clearance, block_pools_types,
|
||||
attestation_pool, exit_pool, spec_cache],
|
||||
|
|
|
@ -12,9 +12,8 @@ import
|
|||
../consensus_object_pools/[blockchain_dag, exit_pool],
|
||||
../gossip_processing/gossip_validation,
|
||||
../validators/validator_duties,
|
||||
../spec/[forks, network],
|
||||
../spec/[eth2_merkleization, forks, network],
|
||||
../spec/datatypes/[phase0, altair],
|
||||
../ssz/merkleization,
|
||||
./rest_utils
|
||||
|
||||
logScope: topics = "rest_beaconapi"
|
||||
|
|
|
@ -8,13 +8,12 @@ import
|
|||
stew/[results, base10],
|
||||
chronicles,
|
||||
nimcrypto/utils as ncrutils,
|
||||
../spec/datatypes/[phase0],
|
||||
../beacon_node_common, ../networking/eth2_network,
|
||||
../consensus_object_pools/[blockchain_dag, spec_cache, attestation_pool],
|
||||
../gossip_processing/gossip_validation,
|
||||
../validators/validator_duties,
|
||||
../spec/[forks, network],
|
||||
../spec/datatypes/[phase0],
|
||||
../ssz/merkleization,
|
||||
./rest_utils
|
||||
|
||||
logScope: topics = "rest_validatorapi"
|
||||
|
|
|
@ -18,9 +18,8 @@ import
|
|||
../validators/validator_duties,
|
||||
../gossip_processing/gossip_validation,
|
||||
../consensus_object_pools/blockchain_dag,
|
||||
../spec/[forks, network],
|
||||
../spec/[eth2_merkleization, forks, network],
|
||||
../spec/datatypes/[phase0],
|
||||
../ssz/merkleization,
|
||||
./rpc_utils
|
||||
|
||||
logScope: topics = "beaconapi"
|
||||
|
|
|
@ -20,7 +20,7 @@ import
|
|||
../spec/[forks, helpers, network, signatures],
|
||||
../spec/datatypes/phase0,
|
||||
../spec/eth2_apis/rpc_types,
|
||||
../consensus_object_pools/[blockchain_dag, spec_cache, attestation_pool], ../ssz/merkleization,
|
||||
../consensus_object_pools/[blockchain_dag, spec_cache, attestation_pool],
|
||||
../beacon_node_common, ../beacon_node_types,
|
||||
../validators/validator_duties,
|
||||
../networking/eth2_network,
|
||||
|
|
|
@ -12,9 +12,9 @@ import
|
|||
stew/assign2,
|
||||
json_serialization/std/sets,
|
||||
chronicles,
|
||||
../extras, ../ssz/merkleization,
|
||||
../extras,
|
||||
./datatypes/[phase0, altair, merge],
|
||||
"."/[helpers, signatures, validator],
|
||||
"."/[eth2_merkleization, helpers, signatures, validator],
|
||||
../../nbench/bench_lab
|
||||
|
||||
export extras, phase0, altair, merge
|
||||
|
|
|
@ -24,15 +24,22 @@
|
|||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
json_serialization
|
||||
|
||||
export
|
||||
json_serialization
|
||||
|
||||
import
|
||||
std/[macros, hashes, intsets, strutils, tables, typetraits],
|
||||
stew/[assign2, byteutils], chronicles,
|
||||
json_serialization,
|
||||
stew/[assign2, byteutils],
|
||||
chronicles,
|
||||
chronos/timer,
|
||||
../../version, ../../ssz/types as sszTypes,
|
||||
".."/[crypto, digest, presets]
|
||||
|
||||
export
|
||||
crypto, digest, sszTypes, presets, json_serialization
|
||||
timer, crypto, digest, sszTypes, presets
|
||||
|
||||
# Presently, we're reusing the data types from the serialization (uint64) in the
|
||||
# objects we pass around to the beacon chain logic, thus keeping the two
|
||||
|
@ -627,15 +634,6 @@ proc readValue*(reader: var JsonReader, value: var SubnetId)
|
|||
reader, "Subnet id must be <= " & $ATTESTATION_SUBNET_COUNT)
|
||||
value = SubnetId(v)
|
||||
|
||||
proc writeValue*(writer: var JsonWriter, value: HashList)
|
||||
{.raises: [IOError, SerializationError, Defect].} =
|
||||
writeValue(writer, value.data)
|
||||
|
||||
proc readValue*(reader: var JsonReader, value: var HashList)
|
||||
{.raises: [IOError, SerializationError, Defect].} =
|
||||
value.resetCache()
|
||||
readValue(reader, value.data)
|
||||
|
||||
template writeValue*(writer: var JsonWriter, value: Version | ForkDigest) =
|
||||
writeValue(writer, $value)
|
||||
|
||||
|
@ -731,29 +729,6 @@ func `as`*(d: DepositData, T: type DepositMessage): T =
|
|||
ethTimeUnit Slot
|
||||
ethTimeUnit Epoch
|
||||
|
||||
Json.useCustomSerialization(BitSeq):
|
||||
read:
|
||||
try:
|
||||
BitSeq reader.readValue(string).hexToSeqByte
|
||||
except ValueError:
|
||||
raiseUnexpectedValue(reader, "A BitSeq value should be a valid hex string")
|
||||
|
||||
write:
|
||||
writer.writeValue "0x" & seq[byte](value).toHex
|
||||
|
||||
template readValue*(reader: var JsonReader, value: var List) =
|
||||
value = type(value)(readValue(reader, seq[type value[0]]))
|
||||
|
||||
template writeValue*(writer: var JsonWriter, value: List) =
|
||||
writeValue(writer, asSeq value)
|
||||
|
||||
template readValue*(reader: var JsonReader, value: var BitList) =
|
||||
type T = type(value)
|
||||
value = T readValue(reader, BitSeq)
|
||||
|
||||
template writeValue*(writer: var JsonWriter, value: BitList) =
|
||||
writeValue(writer, BitSeq value)
|
||||
|
||||
template newClone*[T: not ref](x: T): ref T =
|
||||
# TODO not nil in return type: https://github.com/nim-lang/Nim/issues/14146
|
||||
# TODO use only when x is a function call that returns a new instance!
|
||||
|
|
|
@ -24,14 +24,14 @@
|
|||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
import ./base
|
||||
export base
|
||||
|
||||
import
|
||||
std/[macros, intsets, json, strutils, tables],
|
||||
stew/[assign2, byteutils], chronicles,
|
||||
json_serialization/types as jsonTypes
|
||||
|
||||
import ./base
|
||||
export base
|
||||
|
||||
type
|
||||
# https://github.com/ethereum/eth2.0-specs/blob/v1.0.1/specs/phase0/beacon-chain.md#beaconstate
|
||||
BeaconState* = object
|
||||
|
|
|
@ -14,7 +14,8 @@ import
|
|||
../datatypes/[phase0, altair, merge],
|
||||
./rest_types
|
||||
|
||||
export results, peerid, presto, json_serialization, options, net, rest_types
|
||||
export
|
||||
results, peerid, presto, json_serialization, options, net, rest_types
|
||||
|
||||
Json.createFlavor RestJson
|
||||
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2020 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
# SSZ Serialization (simple serialize)
|
||||
# See https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md
|
||||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
# TODO Many RVO bugs, careful
|
||||
# https://github.com/nim-lang/Nim/issues/14470
|
||||
# https://github.com/nim-lang/Nim/issues/14126
|
||||
# Import this module to get access to `hash_tree_root` for spec types
|
||||
|
||||
import
|
||||
ssz/[merkleization, ssz_serialization, types]
|
||||
./ssz_codec,
|
||||
../ssz/merkleization
|
||||
|
||||
export
|
||||
merkleization, ssz_serialization, types
|
||||
export ssz_codec, merkleization
|
|
@ -0,0 +1,40 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
# Import this module to get access to `SSZ.encode` and `SSZ.decode` for spec types
|
||||
|
||||
import
|
||||
./ssz_codec,
|
||||
../ssz/ssz_serialization,
|
||||
./datatypes/[phase0, altair],
|
||||
./eth2_merkleization
|
||||
|
||||
export phase0, altair, ssz_codec, ssz_serialization, eth2_merkleization
|
||||
|
||||
proc readAndUpdateRoot(data: openArray[byte], val: var auto, updateRoot = true) {.
|
||||
raises: [Defect, MalformedSszError, SszSizeMismatchError].} =
|
||||
readSszValue(data, val)
|
||||
if updateRoot:
|
||||
val.root = hash_tree_root(val.message)
|
||||
|
||||
# TODO this is an ugly way to get a stronger match than the generic readSszBytes
|
||||
# and avoid ambiguities - `var` + typeclasses are problematic
|
||||
|
||||
template readSszBytes*(
|
||||
data: openArray[byte], val: var phase0.SignedBeaconBlock, updateRoot = true) =
|
||||
readAndUpdateRoot(data, val, updateRoot)
|
||||
template readSszBytes*(
|
||||
data: openArray[byte], val: var phase0.TrustedSignedBeaconBlock, updateRoot = true) =
|
||||
readAndUpdateRoot(data, val, updateRoot)
|
||||
template readSszBytes*(
|
||||
data: openArray[byte], val: var altair.SignedBeaconBlock, updateRoot = true) =
|
||||
readAndUpdateRoot(data, val, updateRoot)
|
||||
template readSszBytes*(
|
||||
data: openArray[byte], val: var altair.TrustedSignedBeaconBlock, updateRoot = true) =
|
||||
readAndUpdateRoot(data, val, updateRoot)
|
|
@ -16,9 +16,12 @@ import
|
|||
stew/[byteutils, endians2],
|
||||
# Internal
|
||||
./datatypes/[phase0, altair],
|
||||
../ssz/merkleization
|
||||
./eth2_merkleization, ./ssz_codec
|
||||
|
||||
export phase0, altair
|
||||
# TODO although eth2_merkleization already exports ssz_codec, *sometimes* code
|
||||
# fails to compile if the export is not done here also
|
||||
export
|
||||
phase0, altair, eth2_merkleization, ssz_codec
|
||||
|
||||
# https://github.com/ethereum/eth2.0-specs/blob/v1.0.1/specs/phase0/beacon-chain.md#integer_squareroot
|
||||
func integer_squareroot*(n: SomeInteger): SomeInteger =
|
||||
|
|
|
@ -8,8 +8,7 @@
|
|||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
../ssz/merkleization,
|
||||
./datatypes/[phase0, altair], ./helpers
|
||||
./datatypes/[phase0, altair], ./helpers, ./eth2_merkleization
|
||||
|
||||
export phase0, altair
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ import
|
|||
blscurve,
|
||||
stew/[byteutils, results],
|
||||
# Internal
|
||||
../ssz/merkleization,
|
||||
"."/[helpers, beaconstate, forks],
|
||||
"."/datatypes/[altair, phase0]
|
||||
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
{.pragma: raisesssz, raises: [Defect, MalformedSszError, SszSizeMismatchError].}
|
||||
|
||||
import
|
||||
std/[typetraits],
|
||||
../ssz/codec,
|
||||
../spec/datatypes/[phase0, altair],
|
||||
./eth2_merkleization
|
||||
|
||||
export codec, phase0, altair, typetraits, eth2_merkleization
|
||||
|
||||
# Coding and decoding of SSZ to spec-specific types
|
||||
|
||||
template toSszType*(v: Slot|Epoch): auto = uint64(v)
|
||||
template toSszType*(v: BlsCurveType): auto = toRaw(v)
|
||||
template toSszType*(v: ForkDigest|GraffitiBytes): auto = distinctBase(v)
|
||||
template toSszType*(v: Version): auto = distinctBase(v)
|
||||
|
||||
func fromSszBytes*(T: type GraffitiBytes, data: openArray[byte]): T {.raisesssz.} =
|
||||
if data.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr data[0], sizeof(result))
|
||||
|
||||
template fromSszBytes*(T: type Slot, bytes: openArray[byte]): T =
|
||||
T fromSszBytes(uint64, bytes)
|
||||
|
||||
template fromSszBytes*(T: type Epoch, bytes: openArray[byte]): T =
|
||||
T fromSszBytes(uint64, bytes)
|
||||
|
||||
func fromSszBytes*(T: type ForkDigest, bytes: openArray[byte]): T {.raisesssz.} =
|
||||
if bytes.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
|
||||
|
||||
func fromSszBytes*(T: type Version, bytes: openArray[byte]): T {.raisesssz.} =
|
||||
if bytes.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
|
|
@ -46,11 +46,10 @@ import
|
|||
stew/results,
|
||||
metrics,
|
||||
../extras,
|
||||
../ssz/merkleization,
|
||||
./datatypes/[phase0, altair],
|
||||
"."/[
|
||||
helpers, signatures, validator, beaconstate, state_transition_block,
|
||||
state_transition_epoch, forks],
|
||||
beaconstate, eth2_merkleization, forks, helpers, signatures,
|
||||
state_transition_block, state_transition_epoch, validator],
|
||||
../../nbench/bench_lab
|
||||
|
||||
export extras, phase0, altair
|
||||
|
|
|
@ -21,10 +21,10 @@
|
|||
|
||||
import
|
||||
std/[algorithm, intsets, options, sequtils, sets, tables],
|
||||
chronicles,
|
||||
../extras, ../ssz/merkleization, metrics,
|
||||
chronicles, metrics,
|
||||
../extras,
|
||||
./datatypes/[phase0, altair],
|
||||
"."/[beaconstate, helpers, validator, signatures],
|
||||
"."/[beaconstate, eth2_merkleization, helpers, validator, signatures],
|
||||
../../nbench/bench_lab
|
||||
|
||||
export extras, phase0, altair
|
||||
|
|
|
@ -23,9 +23,8 @@ import
|
|||
std/[math, sequtils, sets, tables, algorithm],
|
||||
stew/[bitops2], chronicles,
|
||||
../extras,
|
||||
../ssz/merkleization,
|
||||
./datatypes/[phase0, altair],
|
||||
"."/[beaconstate, helpers, validator],
|
||||
"."/[beaconstate, eth2_merkleization, helpers, validator],
|
||||
../../nbench/bench_lab
|
||||
|
||||
export extras, phase0, altair
|
||||
|
|
|
@ -8,7 +8,10 @@
|
|||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
stew/[bitops2, endians2, ptrops]
|
||||
stew/[bitops2, endians2, byteutils, ptrops],
|
||||
json_serialization
|
||||
|
||||
export json_serialization
|
||||
|
||||
type
|
||||
Bytes = seq[byte]
|
||||
|
@ -319,3 +322,12 @@ func countOnes*(a: BitArray): int =
|
|||
for bit in a:
|
||||
if bit: inc result
|
||||
|
||||
Json.useCustomSerialization(BitSeq):
|
||||
read:
|
||||
try:
|
||||
BitSeq reader.readValue(string).hexToSeqByte
|
||||
except ValueError:
|
||||
raiseUnexpectedValue(reader, "A BitSeq value should be a valid hex string")
|
||||
|
||||
write:
|
||||
writer.writeValue "0x" & seq[byte](value).toHex
|
||||
|
|
|
@ -6,16 +6,20 @@
|
|||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
{.pragma: raisesssz, raises: [Defect, MalformedSszError, SszSizeMismatchError].}
|
||||
|
||||
import
|
||||
std/[typetraits, options],
|
||||
stew/[endians2, objects],
|
||||
../spec/digest, ./types, ./spec_types, ./merkleization,
|
||||
../spec/datatypes/[phase0, altair]
|
||||
# Coding and decoding of primitive SSZ types - every "simple" type passed to
|
||||
# and from the SSZ library must have a `fromSssBytes` and `toSszType` overload.
|
||||
|
||||
template raiseIncorrectSize(T: type) =
|
||||
import
|
||||
std/typetraits,
|
||||
stew/[endians2, objects],
|
||||
../spec/digest, ./types
|
||||
|
||||
export
|
||||
digest, types
|
||||
|
||||
template raiseIncorrectSize*(T: type) =
|
||||
const typeName = name(T)
|
||||
raise newException(MalformedSszError,
|
||||
"SSZ " & typeName & " input of incorrect size")
|
||||
|
@ -49,30 +53,6 @@ func fromSszBytes*(T: type Eth2Digest, data: openArray[byte]): T {.raisesssz.} =
|
|||
raiseIncorrectSize T
|
||||
copyMem(result.data.addr, unsafeAddr data[0], sizeof(result.data))
|
||||
|
||||
func fromSszBytes*(T: type GraffitiBytes, data: openArray[byte]): T {.raisesssz.} =
|
||||
if data.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr data[0], sizeof(result))
|
||||
|
||||
template fromSszBytes*(T: type Slot, bytes: openArray[byte]): T =
|
||||
T fromSszBytes(uint64, bytes)
|
||||
|
||||
template fromSszBytes*(T: type Epoch, bytes: openArray[byte]): T =
|
||||
T fromSszBytes(uint64, bytes)
|
||||
|
||||
template fromSszBytes*(T: type ParticipationFlags, bytes: openArray[byte]): T =
|
||||
T fromSszBytes(uint8, bytes)
|
||||
|
||||
func fromSszBytes*(T: type ForkDigest, bytes: openArray[byte]): T {.raisesssz.} =
|
||||
if bytes.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
|
||||
|
||||
func fromSszBytes*(T: type Version, bytes: openArray[byte]): T {.raisesssz.} =
|
||||
if bytes.len != sizeof(result):
|
||||
raiseIncorrectSize T
|
||||
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
|
||||
|
||||
template fromSszBytes*(T: type BitSeq, bytes: openArray[byte]): auto =
|
||||
BitSeq @bytes
|
||||
|
||||
|
@ -96,7 +76,7 @@ template checkForForbiddenBits(ResulType: type,
|
|||
raiseIncorrectSize ResulType
|
||||
|
||||
func readSszValue*[T](input: openArray[byte],
|
||||
val: var T, updateRoot: bool = true) {.raisesssz.} =
|
||||
val: var T) {.raisesssz.} =
|
||||
mixin fromSszBytes, toSszType
|
||||
|
||||
template readOffsetUnchecked(n: int): uint32 {.used.}=
|
||||
|
@ -108,14 +88,6 @@ func readSszValue*[T](input: openArray[byte],
|
|||
raise newException(MalformedSszError, "SSZ list element offset points past the end of the input")
|
||||
int(offset)
|
||||
|
||||
#when result is List:
|
||||
# result.setOutputSize input.len
|
||||
# readOpenArray(toSeq result, input)
|
||||
|
||||
#elif result is array:
|
||||
# result.checkOutputSize input.len
|
||||
# readOpenArray(result, input)
|
||||
|
||||
when val is BitList:
|
||||
if input.len == 0:
|
||||
raise newException(MalformedSszError, "Invalid empty SSZ BitList value")
|
||||
|
@ -256,9 +228,21 @@ func readSszValue*[T](input: openArray[byte],
|
|||
type(field),
|
||||
input.toOpenArray(int(startOffset), int(endOffset - 1)))
|
||||
|
||||
when val is phase0.SignedBeaconBlock | phase0.TrustedSignedBeaconBlock |
|
||||
altair.SignedBeaconBlock | altair.TrustedSignedBeaconBlock:
|
||||
if updateRoot:
|
||||
val.root = hash_tree_root(val.message)
|
||||
else:
|
||||
unsupported T
|
||||
|
||||
# Identity conversions for core SSZ types
|
||||
|
||||
template toSszType*(v: auto): auto =
|
||||
## toSszType converts a given value into one of the primitive types supported
|
||||
## by SSZ - to add support for a custom type (for example a `distinct` type),
|
||||
## add an overload for `toSszType` which converts it to one of the `SszType`
|
||||
## types, as well as a `fromSszBytes`.
|
||||
type T = type(v)
|
||||
when T is SszType:
|
||||
when T is Eth2Digest:
|
||||
v.data
|
||||
else:
|
||||
v
|
||||
else:
|
||||
unsupported T
|
|
@ -12,10 +12,10 @@ import
|
|||
std/[strutils, parseutils],
|
||||
stew/objects, faststreams/outputs, json_serialization/writer,
|
||||
../spec/datatypes/base,
|
||||
./bytes_reader, ./types, ./navigator, ./spec_types
|
||||
./codec, ./types, ./navigator
|
||||
|
||||
export
|
||||
bytes_reader, navigator, types
|
||||
codec, navigator, types
|
||||
|
||||
type
|
||||
ObjKind = enum
|
||||
|
|
|
@ -16,10 +16,10 @@ import
|
|||
stew/ranges/ptr_arith,
|
||||
serialization/testing/tracing,
|
||||
../spec/digest,
|
||||
"."/[bitseqs, spec_types, types]
|
||||
"."/[bitseqs, codec, types]
|
||||
|
||||
export
|
||||
spec_types, types
|
||||
codec, bitseqs, digest, types
|
||||
|
||||
when hasSerializationTracing:
|
||||
import stew/byteutils, typetraits
|
||||
|
@ -676,7 +676,7 @@ func hashTreeRootCached*(x: HashList): Eth2Digest =
|
|||
|
||||
x.hashes[0]
|
||||
|
||||
func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} =
|
||||
func hash_tree_root*(x: auto): Eth2Digest =
|
||||
trs "STARTING HASH TREE ROOT FOR TYPE ", name(type(x))
|
||||
mixin toSszType
|
||||
|
||||
|
@ -689,4 +689,3 @@ func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} =
|
|||
hashTreeRootAux toSszType(x)
|
||||
|
||||
trs "HASH TREE ROOT FOR ", name(type x), " = ", "0x", $result
|
||||
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
|
||||
import
|
||||
stew/[ptrops, objects], stew/ranges/ptr_arith,
|
||||
./bytes_reader, ./types, ./spec_types
|
||||
./codec, ./types
|
||||
|
||||
export bytes_reader, types
|
||||
export codec, types
|
||||
|
||||
type
|
||||
MemRange* = object
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
std/[typetraits],
|
||||
../spec/datatypes/[phase0, altair]
|
||||
|
||||
export phase0, altair, typetraits
|
||||
|
||||
# Eth2-spec-specific type handling that is not generic to SSZ
|
||||
|
||||
template toSszType*(x: auto): auto =
|
||||
mixin toSszType
|
||||
|
||||
# Please note that BitArray doesn't need any special treatment here
|
||||
# because it can be considered a regular fixed-size object type.
|
||||
|
||||
# enum should not be added here as nim will raise Defect when value is out
|
||||
# of range
|
||||
when x is Slot|Epoch|ValidatorIndex: uint64(x)
|
||||
elif x is Eth2Digest: x.data
|
||||
elif x is BlsCurveType: toRaw(x)
|
||||
elif x is ForkDigest|Version|GraffitiBytes: distinctBase(x)
|
||||
else: x
|
|
@ -12,13 +12,13 @@
|
|||
# https://github.com/ethereum/eth2.0-specs/blob/v1.0.1/ssz/simple-serialize.md#serialization
|
||||
|
||||
import
|
||||
std/[typetraits, options],
|
||||
std/typetraits,
|
||||
stew/[endians2, leb128, objects],
|
||||
serialization, serialization/testing/tracing,
|
||||
./bytes_reader, ./bitseqs, ./types, ./spec_types
|
||||
./codec, ./bitseqs, ./types
|
||||
|
||||
export
|
||||
serialization, types, spec_types, bitseqs
|
||||
serialization, codec, types, bitseqs
|
||||
|
||||
type
|
||||
SszReader* = object
|
||||
|
@ -48,7 +48,7 @@ template sizePrefixed*[TT](x: TT): untyped =
|
|||
|
||||
proc init*(T: type SszReader,
|
||||
stream: InputStream,
|
||||
updateRoot: bool = true): T {.raises: [Defect].} =
|
||||
updateRoot: bool = true): T =
|
||||
T(stream: stream, updateRoot: updateRoot)
|
||||
|
||||
proc writeFixedSized(s: var (OutputStream|WriteCursor), x: auto) {.raises: [Defect, IOError].} =
|
||||
|
@ -85,12 +85,12 @@ template supports*(_: type SSZ, T: type): bool =
|
|||
mixin toSszType
|
||||
anonConst compiles(fixedPortionSize toSszType(declval T))
|
||||
|
||||
func init*(T: type SszWriter, stream: OutputStream): T {.raises: [Defect].} =
|
||||
func init*(T: type SszWriter, stream: OutputStream): T =
|
||||
result.stream = stream
|
||||
|
||||
proc writeVarSizeType(w: var SszWriter, value: auto) {.gcsafe, raises: [Defect, IOError].}
|
||||
|
||||
proc beginRecord*(w: var SszWriter, TT: type): auto {.raises: [Defect].} =
|
||||
proc beginRecord*(w: var SszWriter, TT: type): auto =
|
||||
type T = TT
|
||||
when isFixedSize(T):
|
||||
FixedSizedWriterCtx()
|
||||
|
@ -222,23 +222,23 @@ proc writeValue*[T](w: var SszWriter, x: SizePrefixed[T]) {.raises: [Defect, IOE
|
|||
let length = toBytes(uint64(w.stream.pos - initPos), Leb128)
|
||||
cursor.finalWrite length.toOpenArray()
|
||||
|
||||
proc readValue*[T](r: var SszReader, val: var T) {.raises: [Defect, MalformedSszError, SszSizeMismatchError, IOError].} =
|
||||
proc readValue*(r: var SszReader, val: var auto) {.raises: [Defect, MalformedSszError, SszSizeMismatchError, IOError].} =
|
||||
mixin readSszBytes
|
||||
type T = type val
|
||||
when isFixedSize(T):
|
||||
const minimalSize = fixedPortionSize(T)
|
||||
if r.stream.readable(minimalSize):
|
||||
readSszValue(r.stream.read(minimalSize), val, r.updateRoot)
|
||||
readSszBytes(r.stream.read(minimalSize), val, r.updateRoot)
|
||||
else:
|
||||
raise newException(MalformedSszError, "SSZ input of insufficient size")
|
||||
else:
|
||||
# TODO(zah) Read the fixed portion first and precisely measure the
|
||||
# size of the dynamic portion to consume the right number of bytes.
|
||||
readSszValue(r.stream.read(r.stream.len.get), val, r.updateRoot)
|
||||
readSszBytes(r.stream.read(r.stream.len.get), val, r.updateRoot)
|
||||
|
||||
proc readSszBytes*[T](data: openArray[byte], val: var T, updateRoot = true) {.
|
||||
raises: [Defect, MalformedSszError, SszSizeMismatchError].} =
|
||||
when isFixedSize(T):
|
||||
const minimalSize = fixedPortionSize(T)
|
||||
if data.len < minimalSize:
|
||||
raise newException(MalformedSszError, "SSZ input of insufficient size")
|
||||
|
||||
readSszValue(data, val, updateRoot)
|
||||
# Overload `readSszBytes` to perform custom operations on T after
|
||||
# deserialization
|
||||
mixin readSszValue
|
||||
readSszValue(data, val)
|
||||
|
|
|
@ -8,13 +8,14 @@
|
|||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
std/[tables, options, typetraits, strformat],
|
||||
std/[tables, typetraits, strformat],
|
||||
stew/shims/macros, stew/[byteutils, bitops2, objects],
|
||||
serialization/[object_serialization, errors],
|
||||
json_serialization,
|
||||
"."/[bitseqs],
|
||||
../spec/digest
|
||||
|
||||
export bitseqs
|
||||
export bitseqs, json_serialization
|
||||
|
||||
const
|
||||
offsetSize* = 4
|
||||
|
@ -131,10 +132,13 @@ type
|
|||
actualSszSize*: int
|
||||
elementSize*: int
|
||||
|
||||
template asSeq*(x: List): auto = distinctBase(x)
|
||||
# These are supported by the SSZ library - anything that's not covered here
|
||||
# needs to overload toSszType and fromSszBytes
|
||||
SszType* =
|
||||
BasicType | array | HashArray | List | HashList | BitArray | BitList |
|
||||
object | tuple
|
||||
|
||||
template init*[T](L: type List, x: seq[T], N: static Limit): auto =
|
||||
List[T, N](x)
|
||||
template asSeq*(x: List): auto = distinctBase(x)
|
||||
|
||||
template init*[T, N](L: type List[T, N], x: seq[T]): auto =
|
||||
List[T, N](x)
|
||||
|
@ -326,6 +330,11 @@ proc addDefault*(x: var HashList): ptr x.T =
|
|||
clearCaches(x, x.data.len() - 1)
|
||||
addr x.data[^1]
|
||||
|
||||
template init*[T, N](L: type HashList[T, N], x: seq[T]): auto =
|
||||
var tmp = HashList[T, N](data: List[T, N].init(x))
|
||||
tmp.growHashes()
|
||||
tmp
|
||||
|
||||
template len*(x: HashList|HashArray): auto = len(x.data)
|
||||
template low*(x: HashList|HashArray): auto = low(x.data)
|
||||
template high*(x: HashList|HashArray): auto = high(x.data)
|
||||
|
@ -379,7 +388,10 @@ macro unsupported*(T: typed): untyped =
|
|||
# so we use this macro instead. It's also much better at figuring
|
||||
# out the actual type that was used in the instantiation.
|
||||
# File both problems as issues.
|
||||
error "SSZ serialization of the type " & humaneTypeName(T) & " is not supported"
|
||||
when T is enum:
|
||||
error "Nim `enum` types map poorly to SSZ and make it easy to introduce security issues because of spurious Defect's"
|
||||
else:
|
||||
error "SSZ serialization of the type " & humaneTypeName(T) & " is not supported, overload toSszType and fromSszBytes"
|
||||
|
||||
template ElemType*(T: type HashArray): untyped =
|
||||
T.T
|
||||
|
@ -529,3 +541,25 @@ method formatMsg*(
|
|||
&"SSZ size mismatch, element {err.elementSize}, actual {err.actualSszSize}, type {err.deserializedType}, file {filename}"
|
||||
except CatchableError:
|
||||
"SSZ size mismatch"
|
||||
|
||||
template readValue*(reader: var JsonReader, value: var List) =
|
||||
value = type(value)(readValue(reader, seq[type value[0]]))
|
||||
|
||||
template writeValue*(writer: var JsonWriter, value: List) =
|
||||
writeValue(writer, asSeq value)
|
||||
|
||||
proc writeValue*(writer: var JsonWriter, value: HashList)
|
||||
{.raises: [IOError, SerializationError, Defect].} =
|
||||
writeValue(writer, value.data)
|
||||
|
||||
proc readValue*(reader: var JsonReader, value: var HashList)
|
||||
{.raises: [IOError, SerializationError, Defect].} =
|
||||
value.resetCache()
|
||||
readValue(reader, value.data)
|
||||
|
||||
template readValue*(reader: var JsonReader, value: var BitList) =
|
||||
type T = type(value)
|
||||
value = T readValue(reader, BitSeq)
|
||||
|
||||
template writeValue*(writer: var JsonWriter, value: BitList) =
|
||||
writeValue(writer, BitSeq value)
|
||||
|
|
|
@ -8,10 +8,11 @@
|
|||
{.push raises: [Defect].}
|
||||
|
||||
import
|
||||
os, strformat, chronicles,
|
||||
./ssz_serialization,
|
||||
../spec/datatypes/[phase0, altair],
|
||||
../consensus_object_pools/block_pools_types
|
||||
std/[os, strformat],
|
||||
chronicles,
|
||||
./spec/[eth2_ssz_serialization, eth2_merkleization],
|
||||
./spec/datatypes/[phase0, altair],
|
||||
./consensus_object_pools/block_pools_types
|
||||
|
||||
# Dump errors are generally not fatal where used currently - the code calling
|
||||
# these functions, like most code, is not exception safe
|
|
@ -14,7 +14,6 @@ import
|
|||
../spec/forks,
|
||||
../networking/eth2_network,
|
||||
../beacon_node_types,
|
||||
../ssz/merkleization,
|
||||
../gossip_processing/block_processor,
|
||||
"."/sync_protocol, "."/sync_manager
|
||||
export sync_manager
|
||||
|
|
|
@ -7,20 +7,19 @@ import chronos, presto, presto/client as presto_client, chronicles, confutils,
|
|||
# Local modules
|
||||
import
|
||||
../spec/datatypes/[phase0, altair],
|
||||
../spec/[helpers, signatures],
|
||||
../spec/[eth2_merkleization, helpers, signatures],
|
||||
../spec/eth2_apis/rest_beacon_client,
|
||||
../validators/[attestation_aggregation, keystore_management,
|
||||
validator_pool, slashing_protection],
|
||||
".."/[conf, beacon_clock, version, beacon_node_types,
|
||||
nimbus_binary_common],
|
||||
".."/ssz/merkleization
|
||||
nimbus_binary_common]
|
||||
|
||||
export os, tables, sequtils, sequtils, chronos, presto, chronicles, confutils,
|
||||
nimbus_binary_common, version, conf, options, tables, results, base10,
|
||||
byteutils, presto_client
|
||||
|
||||
export rest_beacon_client,
|
||||
phase0, altair, helpers, signatures, merkleization,
|
||||
phase0, altair, helpers, signatures, eth2_merkleization,
|
||||
beacon_clock,
|
||||
kvstore, kvstore_sqlite3,
|
||||
keystore_management, slashing_protection, validator_pool,
|
||||
|
|
|
@ -11,11 +11,11 @@ import
|
|||
std/[os, strutils, terminal, wordwrap, unicode],
|
||||
chronicles, chronos, web3, stint, json_serialization, zxcvbn,
|
||||
serialization, blscurve, eth/common/eth_types, eth/keys, confutils, bearssl,
|
||||
../spec/[keystore],
|
||||
../spec/[eth2_merkleization, keystore],
|
||||
../spec/datatypes/base,
|
||||
stew/io2, libp2p/crypto/crypto as lcrypto,
|
||||
nimcrypto/utils as ncrutils,
|
||||
".."/[conf, ssz/merkleization, filepath],
|
||||
".."/[conf, filepath],
|
||||
../networking/network_metadata
|
||||
|
||||
export
|
||||
|
|
|
@ -14,11 +14,9 @@ import
|
|||
eth/db/[kvstore, kvstore_sqlite3],
|
||||
chronicles,
|
||||
nimcrypto/hash,
|
||||
serialization,
|
||||
json_serialization,
|
||||
# Internal
|
||||
../spec/datatypes/base,
|
||||
../ssz,
|
||||
../spec/eth2_ssz_serialization,
|
||||
./slashing_protection_common
|
||||
|
||||
# Requirements
|
||||
|
|
|
@ -18,7 +18,6 @@ import
|
|||
# Internal
|
||||
../spec/datatypes/base,
|
||||
../spec/helpers,
|
||||
../ssz,
|
||||
./slashing_protection_common
|
||||
|
||||
# Requirements
|
||||
|
|
|
@ -21,12 +21,13 @@ import
|
|||
|
||||
# Local modules
|
||||
../spec/datatypes/[phase0, altair, merge],
|
||||
../spec/[forks, helpers, network, signatures, state_transition],
|
||||
../spec/[
|
||||
eth2_merkleization, forks, helpers, network, signatures, state_transition],
|
||||
../consensus_object_pools/[
|
||||
spec_cache, blockchain_dag, block_clearance, attestation_pool, exit_pool],
|
||||
../eth1/eth1_monitor,
|
||||
../networking/eth2_network,
|
||||
../ssz, ../ssz/sszdump, ../sync/sync_manager,
|
||||
../sszdump, ../sync/sync_manager,
|
||||
../gossip_processing/consensus_manager,
|
||||
".."/[conf, beacon_clock, beacon_node_common, beacon_node_types, version],
|
||||
"."/[
|
||||
|
|
|
@ -14,7 +14,6 @@ import
|
|||
../beacon_chain/spec/datatypes/[phase0],
|
||||
../beacon_chain/spec/[
|
||||
beaconstate, forks, helpers, state_transition, state_transition_block],
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization],
|
||||
../tests/official/fixtures_utils
|
||||
|
||||
# Nimbus Bench - Scenario configuration
|
||||
|
|
|
@ -4,7 +4,7 @@ import
|
|||
stew/[endians2, results],
|
||||
snappy, snappy/framing,
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/ssz/ssz_serialization
|
||||
../beacon_chain/spec/eth2_ssz_serialization
|
||||
|
||||
const
|
||||
E2Version = [byte 0x65, 0x32]
|
||||
|
|
|
@ -4,9 +4,9 @@ import
|
|||
stew/byteutils,
|
||||
../research/simutils,
|
||||
../beacon_chain/spec/datatypes/[phase0],
|
||||
../beacon_chain/spec/[forks, helpers, state_transition],
|
||||
../beacon_chain/networking/network_metadata,
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization]
|
||||
../beacon_chain/spec/[
|
||||
eth2_ssz_serialization, forks, helpers, state_transition],
|
||||
../beacon_chain/networking/network_metadata
|
||||
|
||||
type
|
||||
Cmd* = enum
|
||||
|
|
|
@ -8,7 +8,7 @@ import
|
|||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[
|
||||
forks, helpers, state_transition, state_transition_epoch],
|
||||
../beacon_chain/ssz, ../beacon_chain/ssz/sszdump,
|
||||
../beacon_chain/sszdump,
|
||||
../research/simutils, ./e2store
|
||||
|
||||
type Timers = enum
|
||||
|
|
|
@ -5,8 +5,8 @@ import
|
|||
stew/ptrops, stew/ranges/ptr_arith, chronicles,
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[
|
||||
beaconstate, forks, validator, state_transition, state_transition_block],
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization]
|
||||
beaconstate, eth2_ssz_serialization, forks, validator, state_transition,
|
||||
state_transition_block]
|
||||
|
||||
type
|
||||
AttestationInput = object
|
||||
|
|
|
@ -28,7 +28,6 @@ import
|
|||
../beacon_chain/validators/validator_pool,
|
||||
../beacon_chain/consensus_object_pools/[blockchain_dag, block_quarantine,
|
||||
block_clearance, attestation_pool],
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization],
|
||||
./simutils
|
||||
|
||||
type Timers = enum
|
||||
|
|
|
@ -2,7 +2,6 @@ import
|
|||
stats, os, strformat, times,
|
||||
../tests/testblockutil,
|
||||
../beacon_chain/beacon_chain_db,
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization],
|
||||
../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../beacon_chain/spec/[beaconstate, forks, helpers],
|
||||
../beacon_chain/consensus_object_pools/[blockchain_dag, block_pools_types],
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
../tests/testblockutil,
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[beaconstate, forks, helpers],
|
||||
../beacon_chain/ssz/[merkleization, ssz_serialization],
|
||||
./simutils
|
||||
|
||||
type Timers = enum
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import ./testutil
|
||||
|
||||
import # Unit test
|
||||
./ssz/all_tests,
|
||||
./test_attestation_pool,
|
||||
./test_beacon_chain_db,
|
||||
./test_beaconstate,
|
||||
|
@ -16,6 +17,7 @@ import # Unit test
|
|||
./test_datatypes,
|
||||
./test_discovery,
|
||||
./test_eth1_monitor,
|
||||
./test_eth2_ssz_serialization,
|
||||
./test_exit_pool,
|
||||
./test_gossip_validation,
|
||||
./test_helpers,
|
||||
|
@ -23,7 +25,6 @@ import # Unit test
|
|||
./test_interop,
|
||||
./test_message_signatures,
|
||||
./test_peer_pool,
|
||||
./test_ssz,
|
||||
./test_statediff,
|
||||
./test_sync_manager,
|
||||
./test_zero_signature,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import
|
||||
testutils/fuzzing, faststreams/inputs, serialization/testing/tracing,
|
||||
../../beacon_chain/ssz,
|
||||
../../beacon_chain/spec/datatypes/base
|
||||
|
||||
export
|
||||
|
|
|
@ -16,8 +16,6 @@ import
|
|||
# Specs
|
||||
../../beacon_chain/spec/datatypes/phase0,
|
||||
../../beacon_chain/spec/[beaconstate, helpers, validator, signatures],
|
||||
# Internals
|
||||
../../beacon_chain/ssz,
|
||||
# Mocking procs
|
||||
./mock_blocks,
|
||||
./mock_validator_keys
|
||||
|
|
|
@ -10,8 +10,6 @@ import
|
|||
# Specs
|
||||
../../beacon_chain/spec/datatypes/phase0,
|
||||
../../beacon_chain/spec/[helpers, signatures, validator],
|
||||
# Internals
|
||||
../../beacon_chain/ssz,
|
||||
# Mock helpers
|
||||
./mock_validator_keys
|
||||
|
||||
|
|
|
@ -13,12 +13,11 @@ import
|
|||
math,
|
||||
|
||||
# Specs
|
||||
../../beacon_chain/spec/[keystore, signatures],
|
||||
../../beacon_chain/spec/[eth2_merkleization, keystore, signatures],
|
||||
../../beacon_chain/spec/datatypes/base,
|
||||
|
||||
# Internals
|
||||
../../beacon_chain/extras,
|
||||
../../beacon_chain/ssz,
|
||||
../../beacon_chain/eth1/merkle_minimal,
|
||||
|
||||
# Mocking procs
|
||||
|
|
|
@ -13,7 +13,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[beaconstate, helpers],
|
||||
../../../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -16,7 +16,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/beaconstate,
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[state_transition_block],
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[beaconstate, presets, state_transition_block],
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -13,7 +13,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[forks, state_transition],
|
||||
../../../beacon_chain/spec/datatypes/altair,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
yaml,
|
||||
# Beacon chain internals
|
||||
../../beacon_chain/spec/datatypes/altair,
|
||||
../../beacon_chain/ssz,
|
||||
# Status libraries
|
||||
snappy,
|
||||
# Test utilities
|
||||
|
|
|
@ -16,7 +16,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[state_transition, forks, helpers],
|
||||
../../../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../../../beacon_chain/[ssz],
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils
|
||||
|
|
|
@ -7,22 +7,17 @@
|
|||
|
||||
import
|
||||
# Standard library
|
||||
os, strutils, typetraits,
|
||||
std/[os, strutils, typetraits],
|
||||
# Internals
|
||||
../../beacon_chain/ssz,
|
||||
../../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../../beacon_chain/spec/[state_transition_epoch],
|
||||
../../beacon_chain/spec/[
|
||||
eth2_merkleization, eth2_ssz_serialization, state_transition_epoch],
|
||||
# Status libs,
|
||||
snappy,
|
||||
stew/byteutils,
|
||||
serialization, json_serialization
|
||||
stew/byteutils
|
||||
|
||||
export # Workaround:
|
||||
# - https://github.com/status-im/nim-serialization/issues/4
|
||||
# - https://github.com/status-im/nim-serialization/issues/5
|
||||
# - https://github.com/nim-lang/Nim/issues/11225
|
||||
serialization.readValue,
|
||||
Json, ssz, phase0, altair
|
||||
export
|
||||
eth2_merkleization, eth2_ssz_serialization
|
||||
|
||||
# Process current EF test format
|
||||
# ---------------------------------------------
|
||||
|
|
|
@ -16,7 +16,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/beaconstate,
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[state_transition_block],
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[state_transition_block, presets],
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/state_transition_block,
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils,
|
||||
|
|
|
@ -13,7 +13,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../../beacon_chain/spec/[forks, state_transition],
|
||||
../../../beacon_chain/spec/datatypes/phase0,
|
||||
../../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../../testutil,
|
||||
../fixtures_utils
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
yaml,
|
||||
# Beacon chain internals
|
||||
../../beacon_chain/spec/datatypes/phase0,
|
||||
../../beacon_chain/ssz,
|
||||
# Status libraries
|
||||
snappy,
|
||||
# Test utilities
|
||||
|
|
|
@ -15,7 +15,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../beacon_chain/spec/datatypes/phase0,
|
||||
../../beacon_chain/spec/[validator, helpers, state_transition_epoch],
|
||||
../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
../testutil,
|
||||
./fixtures_utils
|
||||
|
|
|
@ -16,7 +16,6 @@ import
|
|||
# Beacon chain internals
|
||||
../../beacon_chain/spec/digest,
|
||||
../../beacon_chain/spec/datatypes/base,
|
||||
../../beacon_chain/ssz,
|
||||
# Test utilities
|
||||
./fixtures_utils
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ import
|
|||
../../beacon_chain/spec/[state_transition_block],
|
||||
../../beacon_chain/spec/datatypes/base,
|
||||
# Internals
|
||||
../../beacon_chain/ssz,
|
||||
# Mock helpers
|
||||
../mocking/[mock_deposits, mock_genesis],
|
||||
../testutil, ../helpers/math_helpers
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
{.used.}
|
||||
|
||||
import
|
||||
./test_ssz_roundtrip,
|
||||
./test_ssz_serialization
|
|
@ -11,6 +11,6 @@
|
|||
# with unittest2 as of writing
|
||||
import
|
||||
serialization/testing/generic_suite,
|
||||
../beacon_chain/ssz
|
||||
../../beacon_chain/ssz/ssz_serialization
|
||||
|
||||
executeRoundTripTests SSZ
|
|
@ -8,13 +8,10 @@
|
|||
{.used.}
|
||||
|
||||
import
|
||||
std/[options],
|
||||
std/typetraits,
|
||||
unittest2,
|
||||
nimcrypto/hash,
|
||||
json_serialization,
|
||||
../beacon_chain/spec/datatypes/base,
|
||||
../beacon_chain/ssz,
|
||||
../beacon_chain/ssz/[navigator, dynamic_navigator]
|
||||
../../beacon_chain/ssz/[ssz_serialization, merkleization],
|
||||
../../beacon_chain/ssz/[navigator, dynamic_navigator]
|
||||
|
||||
type
|
||||
SomeEnum = enum
|
||||
|
@ -52,13 +49,11 @@ type
|
|||
f0: uint8
|
||||
f1: uint32
|
||||
f2: array[20, byte]
|
||||
f3: MDigest[256]
|
||||
f4: seq[byte]
|
||||
f5: ValidatorIndex
|
||||
f3: Eth2Digest
|
||||
|
||||
static:
|
||||
doAssert fixedPortionSize(ObjWithFields) ==
|
||||
1 + 4 + sizeof(array[20, byte]) + (256 div 8) + 4 + 8
|
||||
1 + 4 + sizeof(array[20, byte]) + (256 div 8)
|
||||
|
||||
type
|
||||
Foo = object
|
||||
|
@ -154,7 +149,7 @@ suite "SSZ navigator":
|
|||
var leaves = HashList[uint64, 1'i64 shl 3]()
|
||||
while leaves.len < leaves.maxLen:
|
||||
check:
|
||||
leaves.add leaves.lenu64
|
||||
leaves.add leaves.len.uint64
|
||||
hash_tree_root(leaves) == hash_tree_root(leaves.data)
|
||||
|
||||
suite "SSZ dynamic navigator":
|
||||
|
@ -311,3 +306,48 @@ suite "hash":
|
|||
readSszValue(emptyBytes, sloaded)
|
||||
check:
|
||||
emptyRoot == hash_tree_root(sloaded)
|
||||
|
||||
suite "underlong values":
|
||||
template testit(t: auto) =
|
||||
test "Underlong SSZ.decode: " & type(t).name():
|
||||
let encoded = SSZ.encode(t)
|
||||
expect(SszError):
|
||||
discard SSZ.decode(encoded[0..^2], type t)
|
||||
|
||||
test "Underlong readSszBytes: " & type(t).name():
|
||||
let encoded = SSZ.encode(t)
|
||||
var t2: type t
|
||||
expect(SszError):
|
||||
readSszBytes(encoded[0..^2], t2)
|
||||
|
||||
test "Overlong SSZ.decode: " & type(t).name():
|
||||
when not (t is BasicType | BitArray | array | HashArray | BitList | Simple):
|
||||
let encoded = SSZ.encode(t)
|
||||
expect(SszError):
|
||||
discard SSZ.decode(encoded & @[32'u8], type t)
|
||||
else:
|
||||
skip # TODO Difference between decode and readSszBytes needs revisiting
|
||||
|
||||
test "Overlong readSszBytes: " & type(t).name():
|
||||
when not (t is BitList | Simple):
|
||||
let encoded = SSZ.encode(t)
|
||||
var t2: type t
|
||||
expect(SszError):
|
||||
readSszBytes(encoded & @[32'u8], t2)
|
||||
else:
|
||||
skip # TODO Difference between decode and readSszBytes needs revisiting
|
||||
|
||||
# All SszType types
|
||||
testit(default(bool))
|
||||
testit(default(uint8))
|
||||
testit(default(uint16))
|
||||
testit(default(uint32))
|
||||
testit(default(uint64))
|
||||
testit(default(array[32, uint8]))
|
||||
testit(default(HashArray[32, uint8]))
|
||||
testit(List[uint64, 32].init(@[42'u64]))
|
||||
testit(HashList[uint64, 32].init(@[42'u64]))
|
||||
testit(default(BitArray[32]))
|
||||
testit(BitList[32].init(10))
|
||||
testit(default(Simple))
|
||||
# TODO testit((32'u8, )) fails with a semcheck bug
|
|
@ -20,7 +20,6 @@ import
|
|||
../beacon_chain/fork_choice/[fork_choice_types, fork_choice],
|
||||
../beacon_chain/consensus_object_pools/[
|
||||
block_quarantine, blockchain_dag, block_clearance, attestation_pool],
|
||||
../beacon_chain/ssz/merkleization,
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[forks, state_transition, helpers],
|
||||
# Test utilities
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
std/[algorithm, options, sequtils],
|
||||
unittest2,
|
||||
../beacon_chain/[beacon_chain_db, interop, ssz],
|
||||
../beacon_chain/[beacon_chain_db, interop],
|
||||
../beacon_chain/spec/[beaconstate, forks, state_transition],
|
||||
../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../beacon_chain/consensus_object_pools/blockchain_dag,
|
||||
|
|
|
@ -16,7 +16,7 @@ import
|
|||
../beacon_chain/spec/datatypes/base,
|
||||
../beacon_chain/spec/[beaconstate, forks, helpers, state_transition],
|
||||
../beacon_chain/beacon_node_types,
|
||||
../beacon_chain/[beacon_chain_db, ssz],
|
||||
../beacon_chain/[beacon_chain_db],
|
||||
../beacon_chain/consensus_object_pools/[
|
||||
blockchain_dag, block_quarantine, block_clearance],
|
||||
./testutil, ./testdbutil, ./testblockutil
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.used.}
|
||||
|
||||
import
|
||||
unittest2,
|
||||
../beacon_chain/spec/datatypes/[phase0, altair],
|
||||
../beacon_chain/spec/eth2_ssz_serialization
|
||||
|
||||
template reject(stmt) =
|
||||
doAssert(not compiles(stmt))
|
||||
|
||||
static:
|
||||
doAssert isFixedSize(Slot) == true
|
||||
|
||||
type
|
||||
Specific = object
|
||||
f1: Slot
|
||||
f2: Epoch
|
||||
|
||||
Primitive = object # Same as above, but using primitive fields
|
||||
f1: uint64
|
||||
f2: uint64
|
||||
|
||||
suite "Specific field types":
|
||||
test "roundtrip":
|
||||
let encoded = SSZ.encode(Specific(f1: Slot(1), f2: Epoch(2)))
|
||||
check SSZ.decode(encoded, Primitive) == Primitive(f1: 1, f2: 2)
|
||||
|
||||
test "root update":
|
||||
template testit(T: type) =
|
||||
var t: T
|
||||
t.root = hash_tree_root(t.message)
|
||||
let encoded = SSZ.encode(t)
|
||||
let decoded = SSZ.decode(encoded, T)
|
||||
check:
|
||||
t.message == decoded.message
|
||||
t.root == decoded.root
|
||||
|
||||
testit(phase0.SignedBeaconBlock)
|
||||
testit(phase0.TrustedSignedBeaconBlock)
|
||||
testit(altair.SignedBeaconBlock)
|
||||
testit(altair.TrustedSignedBeaconBlock)
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
# Status lib
|
||||
unittest2,
|
||||
chronicles,
|
||||
chronicles, chronos,
|
||||
eth/keys,
|
||||
# Internal
|
||||
../beacon_chain/[beacon_node_types, beacon_clock],
|
||||
|
@ -18,7 +18,6 @@ import
|
|||
../beacon_chain/fork_choice/[fork_choice_types, fork_choice],
|
||||
../beacon_chain/consensus_object_pools/[
|
||||
block_quarantine, blockchain_dag, block_clearance, attestation_pool],
|
||||
../beacon_chain/ssz/merkleization,
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[forks, state_transition, helpers, network],
|
||||
# Test utilities
|
||||
|
|
|
@ -3,8 +3,7 @@
|
|||
import
|
||||
stint, ./testutil, stew/byteutils,
|
||||
../beacon_chain/interop,
|
||||
../beacon_chain/ssz,
|
||||
../beacon_chain/spec/[beaconstate],
|
||||
../beacon_chain/spec/[eth2_merkleization, beaconstate],
|
||||
../beacon_chain/spec/datatypes/base
|
||||
|
||||
# Interop test yaml, found here:
|
||||
|
|
|
@ -1,269 +0,0 @@
|
|||
import
|
||||
std/[strutils, sequtils, macros, bitops],
|
||||
stew/[bitops2, endians2],
|
||||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[beaconstate, helpers],
|
||||
../beacon_chain/eth1/merkle_minimal,
|
||||
../beacon_chain/ssz,
|
||||
mocking/mock_deposits
|
||||
|
||||
func round_step_down(x: Natural, step: static Natural): int =
|
||||
## Round the input to the previous multiple of "step"
|
||||
when (step and (step - 1)) == 0:
|
||||
# Step is a power of 2. (If compiler cannot prove that x>0 it does not make the optim)
|
||||
x and not(step - 1)
|
||||
else:
|
||||
x - x mod step
|
||||
|
||||
type SparseMerkleTree[Depth: static int] = object
|
||||
## Sparse Merkle tree
|
||||
# There is an extra "depth" layer to store leaf nodes
|
||||
# This stores leaves at depth = 0
|
||||
# and the root hash at the last depth
|
||||
nnznodes: array[Depth+1, seq[Eth2Digest]] # nodes that leads to non-zero leaves
|
||||
|
||||
func merkleTreeFromLeaves(
|
||||
values: openArray[Eth2Digest],
|
||||
Depth: static[int] = DEPOSIT_CONTRACT_TREE_DEPTH
|
||||
): SparseMerkleTree[Depth] =
|
||||
## Depth should be the same as is_valid_merkle_branch
|
||||
|
||||
result.nnznodes[0] = @values
|
||||
|
||||
for depth in 1 .. Depth: # Inclusive range
|
||||
let prev_depth_len = result.nnznodes[depth-1].len
|
||||
let stop = round_step_down(prev_depth_len, 2)
|
||||
for i in countup(0, stop-1, 2):
|
||||
# hash by pair of previous nodes
|
||||
let nodeHash = withEth2Hash:
|
||||
h.update result.nnznodes[depth-1][i]
|
||||
h.update result.nnznodes[depth-1][i+1]
|
||||
result.nnznodes[depth].add nodeHash
|
||||
|
||||
if prev_depth_len != stop:
|
||||
# If length is odd, the last one was skipped,
|
||||
# we need to combine it
|
||||
# with the zeroHash corresponding to the current depth
|
||||
let nodeHash = withEth2Hash:
|
||||
h.update result.nnznodes[depth-1][^1]
|
||||
h.update zeroHashes[depth-1]
|
||||
result.nnznodes[depth].add nodeHash
|
||||
|
||||
func getMerkleProof[Depth: static int](tree: SparseMerkleTree[Depth],
|
||||
index: int,
|
||||
depositMode = false): array[Depth, Eth2Digest] =
|
||||
# Descend down the tree according to the bit representation
|
||||
# of the index:
|
||||
# - 0 --> go left
|
||||
# - 1 --> go right
|
||||
let path = uint32(index)
|
||||
|
||||
# This is what the nnznodes[depth].len would be if `index` had been the last
|
||||
# deposit on the Merkle tree
|
||||
var depthLen = index + 1
|
||||
|
||||
for depth in 0 ..< Depth:
|
||||
let nodeIdx = int((path shr depth) xor 1)
|
||||
|
||||
# depositMode simulates only having constructed SparseMerkleTree[Depth]
|
||||
# through exactly deposit specified.
|
||||
if nodeIdx < tree.nnznodes[depth].len and
|
||||
(nodeIdx < depthLen or not depositMode):
|
||||
result[depth] = tree.nnznodes[depth][nodeIdx]
|
||||
else:
|
||||
result[depth] = zeroHashes[depth]
|
||||
|
||||
# Round up, i.e. a half-pair of Merkle nodes/leaves still requires a node
|
||||
# in the next Merkle tree layer calculated
|
||||
depthLen = (depthLen + 1) div 2
|
||||
|
||||
proc testMerkleMinimal(): bool =
|
||||
proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
|
||||
result.data[0 .. N-1] = x
|
||||
|
||||
let a = [byte 0x01, 0x02, 0x03].toDigest
|
||||
let b = [byte 0x04, 0x05, 0x06].toDigest
|
||||
let c = [byte 0x07, 0x08, 0x09].toDigest
|
||||
|
||||
block: # SSZ Sanity checks vs Python impl
|
||||
block: # 3 leaves
|
||||
let leaves = List[Eth2Digest, 3](@[a, b, c])
|
||||
let root = hash_tree_root(leaves)
|
||||
doAssert $root == "9ff412e827b7c9d40fc7df2725021fd579ab762581d1ff5c270316682868456e".toUpperAscii
|
||||
|
||||
block: # 2^3 leaves
|
||||
let leaves = List[Eth2Digest, int64(1 shl 3)](@[a, b, c])
|
||||
let root = hash_tree_root(leaves)
|
||||
doAssert $root == "5248085b588fab1dd1e03f3cd62201602b12e6560665935964f46e805977e8c5".toUpperAscii
|
||||
|
||||
block: # 2^10 leaves
|
||||
let leaves = List[Eth2Digest, int64(1 shl 10)](@[a, b, c])
|
||||
let root = hash_tree_root(leaves)
|
||||
doAssert $root == "9fb7d518368dc14e8cc588fb3fd2749beef9f493fef70ae34af5721543c67173".toUpperAscii
|
||||
|
||||
block:
|
||||
macro roundTrips(): untyped =
|
||||
result = newStmtList()
|
||||
|
||||
# compile-time unrolled test
|
||||
for nleaves in [3, 4, 5, 7, 8, 1 shl 10, 1 shl 32]:
|
||||
let depth = fastLog2(nleaves-1) + 1
|
||||
|
||||
result.add quote do:
|
||||
block:
|
||||
let tree = merkleTreeFromLeaves([a, b, c], Depth = `depth`)
|
||||
#echo "Tree: ", tree
|
||||
|
||||
doAssert tree.nnznodes[`depth`].len == 1
|
||||
let root = tree.nnznodes[`depth`][0]
|
||||
#echo "Root: ", root
|
||||
|
||||
block: # proof for a
|
||||
let index = 0
|
||||
|
||||
doAssert is_valid_merkle_branch(
|
||||
a, tree.getMerkleProof(index = index),
|
||||
depth = `depth`,
|
||||
index = index.uint64,
|
||||
root = root
|
||||
), "Failed (depth: " & $`depth` &
|
||||
", nleaves: " & $`nleaves` & ')'
|
||||
|
||||
block: # proof for b
|
||||
let index = 1
|
||||
|
||||
doAssert is_valid_merkle_branch(
|
||||
b, tree.getMerkleProof(index = index),
|
||||
depth = `depth`,
|
||||
index = index.uint64,
|
||||
root = root
|
||||
), "Failed (depth: " & $`depth` &
|
||||
", nleaves: " & $`nleaves` & ')'
|
||||
|
||||
block: # proof for c
|
||||
let index = 2
|
||||
|
||||
doAssert is_valid_merkle_branch(
|
||||
c, tree.getMerkleProof(index = index),
|
||||
depth = `depth`,
|
||||
index = index.uint64,
|
||||
root = root
|
||||
), "Failed (depth: " & $`depth` &
|
||||
", nleaves: " & $`nleaves` & ')'
|
||||
|
||||
roundTrips()
|
||||
true
|
||||
|
||||
doAssert testMerkleMinimal()
|
||||
|
||||
proc compareTreeVsMerkleizer(hashes: openArray[Eth2Digest], limit: static Limit) =
|
||||
const treeHeight = binaryTreeHeight(limit)
|
||||
let tree = merkleTreeFromLeaves(hashes, treeHeight)
|
||||
|
||||
var merkleizer = createMerkleizer(limit)
|
||||
for hash in hashes:
|
||||
merkleizer.addChunk hash.data
|
||||
|
||||
doAssert merkleizer.getFinalHash() == tree.nnznodes[treeHeight - 1][0]
|
||||
|
||||
proc testMultiProofsGeneration(preludeRecords: int,
|
||||
totalProofs: int,
|
||||
followUpRecords: int,
|
||||
limit: static Limit) =
|
||||
var
|
||||
m1 = createMerkleizer(limit)
|
||||
m2 = createMerkleizer(limit)
|
||||
|
||||
var preludeHashes = newSeq[Eth2Digest]()
|
||||
for i in 0 ..< preludeRecords:
|
||||
let hash = eth2digest toBytesLE(uint64(100000000 + i))
|
||||
m1.addChunk hash.data
|
||||
m2.addChunk hash.data
|
||||
preludeHashes.add hash
|
||||
|
||||
var proofsHashes = newSeq[Eth2Digest]()
|
||||
for i in 0 ..< totalProofs:
|
||||
let hash = eth2digest toBytesLE(uint64(200000000 + i))
|
||||
m1.addChunk hash.data
|
||||
proofsHashes.add hash
|
||||
|
||||
var proofs = addChunksAndGenMerkleProofs(m2, proofsHashes)
|
||||
|
||||
const treeHeight = binaryTreeHeight(limit)
|
||||
let merkleTree = merkleTreeFromLeaves(preludeHashes & proofsHashes,
|
||||
treeHeight)
|
||||
|
||||
doAssert m1.getFinalHash == merkleTree.nnznodes[treeHeight - 1][0]
|
||||
doAssert m1.getFinalHash == m2.getFinalHash
|
||||
|
||||
for i in 0 ..< totalProofs:
|
||||
let
|
||||
referenceProof = merkle_tree.getMerkleProof(preludeRecords + i, false)
|
||||
startPos = i * treeHeight
|
||||
endPos = startPos + treeHeight - 1
|
||||
|
||||
doAssert referenceProof == proofs.toOpenArray(startPos, endPos)
|
||||
|
||||
for i in 0 ..< followUpRecords:
|
||||
let hash = eth2digest toBytesLE(uint64(300000000 + i))
|
||||
m1.addChunk hash.data
|
||||
m2.addChunk hash.data
|
||||
|
||||
doAssert m1.getFinalHash == m2.getFinalHash
|
||||
|
||||
for prelude in [0, 1, 2, 5, 6, 12, 13, 16]:
|
||||
for proofs in [1, 2, 4, 17, 64]:
|
||||
for followUpHashes in [0, 1, 2, 5, 7, 8, 15, 48]:
|
||||
testMultiProofsGeneration(prelude, proofs, followUpHashes, 128)
|
||||
testMultiProofsGeneration(prelude, proofs, followUpHashes, 5000)
|
||||
|
||||
iterator hash_tree_roots_prefix[T](lst: openArray[T],
|
||||
limit: static Limit): Eth2Digest =
|
||||
# This is a particular type's instantiation of a general fold, reduce,
|
||||
# accumulation, prefix sums, etc family of operations. As long as that
|
||||
# Eth1 deposit case is the only notable example -- the usual uses of a
|
||||
# list involve, at some point, tree-hashing it -- finalized hashes are
|
||||
# the only abstraction that escapes from this module this way.
|
||||
var merkleizer = createMerkleizer(limit)
|
||||
for i, elem in lst:
|
||||
merkleizer.addChunk(hash_tree_root(elem).data)
|
||||
yield mixInLength(merkleizer.getFinalHash(), i + 1)
|
||||
|
||||
func attachMerkleProofsReferenceImpl(deposits: var openArray[Deposit]) =
|
||||
let
|
||||
deposit_data_roots = mapIt(deposits, it.data.hash_tree_root)
|
||||
merkle_tree = merkleTreeFromLeaves(deposit_data_roots)
|
||||
var
|
||||
deposit_data_sums: seq[Eth2Digest]
|
||||
for prefix_root in hash_tree_roots_prefix(
|
||||
deposit_data_roots, 1'i64 shl DEPOSIT_CONTRACT_TREE_DEPTH):
|
||||
deposit_data_sums.add prefix_root
|
||||
|
||||
for val_idx in 0 ..< deposits.len:
|
||||
deposits[val_idx].proof[0..31] = merkle_tree.getMerkleProof(val_idx, true)
|
||||
deposits[val_idx].proof[32] = default(Eth2Digest)
|
||||
deposits[val_idx].proof[32].data[0..7] = uint_to_bytes8((val_idx + 1).uint64)
|
||||
|
||||
doAssert is_valid_merkle_branch(
|
||||
deposit_data_roots[val_idx], deposits[val_idx].proof,
|
||||
DEPOSIT_CONTRACT_TREE_DEPTH + 1, val_idx.uint64,
|
||||
deposit_data_sums[val_idx])
|
||||
|
||||
let
|
||||
digests = mapIt(1..65, eth2digest toBytesLE(uint64 it))
|
||||
|
||||
proc testMerkleizer =
|
||||
for i in 0 ..< digests.len:
|
||||
compareTreeVsMerkleizer(digests.toOpenArray(0, i), 128)
|
||||
compareTreeVsMerkleizer(digests.toOpenArray(0, i), 5000)
|
||||
|
||||
var deposits = mockGenesisBalancedDeposits(65, 100000)
|
||||
var depositsCopy = deposits
|
||||
|
||||
attachMerkleProofsReferenceImpl(deposits)
|
||||
attachMerkleProofs(depositsCopy)
|
||||
|
||||
for i in 0 ..< deposits.len:
|
||||
doAssert deposits[i].proof == depositsCopy[i].proof
|
||||
|
||||
testMerkleizer()
|
|
@ -14,7 +14,6 @@ import
|
|||
../beacon_chain/spec/datatypes/phase0,
|
||||
../beacon_chain/spec/[forks, helpers],
|
||||
../beacon_chain/[beacon_node_types, statediff],
|
||||
../beacon_chain/ssz,
|
||||
../beacon_chain/consensus_object_pools/[blockchain_dag, block_quarantine]
|
||||
|
||||
when isMainModule:
|
||||
|
|
|
@ -10,9 +10,8 @@
|
|||
import
|
||||
std/strutils,
|
||||
unittest2,
|
||||
../beacon_chain/spec/crypto,
|
||||
../beacon_chain/spec/datatypes/base,
|
||||
../beacon_chain/ssz
|
||||
../beacon_chain/spec/[eth2_ssz_serialization, crypto],
|
||||
../beacon_chain/spec/datatypes/base
|
||||
|
||||
# Sanity checks to make sure all the workarounds introduced
|
||||
# to deal with https://github.com/status-im/nimbus-eth2/issues/374
|
||||
|
|
|
@ -10,7 +10,6 @@ import
|
|||
options, stew/endians2,
|
||||
../beacon_chain/[beacon_node_types],
|
||||
../beacon_chain/validators/validator_pool,
|
||||
../beacon_chain/ssz/merkleization,
|
||||
../beacon_chain/spec/datatypes/[phase0, altair, merge],
|
||||
../beacon_chain/spec/[helpers, signatures, state_transition, forks],
|
||||
../beacon_chain/consensus_object_pools/attestation_pool
|
||||
|
|
|
@ -11,7 +11,6 @@ import
|
|||
options, stew/endians2,
|
||||
./mocking/mock_deposits,
|
||||
./helpers/math_helpers,
|
||||
../beacon_chain/ssz/merkleization,
|
||||
../beacon_chain/spec/[
|
||||
forks, helpers, state_transition, state_transition_block]
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue