Refactor portal test directory structure (#2650)
This commit is contained in:
parent
38d651c9c8
commit
e603952df3
10
Makefile
10
Makefile
|
@ -281,17 +281,17 @@ fluffy-test-reproducibility:
|
|||
{ echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; }
|
||||
|
||||
# fluffy tests
|
||||
all_fluffy_portal_spec_tests: | build deps
|
||||
all_history_network_custom_chain_tests: | build deps
|
||||
echo -e $(BUILD_MSG) "build/$@" && \
|
||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/portal_spec_tests/mainnet/$@.nim"
|
||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/history_network_tests/$@.nim"
|
||||
|
||||
|
||||
all_fluffy_tests: | build deps
|
||||
echo -e $(BUILD_MSG) "build/$@" && \
|
||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130 -o:build/$@ "fluffy/tests/$@.nim"
|
||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/$@.nim"
|
||||
|
||||
# builds and runs the fluffy test suite
|
||||
fluffy-test: | all_fluffy_portal_spec_tests all_fluffy_tests
|
||||
fluffy-test: | all_fluffy_tests all_history_network_custom_chain_tests
|
||||
|
||||
# builds the fluffy tools, wherever they are
|
||||
$(FLUFFY_TOOLS): | build deps rocksdb
|
||||
|
@ -357,7 +357,7 @@ txparse: | build deps
|
|||
|
||||
# usual cleaning
|
||||
clean: | clean-common
|
||||
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_fluffy_portal_spec_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
|
||||
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
|
||||
rm -rf tools/t8n/{t8n,t8n_test}
|
||||
rm -rf tools/evmstate/{evmstate,evmstate_test}
|
||||
ifneq ($(USE_LIBBACKTRACE), 0)
|
||||
|
|
|
@ -17,11 +17,8 @@ import
|
|||
../../database/content_db,
|
||||
../../network_metadata,
|
||||
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||
"."/[
|
||||
history_content,
|
||||
beacon_chain_historical_roots,
|
||||
validation/historical_hashes_accumulator,
|
||||
],
|
||||
"."/[history_content, validation/historical_hashes_accumulator],
|
||||
../beacon/beacon_chain_historical_roots,
|
||||
./content/content_deprecated
|
||||
|
||||
logScope:
|
||||
|
|
|
@ -20,7 +20,7 @@ import
|
|||
../rpc/eth_rpc_client,
|
||||
../eth_data/[history_data_seeding, history_data_json_store, history_data_ssz_e2s],
|
||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||
../tests/test_history_util
|
||||
../tests/history_network_tests/test_history_util
|
||||
|
||||
type
|
||||
FutureCallback[A] = proc(): Future[A] {.gcsafe, raises: [].}
|
||||
|
|
|
@ -8,13 +8,9 @@
|
|||
{.warning[UnusedImport]: off.}
|
||||
|
||||
import
|
||||
./test_portal_wire_protocol,
|
||||
./test_accumulator,
|
||||
./test_history_network,
|
||||
./test_content_db,
|
||||
./test_discovery_rpc,
|
||||
./test_beacon_chain_historical_roots,
|
||||
./test_beacon_chain_historical_summaries,
|
||||
./wire_protocol_tests/all_wire_protocol_tests,
|
||||
./history_network_tests/all_history_network_tests,
|
||||
./beacon_network_tests/all_beacon_network_tests,
|
||||
./state_network_tests/all_state_network_tests
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
|
||||
import
|
||||
./test_beacon_content,
|
||||
./test_beacon_historical_roots,
|
||||
./test_beacon_historical_summaries,
|
||||
./test_beacon_historical_summaries_vectors,
|
||||
./test_beacon_network,
|
||||
./test_beacon_light_client
|
||||
|
|
|
@ -19,7 +19,7 @@ import
|
|||
../../eth_data/yaml_utils,
|
||||
"."/light_client_test_data
|
||||
|
||||
suite "Beacon Content Encodings - Mainnet":
|
||||
suite "Beacon Content Keys and Values - Test Vectors":
|
||||
# These test vectors are generated by eth_data_exporter. The content is taken
|
||||
# from mainnet and encoded as it would be transmitted on Portal Network,
|
||||
# including also the content key.
|
||||
|
@ -170,7 +170,7 @@ suite "Beacon Content Encodings - Mainnet":
|
|||
check encoded == contentValueEncoded
|
||||
check encode(key).asSeq() == contentKeyEncoded
|
||||
|
||||
suite "Beacon Content Encodings":
|
||||
suite "Beacon Content Keys and Values":
|
||||
# TODO: These tests are less useful now and should instead be altered to
|
||||
# use the consensus test vectors to simply test if encoding / decoding works
|
||||
# fine for the different forks.
|
||||
|
@ -277,7 +277,7 @@ suite "Beacon Content Encodings":
|
|||
decodeLightClientBootstrapForked(forkDigests, encodedTooEarlyFork).isErr()
|
||||
decodeLightClientBootstrapForked(forkDigests, encodedUnknownFork).isErr()
|
||||
|
||||
suite "Beacon ContentKey Encodings ":
|
||||
suite "Beacon Content Keys - Invalid Cases":
|
||||
test "Invalid prefix - 0 value":
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||
let decoded = decode(encoded)
|
||||
|
|
|
@ -17,7 +17,7 @@ import
|
|||
beacon_chain /../ tests/testblockutil,
|
||||
beacon_chain /../ tests/mocking/mock_genesis,
|
||||
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
||||
../network/history/beacon_chain_historical_roots
|
||||
../../network/beacon/beacon_chain_historical_roots
|
||||
|
||||
suite "Beacon Chain Historical Roots":
|
||||
let
|
|
@ -1,5 +1,5 @@
|
|||
# fluffy
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -7,78 +7,42 @@
|
|||
|
||||
{.used.}
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
unittest2,
|
||||
stew/byteutils,
|
||||
results,
|
||||
beacon_chain/networking/network_metadata,
|
||||
beacon_chain/spec/forks,
|
||||
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
|
||||
../../eth_data/yaml_utils
|
||||
|
||||
type YamlHistoricalSummariesWithProof* = object
|
||||
content_key*: string
|
||||
content_value*: string
|
||||
beacon_state_root*: string
|
||||
historical_summaries_root*: string
|
||||
historical_summaries_state_proof*: array[5, string]
|
||||
epoch*: uint64
|
||||
|
||||
suite "Beacon HistoricalSummariesWithProof":
|
||||
const testVectorDir =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"
|
||||
beacon_chain/spec/datatypes/capella,
|
||||
# Test helpers
|
||||
beacon_chain /../ tests/testblockutil,
|
||||
beacon_chain /../ tests/mocking/mock_genesis,
|
||||
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
||||
../../network/beacon/beacon_chain_historical_summaries
|
||||
|
||||
suite "Beacon Chain Historical Summaries":
|
||||
let
|
||||
metadata = getMetadataForNetwork("mainnet")
|
||||
genesisState =
|
||||
try:
|
||||
template genesisData(): auto =
|
||||
metadata.genesis.bakedBytes
|
||||
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
|
||||
state = newClone(initGenesisState(cfg = cfg))
|
||||
var cache = StateCache()
|
||||
|
||||
newClone(
|
||||
readSszForkedHashedBeaconState(
|
||||
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
|
||||
)
|
||||
)
|
||||
except CatchableError as err:
|
||||
raiseAssert "Invalid baked-in state: " & err.msg
|
||||
var blocks: seq[capella.SignedBeaconBlock]
|
||||
# Note:
|
||||
# Adding 8192 blocks. First block is genesis block and not one of these.
|
||||
# Then one extra block is needed to get the historical summaries, block
|
||||
# roots and state roots processed.
|
||||
# index i = 0 is second block.
|
||||
# index i = 8190 is 8192th block and last one that is part of the first
|
||||
# historical root
|
||||
for i in 0 ..< SLOTS_PER_HISTORICAL_ROOT:
|
||||
blocks.add(addTestBlock(state[], cache, cfg = cfg).capellaData)
|
||||
|
||||
# Although the test data is generated from a test state, we need to use the
|
||||
# forkDigests of mainnet as apparently these are used in the generated test vector.
|
||||
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
|
||||
# genesis_validators_root = Digest.fromHex(
|
||||
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
|
||||
# )
|
||||
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)
|
||||
test "Historical Summaries Proof":
|
||||
withState(state[]):
|
||||
when consensusFork >= ConsensusFork.Capella:
|
||||
let historical_summaries = forkyState.data.historical_summaries
|
||||
let res = buildProof(state[])
|
||||
check res.isOk()
|
||||
let proof = res.get()
|
||||
|
||||
test "HistoricalSummaries Encoding/Decoding and Verification":
|
||||
const file = testVectorDir & "historical_summaries_with_proof.yaml"
|
||||
let
|
||||
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Invalid test vector file: " & error
|
||||
|
||||
contentKeyEncoded = testCase.content_key.hexToSeqByte()
|
||||
contentValueEncoded = testCase.content_value.hexToSeqByte()
|
||||
|
||||
# Decode content and content key
|
||||
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
contentValue =
|
||||
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
|
||||
check:
|
||||
contentKey.isOk()
|
||||
contentValue.isOk()
|
||||
|
||||
let summariesWithProof = contentValue.value()
|
||||
let root = hash_tree_root(summariesWithProof.historical_summaries)
|
||||
|
||||
check:
|
||||
root.data == testCase.historical_summaries_root.hexToSeqByte()
|
||||
summariesWithProof.epoch == testCase.epoch
|
||||
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))
|
||||
|
||||
# Encode content and content key
|
||||
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
|
||||
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
|
||||
check:
|
||||
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
|
||||
encode(contentKey.value()).asSeq() == contentKeyEncoded
|
||||
withState(state[]):
|
||||
check verifyProof(historical_summaries, proof, forkyState.root)
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
# fluffy
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.used.}
|
||||
|
||||
import
|
||||
unittest2,
|
||||
stew/byteutils,
|
||||
results,
|
||||
beacon_chain/networking/network_metadata,
|
||||
beacon_chain/spec/forks,
|
||||
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
|
||||
../../eth_data/yaml_utils
|
||||
|
||||
type YamlHistoricalSummariesWithProof* = object
|
||||
content_key*: string
|
||||
content_value*: string
|
||||
beacon_state_root*: string
|
||||
historical_summaries_root*: string
|
||||
historical_summaries_state_proof*: array[5, string]
|
||||
epoch*: uint64
|
||||
|
||||
suite "Beacon Chain Historical Summaries With Proof - Test Vectors":
|
||||
const testVectorDir =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"
|
||||
|
||||
let
|
||||
metadata = getMetadataForNetwork("mainnet")
|
||||
genesisState =
|
||||
try:
|
||||
template genesisData(): auto =
|
||||
metadata.genesis.bakedBytes
|
||||
|
||||
newClone(
|
||||
readSszForkedHashedBeaconState(
|
||||
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
|
||||
)
|
||||
)
|
||||
except CatchableError as err:
|
||||
raiseAssert "Invalid baked-in state: " & err.msg
|
||||
|
||||
# Although the test data is generated from a test state, we need to use the
|
||||
# forkDigests of mainnet as apparently these are used in the generated test vector.
|
||||
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
|
||||
# genesis_validators_root = Digest.fromHex(
|
||||
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
|
||||
# )
|
||||
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)
|
||||
|
||||
test "HistoricalSummaries Encoding/Decoding and Verification":
|
||||
const file = testVectorDir & "historical_summaries_with_proof.yaml"
|
||||
let
|
||||
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Invalid test vector file: " & error
|
||||
|
||||
contentKeyEncoded = testCase.content_key.hexToSeqByte()
|
||||
contentValueEncoded = testCase.content_value.hexToSeqByte()
|
||||
|
||||
# Decode content and content key
|
||||
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
contentValue =
|
||||
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
|
||||
check:
|
||||
contentKey.isOk()
|
||||
contentValue.isOk()
|
||||
|
||||
let summariesWithProof = contentValue.value()
|
||||
let root = hash_tree_root(summariesWithProof.historical_summaries)
|
||||
|
||||
check:
|
||||
root.data == testCase.historical_summaries_root.hexToSeqByte()
|
||||
summariesWithProof.epoch == testCase.epoch
|
||||
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))
|
||||
|
||||
# Encode content and content key
|
||||
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
|
||||
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
|
||||
check:
|
||||
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
|
||||
encode(contentKey.value()).asSeq() == contentKeyEncoded
|
|
@ -18,7 +18,7 @@ import
|
|||
../../network/beacon/[beacon_init_loader, beacon_light_client],
|
||||
"."/[light_client_test_data, beacon_test_helpers]
|
||||
|
||||
procSuite "Portal Beacon Light Client":
|
||||
procSuite "Beacon Light Client":
|
||||
let rng = newRng()
|
||||
|
||||
proc headerCallback(
|
||||
|
|
|
@ -20,7 +20,7 @@ import
|
|||
[beacon_network, beacon_init_loader, beacon_chain_historical_summaries],
|
||||
"."/[light_client_test_data, beacon_test_helpers]
|
||||
|
||||
procSuite "Beacon Content Network":
|
||||
procSuite "Beacon Network":
|
||||
let rng = newRng()
|
||||
|
||||
asyncTest "Get bootstrap by trusted block hash":
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
# Nimbus
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.warning[UnusedImport]: off.}
|
||||
|
||||
# Note: These tests are separated because they require a custom merge block
|
||||
# number defined at compile time. Once runtime chain config gets added these
|
||||
# tests can be compiled together with all the other portal tests.
|
||||
import ./test_historical_hashes_accumulator, ./test_history_network
|
|
@ -8,6 +8,10 @@
|
|||
{.warning[UnusedImport]: off.}
|
||||
|
||||
import
|
||||
./test_history_content_keys,
|
||||
./test_history_content,
|
||||
./test_history_content_validation,
|
||||
./test_historical_hashes_accumulator_root,
|
||||
./test_block_proof_historical_roots,
|
||||
./test_block_proof_historical_roots_vectors,
|
||||
./test_block_proof_historical_summaries,
|
||||
|
|
|
@ -35,7 +35,7 @@ import
|
|||
# TODO: Add more blocks to reach 1+ historical roots, to make sure that indexing
|
||||
# is properly tested.
|
||||
|
||||
suite "Beacon Chain Block Proofs - Bellatrix":
|
||||
suite "History Block Proofs - Historical Roots":
|
||||
let
|
||||
cfg = block:
|
||||
var res = defaultRuntimeConfig
|
||||
|
|
|
@ -19,7 +19,7 @@ import
|
|||
../../network/history/validation/block_proof_historical_roots,
|
||||
../../eth_data/[yaml_utils, yaml_eth_types]
|
||||
|
||||
suite "History Block Proofs - Historical Roots":
|
||||
suite "History Block Proofs - Historical Roots - Test Vectors":
|
||||
test "BlockProofHistoricalRoots for Execution BlockHeader":
|
||||
let
|
||||
testsPath =
|
||||
|
|
|
@ -38,7 +38,7 @@ import
|
|||
# - Adjust tests to test usage of historical_summaries and historical_roots
|
||||
# together.
|
||||
|
||||
suite "Beacon Chain Block Proofs - Capella":
|
||||
suite "History Block Proofs - Historical Summaries":
|
||||
let
|
||||
cfg = block:
|
||||
var res = defaultRuntimeConfig
|
||||
|
|
|
@ -44,7 +44,7 @@ proc readHistoricalSummaries(
|
|||
except SerializationError as err:
|
||||
err("Failed decoding historical_summaries: " & err.msg)
|
||||
|
||||
suite "History Block Proofs - Historical Summaries":
|
||||
suite "History Block Proofs - Historical Summaries - Test Vectors":
|
||||
test "BlockProofHistoricalSummaries for Execution BlockHeader":
|
||||
let
|
||||
testsPath =
|
||||
|
|
|
@ -13,12 +13,12 @@ import
|
|||
unittest2,
|
||||
stint,
|
||||
eth/common/eth_types_rlp,
|
||||
../eth_data/history_data_json_store,
|
||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||
./test_helpers
|
||||
../../eth_data/history_data_json_store,
|
||||
../../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||
./test_history_util
|
||||
|
||||
suite "Header Accumulator":
|
||||
test "Header Accumulator Canonical Verification":
|
||||
suite "Historical Hashes Accumulator":
|
||||
test "Historical Hashes Accumulator Canonical Verification":
|
||||
const
|
||||
# Amount of headers to be created and added to the accumulator
|
||||
amount = mergeBlockNumber
|
||||
|
@ -77,7 +77,7 @@ suite "Header Accumulator":
|
|||
for i in headersToTest:
|
||||
check verifyAccumulatorProof(accumulator, headers[i], proof).isErr()
|
||||
|
||||
test "Header Accumulator - Not Finished":
|
||||
test "Historical Hashes Accumulator - Not Finished":
|
||||
# Less headers than needed to finish the accumulator
|
||||
const amount = mergeBlockNumber - 1
|
||||
|
|
@ -14,10 +14,10 @@ import
|
|||
stint,
|
||||
stew/byteutils,
|
||||
eth/common/eth_types_rlp,
|
||||
../../../eth_data/history_data_json_store,
|
||||
../../../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||
../../eth_data/history_data_json_store,
|
||||
../../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||
|
||||
suite "Header Accumulator Root":
|
||||
suite "Historical Hashes Accumulator Root":
|
||||
test "Header Accumulator Update":
|
||||
const
|
||||
hashTreeRoots = [
|
|
@ -13,16 +13,16 @@ import
|
|||
unittest2,
|
||||
stew/byteutils,
|
||||
eth/common/eth_types_rlp,
|
||||
../../../network_metadata,
|
||||
../../../eth_data/[history_data_json_store, history_data_ssz_e2s],
|
||||
../../../network/history/
|
||||
../../network_metadata,
|
||||
../../eth_data/[history_data_json_store, history_data_ssz_e2s],
|
||||
../../network/history/
|
||||
[history_content, history_network, validation/historical_hashes_accumulator],
|
||||
../../test_history_util,
|
||||
../../../eth_data/yaml_utils
|
||||
../../eth_data/yaml_utils,
|
||||
./test_history_util
|
||||
|
||||
from std/os import walkDir, splitFile, PathComponent
|
||||
|
||||
suite "History Content Encodings":
|
||||
suite "History Content Values":
|
||||
test "HeaderWithProof Building and Encoding":
|
||||
const
|
||||
headerFile =
|
|
@ -13,12 +13,12 @@ import
|
|||
stint,
|
||||
ssz_serialization,
|
||||
ssz_serialization/[proofs, merkleization],
|
||||
../../../network/history/history_content
|
||||
../../network/history/history_content
|
||||
|
||||
# According to test vectors:
|
||||
# https://github.com/ethereum/portal-network-specs/blob/master/content-keys-test-vectors.md#history-network-keys
|
||||
|
||||
suite "History ContentKey Encodings":
|
||||
suite "History Content Keys":
|
||||
test "BlockHeader":
|
||||
# Input
|
||||
const blockHash = BlockHash.fromHex(
|
|
@ -15,16 +15,16 @@ import
|
|||
stew/byteutils,
|
||||
results,
|
||||
eth/[common/eth_types, rlp],
|
||||
../../../common/common_types,
|
||||
../../../eth_data/history_data_json_store,
|
||||
../../../network/history/history_network
|
||||
../../common/common_types,
|
||||
../../eth_data/history_data_json_store,
|
||||
../../network/history/history_network
|
||||
|
||||
const
|
||||
dataFile = "./fluffy/tests/blocks/mainnet_blocks_selected.json"
|
||||
# Block that will be validated
|
||||
blockHashStr = "0xce8f770a56203e10afe19c7dd7e2deafc356e6cce0a560a30a85add03da56137"
|
||||
|
||||
suite "History Network Content Validation":
|
||||
suite "History Content Values Validation":
|
||||
let blockDataTable =
|
||||
readJsonType(dataFile, BlockDataTable).expect("Valid data file should parse")
|
||||
|
|
@ -12,11 +12,12 @@ import
|
|||
eth/p2p/discoveryv5/routing_table,
|
||||
eth/common/eth_types_rlp,
|
||||
eth/rlp,
|
||||
../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||
../network/history/
|
||||
../../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||
../../network/history/
|
||||
[history_network, history_content, validation/historical_hashes_accumulator],
|
||||
../database/content_db,
|
||||
./test_helpers
|
||||
../../database/content_db,
|
||||
../test_helpers,
|
||||
./test_history_util
|
||||
|
||||
type HistoryNode = ref object
|
||||
discoveryProtocol*: discv5_protocol.Protocol
|
|
@ -0,0 +1,92 @@
|
|||
# Nimbus
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
results,
|
||||
eth/common/eth_types_rlp,
|
||||
../../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||
|
||||
export results, historical_hashes_accumulator, history_content
|
||||
|
||||
proc buildHeadersWithProof*(
|
||||
blockHeaders: seq[BlockHeader], epochRecord: EpochRecordCached
|
||||
): Result[seq[(seq[byte], seq[byte])], string] =
|
||||
var blockHeadersWithProof: seq[(seq[byte], seq[byte])]
|
||||
for header in blockHeaders:
|
||||
if header.isPreMerge():
|
||||
let
|
||||
content = ?buildHeaderWithProof(header, epochRecord)
|
||||
contentKey = ContentKey(
|
||||
contentType: blockHeader,
|
||||
blockHeaderKey: BlockKey(blockHash: header.blockHash()),
|
||||
)
|
||||
|
||||
blockHeadersWithProof.add((encode(contentKey).asSeq(), SSZ.encode(content)))
|
||||
|
||||
ok(blockHeadersWithProof)
|
||||
|
||||
func buildAccumulator*(headers: seq[BlockHeader]): Result[FinishedAccumulator, string] =
|
||||
var accumulator: Accumulator
|
||||
for header in headers:
|
||||
updateAccumulator(accumulator, header)
|
||||
|
||||
if header.number == mergeBlockNumber - 1:
|
||||
return ok(finishAccumulator(accumulator))
|
||||
|
||||
err("Not enough headers provided to finish the accumulator")
|
||||
|
||||
func buildAccumulatorData*(
|
||||
headers: seq[BlockHeader]
|
||||
): Result[(FinishedAccumulator, seq[EpochRecord]), string] =
|
||||
var accumulator: Accumulator
|
||||
var epochRecords: seq[EpochRecord]
|
||||
for header in headers:
|
||||
updateAccumulator(accumulator, header)
|
||||
|
||||
if accumulator.currentEpoch.len() == EPOCH_SIZE:
|
||||
epochRecords.add(accumulator.currentEpoch)
|
||||
|
||||
if header.number == mergeBlockNumber - 1:
|
||||
epochRecords.add(accumulator.currentEpoch)
|
||||
|
||||
return ok((finishAccumulator(accumulator), epochRecords))
|
||||
|
||||
err("Not enough headers provided to finish the accumulator")
|
||||
|
||||
func buildProof*(
|
||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||
): Result[AccumulatorProof, string] =
|
||||
let epochIndex = getEpochIndex(header)
|
||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||
let epochRecord = epochRecords[epochIndex]
|
||||
|
||||
buildProof(header, epochRecord)
|
||||
|
||||
func buildHeaderWithProof*(
|
||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||
): Result[BlockHeaderWithProof, string] =
|
||||
## Construct the accumulator proof for a specific header.
|
||||
## Returns the block header with the proof
|
||||
if header.isPreMerge():
|
||||
let epochIndex = getEpochIndex(header)
|
||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||
let epochRecord = epochRecords[epochIndex]
|
||||
|
||||
buildHeaderWithProof(header, epochRecord)
|
||||
else:
|
||||
err("Cannot build accumulator proof for post merge header")
|
||||
|
||||
func buildHeadersWithProof*(
|
||||
headers: seq[BlockHeader], epochRecords: seq[EpochRecord]
|
||||
): Result[seq[BlockHeaderWithProof], string] =
|
||||
var headersWithProof: seq[BlockHeaderWithProof]
|
||||
for header in headers:
|
||||
headersWithProof.add(?buildHeaderWithProof(header, epochRecords))
|
||||
|
||||
ok(headersWithProof)
|
|
@ -1,48 +0,0 @@
|
|||
# fluffy
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.used.}
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
unittest2,
|
||||
beacon_chain/spec/forks,
|
||||
beacon_chain/spec/datatypes/capella,
|
||||
# Test helpers
|
||||
beacon_chain /../ tests/testblockutil,
|
||||
beacon_chain /../ tests/mocking/mock_genesis,
|
||||
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
||||
../network/beacon/beacon_chain_historical_summaries
|
||||
|
||||
suite "Beacon Chain Historical Summaries":
|
||||
let
|
||||
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
|
||||
state = newClone(initGenesisState(cfg = cfg))
|
||||
var cache = StateCache()
|
||||
|
||||
var blocks: seq[capella.SignedBeaconBlock]
|
||||
# Note:
|
||||
# Adding 8192 blocks. First block is genesis block and not one of these.
|
||||
# Then one extra block is needed to get the historical summaries, block
|
||||
# roots and state roots processed.
|
||||
# index i = 0 is second block.
|
||||
# index i = 8190 is 8192th block and last one that is part of the first
|
||||
# historical root
|
||||
for i in 0 ..< SLOTS_PER_HISTORICAL_ROOT:
|
||||
blocks.add(addTestBlock(state[], cache, cfg = cfg).capellaData)
|
||||
|
||||
test "Historical Summaries Proof":
|
||||
withState(state[]):
|
||||
when consensusFork >= ConsensusFork.Capella:
|
||||
let historical_summaries = forkyState.data.historical_summaries
|
||||
let res = buildProof(state[])
|
||||
check res.isOk()
|
||||
let proof = res.get()
|
||||
|
||||
withState(state[]):
|
||||
check verifyProof(historical_summaries, proof, forkyState.root)
|
|
@ -48,7 +48,7 @@ proc stop(testCase: TestCase) {.async.} =
|
|||
await testCase.server.closeWait()
|
||||
await testCase.localDiscovery.closeWait()
|
||||
|
||||
procSuite "Discovery RPC":
|
||||
procSuite "Discovery v5 JSON-RPC API":
|
||||
let rng = newRng()
|
||||
|
||||
asyncTest "Get local node info":
|
||||
|
|
|
@ -9,11 +9,9 @@
|
|||
|
||||
import
|
||||
std/net,
|
||||
eth/[common, keys, rlp],
|
||||
eth/[common, keys],
|
||||
eth/p2p/discoveryv5/[enr, node, routing_table],
|
||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||
../database/content_db
|
||||
eth/p2p/discoveryv5/protocol as discv5_protocol
|
||||
|
||||
proc localAddress*(port: int): Address {.raises: [ValueError].} =
|
||||
Address(ip: parseIpAddress("127.0.0.1"), port: Port(port))
|
||||
|
@ -51,63 +49,3 @@ proc genByteSeq*(length: int): seq[byte] =
|
|||
resultSeq[i] = byte(i)
|
||||
inc i
|
||||
return resultSeq
|
||||
|
||||
func buildAccumulator*(headers: seq[BlockHeader]): Result[FinishedAccumulator, string] =
|
||||
var accumulator: Accumulator
|
||||
for header in headers:
|
||||
updateAccumulator(accumulator, header)
|
||||
|
||||
if header.number == mergeBlockNumber - 1:
|
||||
return ok(finishAccumulator(accumulator))
|
||||
|
||||
err("Not enough headers provided to finish the accumulator")
|
||||
|
||||
func buildAccumulatorData*(
|
||||
headers: seq[BlockHeader]
|
||||
): Result[(FinishedAccumulator, seq[EpochRecord]), string] =
|
||||
var accumulator: Accumulator
|
||||
var epochRecords: seq[EpochRecord]
|
||||
for header in headers:
|
||||
updateAccumulator(accumulator, header)
|
||||
|
||||
if accumulator.currentEpoch.len() == EPOCH_SIZE:
|
||||
epochRecords.add(accumulator.currentEpoch)
|
||||
|
||||
if header.number == mergeBlockNumber - 1:
|
||||
epochRecords.add(accumulator.currentEpoch)
|
||||
|
||||
return ok((finishAccumulator(accumulator), epochRecords))
|
||||
|
||||
err("Not enough headers provided to finish the accumulator")
|
||||
|
||||
func buildProof*(
|
||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||
): Result[AccumulatorProof, string] =
|
||||
let epochIndex = getEpochIndex(header)
|
||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||
let epochRecord = epochRecords[epochIndex]
|
||||
|
||||
buildProof(header, epochRecord)
|
||||
|
||||
func buildHeaderWithProof*(
|
||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||
): Result[BlockHeaderWithProof, string] =
|
||||
## Construct the accumulator proof for a specific header.
|
||||
## Returns the block header with the proof
|
||||
if header.isPreMerge():
|
||||
let epochIndex = getEpochIndex(header)
|
||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||
let epochRecord = epochRecords[epochIndex]
|
||||
|
||||
buildHeaderWithProof(header, epochRecord)
|
||||
else:
|
||||
err("Cannot build accumulator proof for post merge header")
|
||||
|
||||
func buildHeadersWithProof*(
|
||||
headers: seq[BlockHeader], epochRecords: seq[EpochRecord]
|
||||
): Result[seq[BlockHeaderWithProof], string] =
|
||||
var headersWithProof: seq[BlockHeaderWithProof]
|
||||
for header in headers:
|
||||
headersWithProof.add(?buildHeaderWithProof(header, epochRecords))
|
||||
|
||||
ok(headersWithProof)
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
# Nimbus
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
results,
|
||||
eth/common/eth_types_rlp,
|
||||
../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||
|
||||
export results, historical_hashes_accumulator, history_content
|
||||
|
||||
proc buildHeadersWithProof*(
|
||||
blockHeaders: seq[BlockHeader], epochRecord: EpochRecordCached
|
||||
): Result[seq[(seq[byte], seq[byte])], string] =
|
||||
var blockHeadersWithProof: seq[(seq[byte], seq[byte])]
|
||||
for header in blockHeaders:
|
||||
if header.isPreMerge():
|
||||
let
|
||||
content = ?buildHeaderWithProof(header, epochRecord)
|
||||
contentKey = ContentKey(
|
||||
contentType: blockHeader,
|
||||
blockHeaderKey: BlockKey(blockHash: header.blockHash()),
|
||||
)
|
||||
|
||||
blockHeadersWithProof.add((encode(contentKey).asSeq(), SSZ.encode(content)))
|
||||
|
||||
ok(blockHeadersWithProof)
|
|
@ -1,5 +1,5 @@
|
|||
# Fluffy
|
||||
# Copyright (c) 2022-2024 Status Research & Development GmbH
|
||||
# Nimbus
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
@ -7,9 +7,4 @@
|
|||
|
||||
{.warning[UnusedImport]: off.}
|
||||
|
||||
import
|
||||
./test_portal_wire_encoding,
|
||||
./test_history_content_keys,
|
||||
./test_history_content,
|
||||
./test_history_content_validation,
|
||||
./test_accumulator_root
|
||||
import ./test_portal_wire_encoding, ./test_portal_wire_protocol
|
|
@ -13,7 +13,7 @@ import
|
|||
stew/byteutils,
|
||||
results,
|
||||
eth/p2p/discoveryv5/enr,
|
||||
../../../network/wire/messages
|
||||
../../network/wire/messages
|
||||
|
||||
# According to test vectors:
|
||||
# https://github.com/ethereum/portal-network-specs/blob/master/portal-wire-test-vectors.md
|
|
@ -16,9 +16,9 @@ import
|
|||
eth/p2p/discoveryv5/routing_table,
|
||||
nimcrypto/[hash, sha2],
|
||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||
../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||
../database/content_db,
|
||||
./test_helpers
|
||||
../../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||
../../database/content_db,
|
||||
../test_helpers
|
||||
|
||||
const protocolId = [byte 0x50, 0x00]
|
||||
|
Loading…
Reference in New Issue