Refactor portal test directory structure (#2650)
This commit is contained in:
parent
38d651c9c8
commit
e603952df3
10
Makefile
10
Makefile
|
@ -281,17 +281,17 @@ fluffy-test-reproducibility:
|
||||||
{ echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; }
|
{ echo -e "\e[91mFailure: the binary changed between builds.\e[39m"; exit 1; }
|
||||||
|
|
||||||
# fluffy tests
|
# fluffy tests
|
||||||
all_fluffy_portal_spec_tests: | build deps
|
all_history_network_custom_chain_tests: | build deps
|
||||||
echo -e $(BUILD_MSG) "build/$@" && \
|
echo -e $(BUILD_MSG) "build/$@" && \
|
||||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/portal_spec_tests/mainnet/$@.nim"
|
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:mergeBlockNumber:38130 -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/history_network_tests/$@.nim"
|
||||||
|
|
||||||
|
|
||||||
all_fluffy_tests: | build deps
|
all_fluffy_tests: | build deps
|
||||||
echo -e $(BUILD_MSG) "build/$@" && \
|
echo -e $(BUILD_MSG) "build/$@" && \
|
||||||
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -d:mergeBlockNumber:38130 -o:build/$@ "fluffy/tests/$@.nim"
|
$(ENV_SCRIPT) nim c -r $(NIM_PARAMS) -d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite -o:build/$@ "fluffy/tests/$@.nim"
|
||||||
|
|
||||||
# builds and runs the fluffy test suite
|
# builds and runs the fluffy test suite
|
||||||
fluffy-test: | all_fluffy_portal_spec_tests all_fluffy_tests
|
fluffy-test: | all_fluffy_tests all_history_network_custom_chain_tests
|
||||||
|
|
||||||
# builds the fluffy tools, wherever they are
|
# builds the fluffy tools, wherever they are
|
||||||
$(FLUFFY_TOOLS): | build deps rocksdb
|
$(FLUFFY_TOOLS): | build deps rocksdb
|
||||||
|
@ -357,7 +357,7 @@ txparse: | build deps
|
||||||
|
|
||||||
# usual cleaning
|
# usual cleaning
|
||||||
clean: | clean-common
|
clean: | clean-common
|
||||||
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_fluffy_portal_spec_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
|
rm -rf build/{nimbus,fluffy,libverifproxy,nimbus_verified_proxy,$(TOOLS_CSV),$(FLUFFY_TOOLS_CSV),all_tests,test_kvstore_rocksdb,test_rpc,all_fluffy_tests,all_history_network_custom_chain_tests,test_portal_testnet,utp_test_app,utp_test,*.dSYM}
|
||||||
rm -rf tools/t8n/{t8n,t8n_test}
|
rm -rf tools/t8n/{t8n,t8n_test}
|
||||||
rm -rf tools/evmstate/{evmstate,evmstate_test}
|
rm -rf tools/evmstate/{evmstate,evmstate_test}
|
||||||
ifneq ($(USE_LIBBACKTRACE), 0)
|
ifneq ($(USE_LIBBACKTRACE), 0)
|
||||||
|
|
|
@ -17,11 +17,8 @@ import
|
||||||
../../database/content_db,
|
../../database/content_db,
|
||||||
../../network_metadata,
|
../../network_metadata,
|
||||||
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||||
"."/[
|
"."/[history_content, validation/historical_hashes_accumulator],
|
||||||
history_content,
|
../beacon/beacon_chain_historical_roots,
|
||||||
beacon_chain_historical_roots,
|
|
||||||
validation/historical_hashes_accumulator,
|
|
||||||
],
|
|
||||||
./content/content_deprecated
|
./content/content_deprecated
|
||||||
|
|
||||||
logScope:
|
logScope:
|
||||||
|
|
|
@ -20,7 +20,7 @@ import
|
||||||
../rpc/eth_rpc_client,
|
../rpc/eth_rpc_client,
|
||||||
../eth_data/[history_data_seeding, history_data_json_store, history_data_ssz_e2s],
|
../eth_data/[history_data_seeding, history_data_json_store, history_data_ssz_e2s],
|
||||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||||
../tests/test_history_util
|
../tests/history_network_tests/test_history_util
|
||||||
|
|
||||||
type
|
type
|
||||||
FutureCallback[A] = proc(): Future[A] {.gcsafe, raises: [].}
|
FutureCallback[A] = proc(): Future[A] {.gcsafe, raises: [].}
|
||||||
|
|
|
@ -8,13 +8,9 @@
|
||||||
{.warning[UnusedImport]: off.}
|
{.warning[UnusedImport]: off.}
|
||||||
|
|
||||||
import
|
import
|
||||||
./test_portal_wire_protocol,
|
|
||||||
./test_accumulator,
|
|
||||||
./test_history_network,
|
|
||||||
./test_content_db,
|
./test_content_db,
|
||||||
./test_discovery_rpc,
|
./test_discovery_rpc,
|
||||||
./test_beacon_chain_historical_roots,
|
./wire_protocol_tests/all_wire_protocol_tests,
|
||||||
./test_beacon_chain_historical_summaries,
|
|
||||||
./history_network_tests/all_history_network_tests,
|
./history_network_tests/all_history_network_tests,
|
||||||
./beacon_network_tests/all_beacon_network_tests,
|
./beacon_network_tests/all_beacon_network_tests,
|
||||||
./state_network_tests/all_state_network_tests
|
./state_network_tests/all_state_network_tests
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
./test_beacon_content,
|
./test_beacon_content,
|
||||||
|
./test_beacon_historical_roots,
|
||||||
./test_beacon_historical_summaries,
|
./test_beacon_historical_summaries,
|
||||||
|
./test_beacon_historical_summaries_vectors,
|
||||||
./test_beacon_network,
|
./test_beacon_network,
|
||||||
./test_beacon_light_client
|
./test_beacon_light_client
|
||||||
|
|
|
@ -19,7 +19,7 @@ import
|
||||||
../../eth_data/yaml_utils,
|
../../eth_data/yaml_utils,
|
||||||
"."/light_client_test_data
|
"."/light_client_test_data
|
||||||
|
|
||||||
suite "Beacon Content Encodings - Mainnet":
|
suite "Beacon Content Keys and Values - Test Vectors":
|
||||||
# These test vectors are generated by eth_data_exporter. The content is taken
|
# These test vectors are generated by eth_data_exporter. The content is taken
|
||||||
# from mainnet and encoded as it would be transmitted on Portal Network,
|
# from mainnet and encoded as it would be transmitted on Portal Network,
|
||||||
# including also the content key.
|
# including also the content key.
|
||||||
|
@ -170,7 +170,7 @@ suite "Beacon Content Encodings - Mainnet":
|
||||||
check encoded == contentValueEncoded
|
check encoded == contentValueEncoded
|
||||||
check encode(key).asSeq() == contentKeyEncoded
|
check encode(key).asSeq() == contentKeyEncoded
|
||||||
|
|
||||||
suite "Beacon Content Encodings":
|
suite "Beacon Content Keys and Values":
|
||||||
# TODO: These tests are less useful now and should instead be altered to
|
# TODO: These tests are less useful now and should instead be altered to
|
||||||
# use the consensus test vectors to simply test if encoding / decoding works
|
# use the consensus test vectors to simply test if encoding / decoding works
|
||||||
# fine for the different forks.
|
# fine for the different forks.
|
||||||
|
@ -277,7 +277,7 @@ suite "Beacon Content Encodings":
|
||||||
decodeLightClientBootstrapForked(forkDigests, encodedTooEarlyFork).isErr()
|
decodeLightClientBootstrapForked(forkDigests, encodedTooEarlyFork).isErr()
|
||||||
decodeLightClientBootstrapForked(forkDigests, encodedUnknownFork).isErr()
|
decodeLightClientBootstrapForked(forkDigests, encodedUnknownFork).isErr()
|
||||||
|
|
||||||
suite "Beacon ContentKey Encodings ":
|
suite "Beacon Content Keys - Invalid Cases":
|
||||||
test "Invalid prefix - 0 value":
|
test "Invalid prefix - 0 value":
|
||||||
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||||
let decoded = decode(encoded)
|
let decoded = decode(encoded)
|
||||||
|
|
|
@ -17,7 +17,7 @@ import
|
||||||
beacon_chain /../ tests/testblockutil,
|
beacon_chain /../ tests/testblockutil,
|
||||||
beacon_chain /../ tests/mocking/mock_genesis,
|
beacon_chain /../ tests/mocking/mock_genesis,
|
||||||
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
||||||
../network/history/beacon_chain_historical_roots
|
../../network/beacon/beacon_chain_historical_roots
|
||||||
|
|
||||||
suite "Beacon Chain Historical Roots":
|
suite "Beacon Chain Historical Roots":
|
||||||
let
|
let
|
|
@ -1,5 +1,5 @@
|
||||||
# fluffy
|
# fluffy
|
||||||
# Copyright (c) 2024 Status Research & Development GmbH
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||||
# Licensed and distributed under either of
|
# Licensed and distributed under either of
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
@ -7,78 +7,42 @@
|
||||||
|
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
import
|
import
|
||||||
unittest2,
|
unittest2,
|
||||||
stew/byteutils,
|
|
||||||
results,
|
|
||||||
beacon_chain/networking/network_metadata,
|
|
||||||
beacon_chain/spec/forks,
|
beacon_chain/spec/forks,
|
||||||
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
|
beacon_chain/spec/datatypes/capella,
|
||||||
../../eth_data/yaml_utils
|
# Test helpers
|
||||||
|
beacon_chain /../ tests/testblockutil,
|
||||||
type YamlHistoricalSummariesWithProof* = object
|
beacon_chain /../ tests/mocking/mock_genesis,
|
||||||
content_key*: string
|
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
||||||
content_value*: string
|
../../network/beacon/beacon_chain_historical_summaries
|
||||||
beacon_state_root*: string
|
|
||||||
historical_summaries_root*: string
|
|
||||||
historical_summaries_state_proof*: array[5, string]
|
|
||||||
epoch*: uint64
|
|
||||||
|
|
||||||
suite "Beacon HistoricalSummariesWithProof":
|
|
||||||
const testVectorDir =
|
|
||||||
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"
|
|
||||||
|
|
||||||
|
suite "Beacon Chain Historical Summaries":
|
||||||
let
|
let
|
||||||
metadata = getMetadataForNetwork("mainnet")
|
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
|
||||||
genesisState =
|
state = newClone(initGenesisState(cfg = cfg))
|
||||||
try:
|
var cache = StateCache()
|
||||||
template genesisData(): auto =
|
|
||||||
metadata.genesis.bakedBytes
|
|
||||||
|
|
||||||
newClone(
|
var blocks: seq[capella.SignedBeaconBlock]
|
||||||
readSszForkedHashedBeaconState(
|
# Note:
|
||||||
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
|
# Adding 8192 blocks. First block is genesis block and not one of these.
|
||||||
)
|
# Then one extra block is needed to get the historical summaries, block
|
||||||
)
|
# roots and state roots processed.
|
||||||
except CatchableError as err:
|
# index i = 0 is second block.
|
||||||
raiseAssert "Invalid baked-in state: " & err.msg
|
# index i = 8190 is 8192th block and last one that is part of the first
|
||||||
|
# historical root
|
||||||
|
for i in 0 ..< SLOTS_PER_HISTORICAL_ROOT:
|
||||||
|
blocks.add(addTestBlock(state[], cache, cfg = cfg).capellaData)
|
||||||
|
|
||||||
# Although the test data is generated from a test state, we need to use the
|
test "Historical Summaries Proof":
|
||||||
# forkDigests of mainnet as apparently these are used in the generated test vector.
|
withState(state[]):
|
||||||
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
|
when consensusFork >= ConsensusFork.Capella:
|
||||||
# genesis_validators_root = Digest.fromHex(
|
let historical_summaries = forkyState.data.historical_summaries
|
||||||
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
|
let res = buildProof(state[])
|
||||||
# )
|
check res.isOk()
|
||||||
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)
|
let proof = res.get()
|
||||||
|
|
||||||
test "HistoricalSummaries Encoding/Decoding and Verification":
|
withState(state[]):
|
||||||
const file = testVectorDir & "historical_summaries_with_proof.yaml"
|
check verifyProof(historical_summaries, proof, forkyState.root)
|
||||||
let
|
|
||||||
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
|
|
||||||
raiseAssert "Invalid test vector file: " & error
|
|
||||||
|
|
||||||
contentKeyEncoded = testCase.content_key.hexToSeqByte()
|
|
||||||
contentValueEncoded = testCase.content_value.hexToSeqByte()
|
|
||||||
|
|
||||||
# Decode content and content key
|
|
||||||
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
|
||||||
contentValue =
|
|
||||||
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
|
|
||||||
check:
|
|
||||||
contentKey.isOk()
|
|
||||||
contentValue.isOk()
|
|
||||||
|
|
||||||
let summariesWithProof = contentValue.value()
|
|
||||||
let root = hash_tree_root(summariesWithProof.historical_summaries)
|
|
||||||
|
|
||||||
check:
|
|
||||||
root.data == testCase.historical_summaries_root.hexToSeqByte()
|
|
||||||
summariesWithProof.epoch == testCase.epoch
|
|
||||||
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))
|
|
||||||
|
|
||||||
# Encode content and content key
|
|
||||||
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
|
|
||||||
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
|
|
||||||
check:
|
|
||||||
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
|
|
||||||
encode(contentKey.value()).asSeq() == contentKeyEncoded
|
|
||||||
|
|
|
@ -0,0 +1,84 @@
|
||||||
|
# fluffy
|
||||||
|
# Copyright (c) 2024 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.used.}
|
||||||
|
|
||||||
|
import
|
||||||
|
unittest2,
|
||||||
|
stew/byteutils,
|
||||||
|
results,
|
||||||
|
beacon_chain/networking/network_metadata,
|
||||||
|
beacon_chain/spec/forks,
|
||||||
|
../../network/beacon/[beacon_chain_historical_summaries, beacon_content],
|
||||||
|
../../eth_data/yaml_utils
|
||||||
|
|
||||||
|
type YamlHistoricalSummariesWithProof* = object
|
||||||
|
content_key*: string
|
||||||
|
content_value*: string
|
||||||
|
beacon_state_root*: string
|
||||||
|
historical_summaries_root*: string
|
||||||
|
historical_summaries_state_proof*: array[5, string]
|
||||||
|
epoch*: uint64
|
||||||
|
|
||||||
|
suite "Beacon Chain Historical Summaries With Proof - Test Vectors":
|
||||||
|
const testVectorDir =
|
||||||
|
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/historical_summaries_with_proof/deneb/"
|
||||||
|
|
||||||
|
let
|
||||||
|
metadata = getMetadataForNetwork("mainnet")
|
||||||
|
genesisState =
|
||||||
|
try:
|
||||||
|
template genesisData(): auto =
|
||||||
|
metadata.genesis.bakedBytes
|
||||||
|
|
||||||
|
newClone(
|
||||||
|
readSszForkedHashedBeaconState(
|
||||||
|
metadata.cfg, genesisData.toOpenArray(genesisData.low, genesisData.high)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except CatchableError as err:
|
||||||
|
raiseAssert "Invalid baked-in state: " & err.msg
|
||||||
|
|
||||||
|
# Although the test data is generated from a test state, we need to use the
|
||||||
|
# forkDigests of mainnet as apparently these are used in the generated test vector.
|
||||||
|
genesis_validators_root = getStateField(genesisState[], genesis_validators_root)
|
||||||
|
# genesis_validators_root = Digest.fromHex(
|
||||||
|
# "0x2170688a9e92595fb353c0a2ad6733431a8066c7ecb48ab3b2aaf9091a1722b1"
|
||||||
|
# )
|
||||||
|
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)
|
||||||
|
|
||||||
|
test "HistoricalSummaries Encoding/Decoding and Verification":
|
||||||
|
const file = testVectorDir & "historical_summaries_with_proof.yaml"
|
||||||
|
let
|
||||||
|
testCase = YamlHistoricalSummariesWithProof.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Invalid test vector file: " & error
|
||||||
|
|
||||||
|
contentKeyEncoded = testCase.content_key.hexToSeqByte()
|
||||||
|
contentValueEncoded = testCase.content_value.hexToSeqByte()
|
||||||
|
|
||||||
|
# Decode content and content key
|
||||||
|
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||||
|
contentValue =
|
||||||
|
decodeSsz(forkDigests[], contentValueEncoded, HistoricalSummariesWithProof)
|
||||||
|
check:
|
||||||
|
contentKey.isOk()
|
||||||
|
contentValue.isOk()
|
||||||
|
|
||||||
|
let summariesWithProof = contentValue.value()
|
||||||
|
let root = hash_tree_root(summariesWithProof.historical_summaries)
|
||||||
|
|
||||||
|
check:
|
||||||
|
root.data == testCase.historical_summaries_root.hexToSeqByte()
|
||||||
|
summariesWithProof.epoch == testCase.epoch
|
||||||
|
verifyProof(summariesWithProof, Digest.fromHex(testCase.beacon_state_root))
|
||||||
|
|
||||||
|
# Encode content and content key
|
||||||
|
let consensusFork = consensusForkAtEpoch(metadata.cfg, summariesWithProof.epoch)
|
||||||
|
let forkDigest = atConsensusFork(forkDigests[], consensusFork)
|
||||||
|
check:
|
||||||
|
encodeSsz(summariesWithProof, forkDigest) == contentValueEncoded
|
||||||
|
encode(contentKey.value()).asSeq() == contentKeyEncoded
|
|
@ -18,7 +18,7 @@ import
|
||||||
../../network/beacon/[beacon_init_loader, beacon_light_client],
|
../../network/beacon/[beacon_init_loader, beacon_light_client],
|
||||||
"."/[light_client_test_data, beacon_test_helpers]
|
"."/[light_client_test_data, beacon_test_helpers]
|
||||||
|
|
||||||
procSuite "Portal Beacon Light Client":
|
procSuite "Beacon Light Client":
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
|
|
||||||
proc headerCallback(
|
proc headerCallback(
|
||||||
|
|
|
@ -20,7 +20,7 @@ import
|
||||||
[beacon_network, beacon_init_loader, beacon_chain_historical_summaries],
|
[beacon_network, beacon_init_loader, beacon_chain_historical_summaries],
|
||||||
"."/[light_client_test_data, beacon_test_helpers]
|
"."/[light_client_test_data, beacon_test_helpers]
|
||||||
|
|
||||||
procSuite "Beacon Content Network":
|
procSuite "Beacon Network":
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
|
|
||||||
asyncTest "Get bootstrap by trusted block hash":
|
asyncTest "Get bootstrap by trusted block hash":
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Nimbus
|
||||||
|
# Copyright (c) 2024 Status Research & Development GmbH
|
||||||
|
# Licensed under either of
|
||||||
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||||
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.warning[UnusedImport]: off.}
|
||||||
|
|
||||||
|
# Note: These tests are separated because they require a custom merge block
|
||||||
|
# number defined at compile time. Once runtime chain config gets added these
|
||||||
|
# tests can be compiled together with all the other portal tests.
|
||||||
|
import ./test_historical_hashes_accumulator, ./test_history_network
|
|
@ -8,6 +8,10 @@
|
||||||
{.warning[UnusedImport]: off.}
|
{.warning[UnusedImport]: off.}
|
||||||
|
|
||||||
import
|
import
|
||||||
|
./test_history_content_keys,
|
||||||
|
./test_history_content,
|
||||||
|
./test_history_content_validation,
|
||||||
|
./test_historical_hashes_accumulator_root,
|
||||||
./test_block_proof_historical_roots,
|
./test_block_proof_historical_roots,
|
||||||
./test_block_proof_historical_roots_vectors,
|
./test_block_proof_historical_roots_vectors,
|
||||||
./test_block_proof_historical_summaries,
|
./test_block_proof_historical_summaries,
|
||||||
|
|
|
@ -35,7 +35,7 @@ import
|
||||||
# TODO: Add more blocks to reach 1+ historical roots, to make sure that indexing
|
# TODO: Add more blocks to reach 1+ historical roots, to make sure that indexing
|
||||||
# is properly tested.
|
# is properly tested.
|
||||||
|
|
||||||
suite "Beacon Chain Block Proofs - Bellatrix":
|
suite "History Block Proofs - Historical Roots":
|
||||||
let
|
let
|
||||||
cfg = block:
|
cfg = block:
|
||||||
var res = defaultRuntimeConfig
|
var res = defaultRuntimeConfig
|
||||||
|
|
|
@ -19,7 +19,7 @@ import
|
||||||
../../network/history/validation/block_proof_historical_roots,
|
../../network/history/validation/block_proof_historical_roots,
|
||||||
../../eth_data/[yaml_utils, yaml_eth_types]
|
../../eth_data/[yaml_utils, yaml_eth_types]
|
||||||
|
|
||||||
suite "History Block Proofs - Historical Roots":
|
suite "History Block Proofs - Historical Roots - Test Vectors":
|
||||||
test "BlockProofHistoricalRoots for Execution BlockHeader":
|
test "BlockProofHistoricalRoots for Execution BlockHeader":
|
||||||
let
|
let
|
||||||
testsPath =
|
testsPath =
|
||||||
|
|
|
@ -38,7 +38,7 @@ import
|
||||||
# - Adjust tests to test usage of historical_summaries and historical_roots
|
# - Adjust tests to test usage of historical_summaries and historical_roots
|
||||||
# together.
|
# together.
|
||||||
|
|
||||||
suite "Beacon Chain Block Proofs - Capella":
|
suite "History Block Proofs - Historical Summaries":
|
||||||
let
|
let
|
||||||
cfg = block:
|
cfg = block:
|
||||||
var res = defaultRuntimeConfig
|
var res = defaultRuntimeConfig
|
||||||
|
|
|
@ -44,7 +44,7 @@ proc readHistoricalSummaries(
|
||||||
except SerializationError as err:
|
except SerializationError as err:
|
||||||
err("Failed decoding historical_summaries: " & err.msg)
|
err("Failed decoding historical_summaries: " & err.msg)
|
||||||
|
|
||||||
suite "History Block Proofs - Historical Summaries":
|
suite "History Block Proofs - Historical Summaries - Test Vectors":
|
||||||
test "BlockProofHistoricalSummaries for Execution BlockHeader":
|
test "BlockProofHistoricalSummaries for Execution BlockHeader":
|
||||||
let
|
let
|
||||||
testsPath =
|
testsPath =
|
||||||
|
|
|
@ -13,12 +13,12 @@ import
|
||||||
unittest2,
|
unittest2,
|
||||||
stint,
|
stint,
|
||||||
eth/common/eth_types_rlp,
|
eth/common/eth_types_rlp,
|
||||||
../eth_data/history_data_json_store,
|
../../eth_data/history_data_json_store,
|
||||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
../../network/history/[history_content, validation/historical_hashes_accumulator],
|
||||||
./test_helpers
|
./test_history_util
|
||||||
|
|
||||||
suite "Header Accumulator":
|
suite "Historical Hashes Accumulator":
|
||||||
test "Header Accumulator Canonical Verification":
|
test "Historical Hashes Accumulator Canonical Verification":
|
||||||
const
|
const
|
||||||
# Amount of headers to be created and added to the accumulator
|
# Amount of headers to be created and added to the accumulator
|
||||||
amount = mergeBlockNumber
|
amount = mergeBlockNumber
|
||||||
|
@ -77,7 +77,7 @@ suite "Header Accumulator":
|
||||||
for i in headersToTest:
|
for i in headersToTest:
|
||||||
check verifyAccumulatorProof(accumulator, headers[i], proof).isErr()
|
check verifyAccumulatorProof(accumulator, headers[i], proof).isErr()
|
||||||
|
|
||||||
test "Header Accumulator - Not Finished":
|
test "Historical Hashes Accumulator - Not Finished":
|
||||||
# Less headers than needed to finish the accumulator
|
# Less headers than needed to finish the accumulator
|
||||||
const amount = mergeBlockNumber - 1
|
const amount = mergeBlockNumber - 1
|
||||||
|
|
|
@ -14,10 +14,10 @@ import
|
||||||
stint,
|
stint,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
eth/common/eth_types_rlp,
|
eth/common/eth_types_rlp,
|
||||||
../../../eth_data/history_data_json_store,
|
../../eth_data/history_data_json_store,
|
||||||
../../../network/history/[history_content, validation/historical_hashes_accumulator]
|
../../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||||
|
|
||||||
suite "Header Accumulator Root":
|
suite "Historical Hashes Accumulator Root":
|
||||||
test "Header Accumulator Update":
|
test "Header Accumulator Update":
|
||||||
const
|
const
|
||||||
hashTreeRoots = [
|
hashTreeRoots = [
|
|
@ -13,16 +13,16 @@ import
|
||||||
unittest2,
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
eth/common/eth_types_rlp,
|
eth/common/eth_types_rlp,
|
||||||
../../../network_metadata,
|
../../network_metadata,
|
||||||
../../../eth_data/[history_data_json_store, history_data_ssz_e2s],
|
../../eth_data/[history_data_json_store, history_data_ssz_e2s],
|
||||||
../../../network/history/
|
../../network/history/
|
||||||
[history_content, history_network, validation/historical_hashes_accumulator],
|
[history_content, history_network, validation/historical_hashes_accumulator],
|
||||||
../../test_history_util,
|
../../eth_data/yaml_utils,
|
||||||
../../../eth_data/yaml_utils
|
./test_history_util
|
||||||
|
|
||||||
from std/os import walkDir, splitFile, PathComponent
|
from std/os import walkDir, splitFile, PathComponent
|
||||||
|
|
||||||
suite "History Content Encodings":
|
suite "History Content Values":
|
||||||
test "HeaderWithProof Building and Encoding":
|
test "HeaderWithProof Building and Encoding":
|
||||||
const
|
const
|
||||||
headerFile =
|
headerFile =
|
|
@ -13,12 +13,12 @@ import
|
||||||
stint,
|
stint,
|
||||||
ssz_serialization,
|
ssz_serialization,
|
||||||
ssz_serialization/[proofs, merkleization],
|
ssz_serialization/[proofs, merkleization],
|
||||||
../../../network/history/history_content
|
../../network/history/history_content
|
||||||
|
|
||||||
# According to test vectors:
|
# According to test vectors:
|
||||||
# https://github.com/ethereum/portal-network-specs/blob/master/content-keys-test-vectors.md#history-network-keys
|
# https://github.com/ethereum/portal-network-specs/blob/master/content-keys-test-vectors.md#history-network-keys
|
||||||
|
|
||||||
suite "History ContentKey Encodings":
|
suite "History Content Keys":
|
||||||
test "BlockHeader":
|
test "BlockHeader":
|
||||||
# Input
|
# Input
|
||||||
const blockHash = BlockHash.fromHex(
|
const blockHash = BlockHash.fromHex(
|
|
@ -15,16 +15,16 @@ import
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
results,
|
results,
|
||||||
eth/[common/eth_types, rlp],
|
eth/[common/eth_types, rlp],
|
||||||
../../../common/common_types,
|
../../common/common_types,
|
||||||
../../../eth_data/history_data_json_store,
|
../../eth_data/history_data_json_store,
|
||||||
../../../network/history/history_network
|
../../network/history/history_network
|
||||||
|
|
||||||
const
|
const
|
||||||
dataFile = "./fluffy/tests/blocks/mainnet_blocks_selected.json"
|
dataFile = "./fluffy/tests/blocks/mainnet_blocks_selected.json"
|
||||||
# Block that will be validated
|
# Block that will be validated
|
||||||
blockHashStr = "0xce8f770a56203e10afe19c7dd7e2deafc356e6cce0a560a30a85add03da56137"
|
blockHashStr = "0xce8f770a56203e10afe19c7dd7e2deafc356e6cce0a560a30a85add03da56137"
|
||||||
|
|
||||||
suite "History Network Content Validation":
|
suite "History Content Values Validation":
|
||||||
let blockDataTable =
|
let blockDataTable =
|
||||||
readJsonType(dataFile, BlockDataTable).expect("Valid data file should parse")
|
readJsonType(dataFile, BlockDataTable).expect("Valid data file should parse")
|
||||||
|
|
|
@ -12,11 +12,12 @@ import
|
||||||
eth/p2p/discoveryv5/routing_table,
|
eth/p2p/discoveryv5/routing_table,
|
||||||
eth/common/eth_types_rlp,
|
eth/common/eth_types_rlp,
|
||||||
eth/rlp,
|
eth/rlp,
|
||||||
../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
../../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||||
../network/history/
|
../../network/history/
|
||||||
[history_network, history_content, validation/historical_hashes_accumulator],
|
[history_network, history_content, validation/historical_hashes_accumulator],
|
||||||
../database/content_db,
|
../../database/content_db,
|
||||||
./test_helpers
|
../test_helpers,
|
||||||
|
./test_history_util
|
||||||
|
|
||||||
type HistoryNode = ref object
|
type HistoryNode = ref object
|
||||||
discoveryProtocol*: discv5_protocol.Protocol
|
discoveryProtocol*: discv5_protocol.Protocol
|
|
@ -0,0 +1,92 @@
|
||||||
|
# Nimbus
|
||||||
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
|
import
|
||||||
|
results,
|
||||||
|
eth/common/eth_types_rlp,
|
||||||
|
../../network/history/[history_content, validation/historical_hashes_accumulator]
|
||||||
|
|
||||||
|
export results, historical_hashes_accumulator, history_content
|
||||||
|
|
||||||
|
proc buildHeadersWithProof*(
|
||||||
|
blockHeaders: seq[BlockHeader], epochRecord: EpochRecordCached
|
||||||
|
): Result[seq[(seq[byte], seq[byte])], string] =
|
||||||
|
var blockHeadersWithProof: seq[(seq[byte], seq[byte])]
|
||||||
|
for header in blockHeaders:
|
||||||
|
if header.isPreMerge():
|
||||||
|
let
|
||||||
|
content = ?buildHeaderWithProof(header, epochRecord)
|
||||||
|
contentKey = ContentKey(
|
||||||
|
contentType: blockHeader,
|
||||||
|
blockHeaderKey: BlockKey(blockHash: header.blockHash()),
|
||||||
|
)
|
||||||
|
|
||||||
|
blockHeadersWithProof.add((encode(contentKey).asSeq(), SSZ.encode(content)))
|
||||||
|
|
||||||
|
ok(blockHeadersWithProof)
|
||||||
|
|
||||||
|
func buildAccumulator*(headers: seq[BlockHeader]): Result[FinishedAccumulator, string] =
|
||||||
|
var accumulator: Accumulator
|
||||||
|
for header in headers:
|
||||||
|
updateAccumulator(accumulator, header)
|
||||||
|
|
||||||
|
if header.number == mergeBlockNumber - 1:
|
||||||
|
return ok(finishAccumulator(accumulator))
|
||||||
|
|
||||||
|
err("Not enough headers provided to finish the accumulator")
|
||||||
|
|
||||||
|
func buildAccumulatorData*(
|
||||||
|
headers: seq[BlockHeader]
|
||||||
|
): Result[(FinishedAccumulator, seq[EpochRecord]), string] =
|
||||||
|
var accumulator: Accumulator
|
||||||
|
var epochRecords: seq[EpochRecord]
|
||||||
|
for header in headers:
|
||||||
|
updateAccumulator(accumulator, header)
|
||||||
|
|
||||||
|
if accumulator.currentEpoch.len() == EPOCH_SIZE:
|
||||||
|
epochRecords.add(accumulator.currentEpoch)
|
||||||
|
|
||||||
|
if header.number == mergeBlockNumber - 1:
|
||||||
|
epochRecords.add(accumulator.currentEpoch)
|
||||||
|
|
||||||
|
return ok((finishAccumulator(accumulator), epochRecords))
|
||||||
|
|
||||||
|
err("Not enough headers provided to finish the accumulator")
|
||||||
|
|
||||||
|
func buildProof*(
|
||||||
|
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||||
|
): Result[AccumulatorProof, string] =
|
||||||
|
let epochIndex = getEpochIndex(header)
|
||||||
|
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||||
|
let epochRecord = epochRecords[epochIndex]
|
||||||
|
|
||||||
|
buildProof(header, epochRecord)
|
||||||
|
|
||||||
|
func buildHeaderWithProof*(
|
||||||
|
header: BlockHeader, epochRecords: seq[EpochRecord]
|
||||||
|
): Result[BlockHeaderWithProof, string] =
|
||||||
|
## Construct the accumulator proof for a specific header.
|
||||||
|
## Returns the block header with the proof
|
||||||
|
if header.isPreMerge():
|
||||||
|
let epochIndex = getEpochIndex(header)
|
||||||
|
doAssert(epochIndex < uint64(epochRecords.len()))
|
||||||
|
let epochRecord = epochRecords[epochIndex]
|
||||||
|
|
||||||
|
buildHeaderWithProof(header, epochRecord)
|
||||||
|
else:
|
||||||
|
err("Cannot build accumulator proof for post merge header")
|
||||||
|
|
||||||
|
func buildHeadersWithProof*(
|
||||||
|
headers: seq[BlockHeader], epochRecords: seq[EpochRecord]
|
||||||
|
): Result[seq[BlockHeaderWithProof], string] =
|
||||||
|
var headersWithProof: seq[BlockHeaderWithProof]
|
||||||
|
for header in headers:
|
||||||
|
headersWithProof.add(?buildHeaderWithProof(header, epochRecords))
|
||||||
|
|
||||||
|
ok(headersWithProof)
|
|
@ -1,48 +0,0 @@
|
||||||
# fluffy
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.used.}
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import
|
|
||||||
unittest2,
|
|
||||||
beacon_chain/spec/forks,
|
|
||||||
beacon_chain/spec/datatypes/capella,
|
|
||||||
# Test helpers
|
|
||||||
beacon_chain /../ tests/testblockutil,
|
|
||||||
beacon_chain /../ tests/mocking/mock_genesis,
|
|
||||||
beacon_chain /../ tests/consensus_spec/fixtures_utils,
|
|
||||||
../network/beacon/beacon_chain_historical_summaries
|
|
||||||
|
|
||||||
suite "Beacon Chain Historical Summaries":
|
|
||||||
let
|
|
||||||
cfg = genesisTestRuntimeConfig(ConsensusFork.Capella)
|
|
||||||
state = newClone(initGenesisState(cfg = cfg))
|
|
||||||
var cache = StateCache()
|
|
||||||
|
|
||||||
var blocks: seq[capella.SignedBeaconBlock]
|
|
||||||
# Note:
|
|
||||||
# Adding 8192 blocks. First block is genesis block and not one of these.
|
|
||||||
# Then one extra block is needed to get the historical summaries, block
|
|
||||||
# roots and state roots processed.
|
|
||||||
# index i = 0 is second block.
|
|
||||||
# index i = 8190 is 8192th block and last one that is part of the first
|
|
||||||
# historical root
|
|
||||||
for i in 0 ..< SLOTS_PER_HISTORICAL_ROOT:
|
|
||||||
blocks.add(addTestBlock(state[], cache, cfg = cfg).capellaData)
|
|
||||||
|
|
||||||
test "Historical Summaries Proof":
|
|
||||||
withState(state[]):
|
|
||||||
when consensusFork >= ConsensusFork.Capella:
|
|
||||||
let historical_summaries = forkyState.data.historical_summaries
|
|
||||||
let res = buildProof(state[])
|
|
||||||
check res.isOk()
|
|
||||||
let proof = res.get()
|
|
||||||
|
|
||||||
withState(state[]):
|
|
||||||
check verifyProof(historical_summaries, proof, forkyState.root)
|
|
|
@ -48,7 +48,7 @@ proc stop(testCase: TestCase) {.async.} =
|
||||||
await testCase.server.closeWait()
|
await testCase.server.closeWait()
|
||||||
await testCase.localDiscovery.closeWait()
|
await testCase.localDiscovery.closeWait()
|
||||||
|
|
||||||
procSuite "Discovery RPC":
|
procSuite "Discovery v5 JSON-RPC API":
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
|
|
||||||
asyncTest "Get local node info":
|
asyncTest "Get local node info":
|
||||||
|
|
|
@ -9,11 +9,9 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
std/net,
|
std/net,
|
||||||
eth/[common, keys, rlp],
|
eth/[common, keys],
|
||||||
eth/p2p/discoveryv5/[enr, node, routing_table],
|
eth/p2p/discoveryv5/[enr, node, routing_table],
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol
|
||||||
../network/history/[history_content, validation/historical_hashes_accumulator],
|
|
||||||
../database/content_db
|
|
||||||
|
|
||||||
proc localAddress*(port: int): Address {.raises: [ValueError].} =
|
proc localAddress*(port: int): Address {.raises: [ValueError].} =
|
||||||
Address(ip: parseIpAddress("127.0.0.1"), port: Port(port))
|
Address(ip: parseIpAddress("127.0.0.1"), port: Port(port))
|
||||||
|
@ -51,63 +49,3 @@ proc genByteSeq*(length: int): seq[byte] =
|
||||||
resultSeq[i] = byte(i)
|
resultSeq[i] = byte(i)
|
||||||
inc i
|
inc i
|
||||||
return resultSeq
|
return resultSeq
|
||||||
|
|
||||||
func buildAccumulator*(headers: seq[BlockHeader]): Result[FinishedAccumulator, string] =
|
|
||||||
var accumulator: Accumulator
|
|
||||||
for header in headers:
|
|
||||||
updateAccumulator(accumulator, header)
|
|
||||||
|
|
||||||
if header.number == mergeBlockNumber - 1:
|
|
||||||
return ok(finishAccumulator(accumulator))
|
|
||||||
|
|
||||||
err("Not enough headers provided to finish the accumulator")
|
|
||||||
|
|
||||||
func buildAccumulatorData*(
|
|
||||||
headers: seq[BlockHeader]
|
|
||||||
): Result[(FinishedAccumulator, seq[EpochRecord]), string] =
|
|
||||||
var accumulator: Accumulator
|
|
||||||
var epochRecords: seq[EpochRecord]
|
|
||||||
for header in headers:
|
|
||||||
updateAccumulator(accumulator, header)
|
|
||||||
|
|
||||||
if accumulator.currentEpoch.len() == EPOCH_SIZE:
|
|
||||||
epochRecords.add(accumulator.currentEpoch)
|
|
||||||
|
|
||||||
if header.number == mergeBlockNumber - 1:
|
|
||||||
epochRecords.add(accumulator.currentEpoch)
|
|
||||||
|
|
||||||
return ok((finishAccumulator(accumulator), epochRecords))
|
|
||||||
|
|
||||||
err("Not enough headers provided to finish the accumulator")
|
|
||||||
|
|
||||||
func buildProof*(
|
|
||||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
|
||||||
): Result[AccumulatorProof, string] =
|
|
||||||
let epochIndex = getEpochIndex(header)
|
|
||||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
|
||||||
let epochRecord = epochRecords[epochIndex]
|
|
||||||
|
|
||||||
buildProof(header, epochRecord)
|
|
||||||
|
|
||||||
func buildHeaderWithProof*(
|
|
||||||
header: BlockHeader, epochRecords: seq[EpochRecord]
|
|
||||||
): Result[BlockHeaderWithProof, string] =
|
|
||||||
## Construct the accumulator proof for a specific header.
|
|
||||||
## Returns the block header with the proof
|
|
||||||
if header.isPreMerge():
|
|
||||||
let epochIndex = getEpochIndex(header)
|
|
||||||
doAssert(epochIndex < uint64(epochRecords.len()))
|
|
||||||
let epochRecord = epochRecords[epochIndex]
|
|
||||||
|
|
||||||
buildHeaderWithProof(header, epochRecord)
|
|
||||||
else:
|
|
||||||
err("Cannot build accumulator proof for post merge header")
|
|
||||||
|
|
||||||
func buildHeadersWithProof*(
|
|
||||||
headers: seq[BlockHeader], epochRecords: seq[EpochRecord]
|
|
||||||
): Result[seq[BlockHeaderWithProof], string] =
|
|
||||||
var headersWithProof: seq[BlockHeaderWithProof]
|
|
||||||
for header in headers:
|
|
||||||
headersWithProof.add(?buildHeaderWithProof(header, epochRecords))
|
|
||||||
|
|
||||||
ok(headersWithProof)
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
# Nimbus
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import
|
|
||||||
results,
|
|
||||||
eth/common/eth_types_rlp,
|
|
||||||
../network/history/[history_content, validation/historical_hashes_accumulator]
|
|
||||||
|
|
||||||
export results, historical_hashes_accumulator, history_content
|
|
||||||
|
|
||||||
proc buildHeadersWithProof*(
|
|
||||||
blockHeaders: seq[BlockHeader], epochRecord: EpochRecordCached
|
|
||||||
): Result[seq[(seq[byte], seq[byte])], string] =
|
|
||||||
var blockHeadersWithProof: seq[(seq[byte], seq[byte])]
|
|
||||||
for header in blockHeaders:
|
|
||||||
if header.isPreMerge():
|
|
||||||
let
|
|
||||||
content = ?buildHeaderWithProof(header, epochRecord)
|
|
||||||
contentKey = ContentKey(
|
|
||||||
contentType: blockHeader,
|
|
||||||
blockHeaderKey: BlockKey(blockHash: header.blockHash()),
|
|
||||||
)
|
|
||||||
|
|
||||||
blockHeadersWithProof.add((encode(contentKey).asSeq(), SSZ.encode(content)))
|
|
||||||
|
|
||||||
ok(blockHeadersWithProof)
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Fluffy
|
# Nimbus
|
||||||
# Copyright (c) 2022-2024 Status Research & Development GmbH
|
# Copyright (c) 2024 Status Research & Development GmbH
|
||||||
# Licensed under either of
|
# Licensed under either of
|
||||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||||
|
@ -7,9 +7,4 @@
|
||||||
|
|
||||||
{.warning[UnusedImport]: off.}
|
{.warning[UnusedImport]: off.}
|
||||||
|
|
||||||
import
|
import ./test_portal_wire_encoding, ./test_portal_wire_protocol
|
||||||
./test_portal_wire_encoding,
|
|
||||||
./test_history_content_keys,
|
|
||||||
./test_history_content,
|
|
||||||
./test_history_content_validation,
|
|
||||||
./test_accumulator_root
|
|
|
@ -13,7 +13,7 @@ import
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
results,
|
results,
|
||||||
eth/p2p/discoveryv5/enr,
|
eth/p2p/discoveryv5/enr,
|
||||||
../../../network/wire/messages
|
../../network/wire/messages
|
||||||
|
|
||||||
# According to test vectors:
|
# According to test vectors:
|
||||||
# https://github.com/ethereum/portal-network-specs/blob/master/portal-wire-test-vectors.md
|
# https://github.com/ethereum/portal-network-specs/blob/master/portal-wire-test-vectors.md
|
|
@ -16,9 +16,9 @@ import
|
||||||
eth/p2p/discoveryv5/routing_table,
|
eth/p2p/discoveryv5/routing_table,
|
||||||
nimcrypto/[hash, sha2],
|
nimcrypto/[hash, sha2],
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
../../network/wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||||
../database/content_db,
|
../../database/content_db,
|
||||||
./test_helpers
|
../test_helpers
|
||||||
|
|
||||||
const protocolId = [byte 0x50, 0x00]
|
const protocolId = [byte 0x50, 0x00]
|
||||||
|
|
Loading…
Reference in New Issue