Replace status-im/portal-spec-tests with ethereum fork version (#2097)
- The fluffy test vector repo got forked (well, copied rather) to become the official one under ethereum github org, so we change to that repo now and archive ours. - Our repo also stored accumulator / historical_roots, replace that with a new repo which is only for network configs. - Several changes needed to be made due to test vectors that got updated + some of them got changed to / are yaml format instead of json.
This commit is contained in:
parent
9cd80800df
commit
d3a706c229
|
@ -212,11 +212,6 @@
|
|||
url = https://github.com/status-im/nimbus-security-resources.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/portal-spec-tests"]
|
||||
path = vendor/portal-spec-tests
|
||||
url = https://github.com/status-im/portal-spec-tests.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/libtommath"]
|
||||
path = vendor/libtommath
|
||||
url = https://github.com/libtom/libtommath
|
||||
|
@ -232,3 +227,18 @@
|
|||
url = https://github.com/arnetheduck/nim-results
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/NimYAML"]
|
||||
path = vendor/NimYAML
|
||||
url = https://github.com/status-im/NimYAML.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/portal-spec-tests"]
|
||||
path = vendor/portal-spec-tests
|
||||
url = https://github.com/ethereum/portal-spec-tests.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/portal-mainnet"]
|
||||
path = vendor/portal-mainnet
|
||||
url = https://github.com/status-im/portal-mainnet.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
|
|
3
Makefile
3
Makefile
|
@ -51,7 +51,8 @@ EXCLUDED_NIM_PACKAGES := \
|
|||
vendor/nimbus-eth2/vendor/nim-presto \
|
||||
vendor/nimbus-eth2/vendor/nim-zxcvbn \
|
||||
vendor/nimbus-eth2/vendor/nim-kzg4844 \
|
||||
vendor/nimbus-eth2/vendor/nimbus-security-resources
|
||||
vendor/nimbus-eth2/vendor/nimbus-security-resources \
|
||||
vendor/nimbus-eth2/vendor/NimYAML
|
||||
|
||||
# we don't want an error here, so we can handle things later, in the ".DEFAULT" target
|
||||
-include $(BUILD_SYSTEM_DIR)/makefiles/variables.mk
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# Fluffy bootstrap nodes
|
||||
# nimbus-fluffy-mainnet-master-01@metal-01.ih-eu-mda1.nimbus.fluffy
|
||||
enr:-IS4QGUtAA29qeT3cWVr8lmJfySmkceR2wp6oFQtvO_uMe7KWaK_qd1UQvd93MJKXhMnubSsTQPJ6KkbIu0ywjvNdNEBgmlkgnY0gmlwhMIhKO6Jc2VjcDI1NmsxoQJ508pIqRqsjsvmUQfYGvaUFTxfsELPso_62FKDqlxI24N1ZHCCI40
|
||||
# nimbus-fluffy-mainnet-master-02@metal-01.ih-eu-mda1.nimbus.fluffy
|
||||
enr:-IS4QNaaoQuHGReAMJKoDd6DbQKMbQ4Mked3Gi3GRatwgRVVPXynPlO_-gJKRF_ZSuJr3wyHfwMHyJDbd6q1xZQVZ2kBgmlkgnY0gmlwhMIhKO6Jc2VjcDI1NmsxoQM2kBHT5s_Uh4gsNiOclQDvLK4kPpoQucge3mtbuLuUGYN1ZHCCI44
|
||||
# nimbus-fluffy-mainnet-master-01@metal-02.ih-eu-mda1.nimbus.fluffy
|
||||
enr:-IS4QBdIjs6S1ZkvlahSkuYNq5QW3DbD-UDcrm1l81f2PPjnNjb_NDa4B5x4olHCXtx0d2ZeZBHQyoHyNnuVZ-P1GVkBgmlkgnY0gmlwhMIhKO-Jc2VjcDI1NmsxoQOO3gFuaCAyQKscaiNLC9HfLbVzFdIerESFlOGcEuKWH4N1ZHCCI40
|
||||
# nimbus-fluffy-mainnet-master-02@metal-02.ih-eu-mda1.nimbus.fluffy
|
||||
enr:-IS4QM731tV0CvQXLTDcZNvgFyhhpAjYDKU5XLbM7sZ1WEzIRq4zsakgrv3KO3qyOYZ8jFBK-VzENF8o-vnykuQ99iABgmlkgnY0gmlwhMIhKO-Jc2VjcDI1NmsxoQMTq6Cdx3HmL3Q9sitavcPHPbYKyEibKPKvyVyOlNF8J4N1ZHCCI44
|
||||
|
||||
# Trin bootstrap nodes
|
||||
enr:-Jy4QIs2pCyiKna9YWnAF0zgf7bT0GzlAGoF8MEKFJOExmtofBIqzm71zDvmzRiiLkxaEJcs_Amr7XIhLI74k1rtlXICY5Z0IDAuMS4xLWFscGhhLjEtMTEwZjUwgmlkgnY0gmlwhKEjVaWJc2VjcDI1NmsxoQLSC_nhF1iRwsCw0n3J4jRjqoaRxtKgsEe5a-Dz7y0JloN1ZHCCIyg
|
||||
enr:-Jy4QKSLYMpku9F0Ebk84zhIhwTkmn80UnYvE4Z4sOcLukASIcofrGdXVLAUPVHh8oPCfnEOZm1W1gcAxB9kV2FJywkCY5Z0IDAuMS4xLWFscGhhLjEtMTEwZjUwgmlkgnY0gmlwhJO2oc6Jc2VjcDI1NmsxoQLMSGVlxXL62N3sPtaV-n_TbZFCEM5AR7RDyIwOadbQK4N1ZHCCIyg
|
||||
enr:-Jy4QH4_H4cW--ejWDl_W7ngXw2m31MM2GT8_1ZgECnfWxMzZTiZKvHDgkmwUS_l2aqHHU54Q7hcFSPz6VGzkUjOqkcCY5Z0IDAuMS4xLWFscGhhLjEtMTEwZjUwgmlkgnY0gmlwhJ31OTWJc2VjcDI1NmsxoQPC0eRkjRajDiETr_DRa5N5VJRm-ttCWDoO1QAMMCg5pIN1ZHCCIyg
|
||||
|
||||
# Ultralight bootstrap nodes
|
||||
enr:-IS4QFV_wTNknw7qiCGAbHf6LxB-xPQCktyrCEZX-b-7PikMOIKkBg-frHRBkfwhI3XaYo_T-HxBYmOOQGNwThkBBHYDgmlkgnY0gmlwhKRc9_OJc2VjcDI1NmsxoQKHPt5CQ0D66ueTtSUqwGjfhscU_LiwS28QvJ0GgJFd-YN1ZHCCE4k
|
||||
enr:-IS4QDpUz2hQBNt0DECFm8Zy58Hi59PF_7sw780X3qA0vzJEB2IEd5RtVdPUYZUbeg4f0LMradgwpyIhYUeSxz2Tfa8DgmlkgnY0gmlwhKRc9_OJc2VjcDI1NmsxoQJd4NAVKOXfbdxyjSOUJzmA4rjtg43EDeEJu1f8YRhb_4N1ZHCCE4o
|
||||
enr:-IS4QGG6moBhLW1oXz84NaKEHaRcim64qzFn1hAG80yQyVGNLoKqzJe887kEjthr7rJCNlt6vdVMKMNoUC9OCeNK-EMDgmlkgnY0gmlwhKRc9-KJc2VjcDI1NmsxoQLJhXByb3LmxHQaqgLDtIGUmpANXaBbFw3ybZWzGqb9-IN1ZHCCE4k
|
||||
enr:-IS4QA5hpJikeDFf1DD1_Le6_ylgrLGpdwn3SRaneGu9hY2HUI7peHep0f28UUMzbC0PvlWjN8zSfnqMG07WVcCyBhADgmlkgnY0gmlwhKRc9-KJc2VjcDI1NmsxoQJMpHmGj1xSP1O-Mffk_jYIHVcg6tY5_CjmWVg1gJEsPIN1ZHCCE4o
|
|
@ -29,15 +29,9 @@ proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
|
|||
macros.error "Failed to load bootstrap nodes metadata at '" & path & "': " & err.msg
|
||||
|
||||
const
|
||||
# TODO: Change this from our local repo to an eth-client repo if/when this
|
||||
# gets created for the Portal networks.
|
||||
portalNetworksDir = currentSourcePath.parentDir.replace('\\', '/') / "network_data"
|
||||
|
||||
# TODO: Using a repo for test vectors for now, as it is something to test
|
||||
# against, but at the same time could also go in a network metadata repo.
|
||||
portalTestDir =
|
||||
portalConfigDir =
|
||||
currentSourcePath.parentDir.parentDir.replace('\\', '/') / "vendor" /
|
||||
"portal-spec-tests" / "tests"
|
||||
"portal-mainnet" / "config"
|
||||
# Note:
|
||||
# For now it gets called testnet0 but this Portal network serves Eth1 mainnet
|
||||
# data. Giving the actual Portal (test)networks different names might not be
|
||||
|
@ -48,16 +42,12 @@ const
|
|||
# TODO: It would be nice to be able to use `loadBootstrapFile` here, but that
|
||||
# doesn't work at compile time. The main issue seems to be the usage of
|
||||
# rlp.rawData() in the enr code.
|
||||
testnet0BootstrapNodes* = loadCompileTimeBootstrapNodes(
|
||||
portalNetworksDir / "testnet0" / "bootstrap_nodes.txt"
|
||||
)
|
||||
testnet0BootstrapNodes* =
|
||||
loadCompileTimeBootstrapNodes(portalConfigDir / "bootstrap_nodes.txt")
|
||||
|
||||
finishedAccumulatorSSZ* = slurp(
|
||||
portalTestDir / "mainnet" / "history" / "accumulator" / "finished_accumulator.ssz"
|
||||
)
|
||||
finishedAccumulatorSSZ* = slurp(portalConfigDir / "finished_accumulator.ssz")
|
||||
|
||||
historicalRootsSSZ* =
|
||||
slurp(portalTestDir / "mainnet" / "beacon_chain" / "historical_roots.ssz")
|
||||
historicalRootsSSZ* = slurp(portalConfigDir / "historical_roots.ssz")
|
||||
|
||||
func loadAccumulator*(): FinishedAccumulator =
|
||||
try:
|
||||
|
|
|
@ -16,7 +16,8 @@ import
|
|||
../../../network_metadata,
|
||||
../../../eth_data/[history_data_json_store, history_data_ssz_e2s],
|
||||
../../../network/history/[history_content, history_network, accumulator],
|
||||
../../test_history_util
|
||||
../../test_history_util,
|
||||
../../test_yaml_utils
|
||||
|
||||
suite "History Content Encodings":
|
||||
test "HeaderWithProof Building and Encoding":
|
||||
|
@ -79,83 +80,73 @@ suite "History Content Encodings":
|
|||
|
||||
test "HeaderWithProof Encoding/Decoding and Verification":
|
||||
const dataFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.yaml"
|
||||
|
||||
let accumulator = loadAccumulator()
|
||||
let
|
||||
content = YamlPortalContent.loadFromYaml(dataFile).valueOr:
|
||||
raiseAssert "Invalid data file: " & error
|
||||
accumulator = loadAccumulator()
|
||||
contentKeyEncoded = content.content_key.hexToSeqByte()
|
||||
contentValueEncoded = content.content_value.hexToSeqByte()
|
||||
|
||||
let res = readJsonType(dataFile, JsonPortalContentTable)
|
||||
# Decode content
|
||||
let
|
||||
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
contentValue = decodeSsz(contentValueEncoded, BlockHeaderWithProof)
|
||||
|
||||
check:
|
||||
contentKey.isOk()
|
||||
contentValue.isOk()
|
||||
|
||||
let blockHeaderWithProof = contentValue.get()
|
||||
|
||||
let res = decodeRlp(blockHeaderWithProof.header.asSeq(), BlockHeader)
|
||||
check res.isOk()
|
||||
let content = res.get()
|
||||
let header = res.get()
|
||||
|
||||
for k, v in content:
|
||||
# TODO: strange assignment failure when using try/except ValueError
|
||||
# for the hexToSeqByte() here.
|
||||
let
|
||||
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||
check accumulator.verifyHeader(header, blockHeaderWithProof.proof).isOk()
|
||||
|
||||
# Decode content
|
||||
let
|
||||
contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
contentValue = decodeSsz(contentValueEncoded, BlockHeaderWithProof)
|
||||
|
||||
check:
|
||||
contentKey.isOk()
|
||||
contentValue.isOk()
|
||||
|
||||
let blockHeaderWithProof = contentValue.get()
|
||||
|
||||
let res = decodeRlp(blockHeaderWithProof.header.asSeq(), BlockHeader)
|
||||
check res.isOk()
|
||||
let header = res.get()
|
||||
|
||||
check accumulator.verifyHeader(header, blockHeaderWithProof.proof).isOk()
|
||||
|
||||
# Encode content
|
||||
check:
|
||||
SSZ.encode(blockHeaderWithProof) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
# Encode content
|
||||
check:
|
||||
SSZ.encode(blockHeaderWithProof) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
|
||||
test "PortalBlockBody (Legacy) Encoding/Decoding and Verification":
|
||||
const
|
||||
dataFile = "./vendor/portal-spec-tests/tests/mainnet/history/bodies/14764013.json"
|
||||
headersWithProofFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
|
||||
dataFile = "./vendor/portal-spec-tests/tests/mainnet/history/bodies/14764013.yaml"
|
||||
headersDataFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.yaml"
|
||||
|
||||
let res = readJsonType(dataFile, JsonPortalContentTable)
|
||||
check res.isOk()
|
||||
let content = res.get()
|
||||
let
|
||||
content = YamlPortalContent.loadFromYaml(dataFile).valueOr:
|
||||
raiseAssert "Invalid data file: " & error
|
||||
headerContent = YamlPortalContent.loadFromYaml(headersDataFile).valueOr:
|
||||
raiseAssert "Invalid data file: " & error
|
||||
|
||||
let headersRes = readJsonType(headersWithProofFile, JsonPortalContentTable)
|
||||
check headersRes.isOk()
|
||||
let headers = headersRes.get()
|
||||
contentKeyEncoded = content.content_key.hexToSeqByte()
|
||||
contentValueEncoded = content.content_value.hexToSeqByte()
|
||||
|
||||
for k, v in content:
|
||||
let
|
||||
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||
# Get the header for validation of body
|
||||
let
|
||||
headerEncoded = headerContent.content_value.hexToSeqByte()
|
||||
headerWithProofRes = decodeSsz(headerEncoded, BlockHeaderWithProof)
|
||||
check headerWithProofRes.isOk()
|
||||
let headerRes = decodeRlp(headerWithProofRes.get().header.asSeq(), BlockHeader)
|
||||
check headerRes.isOk()
|
||||
let header = headerRes.get()
|
||||
|
||||
# Get the header for validation of body
|
||||
let
|
||||
headerEncoded = headers[k].content_value.hexToSeqByte()
|
||||
headerWithProofRes = decodeSsz(headerEncoded, BlockHeaderWithProof)
|
||||
check headerWithProofRes.isOk()
|
||||
let headerRes = decodeRlp(headerWithProofRes.get().header.asSeq(), BlockHeader)
|
||||
check headerRes.isOk()
|
||||
let header = headerRes.get()
|
||||
# Decode content key
|
||||
let contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
check contentKey.isOk()
|
||||
|
||||
# Decode content key
|
||||
let contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
check contentKey.isOk()
|
||||
# Decode (SSZ + RLP decode step) and validate block body
|
||||
let contentValue = validateBlockBodyBytes(contentValueEncoded, header)
|
||||
check contentValue.isOk()
|
||||
|
||||
# Decode (SSZ + RLP decode step) and validate block body
|
||||
let contentValue = validateBlockBodyBytes(contentValueEncoded, header)
|
||||
check contentValue.isOk()
|
||||
|
||||
# Encode content and content key
|
||||
check:
|
||||
encode(contentValue.get()) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
# Encode content and content key
|
||||
check:
|
||||
encode(contentValue.get()) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
|
||||
test "PortalBlockBody (Shanghai) Encoding/Decoding":
|
||||
# TODO: We don't have the header (without proof) ready here so cannot do
|
||||
|
@ -188,41 +179,37 @@ suite "History Content Encodings":
|
|||
test "Receipts Encoding/Decoding and Verification":
|
||||
const
|
||||
dataFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/receipts/14764013.json"
|
||||
headersWithProofFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/receipts/14764013.yaml"
|
||||
headersDataFile =
|
||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.yaml"
|
||||
|
||||
let res = readJsonType(dataFile, JsonPortalContentTable)
|
||||
check res.isOk()
|
||||
let content = res.get()
|
||||
let
|
||||
content = YamlPortalContent.loadFromYaml(dataFile).valueOr:
|
||||
raiseAssert "Invalid data file: " & error
|
||||
headerContent = YamlPortalContent.loadFromYaml(headersDataFile).valueOr:
|
||||
raiseAssert "Invalid data file: " & error
|
||||
|
||||
let headersRes = readJsonType(headersWithProofFile, JsonPortalContentTable)
|
||||
check headersRes.isOk()
|
||||
let headers = headersRes.get()
|
||||
contentKeyEncoded = content.content_key.hexToSeqByte()
|
||||
contentValueEncoded = content.content_value.hexToSeqByte()
|
||||
|
||||
for k, v in content:
|
||||
let
|
||||
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||
# Get the header for validation of receipts
|
||||
let
|
||||
headerEncoded = headerContent.content_value.hexToSeqByte()
|
||||
headerWithProofRes = decodeSsz(headerEncoded, BlockHeaderWithProof)
|
||||
check headerWithProofRes.isOk()
|
||||
let headerRes = decodeRlp(headerWithProofRes.get().header.asSeq(), BlockHeader)
|
||||
check headerRes.isOk()
|
||||
let header = headerRes.get()
|
||||
|
||||
# Get the header for validation of receipts
|
||||
let
|
||||
headerEncoded = headers[k].content_value.hexToSeqByte()
|
||||
headerWithProofRes = decodeSsz(headerEncoded, BlockHeaderWithProof)
|
||||
check headerWithProofRes.isOk()
|
||||
let headerRes = decodeRlp(headerWithProofRes.get().header.asSeq(), BlockHeader)
|
||||
check headerRes.isOk()
|
||||
let header = headerRes.get()
|
||||
# Decode content key
|
||||
let contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
check contentKey.isOk()
|
||||
|
||||
# Decode content key
|
||||
let contentKey = decodeSsz(contentKeyEncoded, ContentKey)
|
||||
check contentKey.isOk()
|
||||
# Decode (SSZ + RLP decode step) and validate receipts
|
||||
let contentValue = validateReceiptsBytes(contentValueEncoded, header.receiptRoot)
|
||||
check contentValue.isOk()
|
||||
|
||||
# Decode (SSZ + RLP decode step) and validate receipts
|
||||
let contentValue = validateReceiptsBytes(contentValueEncoded, header.receiptRoot)
|
||||
check contentValue.isOk()
|
||||
|
||||
# Encode content
|
||||
check:
|
||||
encode(contentValue.get()) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
# Encode content
|
||||
check:
|
||||
encode(contentValue.get()) == contentValueEncoded
|
||||
encode(contentKey.get()).asSeq() == contentKeyEncoded
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
# Fluffy
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
type JsonBlockInfo* = object
|
||||
number*: uint64
|
||||
block_hash*: string
|
||||
state_root*: string
|
||||
|
||||
type JsonAccount* = object
|
||||
nonce*: string
|
||||
balance*: string
|
||||
storage_hash*: string
|
||||
code_hash*: string
|
||||
|
||||
type JsonBlock* = object
|
||||
`block`*: JsonBlockInfo
|
||||
address*: string
|
||||
account*: JsonAccount
|
||||
storage_slot*: string
|
||||
storage_value*: string
|
||||
account_proof*: seq[string]
|
||||
storage_proof*: seq[string]
|
||||
bytecode*: string
|
||||
|
||||
type JsonAccountTrieNode* = object
|
||||
content_key*: string
|
||||
content_id*: string
|
||||
content_value_offer*: string
|
||||
content_value_retrieval*: string
|
||||
|
||||
type JsonContractStorageTtrieNode* = object
|
||||
content_key*: string
|
||||
content_id*: string
|
||||
content_value_offer*: string
|
||||
content_value_retrieval*: string
|
||||
|
||||
type JsonContractBytecode* = object
|
||||
content_key*: string
|
||||
content_id*: string
|
||||
content_value_offer*: string
|
||||
content_value_retrieval*: string
|
||||
|
||||
type JsonGossipKVPair* = object
|
||||
content_key*: string
|
||||
content_value*: string
|
||||
|
||||
type JsonRecursiveGossip* = seq[JsonGossipKVPair]
|
|
@ -6,142 +6,166 @@
|
|||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
import
|
||||
std/[sugar, sequtils],
|
||||
testutils/unittests,
|
||||
stew/[byteutils, io2],
|
||||
eth/keys,
|
||||
./helpers,
|
||||
std/[os, sugar, sequtils],
|
||||
unittest2,
|
||||
stew/byteutils,
|
||||
../../network/state/state_content,
|
||||
../../eth_data/history_data_json_store
|
||||
../test_yaml_utils
|
||||
|
||||
const testVectorDir = "./vendor/portal-spec-tests/tests/mainnet/state/"
|
||||
const testVectorDir = "./vendor/portal-spec-tests/tests/mainnet/state/serialization/"
|
||||
|
||||
suite "State Content Values":
|
||||
test "Encode/decode AccountTrieNodeOffer":
|
||||
const file = testVectorDir / "account_trie_node_with_proof.yaml"
|
||||
|
||||
type YamlAccountTrieNodeWithProof = object
|
||||
proof: seq[string]
|
||||
block_hash: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
testCase = YamlAccountTrieNodeWithProof.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
accountTrieNode = readJsonType(
|
||||
testVectorDir & "account_trie_node.json", JsonAccountTrieNode
|
||||
).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
blockHash = BlockHash.fromHex(blockContent.`block`.block_hash)
|
||||
proof = TrieProof.init(
|
||||
blockContent.account_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
)
|
||||
|
||||
blockHash = BlockHash.fromHex(testCase.block_hash)
|
||||
proof =
|
||||
TrieProof.init(testCase.proof.map((hex) => TrieNode.init(hex.hexToSeqByte())))
|
||||
accountTrieNodeOffer = AccountTrieNodeOffer(blockHash: blockHash, proof: proof)
|
||||
|
||||
encoded = SSZ.encode(accountTrieNodeOffer)
|
||||
expected = accountTrieNode.content_value_offer.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, AccountTrieNodeOffer)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == accountTrieNodeOffer
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == accountTrieNodeOffer
|
||||
|
||||
test "Encode/decode AccountTrieNodeRetrieval":
|
||||
const file = testVectorDir / "trie_node.yaml"
|
||||
|
||||
type YamlTrieNode = object
|
||||
trie_node: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
accountTrieNode = readJsonType(
|
||||
testVectorDir & "account_trie_node.json", JsonAccountTrieNode
|
||||
).valueOr:
|
||||
testCase = YamlTrieNode.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
node = TrieNode.init(blockContent.account_proof[^1].hexToSeqByte())
|
||||
node = TrieNode.init(testCase.trie_node.hexToSeqByte())
|
||||
accountTrieNodeRetrieval = AccountTrieNodeRetrieval(node: node)
|
||||
|
||||
encoded = SSZ.encode(accountTrieNodeRetrieval)
|
||||
expected = accountTrieNode.content_value_retrieval.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, AccountTrieNodeRetrieval)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == accountTrieNodeRetrieval
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == accountTrieNodeRetrieval
|
||||
|
||||
test "Encode/decode ContractTrieNodeOffer":
|
||||
const file = testVectorDir / "contract_storage_trie_node_with_proof.yaml"
|
||||
|
||||
type YamlContractStorageTrieNodeWithProof = object
|
||||
storage_proof: seq[string]
|
||||
account_proof: seq[string]
|
||||
block_hash: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
contractStorageTrieNode = readJsonType(
|
||||
testVectorDir & "contract_storage_trie_node.json", JsonContractStorageTtrieNode
|
||||
).valueOr:
|
||||
testCase = YamlContractStorageTrieNodeWithProof.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
blockHash = BlockHash.fromHex(blockContent.`block`.block_hash)
|
||||
blockHash = BlockHash.fromHex(testCase.block_hash)
|
||||
storageProof = TrieProof.init(
|
||||
blockContent.storage_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
testCase.storage_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
)
|
||||
accountProof = TrieProof.init(
|
||||
blockContent.account_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
testCase.account_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
)
|
||||
contractTrieNodeOffer = ContractTrieNodeOffer(
|
||||
blockHash: blockHash, storage_proof: storageProof, account_proof: accountProof
|
||||
)
|
||||
|
||||
encoded = SSZ.encode(contractTrieNodeOffer)
|
||||
expected = contractStorageTrieNode.content_value_offer.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, ContractTrieNodeOffer)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == contractTrieNodeOffer
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == contractTrieNodeOffer
|
||||
|
||||
test "Encode/decode ContractTrieNodeRetrieval":
|
||||
# TODO: This is practically the same as AccountTrieNodeRetrieval test,
|
||||
# but we use different objects for it. Might want to adjust this to just
|
||||
# 1 basic TrieNode type.
|
||||
const file = testVectorDir / "trie_node.yaml"
|
||||
|
||||
type YamlTrieNode = object
|
||||
trie_node: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
contractStorageTrieNode = readJsonType(
|
||||
testVectorDir & "contract_storage_trie_node.json", JsonContractStorageTtrieNode
|
||||
).valueOr:
|
||||
testCase = YamlTrieNode.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
node = TrieNode.init(blockContent.storage_proof[^1].hexToSeqByte())
|
||||
node = TrieNode.init(testCase.trie_node.hexToSeqByte())
|
||||
contractTrieNodeRetrieval = ContractTrieNodeRetrieval(node: node)
|
||||
|
||||
encoded = SSZ.encode(contractTrieNodeRetrieval)
|
||||
expected = contractStorageTrieNode.content_value_retrieval.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, ContractTrieNodeRetrieval)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == contractTrieNodeRetrieval
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == contractTrieNodeRetrieval
|
||||
|
||||
test "Encode/decode ContractCodeOffer":
|
||||
const file = testVectorDir / "contract_bytecode_with_proof.yaml"
|
||||
|
||||
type YamlContractBytecodeWithProof = object
|
||||
bytecode: string
|
||||
account_proof: seq[string]
|
||||
block_hash: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
contractBytecode = readJsonType(
|
||||
testVectorDir & "contract_bytecode.json", JsonContractBytecode
|
||||
).valueOr:
|
||||
testCase = YamlContractBytecodeWithProof.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
code = Bytecode.init(blockContent.bytecode.hexToSeqByte())
|
||||
blockHash = BlockHash.fromHex(blockContent.`block`.block_hash)
|
||||
code = Bytecode.init(testCase.bytecode.hexToSeqByte())
|
||||
blockHash = BlockHash.fromHex(testCase.block_hash)
|
||||
accountProof = TrieProof.init(
|
||||
blockContent.account_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
testCase.account_proof.map((hex) => TrieNode.init(hex.hexToSeqByte()))
|
||||
)
|
||||
contractCodeOffer =
|
||||
ContractCodeOffer(code: code, blockHash: blockHash, accountProof: accountProof)
|
||||
|
||||
encoded = SSZ.encode(contractCodeOffer)
|
||||
expected = contractBytecode.content_value_offer.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, ContractCodeOffer)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == contractCodeOffer
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == contractCodeOffer
|
||||
|
||||
test "Encode/decode ContractCodeRetrieval":
|
||||
const file = testVectorDir / "contract_bytecode.yaml"
|
||||
|
||||
type YamlContractBytecode = object
|
||||
bytecode: string
|
||||
content_value: string
|
||||
|
||||
let
|
||||
blockContent = readJsonType(testVectorDir & "block.json", JsonBlock).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
contractBytecode = readJsonType(
|
||||
testVectorDir & "contract_bytecode.json", JsonContractBytecode
|
||||
).valueOr:
|
||||
testCase = YamlContractBytecode.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
code = Bytecode.init(blockContent.bytecode.hexToSeqByte())
|
||||
code = Bytecode.init(testCase.bytecode.hexToSeqByte())
|
||||
contractCodeRetrieval = ContractCodeRetrieval(code: code)
|
||||
|
||||
encoded = SSZ.encode(contractCodeRetrieval)
|
||||
expected = contractBytecode.content_value_retrieval.hexToSeqByte()
|
||||
expected = testCase.content_value.hexToSeqByte()
|
||||
decoded = SSZ.decode(encoded, ContractCodeRetrieval)
|
||||
|
||||
check encoded == expected
|
||||
check decoded == contractCodeRetrieval
|
||||
check:
|
||||
encoded == expected
|
||||
decoded == contractCodeRetrieval
|
||||
|
|
|
@ -6,29 +6,37 @@
|
|||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
import
|
||||
stew/[byteutils, results],
|
||||
testutils/unittests,
|
||||
std/os,
|
||||
chronos,
|
||||
testutils/unittests,
|
||||
stew/[byteutils, results],
|
||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||
eth/p2p/discoveryv5/routing_table,
|
||||
./helpers,
|
||||
../../network/wire/[portal_protocol, portal_stream],
|
||||
../../network/state/[state_content, state_network],
|
||||
../../database/content_db,
|
||||
.././test_helpers,
|
||||
../../eth_data/history_data_json_store
|
||||
../test_yaml_utils
|
||||
|
||||
const testVectorDir = "./vendor/portal-spec-tests/tests/mainnet/state/"
|
||||
const testVectorDir = "./vendor/portal-spec-tests/tests/mainnet/state/validation/"
|
||||
|
||||
procSuite "State Network Gossip":
|
||||
let rng = newRng()
|
||||
|
||||
asyncTest "Test Gossip of Account Trie Node Offer":
|
||||
const file = testVectorDir / "recursive_gossip.yaml"
|
||||
|
||||
type
|
||||
YamlOffer = object
|
||||
content_key: string
|
||||
content_value: string
|
||||
|
||||
YamlRecursiveGossip = seq[seq[YamlOffer]]
|
||||
|
||||
let
|
||||
recursiveGossipSteps = readJsonType(
|
||||
testVectorDir & "recursive_gossip.json", JsonRecursiveGossip
|
||||
).valueOr:
|
||||
testCase = YamlRecursiveGossip.loadFromYaml(file).valueOr:
|
||||
raiseAssert "Cannot read test vector: " & error
|
||||
|
||||
recursiveGossipSteps = testCase[0]
|
||||
numOfClients = recursiveGossipSteps.len() - 1
|
||||
|
||||
var clients: seq[StateNetwork]
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
# fluffy
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import std/streams, yaml, results
|
||||
|
||||
export yaml
|
||||
|
||||
type YamlPortalContent* = object
|
||||
content_key*: string
|
||||
content_value*: string
|
||||
|
||||
proc loadFromYaml*(T: type, file: string): Result[T, string] =
|
||||
let s =
|
||||
try:
|
||||
openFileStream(file)
|
||||
except IOError as e:
|
||||
return err(e.msg)
|
||||
defer:
|
||||
try:
|
||||
close(s)
|
||||
except Exception as e:
|
||||
raiseAssert(e.msg)
|
||||
var res: T
|
||||
try:
|
||||
{.gcsafe.}:
|
||||
yaml.load(s, res)
|
||||
except YamlConstructionError as e:
|
||||
return err(e.msg)
|
||||
except YamlParserError as e:
|
||||
return err(e.msg)
|
||||
except OSError as e:
|
||||
return err(e.msg)
|
||||
except IOError as e:
|
||||
return err(e.msg)
|
||||
ok(res)
|
|
@ -1,13 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2023 Status Research & Development GmbH. Licensed under
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH. Licensed under
|
||||
# either of:
|
||||
# - Apache License, version 2.0
|
||||
# - MIT license
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
excluded_files="config.yaml"
|
||||
excluded_files="config.yaml|.gitmodules"
|
||||
excluded_extensions="json|md|png|txt|toml|gz|key|rlp"
|
||||
|
||||
current_year=$(date +"%Y")
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 189844a72b90ba7ade864f997280809efcb21d0a
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 0f18d18c1a1ce75d751dee3a440a5a5fcc0ac89e
|
|
@ -1 +1 @@
|
|||
Subproject commit 1b3ad4a94c4282023428792694f375d30fac84d6
|
||||
Subproject commit 954f7d0eb2950a2131048404a1a4ce476bb64657
|
Loading…
Reference in New Issue