Fluffy state offer validation (#2170)
* Implemented offer validation interface. * Implement high level offer validation steps. * Completed state validation tests. * Update validation to use result type. * Update state proof verification tests to test offer verification. * Query history network to get state root by block hash. * Fix state network test and remove usage of CoreDb. * Fix state network gossip test and PR comment updates. * Add trieproof state validation tests and fix for short nodes.
This commit is contained in:
parent
8767bbd10a
commit
2891b9aa7d
|
@ -191,20 +191,6 @@ proc run(config: PortalConf) {.raises: [CatchableError].} =
|
||||||
)
|
)
|
||||||
streamManager = StreamManager.new(d)
|
streamManager = StreamManager.new(d)
|
||||||
|
|
||||||
stateNetwork =
|
|
||||||
if Network.state in config.networks:
|
|
||||||
Opt.some(
|
|
||||||
StateNetwork.new(
|
|
||||||
d,
|
|
||||||
db,
|
|
||||||
streamManager,
|
|
||||||
bootstrapRecords = bootstrapRecords,
|
|
||||||
portalConfig = portalConfig,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Opt.none(StateNetwork)
|
|
||||||
|
|
||||||
accumulator =
|
accumulator =
|
||||||
# Building an accumulator from header epoch files takes > 2m30s and is
|
# Building an accumulator from header epoch files takes > 2m30s and is
|
||||||
# thus not really a viable option at start-up.
|
# thus not really a viable option at start-up.
|
||||||
|
@ -234,6 +220,21 @@ proc run(config: PortalConf) {.raises: [CatchableError].} =
|
||||||
else:
|
else:
|
||||||
Opt.none(HistoryNetwork)
|
Opt.none(HistoryNetwork)
|
||||||
|
|
||||||
|
stateNetwork =
|
||||||
|
if Network.state in config.networks:
|
||||||
|
Opt.some(
|
||||||
|
StateNetwork.new(
|
||||||
|
d,
|
||||||
|
db,
|
||||||
|
streamManager,
|
||||||
|
bootstrapRecords = bootstrapRecords,
|
||||||
|
portalConfig = portalConfig,
|
||||||
|
historyNetwork = historyNetwork,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Opt.none(StateNetwork)
|
||||||
|
|
||||||
beaconLightClient =
|
beaconLightClient =
|
||||||
# TODO: Currently disabled by default as it is not sufficiently polished.
|
# TODO: Currently disabled by default as it is not sufficiently polished.
|
||||||
# Eventually this should be always-on functionality.
|
# Eventually this should be always-on functionality.
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
# Nimbus
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import stint, eth/[common, trie], ./state_proof_types
|
|
||||||
|
|
||||||
proc generateAccountProof*(
|
|
||||||
state: AccountState, address: EthAddress
|
|
||||||
): AccountProof {.raises: [RlpError].} =
|
|
||||||
let key = keccakHash(address).data
|
|
||||||
state.getBranch(key).AccountProof
|
|
||||||
|
|
||||||
proc generateStorageProof*(
|
|
||||||
state: StorageState, slotKey: UInt256
|
|
||||||
): StorageProof {.raises: [RlpError].} =
|
|
||||||
let key = keccakHash(toBytesBE(slotKey)).data
|
|
||||||
state.getBranch(key).StorageProof
|
|
|
@ -1,30 +0,0 @@
|
||||||
# Nimbus
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import eth/[common, trie]
|
|
||||||
|
|
||||||
type
|
|
||||||
AccountState* = distinct HexaryTrie
|
|
||||||
StorageState* = distinct HexaryTrie
|
|
||||||
|
|
||||||
MptProof* = seq[seq[byte]]
|
|
||||||
AccountProof* = distinct MptProof
|
|
||||||
StorageProof* = distinct MptProof
|
|
||||||
|
|
||||||
proc getBranch*(self: AccountState, key: openArray[byte]): seq[seq[byte]] {.borrow.}
|
|
||||||
|
|
||||||
proc rootHash*(self: AccountState): KeccakHash {.borrow.}
|
|
||||||
|
|
||||||
proc getBranch*(self: StorageState, key: openArray[byte]): seq[seq[byte]] {.borrow.}
|
|
||||||
|
|
||||||
proc rootHash*(self: StorageState): KeccakHash {.borrow.}
|
|
||||||
|
|
||||||
proc len*(self: AccountProof): int {.borrow.}
|
|
||||||
|
|
||||||
proc len*(self: StorageProof): int {.borrow.}
|
|
|
@ -1,69 +0,0 @@
|
||||||
# Nimbus
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import
|
|
||||||
std/sequtils,
|
|
||||||
stint,
|
|
||||||
eth/[common, rlp, trie/hexary_proof_verification],
|
|
||||||
stew/results,
|
|
||||||
./state_proof_types
|
|
||||||
|
|
||||||
export results
|
|
||||||
|
|
||||||
proc verifyAccount*(
|
|
||||||
trustedStateRoot: KeccakHash,
|
|
||||||
address: EthAddress,
|
|
||||||
account: Account,
|
|
||||||
proof: AccountProof,
|
|
||||||
): Result[void, string] =
|
|
||||||
if proof.len() == 0:
|
|
||||||
return err("proof is empty")
|
|
||||||
|
|
||||||
let key = toSeq(keccakHash(address).data)
|
|
||||||
let value = rlp.encode(account)
|
|
||||||
|
|
||||||
let proofResult = verifyMptProof(proof.MptProof, trustedStateRoot, key, value)
|
|
||||||
|
|
||||||
case proofResult.kind
|
|
||||||
of ValidProof:
|
|
||||||
ok()
|
|
||||||
of MissingKey:
|
|
||||||
err("missing key")
|
|
||||||
of InvalidProof:
|
|
||||||
err(proofResult.errorMsg)
|
|
||||||
|
|
||||||
proc verifyContractStorageSlot*(
|
|
||||||
trustedStorageRoot: KeccakHash,
|
|
||||||
slotKey: UInt256,
|
|
||||||
slotValue: UInt256,
|
|
||||||
proof: StorageProof,
|
|
||||||
): Result[void, string] =
|
|
||||||
if proof.len() == 0:
|
|
||||||
return err("proof is empty")
|
|
||||||
|
|
||||||
let key = toSeq(keccakHash(toBytesBE(slotKey)).data)
|
|
||||||
let value = rlp.encode(slotValue)
|
|
||||||
|
|
||||||
let proofResult = verifyMptProof(proof.MptProof, trustedStorageRoot, key, value)
|
|
||||||
|
|
||||||
case proofResult.kind
|
|
||||||
of ValidProof:
|
|
||||||
ok()
|
|
||||||
of MissingKey:
|
|
||||||
err("missing key")
|
|
||||||
of InvalidProof:
|
|
||||||
err(proofResult.errorMsg)
|
|
||||||
|
|
||||||
func verifyContractBytecode*(
|
|
||||||
trustedCodeHash: KeccakHash, bytecode: openArray[byte]
|
|
||||||
): Result[void, string] =
|
|
||||||
if trustedCodeHash == keccakHash(bytecode):
|
|
||||||
ok()
|
|
||||||
else:
|
|
||||||
err("hash of bytecode doesn't match the expected code hash")
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
nimcrypto/[hash, sha2, keccak],
|
nimcrypto/[hash, sha2, keccak],
|
||||||
stew/results,
|
results,
|
||||||
stint,
|
stint,
|
||||||
eth/common/eth_types,
|
eth/common/eth_types,
|
||||||
ssz_serialization,
|
ssz_serialization,
|
||||||
|
@ -208,16 +208,35 @@ func encode*(content: RetrievalContentValue): seq[byte] =
|
||||||
of contractCode:
|
of contractCode:
|
||||||
SSZ.encode(content.contractCode)
|
SSZ.encode(content.contractCode)
|
||||||
|
|
||||||
func packNibbles*(nibbles: seq[byte]): Nibbles =
|
func init*(T: type Nibbles, packed: openArray[byte], isEven: bool): T =
|
||||||
doAssert(nibbles.len() <= MAX_UNPACKED_NIBBLES_LEN, "Can't pack more than 64 nibbles")
|
doAssert(packed.len() <= MAX_PACKED_NIBBLES_LEN)
|
||||||
|
|
||||||
if nibbles.len() == 0:
|
var output = newSeqOfCap[byte](packed.len() + 1)
|
||||||
|
if isEven:
|
||||||
|
output.add(0x00)
|
||||||
|
else:
|
||||||
|
doAssert(packed.len() > 0)
|
||||||
|
# set the first nibble to 1 and copy the second nibble from the input
|
||||||
|
output.add((packed[0] and 0x0F) or 0x10)
|
||||||
|
|
||||||
|
let startIdx = if isEven: 0 else: 1
|
||||||
|
for i in startIdx ..< packed.len():
|
||||||
|
output.add(packed[i])
|
||||||
|
|
||||||
|
Nibbles(output)
|
||||||
|
|
||||||
|
func packNibbles*(unpacked: openArray[byte]): Nibbles =
|
||||||
|
doAssert(
|
||||||
|
unpacked.len() <= MAX_UNPACKED_NIBBLES_LEN, "Can't pack more than 64 nibbles"
|
||||||
|
)
|
||||||
|
|
||||||
|
if unpacked.len() == 0:
|
||||||
return Nibbles(@[byte(0x00)])
|
return Nibbles(@[byte(0x00)])
|
||||||
|
|
||||||
let isEvenLength = nibbles.len() mod 2 == 0
|
let isEvenLength = unpacked.len() mod 2 == 0
|
||||||
|
|
||||||
var
|
var
|
||||||
output = newSeqOfCap[byte](nibbles.len() div 2 + 1)
|
output = newSeqOfCap[byte](unpacked.len() div 2 + 1)
|
||||||
highNibble = isEvenLength
|
highNibble = isEvenLength
|
||||||
currentByte: byte = 0
|
currentByte: byte = 0
|
||||||
|
|
||||||
|
@ -226,7 +245,7 @@ func packNibbles*(nibbles: seq[byte]): Nibbles =
|
||||||
else:
|
else:
|
||||||
currentByte = 0x10
|
currentByte = 0x10
|
||||||
|
|
||||||
for i, nibble in nibbles:
|
for i, nibble in unpacked:
|
||||||
if highNibble:
|
if highNibble:
|
||||||
currentByte = nibble shl 4
|
currentByte = nibble shl 4
|
||||||
else:
|
else:
|
||||||
|
@ -236,12 +255,12 @@ func packNibbles*(nibbles: seq[byte]): Nibbles =
|
||||||
|
|
||||||
Nibbles(output)
|
Nibbles(output)
|
||||||
|
|
||||||
func unpackNibbles*(nibbles: Nibbles): seq[byte] =
|
func unpackNibbles*(packed: Nibbles): seq[byte] =
|
||||||
doAssert(nibbles.len() <= MAX_PACKED_NIBBLES_LEN, "Packed nibbles length is too long")
|
doAssert(packed.len() <= MAX_PACKED_NIBBLES_LEN, "Packed nibbles length is too long")
|
||||||
|
|
||||||
var output = newSeqOfCap[byte](nibbles.len() * 2)
|
var output = newSeqOfCap[byte](packed.len() * 2)
|
||||||
|
|
||||||
for i, pair in nibbles:
|
for i, pair in packed:
|
||||||
if i == 0 and pair == 0x00:
|
if i == 0 and pair == 0x00:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -6,17 +6,20 @@
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
import
|
import
|
||||||
stew/results,
|
results,
|
||||||
chronos,
|
chronos,
|
||||||
chronicles,
|
chronicles,
|
||||||
eth/common/eth_hash,
|
eth/common/eth_hash,
|
||||||
eth/common,
|
eth/common,
|
||||||
eth/p2p/discoveryv5/[protocol, enr],
|
eth/p2p/discoveryv5/[protocol, enr],
|
||||||
../../database/content_db,
|
../../database/content_db,
|
||||||
|
../history/history_network,
|
||||||
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
||||||
./state_content,
|
./state_content,
|
||||||
./state_validation
|
./state_validation
|
||||||
|
|
||||||
|
export results
|
||||||
|
|
||||||
logScope:
|
logScope:
|
||||||
topics = "portal_state"
|
topics = "portal_state"
|
||||||
|
|
||||||
|
@ -27,6 +30,7 @@ type StateNetwork* = ref object
|
||||||
contentDB*: ContentDB
|
contentDB*: ContentDB
|
||||||
contentQueue*: AsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])]
|
contentQueue*: AsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])]
|
||||||
processContentLoop: Future[void]
|
processContentLoop: Future[void]
|
||||||
|
historyNetwork: Opt[HistoryNetwork]
|
||||||
|
|
||||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
||||||
ok(toContentId(contentKey))
|
ok(toContentId(contentKey))
|
||||||
|
@ -85,10 +89,10 @@ proc validateContent*(
|
||||||
): bool =
|
): bool =
|
||||||
doAssert(contentKey.contentType == contentValue.contentType)
|
doAssert(contentKey.contentType == contentValue.contentType)
|
||||||
|
|
||||||
|
let res =
|
||||||
case contentKey.contentType
|
case contentKey.contentType
|
||||||
of unused:
|
of unused:
|
||||||
warn "Received content with unused content type"
|
Result[void, string].err("Received content with unused content type")
|
||||||
false
|
|
||||||
of accountTrieNode:
|
of accountTrieNode:
|
||||||
validateFetchedAccountTrieNode(
|
validateFetchedAccountTrieNode(
|
||||||
contentKey.accountTrieNodeKey, contentValue.accountTrieNode
|
contentKey.accountTrieNodeKey, contentValue.accountTrieNode
|
||||||
|
@ -100,6 +104,11 @@ proc validateContent*(
|
||||||
of contractCode:
|
of contractCode:
|
||||||
validateFetchedContractCode(contentKey.contractCodeKey, contentValue.contractCode)
|
validateFetchedContractCode(contentKey.contractCodeKey, contentValue.contractCode)
|
||||||
|
|
||||||
|
res.isOkOr:
|
||||||
|
warn "Validation of fetched content failed: ", error
|
||||||
|
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
proc getContent*(n: StateNetwork, key: ContentKey): Future[Opt[seq[byte]]] {.async.} =
|
proc getContent*(n: StateNetwork, key: ContentKey): Future[Opt[seq[byte]]] {.async.} =
|
||||||
let
|
let
|
||||||
keyEncoded = encode(key)
|
keyEncoded = encode(key)
|
||||||
|
@ -137,34 +146,54 @@ proc getContent*(n: StateNetwork, key: ContentKey): Future[Opt[seq[byte]]] {.asy
|
||||||
# domain types.
|
# domain types.
|
||||||
return Opt.some(contentResult.content)
|
return Opt.some(contentResult.content)
|
||||||
|
|
||||||
proc validateAccountTrieNode(
|
proc getStateRootByBlockHash(
|
||||||
n: StateNetwork, key: ContentKey, contentValue: OfferContentValue
|
n: StateNetwork, hash: BlockHash
|
||||||
): bool =
|
): Future[Opt[KeccakHash]] {.async.} =
|
||||||
true
|
if n.historyNetwork.isNone():
|
||||||
|
warn "History network is not available. Unable to get state root by block hash"
|
||||||
|
return Opt.none(KeccakHash)
|
||||||
|
|
||||||
proc validateContractTrieNode(
|
let header = (await n.historyNetwork.get().getVerifiedBlockHeader(hash)).valueOr:
|
||||||
n: StateNetwork, key: ContentKey, contentValue: OfferContentValue
|
warn "Failed to get block header by hash", hash
|
||||||
): bool =
|
return Opt.none(KeccakHash)
|
||||||
true
|
|
||||||
|
|
||||||
proc validateContractCode(
|
Opt.some(header.stateRoot)
|
||||||
n: StateNetwork, key: ContentKey, contentValue: OfferContentValue
|
|
||||||
): bool =
|
|
||||||
true
|
|
||||||
|
|
||||||
proc validateContent*(
|
proc validateContent*(
|
||||||
n: StateNetwork, contentKey: ContentKey, contentValue: OfferContentValue
|
n: StateNetwork, contentKey: ContentKey, contentValue: OfferContentValue
|
||||||
): bool =
|
): Future[Result[void, string]] {.async.} =
|
||||||
|
doAssert(contentKey.contentType == contentValue.contentType)
|
||||||
|
|
||||||
case contentKey.contentType
|
case contentKey.contentType
|
||||||
of unused:
|
of unused:
|
||||||
warn "Received content with unused content type"
|
Result[void, string].err("Received content with unused content type")
|
||||||
false
|
|
||||||
of accountTrieNode:
|
of accountTrieNode:
|
||||||
validateAccountTrieNode(n, contentKey, contentValue)
|
let stateRoot = (
|
||||||
|
await n.getStateRootByBlockHash(contentValue.accountTrieNode.blockHash)
|
||||||
|
).valueOr:
|
||||||
|
return Result[void, string].err("Failed to get state root by block hash")
|
||||||
|
|
||||||
|
validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValue.accountTrieNode
|
||||||
|
)
|
||||||
of contractTrieNode:
|
of contractTrieNode:
|
||||||
validateContractTrieNode(n, contentKey, contentValue)
|
let stateRoot = (
|
||||||
|
await n.getStateRootByBlockHash(contentValue.contractTrieNode.blockHash)
|
||||||
|
).valueOr:
|
||||||
|
return Result[void, string].err("Failed to get state root by block hash")
|
||||||
|
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValue.contractTrieNode
|
||||||
|
)
|
||||||
of contractCode:
|
of contractCode:
|
||||||
validateContractCode(n, contentKey, contentValue)
|
let stateRoot = (
|
||||||
|
await n.getStateRootByBlockHash(contentValue.contractCode.blockHash)
|
||||||
|
).valueOr:
|
||||||
|
return Result[void, string].err("Failed to get state root by block hash")
|
||||||
|
|
||||||
|
validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValue.contractCode
|
||||||
|
)
|
||||||
|
|
||||||
proc recursiveGossipAccountTrieNode(
|
proc recursiveGossipAccountTrieNode(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
|
@ -235,6 +264,7 @@ proc new*(
|
||||||
streamManager: StreamManager,
|
streamManager: StreamManager,
|
||||||
bootstrapRecords: openArray[Record] = [],
|
bootstrapRecords: openArray[Record] = [],
|
||||||
portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig,
|
portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig,
|
||||||
|
historyNetwork = Opt.none(HistoryNetwork),
|
||||||
): T =
|
): T =
|
||||||
let cq = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50)
|
let cq = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50)
|
||||||
|
|
||||||
|
@ -253,8 +283,12 @@ proc new*(
|
||||||
portalProtocol.dbPut =
|
portalProtocol.dbPut =
|
||||||
createStoreHandler(contentDB, portalConfig.radiusConfig, portalProtocol)
|
createStoreHandler(contentDB, portalConfig.radiusConfig, portalProtocol)
|
||||||
|
|
||||||
return
|
return StateNetwork(
|
||||||
StateNetwork(portalProtocol: portalProtocol, contentDB: contentDB, contentQueue: cq)
|
portalProtocol: portalProtocol,
|
||||||
|
contentDB: contentDB,
|
||||||
|
contentQueue: cq,
|
||||||
|
historyNetwork: historyNetwork,
|
||||||
|
)
|
||||||
|
|
||||||
proc processContentLoop(n: StateNetwork) {.async.} =
|
proc processContentLoop(n: StateNetwork) {.async.} =
|
||||||
try:
|
try:
|
||||||
|
@ -266,7 +300,11 @@ proc processContentLoop(n: StateNetwork) {.async.} =
|
||||||
(decodedKey, decodedValue) = decodeKV(contentKey, contentValue).valueOr:
|
(decodedKey, decodedValue) = decodeKV(contentKey, contentValue).valueOr:
|
||||||
error "Unable to decode offered Key/Value"
|
error "Unable to decode offered Key/Value"
|
||||||
continue
|
continue
|
||||||
if validateContent(n, decodedKey, decodedValue):
|
|
||||||
|
(await n.validateContent(decodedKey, decodedValue)).isOkOr:
|
||||||
|
error "Received offered content failed validation", contentKey, error
|
||||||
|
continue
|
||||||
|
|
||||||
let
|
let
|
||||||
valueForRetrieval = decodedValue.offerContentToRetrievalContent().encode()
|
valueForRetrieval = decodedValue.offerContentToRetrievalContent().encode()
|
||||||
contentId = n.portalProtocol.toContentId(contentKey).valueOr:
|
contentId = n.portalProtocol.toContentId(contentKey).valueOr:
|
||||||
|
@ -280,8 +318,6 @@ proc processContentLoop(n: StateNetwork) {.async.} =
|
||||||
n.portalProtocol, maybeSrcNodeId, contentKey, decodedKey, contentValue,
|
n.portalProtocol, maybeSrcNodeId, contentKey, decodedKey, contentValue,
|
||||||
decodedValue,
|
decodedValue,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
error "Received offered content failed validation", contentKey
|
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
trace "processContentLoop canceled"
|
trace "processContentLoop canceled"
|
||||||
|
|
||||||
|
|
|
@ -5,30 +5,209 @@
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
import eth/common, ./state_content
|
import
|
||||||
|
results, stew/arrayops, eth/[common, trie], ../../common/common_types, ./state_content
|
||||||
|
|
||||||
|
export results
|
||||||
|
|
||||||
|
# private functions
|
||||||
|
|
||||||
|
proc hashEquals(value: TrieNode | Bytecode, expectedHash: KeccakHash): bool {.inline.} =
|
||||||
|
keccakHash(value.asSeq()) == expectedHash
|
||||||
|
|
||||||
|
proc isValidNextNode(thisNodeRlp: Rlp, rlpIdx: int, nextNode: TrieNode): bool =
|
||||||
|
let hashOrShortRlp = thisNodeRlp.listElem(rlpIdx)
|
||||||
|
if hashOrShortRlp.isEmpty():
|
||||||
|
return false
|
||||||
|
|
||||||
|
let nextHash =
|
||||||
|
if hashOrShortRlp.isList():
|
||||||
|
# is a short node
|
||||||
|
keccakHash(rlp.encode(hashOrShortRlp))
|
||||||
|
else:
|
||||||
|
let hash = hashOrShortRlp.toBytes()
|
||||||
|
if hash.len() != 32:
|
||||||
|
return false
|
||||||
|
KeccakHash(data: array[32, byte].initCopyFrom(hash))
|
||||||
|
|
||||||
|
nextNode.hashEquals(nextHash)
|
||||||
|
|
||||||
|
proc decodePrefix(nodePrefixRlp: Rlp): (byte, bool, Nibbles) =
|
||||||
|
doAssert(not nodePrefixRlp.isEmpty())
|
||||||
|
|
||||||
|
let
|
||||||
|
rlpBytes = nodePrefixRlp.toBytes()
|
||||||
|
firstNibble = (rlpBytes[0] and 0xF0) shr 4
|
||||||
|
isLeaf = firstNibble == 2 or firstNibble == 3
|
||||||
|
isEven = firstNibble == 0 or firstNibble == 2
|
||||||
|
startIdx = if isEven: 1 else: 0
|
||||||
|
nibbles = Nibbles.init(rlpBytes[startIdx .. ^1], isEven)
|
||||||
|
|
||||||
|
(firstNibble.byte, isLeaf, nibbles)
|
||||||
|
|
||||||
|
proc validateTrieProof*(
|
||||||
|
expectedRootHash: KeccakHash, path: Nibbles, proof: TrieProof
|
||||||
|
): Result[void, string] =
|
||||||
|
if proof.len() == 0:
|
||||||
|
return err("proof is empty")
|
||||||
|
|
||||||
|
if not proof[0].hashEquals(expectedRootHash):
|
||||||
|
return err("hash of proof root node doesn't match the expected root hash")
|
||||||
|
|
||||||
|
let nibbles = path.unpackNibbles()
|
||||||
|
if nibbles.len() == 0:
|
||||||
|
if proof.len() == 1:
|
||||||
|
return ok() # root node case, already validated above
|
||||||
|
else:
|
||||||
|
return err("empty path, only one node expected in proof")
|
||||||
|
|
||||||
|
var nibbleIdx = 0
|
||||||
|
for proofIdx, p in proof:
|
||||||
|
let
|
||||||
|
thisNodeRlp = rlpFromBytes(p.asSeq())
|
||||||
|
remainingNibbles = nibbles.len() - nibbleIdx
|
||||||
|
isLastNode = proofIdx == proof.high
|
||||||
|
|
||||||
|
if remainingNibbles == 0:
|
||||||
|
if isLastNode:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
return err("empty nibbles but proof has more nodes")
|
||||||
|
|
||||||
|
case thisNodeRlp.listLen()
|
||||||
|
of 2:
|
||||||
|
let nodePrefixRlp = thisNodeRlp.listElem(0)
|
||||||
|
if nodePrefixRlp.isEmpty():
|
||||||
|
return err("node prefix is empty")
|
||||||
|
|
||||||
|
let (prefix, isLeaf, prefixNibbles) = decodePrefix(nodePrefixRlp)
|
||||||
|
if prefix >= 4:
|
||||||
|
return err("invalid prefix in node")
|
||||||
|
|
||||||
|
if not isLastNode or isLeaf:
|
||||||
|
let unpackedPrefix = prefixNibbles.unpackNibbles()
|
||||||
|
if remainingNibbles < unpackedPrefix.len():
|
||||||
|
return err("not enough nibbles to validate node prefix")
|
||||||
|
|
||||||
|
let nibbleEndIdx = nibbleIdx + unpackedPrefix.len()
|
||||||
|
if nibbles[nibbleIdx ..< nibbleEndIdx] != unpackedPrefix:
|
||||||
|
return err("nibbles don't match node prefix")
|
||||||
|
nibbleIdx += unpackedPrefix.len()
|
||||||
|
|
||||||
|
if not isLastNode:
|
||||||
|
if isLeaf:
|
||||||
|
return err("leaf node must be last node in the proof")
|
||||||
|
else: # is extension node
|
||||||
|
if not isValidNextNode(thisNodeRlp, 1, proof[proofIdx + 1]):
|
||||||
|
return
|
||||||
|
err("hash of next node doesn't match the expected extension node hash")
|
||||||
|
of 17:
|
||||||
|
if not isLastNode:
|
||||||
|
let nextNibble = nibbles[nibbleIdx]
|
||||||
|
if nextNibble >= 16:
|
||||||
|
return err("invalid next nibble for branch node")
|
||||||
|
|
||||||
|
if not isValidNextNode(thisNodeRlp, nextNibble.int, proof[proofIdx + 1]):
|
||||||
|
return err("hash of next node doesn't match the expected branch node hash")
|
||||||
|
|
||||||
|
inc nibbleIdx
|
||||||
|
else:
|
||||||
|
return err("invalid rlp node, expected 2 or 17 elements")
|
||||||
|
|
||||||
|
if nibbleIdx < nibbles.len():
|
||||||
|
err("path contains more nibbles than expected for proof")
|
||||||
|
else:
|
||||||
|
ok()
|
||||||
|
|
||||||
|
proc rlpDecodeAccountTrieNode(accountNode: TrieNode): Result[Account, string] =
|
||||||
|
let accNodeRlp = rlpFromBytes(accountNode.asSeq())
|
||||||
|
if accNodeRlp.isEmpty() or accNodeRlp.listLen() != 2:
|
||||||
|
return err("invalid account trie node - malformed")
|
||||||
|
|
||||||
|
let accNodePrefixRlp = accNodeRlp.listElem(0)
|
||||||
|
if accNodePrefixRlp.isEmpty():
|
||||||
|
return err("invalid account trie node - empty prefix")
|
||||||
|
|
||||||
|
let (_, isLeaf, _) = decodePrefix(accNodePrefixRlp)
|
||||||
|
if not isLeaf:
|
||||||
|
return err("invalid account trie node - leaf prefix expected")
|
||||||
|
|
||||||
|
decodeRlp(accNodeRlp.listElem(1).toBytes(), Account)
|
||||||
|
|
||||||
|
# public functions
|
||||||
|
|
||||||
proc validateFetchedAccountTrieNode*(
|
proc validateFetchedAccountTrieNode*(
|
||||||
trustedAccountTrieNodeKey: AccountTrieNodeKey,
|
trustedAccountTrieNodeKey: AccountTrieNodeKey,
|
||||||
accountTrieNode: AccountTrieNodeRetrieval,
|
accountTrieNode: AccountTrieNodeRetrieval,
|
||||||
): bool =
|
): Result[void, string] =
|
||||||
let expectedHash = trustedAccountTrieNodeKey.nodeHash
|
if accountTrieNode.node.hashEquals(trustedAccountTrieNodeKey.nodeHash):
|
||||||
let actualHash = keccakHash(accountTrieNode.node.asSeq())
|
ok()
|
||||||
|
else:
|
||||||
expectedHash == actualHash
|
err("hash of fetched account trie node doesn't match the expected node hash")
|
||||||
|
|
||||||
proc validateFetchedContractTrieNode*(
|
proc validateFetchedContractTrieNode*(
|
||||||
trustedContractTrieNodeKey: ContractTrieNodeKey,
|
trustedContractTrieNodeKey: ContractTrieNodeKey,
|
||||||
contractTrieNode: ContractTrieNodeRetrieval,
|
contractTrieNode: ContractTrieNodeRetrieval,
|
||||||
): bool =
|
): Result[void, string] =
|
||||||
let expectedHash = trustedContractTrieNodeKey.nodeHash
|
if contractTrieNode.node.hashEquals(trustedContractTrieNodeKey.nodeHash):
|
||||||
let actualHash = keccakHash(contractTrieNode.node.asSeq())
|
ok()
|
||||||
|
else:
|
||||||
expectedHash == actualHash
|
err("hash of fetched contract trie node doesn't match the expected node hash")
|
||||||
|
|
||||||
proc validateFetchedContractCode*(
|
proc validateFetchedContractCode*(
|
||||||
trustedContractCodeKey: ContractCodeKey, contractCode: ContractCodeRetrieval
|
trustedContractCodeKey: ContractCodeKey, contractCode: ContractCodeRetrieval
|
||||||
): bool =
|
): Result[void, string] =
|
||||||
let expectedHash = trustedContractCodeKey.codeHash
|
if contractCode.code.hashEquals(trustedContractCodeKey.codeHash):
|
||||||
let actualHash = keccakHash(contractCode.code.asSeq())
|
ok()
|
||||||
|
else:
|
||||||
|
err("hash of fetched bytecode doesn't match the expected code hash")
|
||||||
|
|
||||||
expectedHash == actualHash
|
proc validateOfferedAccountTrieNode*(
|
||||||
|
trustedStateRoot: KeccakHash,
|
||||||
|
accountTrieNodeKey: AccountTrieNodeKey,
|
||||||
|
accountTrieNode: AccountTrieNodeOffer,
|
||||||
|
): Result[void, string] =
|
||||||
|
?validateTrieProof(trustedStateRoot, accountTrieNodeKey.path, accountTrieNode.proof)
|
||||||
|
|
||||||
|
if accountTrieNode.proof[^1].hashEquals(accountTrieNodeKey.nodeHash):
|
||||||
|
ok()
|
||||||
|
else:
|
||||||
|
err("hash of offered account trie node doesn't match the expected node hash")
|
||||||
|
|
||||||
|
proc validateOfferedContractTrieNode*(
|
||||||
|
trustedStateRoot: KeccakHash,
|
||||||
|
contractTrieNodeKey: ContractTrieNodeKey,
|
||||||
|
contractTrieNode: ContractTrieNodeOffer,
|
||||||
|
): Result[void, string] =
|
||||||
|
let addressHash = keccakHash(contractTrieNodeKey.address).data
|
||||||
|
?validateTrieProof(
|
||||||
|
trustedStateRoot, Nibbles.init(addressHash, true), contractTrieNode.accountProof
|
||||||
|
)
|
||||||
|
|
||||||
|
let account = ?rlpDecodeAccountTrieNode(contractTrieNode.accountProof[^1])
|
||||||
|
|
||||||
|
?validateTrieProof(
|
||||||
|
account.storageRoot, contractTrieNodeKey.path, contractTrieNode.storageProof
|
||||||
|
)
|
||||||
|
|
||||||
|
if contractTrieNode.storageProof[^1].hashEquals(contractTrieNodeKey.nodeHash):
|
||||||
|
ok()
|
||||||
|
else:
|
||||||
|
err("hash of offered contract trie node doesn't match the expected node hash")
|
||||||
|
|
||||||
|
proc validateOfferedContractCode*(
|
||||||
|
trustedStateRoot: KeccakHash,
|
||||||
|
contractCodeKey: ContractCodeKey,
|
||||||
|
contractCode: ContractCodeOffer,
|
||||||
|
): Result[void, string] =
|
||||||
|
let addressHash = keccakHash(contractCodeKey.address).data
|
||||||
|
?validateTrieProof(
|
||||||
|
trustedStateRoot, Nibbles.init(addressHash, true), contractCode.accountProof
|
||||||
|
)
|
||||||
|
|
||||||
|
let account = ?rlpDecodeAccountTrieNode(contractCode.accountProof[^1])
|
||||||
|
|
||||||
|
if contractCode.code.hashEquals(account.codeHash):
|
||||||
|
ok()
|
||||||
|
else:
|
||||||
|
err("hash of offered bytecode doesn't match the expected code hash")
|
||||||
|
|
|
@ -9,10 +9,6 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
./test_portal_wire_protocol,
|
./test_portal_wire_protocol,
|
||||||
./state_network_tests/test_state_content_keys,
|
|
||||||
./state_network_tests/test_state_content_values,
|
|
||||||
./state_network_tests/test_state_network_gossip,
|
|
||||||
./test_state_proof_verification,
|
|
||||||
./test_accumulator,
|
./test_accumulator,
|
||||||
./test_history_network,
|
./test_history_network,
|
||||||
./test_content_db,
|
./test_content_db,
|
||||||
|
@ -21,4 +17,5 @@ import
|
||||||
./test_beacon_chain_block_proof_capella,
|
./test_beacon_chain_block_proof_capella,
|
||||||
./test_beacon_chain_historical_roots,
|
./test_beacon_chain_historical_roots,
|
||||||
./test_beacon_chain_historical_summaries,
|
./test_beacon_chain_historical_summaries,
|
||||||
./beacon_network_tests/all_beacon_network_tests
|
./beacon_network_tests/all_beacon_network_tests,
|
||||||
|
./state_network_tests/all_state_network_tests
|
||||||
|
|
|
@ -0,0 +1,838 @@
|
||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"chainId": 123,
|
||||||
|
"homesteadBlock": 0,
|
||||||
|
"eip150Block": 0,
|
||||||
|
"eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"eip155Block": 0,
|
||||||
|
"eip158Block": 0,
|
||||||
|
"byzantiumBlock": 0,
|
||||||
|
"constantinopleBlock": 0,
|
||||||
|
"petersburgBlock": 0,
|
||||||
|
"istanbulBlock": 0,
|
||||||
|
"berlinBlock": 0,
|
||||||
|
"londonBlock": 500,
|
||||||
|
"clique": {
|
||||||
|
"period": 30,
|
||||||
|
"epoch": 30000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"genesis": {
|
||||||
|
"nonce": "0x0",
|
||||||
|
"timestamp": "0x60b3877f",
|
||||||
|
"extraData": "0x00000000000000000000000000000000000000000000000000000000000000005211cea3870c7ba7c6c44b185e62eecdb864cd8c560228ce57d31efbf64c200b2c200aacec78cf17a7148e784fe95a7a750335f8b9572ee28d72e7650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"gasLimit": "0x47b760",
|
||||||
|
"difficulty": "0x1",
|
||||||
|
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"coinbase": "0x0000000000000000000000000000000000000000",
|
||||||
|
"alloc": {
|
||||||
|
"0000000000000000000000000000000000000000": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000001": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000002": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000003": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000004": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000005": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000006": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000007": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000008": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000009": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000000f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000010": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000011": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000012": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000013": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000014": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000015": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000016": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000017": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000018": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000019": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000001f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000020": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000021": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000022": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000023": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000024": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000025": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000026": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000027": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000028": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000029": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000002f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000030": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000031": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000032": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000033": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000034": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000035": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000036": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000037": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000038": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000039": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000003f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000040": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000041": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000042": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000043": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000044": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000045": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000046": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000047": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000048": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000049": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000004f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000050": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000051": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000052": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000053": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000054": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000055": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000056": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000057": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000058": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000059": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000005f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000060": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000061": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000062": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000063": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000064": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000065": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000066": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000067": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000068": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000069": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000006f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000070": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000071": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000072": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000073": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000074": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000075": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000076": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000077": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000078": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000079": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000007f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000080": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000081": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000082": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000083": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000084": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000085": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000086": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000087": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000088": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000089": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000008f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000090": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000091": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000092": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000093": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000094": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000095": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000096": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000097": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000098": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000099": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009a": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009b": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009c": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009d": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009e": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"000000000000000000000000000000000000009f": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000a9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000aa": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ab": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ac": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ad": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ae": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000af": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000b9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ba": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000bb": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000bc": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000bd": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000be": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000bf": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000c9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ca": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000cb": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000cc": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000cd": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ce": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000cf": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000d9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000da": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000db": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000dc": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000dd": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000de": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000df": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000e9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ea": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000eb": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ec": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ed": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ee": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ef": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f0": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f1": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f2": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f3": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f4": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f5": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f6": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f7": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f8": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000f9": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000fa": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000fb": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000fc": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000fd": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000fe": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"00000000000000000000000000000000000000ff": {
|
||||||
|
"balance": "0x1"
|
||||||
|
},
|
||||||
|
"0e89e2aedb1cfcdb9424d41a1f218f4132738172": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"1041afbcb359d5a8dc58c15b2ff51354ff8a217d": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"236ff1e97419ae93ad80cafbaa21220c5d78fb7d": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"60adc0f89a41af237ce73554ede170d733ec14e0": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"799d329e5f583419167cd722962485926e338f4a": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"7cf5b79bfe291a67ab02b393e456ccc4c266f753": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"8a8eafb1cf62bfbeb1741769dae1a9dd47996192": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"8ba1f109551bd432803012645ac136ddd64dba72": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"b02a2eda1b317fbd16760128836b0ac59b560e9d": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"badc0de9e0794b049b5ea63c3e1e698a3476c172": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"f0300bee898ae272eb347e8369ac0c76df42c93f": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
},
|
||||||
|
"fe3b557e8fb62b89f4916b721be55ceb828dbd73": {
|
||||||
|
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"number": "0x0",
|
||||||
|
"gasUsed": "0x0",
|
||||||
|
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,17 @@
|
||||||
|
# Nimbus
|
||||||
|
# Copyright (c) 2022-2024 Status Research & Development GmbH
|
||||||
|
# Licensed under either of
|
||||||
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||||
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.warning[UnusedImport]: off.}
|
||||||
|
|
||||||
|
import
|
||||||
|
./test_state_content_keys,
|
||||||
|
./test_state_content_values,
|
||||||
|
./test_state_network,
|
||||||
|
#./test_state_network_gossip,
|
||||||
|
./test_state_validation,
|
||||||
|
./test_state_validation_genesis,
|
||||||
|
./test_state_validation_trieproof
|
|
@ -0,0 +1,77 @@
|
||||||
|
# Fluffy
|
||||||
|
# Copyright (c) 2021-2024 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
|
import
|
||||||
|
std/[sugar, sequtils],
|
||||||
|
eth/[common, trie, trie/db],
|
||||||
|
../../nimbus/common/chain_config,
|
||||||
|
../../network/state/state_content
|
||||||
|
|
||||||
|
proc asNibbles*(key: openArray[byte], isEven = true): Nibbles =
|
||||||
|
Nibbles.init(key, isEven)
|
||||||
|
|
||||||
|
proc asTrieProof*(branch: openArray[seq[byte]]): TrieProof =
|
||||||
|
TrieProof.init(branch.map(node => TrieNode.init(node)))
|
||||||
|
|
||||||
|
proc getTrieProof*(
|
||||||
|
state: HexaryTrie, key: openArray[byte]
|
||||||
|
): TrieProof {.raises: [RlpError].} =
|
||||||
|
let branch = state.getBranch(key)
|
||||||
|
branch.asTrieProof()
|
||||||
|
|
||||||
|
proc generateAccountProof*(
|
||||||
|
state: HexaryTrie, address: EthAddress
|
||||||
|
): TrieProof {.raises: [RlpError].} =
|
||||||
|
let key = keccakHash(address).data
|
||||||
|
state.getTrieProof(key)
|
||||||
|
|
||||||
|
proc generateStorageProof*(
|
||||||
|
state: HexaryTrie, slotKey: UInt256
|
||||||
|
): TrieProof {.raises: [RlpError].} =
|
||||||
|
let key = keccakHash(toBytesBE(slotKey)).data
|
||||||
|
state.getTrieProof(key)
|
||||||
|
|
||||||
|
proc getGenesisAlloc*(filePath: string): GenesisAlloc =
|
||||||
|
var cn: NetworkParams
|
||||||
|
if not loadNetworkParams(filePath, cn):
|
||||||
|
quit(1)
|
||||||
|
|
||||||
|
cn.genesis.alloc
|
||||||
|
|
||||||
|
proc toState*(
|
||||||
|
alloc: GenesisAlloc
|
||||||
|
): (HexaryTrie, Table[EthAddress, HexaryTrie]) {.raises: [RlpError].} =
|
||||||
|
var accountTrie = initHexaryTrie(newMemoryDB())
|
||||||
|
var storageStates = initTable[EthAddress, HexaryTrie]()
|
||||||
|
|
||||||
|
for address, genAccount in alloc:
|
||||||
|
var storageRoot = EMPTY_ROOT_HASH
|
||||||
|
var codeHash = EMPTY_CODE_HASH
|
||||||
|
|
||||||
|
if genAccount.code.len() > 0:
|
||||||
|
var storageTrie = initHexaryTrie(newMemoryDB())
|
||||||
|
for slotKey, slotValue in genAccount.storage:
|
||||||
|
let key = keccakHash(toBytesBE(slotKey)).data
|
||||||
|
let value = rlp.encode(slotValue)
|
||||||
|
storageTrie.put(key, value)
|
||||||
|
storageStates[address] = storageTrie
|
||||||
|
storageRoot = storageTrie.rootHash()
|
||||||
|
codeHash = keccakHash(genAccount.code)
|
||||||
|
|
||||||
|
let account = Account(
|
||||||
|
nonce: genAccount.nonce,
|
||||||
|
balance: genAccount.balance,
|
||||||
|
storageRoot: storageRoot,
|
||||||
|
codeHash: codeHash,
|
||||||
|
)
|
||||||
|
let key = keccakHash(address).data
|
||||||
|
let value = rlp.encode(account)
|
||||||
|
accountTrie.put(key, value)
|
||||||
|
|
||||||
|
(accountTrie, storageStates)
|
|
@ -6,51 +6,33 @@
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[os, json, sequtils, strutils, sugar],
|
std/os,
|
||||||
stew/[byteutils, io2],
|
|
||||||
nimcrypto/hash,
|
nimcrypto/hash,
|
||||||
testutils/unittests,
|
testutils/unittests,
|
||||||
chronos,
|
chronos,
|
||||||
eth/trie/hexary_proof_verification,
|
|
||||||
eth/keys,
|
eth/keys,
|
||||||
|
eth/trie,
|
||||||
eth/common/[eth_types, eth_hash],
|
eth/common/[eth_types, eth_hash],
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
eth/p2p/discoveryv5/routing_table,
|
eth/p2p/discoveryv5/routing_table,
|
||||||
../../../nimbus/[config, db/core_db, db/state_db],
|
|
||||||
../../../nimbus/common/[chain_config, genesis],
|
|
||||||
../../network/wire/[portal_protocol, portal_stream],
|
../../network/wire/[portal_protocol, portal_stream],
|
||||||
../../network/state/[state_content, state_network],
|
../../network/state/[state_content, state_network],
|
||||||
../../database/content_db,
|
../../database/content_db,
|
||||||
.././test_helpers
|
../test_helpers,
|
||||||
|
./state_test_helpers
|
||||||
const testVectorDir = "./vendor/portal-spec-tests/tests/mainnet/state/"
|
|
||||||
|
|
||||||
proc genesisToTrie(filePath: string): CoreDbMptRef =
|
|
||||||
# TODO: Doing our best here with API that exists, to be improved.
|
|
||||||
var cn: NetworkParams
|
|
||||||
if not loadNetworkParams(filePath, cn):
|
|
||||||
quit(1)
|
|
||||||
|
|
||||||
let sdb = newStateDB(newCoreDbRef LegacyDbMemory, false)
|
|
||||||
let map = toForkTransitionTable(cn.config)
|
|
||||||
let fork =
|
|
||||||
map.toHardFork(forkDeterminationInfo(0.toBlockNumber, cn.genesis.timestamp))
|
|
||||||
discard toGenesisHeader(cn.genesis, sdb, fork)
|
|
||||||
|
|
||||||
sdb.getTrie
|
|
||||||
|
|
||||||
procSuite "State Network":
|
procSuite "State Network":
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
|
|
||||||
asyncTest "Test Share Full State":
|
asyncTest "Test Share Full State":
|
||||||
let
|
let
|
||||||
trie = genesisToTrie("fluffy" / "tests" / "custom_genesis" / "chainid7.json")
|
accounts =
|
||||||
|
getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / "chainid7.json")
|
||||||
node1 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20302))
|
(trie, _) = accounts.toState()
|
||||||
|
node1 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20312))
|
||||||
sm1 = StreamManager.new(node1)
|
sm1 = StreamManager.new(node1)
|
||||||
node2 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20303))
|
node2 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20313))
|
||||||
sm2 = StreamManager.new(node2)
|
sm2 = StreamManager.new(node2)
|
||||||
|
|
||||||
proto1 =
|
proto1 =
|
||||||
StateNetwork.new(node1, ContentDB.new("", uint32.high, inMemory = true), sm1)
|
StateNetwork.new(node1, ContentDB.new("", uint32.high, inMemory = true), sm1)
|
||||||
proto2 =
|
proto2 =
|
||||||
|
@ -72,8 +54,14 @@ procSuite "State Network":
|
||||||
contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey
|
contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey
|
||||||
)
|
)
|
||||||
contentId = toContentId(contentKey)
|
contentId = toContentId(contentKey)
|
||||||
|
value = RetrievalContentValue(
|
||||||
|
contentType: accountTrieNode,
|
||||||
|
accountTrieNode: AccountTrieNodeRetrieval(node: TrieNode.init(v)),
|
||||||
|
)
|
||||||
|
|
||||||
discard proto1.contentDB.put(contentId, v, proto1.portalProtocol.localNode.id)
|
discard proto1.contentDB.put(
|
||||||
|
contentId, value.encode(), proto1.portalProtocol.localNode.id
|
||||||
|
)
|
||||||
|
|
||||||
for key in keys:
|
for key in keys:
|
||||||
var nodeHash: NodeHash
|
var nodeHash: NodeHash
|
||||||
|
@ -89,13 +77,16 @@ procSuite "State Network":
|
||||||
# Note: GetContent and thus the lookup here is not really needed, as we
|
# Note: GetContent and thus the lookup here is not really needed, as we
|
||||||
# only have to request data to one node.
|
# only have to request data to one node.
|
||||||
let foundContent = await proto2.getContent(contentKey)
|
let foundContent = await proto2.getContent(contentKey)
|
||||||
|
check foundContent.isSome()
|
||||||
|
|
||||||
check:
|
let accTrieNode = decodeSsz(foundContent.get(), AccountTrieNodeRetrieval)
|
||||||
foundContent.isSome()
|
check accTrieNode.isOk()
|
||||||
|
|
||||||
let hash = keccakHash(foundContent.get())
|
let hash = keccakHash(accTrieNode.get().node.asSeq())
|
||||||
check hash.data == key
|
check hash.data == key
|
||||||
|
|
||||||
|
proto1.stop()
|
||||||
|
proto2.stop()
|
||||||
await node1.closeWait()
|
await node1.closeWait()
|
||||||
await node2.closeWait()
|
await node2.closeWait()
|
||||||
|
|
||||||
|
@ -103,12 +94,14 @@ procSuite "State Network":
|
||||||
# TODO: Improve this test so it actually need to go through several
|
# TODO: Improve this test so it actually need to go through several
|
||||||
# findNodes request, to properly test the lookup call.
|
# findNodes request, to properly test the lookup call.
|
||||||
let
|
let
|
||||||
trie = genesisToTrie("fluffy" / "tests" / "custom_genesis" / "chainid7.json")
|
accounts =
|
||||||
node1 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20302))
|
getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / "chainid7.json")
|
||||||
|
(trie, _) = accounts.toState()
|
||||||
|
node1 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20312))
|
||||||
sm1 = StreamManager.new(node1)
|
sm1 = StreamManager.new(node1)
|
||||||
node2 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20303))
|
node2 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20313))
|
||||||
sm2 = StreamManager.new(node2)
|
sm2 = StreamManager.new(node2)
|
||||||
node3 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20304))
|
node3 = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20314))
|
||||||
sm3 = StreamManager.new(node3)
|
sm3 = StreamManager.new(node3)
|
||||||
|
|
||||||
proto1 =
|
proto1 =
|
||||||
|
@ -137,11 +130,19 @@ procSuite "State Network":
|
||||||
contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey
|
contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey
|
||||||
)
|
)
|
||||||
contentId = toContentId(contentKey)
|
contentId = toContentId(contentKey)
|
||||||
|
value = RetrievalContentValue(
|
||||||
|
contentType: accountTrieNode,
|
||||||
|
accountTrieNode: AccountTrieNodeRetrieval(node: TrieNode.init(v)),
|
||||||
|
)
|
||||||
|
|
||||||
discard proto2.contentDB.put(contentId, v, proto2.portalProtocol.localNode.id)
|
discard proto2.contentDB.put(
|
||||||
|
contentId, value.encode(), proto2.portalProtocol.localNode.id
|
||||||
|
)
|
||||||
# Not needed right now as 1 node is enough considering node 1 is connected
|
# Not needed right now as 1 node is enough considering node 1 is connected
|
||||||
# to both.
|
# to both.
|
||||||
discard proto3.contentDB.put(contentId, v, proto3.portalProtocol.localNode.id)
|
discard proto3.contentDB.put(
|
||||||
|
contentId, value.encode(), proto3.portalProtocol.localNode.id
|
||||||
|
)
|
||||||
|
|
||||||
# Get first key
|
# Get first key
|
||||||
var nodeHash: NodeHash
|
var nodeHash: NodeHash
|
||||||
|
@ -154,14 +155,16 @@ procSuite "State Network":
|
||||||
ContentKey(contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey)
|
ContentKey(contentType: accountTrieNode, accountTrieNodeKey: accountTrieNodeKey)
|
||||||
|
|
||||||
let foundContent = await proto1.getContent(contentKey)
|
let foundContent = await proto1.getContent(contentKey)
|
||||||
|
check foundContent.isSome()
|
||||||
|
|
||||||
check:
|
let accTrieNode = decodeSsz(foundContent.get(), AccountTrieNodeRetrieval)
|
||||||
foundContent.isSome()
|
check accTrieNode.isOk()
|
||||||
|
|
||||||
let hash = keccakHash(foundContent.get())
|
|
||||||
|
|
||||||
|
let hash = keccakHash(accTrieNode.get().node.asSeq())
|
||||||
check hash.data == firstKey
|
check hash.data == firstKey
|
||||||
|
|
||||||
|
proto1.stop()
|
||||||
|
proto2.stop()
|
||||||
await node1.closeWait()
|
await node1.closeWait()
|
||||||
await node2.closeWait()
|
await node2.closeWait()
|
||||||
await node3.closeWait()
|
await node3.closeWait()
|
||||||
|
|
|
@ -12,6 +12,7 @@ import
|
||||||
stew/[byteutils, results],
|
stew/[byteutils, results],
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
../../network/wire/[portal_protocol, portal_stream],
|
../../network/wire/[portal_protocol, portal_stream],
|
||||||
|
../../network/history/[history_content, history_network],
|
||||||
../../network/state/[state_content, state_network],
|
../../network/state/[state_content, state_network],
|
||||||
../../database/content_db,
|
../../database/content_db,
|
||||||
.././test_helpers,
|
.././test_helpers,
|
||||||
|
@ -35,7 +36,6 @@ procSuite "State Network Gossip":
|
||||||
let
|
let
|
||||||
testCase = YamlRecursiveGossip.loadFromYaml(file).valueOr:
|
testCase = YamlRecursiveGossip.loadFromYaml(file).valueOr:
|
||||||
raiseAssert "Cannot read test vector: " & error
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
recursiveGossipSteps = testCase[0]
|
recursiveGossipSteps = testCase[0]
|
||||||
numOfClients = recursiveGossipSteps.len() - 1
|
numOfClients = recursiveGossipSteps.len() - 1
|
||||||
|
|
||||||
|
@ -44,9 +44,10 @@ procSuite "State Network Gossip":
|
||||||
for i in 0 .. numOfClients:
|
for i in 0 .. numOfClients:
|
||||||
let
|
let
|
||||||
node = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20400 + i))
|
node = initDiscoveryNode(rng, PrivateKey.random(rng[]), localAddress(20400 + i))
|
||||||
|
db = ContentDB.new("", uint32.high, inMemory = true)
|
||||||
sm = StreamManager.new(node)
|
sm = StreamManager.new(node)
|
||||||
proto =
|
hn = HistoryNetwork.new(node, db, sm, FinishedAccumulator())
|
||||||
StateNetwork.new(node, ContentDB.new("", uint32.high, inMemory = true), sm)
|
proto = StateNetwork.new(node, db, sm, historyNetwork = Opt.some(hn))
|
||||||
proto.start()
|
proto.start()
|
||||||
clients.add(proto)
|
clients.add(proto)
|
||||||
|
|
||||||
|
@ -54,10 +55,30 @@ procSuite "State Network Gossip":
|
||||||
let
|
let
|
||||||
currentNode = clients[i]
|
currentNode = clients[i]
|
||||||
nextNode = clients[i + 1]
|
nextNode = clients[i + 1]
|
||||||
|
|
||||||
check:
|
check:
|
||||||
currentNode.portalProtocol.addNode(nextNode.portalProtocol.localNode) == Added
|
currentNode.portalProtocol.addNode(nextNode.portalProtocol.localNode) == Added
|
||||||
(await currentNode.portalProtocol.ping(nextNode.portalProtocol.localNode)).isOk()
|
(await currentNode.portalProtocol.ping(nextNode.portalProtocol.localNode)).isOk()
|
||||||
|
|
||||||
|
let
|
||||||
|
blockHeader = BlockHeader(
|
||||||
|
stateRoot: Hash256.fromHex(
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
headerRlp = rlp.encode(blockHeader)
|
||||||
|
blockHeaderWithProof = BlockHeaderWithProof(
|
||||||
|
header: ByteList.init(headerRlp), proof: BlockHeaderProof.init()
|
||||||
|
)
|
||||||
|
value = recursiveGossipSteps[0].content_value.hexToSeqByte()
|
||||||
|
decodedValue = SSZ.decode(value, AccountTrieNodeOffer)
|
||||||
|
contentKey = history_content.ContentKey
|
||||||
|
.init(history_content.ContentType.blockHeader, decodedValue.blockHash)
|
||||||
|
.encode()
|
||||||
|
contentId = history_content.toContentId(contentKey)
|
||||||
|
|
||||||
|
clients[i].contentDB.put(contentId, SSZ.encode(blockHeaderWithProof))
|
||||||
|
|
||||||
for i in 0 .. numOfClients - 1:
|
for i in 0 .. numOfClients - 1:
|
||||||
let
|
let
|
||||||
pair = recursiveGossipSteps[i]
|
pair = recursiveGossipSteps[i]
|
||||||
|
@ -65,11 +86,11 @@ procSuite "State Network Gossip":
|
||||||
nextNode = clients[i + 1]
|
nextNode = clients[i + 1]
|
||||||
|
|
||||||
key = ByteList.init(pair.content_key.hexToSeqByte())
|
key = ByteList.init(pair.content_key.hexToSeqByte())
|
||||||
decodedKey = key.decode().valueOr:
|
decodedKey = state_content.decode(key).valueOr:
|
||||||
raiseAssert "Cannot decode key"
|
raiseAssert "Cannot decode key"
|
||||||
|
|
||||||
nextKey = ByteList.init(recursiveGossipSteps[1].content_key.hexToSeqByte())
|
nextKey = ByteList.init(recursiveGossipSteps[1].content_key.hexToSeqByte())
|
||||||
decodedNextKey = nextKey.decode().valueOr:
|
decodedNextKey = state_content.decode(nextKey).valueOr:
|
||||||
raiseAssert "Cannot decode key"
|
raiseAssert "Cannot decode key"
|
||||||
|
|
||||||
value = pair.content_value.hexToSeqByte()
|
value = pair.content_value.hexToSeqByte()
|
||||||
|
|
|
@ -6,9 +6,11 @@
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
import
|
import
|
||||||
std/os,
|
std/[os, strutils],
|
||||||
|
results,
|
||||||
unittest2,
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
|
eth/common,
|
||||||
../../network/state/state_content,
|
../../network/state/state_content,
|
||||||
../../network/state/state_validation,
|
../../network/state/state_validation,
|
||||||
../../eth_data/yaml_utils
|
../../eth_data/yaml_utils
|
||||||
|
@ -35,7 +37,15 @@ type YamlContractBytecodeKV = object
|
||||||
|
|
||||||
type YamlContractBytecodeKVs = seq[YamlContractBytecodeKV]
|
type YamlContractBytecodeKVs = seq[YamlContractBytecodeKV]
|
||||||
|
|
||||||
|
type YamlRecursiveGossipKV = object
|
||||||
|
content_key: string
|
||||||
|
content_value: string
|
||||||
|
|
||||||
|
type YamlRecursiveGossipKVs = seq[seq[YamlRecursiveGossipKV]]
|
||||||
|
|
||||||
suite "State Validation":
|
suite "State Validation":
|
||||||
|
# Retrieval validation tests
|
||||||
|
|
||||||
test "Validate valid AccountTrieNodeRetrieval nodes":
|
test "Validate valid AccountTrieNodeRetrieval nodes":
|
||||||
const file = testVectorDir / "account_trie_node.yaml"
|
const file = testVectorDir / "account_trie_node.yaml"
|
||||||
|
|
||||||
|
@ -52,6 +62,7 @@ suite "State Validation":
|
||||||
validateFetchedAccountTrieNode(
|
validateFetchedAccountTrieNode(
|
||||||
contentKey.accountTrieNodeKey, contentValueRetrieval
|
contentKey.accountTrieNodeKey, contentValueRetrieval
|
||||||
)
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
test "Validate invalid AccountTrieNodeRetrieval nodes":
|
test "Validate invalid AccountTrieNodeRetrieval nodes":
|
||||||
const file = testVectorDir / "account_trie_node.yaml"
|
const file = testVectorDir / "account_trie_node.yaml"
|
||||||
|
@ -67,10 +78,13 @@ suite "State Validation":
|
||||||
|
|
||||||
contentValueRetrieval.node[^1] += 1 # Modify node hash
|
contentValueRetrieval.node[^1] += 1 # Modify node hash
|
||||||
|
|
||||||
check:
|
let res = validateFetchedAccountTrieNode(
|
||||||
not validateFetchedAccountTrieNode(
|
|
||||||
contentKey.accountTrieNodeKey, contentValueRetrieval
|
contentKey.accountTrieNodeKey, contentValueRetrieval
|
||||||
)
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() ==
|
||||||
|
"hash of fetched account trie node doesn't match the expected node hash"
|
||||||
|
|
||||||
test "Validate valid ContractTrieNodeRetrieval nodes":
|
test "Validate valid ContractTrieNodeRetrieval nodes":
|
||||||
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
||||||
|
@ -88,6 +102,7 @@ suite "State Validation":
|
||||||
validateFetchedContractTrieNode(
|
validateFetchedContractTrieNode(
|
||||||
contentKey.contractTrieNodeKey, contentValueRetrieval
|
contentKey.contractTrieNodeKey, contentValueRetrieval
|
||||||
)
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
test "Validate invalid ContractTrieNodeRetrieval nodes":
|
test "Validate invalid ContractTrieNodeRetrieval nodes":
|
||||||
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
||||||
|
@ -103,10 +118,13 @@ suite "State Validation":
|
||||||
|
|
||||||
contentValueRetrieval.node[^1] += 1 # Modify node hash
|
contentValueRetrieval.node[^1] += 1 # Modify node hash
|
||||||
|
|
||||||
check:
|
let res = validateFetchedContractTrieNode(
|
||||||
not validateFetchedContractTrieNode(
|
|
||||||
contentKey.contractTrieNodeKey, contentValueRetrieval
|
contentKey.contractTrieNodeKey, contentValueRetrieval
|
||||||
)
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() ==
|
||||||
|
"hash of fetched contract trie node doesn't match the expected node hash"
|
||||||
|
|
||||||
test "Validate valid ContractCodeRetrieval nodes":
|
test "Validate valid ContractCodeRetrieval nodes":
|
||||||
const file = testVectorDir / "contract_bytecode.yaml"
|
const file = testVectorDir / "contract_bytecode.yaml"
|
||||||
|
@ -122,6 +140,7 @@ suite "State Validation":
|
||||||
|
|
||||||
check:
|
check:
|
||||||
validateFetchedContractCode(contentKey.contractCodeKey, contentValueRetrieval)
|
validateFetchedContractCode(contentKey.contractCodeKey, contentValueRetrieval)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
test "Validate invalid ContractCodeRetrieval nodes":
|
test "Validate invalid ContractCodeRetrieval nodes":
|
||||||
const file = testVectorDir / "contract_bytecode.yaml"
|
const file = testVectorDir / "contract_bytecode.yaml"
|
||||||
|
@ -137,7 +156,467 @@ suite "State Validation":
|
||||||
|
|
||||||
contentValueRetrieval.code[^1] += 1 # Modify node hash
|
contentValueRetrieval.code[^1] += 1 # Modify node hash
|
||||||
|
|
||||||
|
let res =
|
||||||
|
validateFetchedContractCode(contentKey.contractCodeKey, contentValueRetrieval)
|
||||||
check:
|
check:
|
||||||
not validateFetchedContractCode(
|
res.isErr()
|
||||||
contentKey.contractCodeKey, contentValueRetrieval
|
res.error() == "hash of fetched bytecode doesn't match the expected code hash"
|
||||||
|
|
||||||
|
# Account offer validation tests
|
||||||
|
|
||||||
|
test "Validate valid AccountTrieNodeOffer nodes":
|
||||||
|
const file = testVectorDir / "account_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
)
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
if i == 1:
|
||||||
|
continue # second test case only has root node and no recursive gossip
|
||||||
|
|
||||||
|
let contentKey =
|
||||||
|
decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer = SSZ.decode(
|
||||||
|
testData.recursive_gossip.content_value_offer.hexToSeqByte(),
|
||||||
|
AccountTrieNodeOffer,
|
||||||
|
)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
test "Validate invalid AccountTrieNodeOffer nodes - bad state roots":
|
||||||
|
const file = testVectorDir / "account_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0xBAD7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xBAD7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xBAD8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
let res = validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
test "Validate invalid AccountTrieNodeOffer nodes - bad nodes":
|
||||||
|
const file = testVectorDir / "account_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.proof[0][0] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
if i == 1:
|
||||||
|
continue # second test case only has root node
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.proof[^2][^2] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
"hash of next node doesn't match the expected" in res.error()
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.proof[^1][^1] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
|
||||||
|
# Contract storage offer validation tests
|
||||||
|
|
||||||
|
test "Validate valid ContractTrieNodeOffer nodes":
|
||||||
|
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
if i == 1:
|
||||||
|
continue # second test case has no recursive gossip
|
||||||
|
|
||||||
|
let contentKey =
|
||||||
|
decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer = SSZ.decode(
|
||||||
|
testData.recursive_gossip.content_value_offer.hexToSeqByte(),
|
||||||
|
ContractTrieNodeOffer,
|
||||||
|
)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
test "Validate invalid ContractTrieNodeOffer nodes - bad state roots":
|
||||||
|
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0xBAD7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xBAD7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
let res = validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
test "Validate invalid ContractTrieNodeOffer nodes - bad nodes":
|
||||||
|
const file = testVectorDir / "contract_storage_trie_node.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlTrieNodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.accountProof[0][0] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.storageProof[0][0] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.accountProof[^1][^1] += 1.byte
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isErr()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.storageProof[^1][^1] += 1.byte
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isErr()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.accountProof[^2][^2] += 1.byte
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isErr()
|
||||||
|
|
||||||
|
# Contract bytecode offer validation tests
|
||||||
|
|
||||||
|
test "Validate valid ContractCodeOffer nodes":
|
||||||
|
const file = testVectorDir / "contract_bytecode.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte()
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlContractBytecodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
test "Validate invalid ContractCodeOffer nodes - bad state root":
|
||||||
|
const file = testVectorDir / "contract_bytecode.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0xBAD7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte()
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlContractBytecodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
let res = validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
test "Validate invalid ContractCodeOffer nodes - bad nodes and bytecode":
|
||||||
|
const file = testVectorDir / "contract_bytecode.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte()
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlContractBytecodeKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.accountProof[0][0] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.code[0] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of offered bytecode doesn't match the expected code hash"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.accountProof[^1][^1] += 1.byte
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isErr()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let contentKey = decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
var contentValueOffer =
|
||||||
|
SSZ.decode(testData.content_value_offer.hexToSeqByte(), ContractCodeOffer)
|
||||||
|
|
||||||
|
contentValueOffer.code[^1] += 1.byte
|
||||||
|
|
||||||
|
let res = validateOfferedContractCode(
|
||||||
|
stateRoot, contentKey.contractCodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of offered bytecode doesn't match the expected code hash"
|
||||||
|
|
||||||
|
# Recursive gossip offer validation tests
|
||||||
|
|
||||||
|
test "Validate valid AccountTrieNodeOffer recursive gossip nodes":
|
||||||
|
const file = testVectorDir / "recursive_gossip.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlRecursiveGossipKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
if i == 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
for kv in testData:
|
||||||
|
let contentKey = decode(kv.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(kv.content_value.hexToSeqByte(), AccountTrieNodeOffer)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedAccountTrieNode(
|
||||||
|
stateRoot, contentKey.accountTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
||||||
|
test "Validate valid ContractTrieNodeOffer recursive gossip nodes":
|
||||||
|
const file = testVectorDir / "recursive_gossip.yaml"
|
||||||
|
const stateRoots = [
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0x1ad7b80af0c28bc1489513346d2706885be90abb07f23ca28e50482adb392d61".hexToSeqByte(),
|
||||||
|
"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".hexToSeqByte(),
|
||||||
|
]
|
||||||
|
|
||||||
|
let testCase = YamlRecursiveGossipKVs.loadFromYaml(file).valueOr:
|
||||||
|
raiseAssert "Cannot read test vector: " & error
|
||||||
|
|
||||||
|
for i, testData in testCase:
|
||||||
|
if i != 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
var stateRoot: KeccakHash
|
||||||
|
copyMem(addr stateRoot, unsafeAddr stateRoots[i][0], 32)
|
||||||
|
|
||||||
|
for kv in testData:
|
||||||
|
let contentKey = decode(kv.content_key.hexToSeqByte().ByteList).get()
|
||||||
|
let contentValueOffer =
|
||||||
|
SSZ.decode(kv.content_value.hexToSeqByte(), ContractTrieNodeOffer)
|
||||||
|
|
||||||
|
check:
|
||||||
|
validateOfferedContractTrieNode(
|
||||||
|
stateRoot, contentKey.contractTrieNodeKey, contentValueOffer
|
||||||
|
)
|
||||||
|
.isOk()
|
||||||
|
|
|
@ -0,0 +1,142 @@
|
||||||
|
# Nimbus
|
||||||
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
|
import
|
||||||
|
std/os,
|
||||||
|
unittest2,
|
||||||
|
stew/results,
|
||||||
|
eth/[common, trie, trie/trie_defs],
|
||||||
|
../../../nimbus/common/chain_config,
|
||||||
|
../../network/state/state_content,
|
||||||
|
../../network/state/state_validation,
|
||||||
|
./state_test_helpers
|
||||||
|
|
||||||
|
template checkValidProofsForExistingLeafs(
|
||||||
|
genAccounts: GenesisAlloc,
|
||||||
|
accountState: HexaryTrie,
|
||||||
|
storageStates: Table[EthAddress, HexaryTrie],
|
||||||
|
) =
|
||||||
|
for address, account in genAccounts:
|
||||||
|
var acc = newAccount(account.nonce, account.balance)
|
||||||
|
acc.codeHash = keccakHash(account.code)
|
||||||
|
|
||||||
|
let
|
||||||
|
accountProof = accountState.generateAccountProof(address)
|
||||||
|
accountTrieNodeKey = AccountTrieNodeKey(
|
||||||
|
path: Nibbles.init(keccakHash(address).data, true),
|
||||||
|
nodeHash: keccakHash(accountProof[^1].asSeq()),
|
||||||
|
)
|
||||||
|
accountTrieOffer = AccountTrieNodeOffer(proof: accountProof)
|
||||||
|
proofResult = validateOfferedAccountTrieNode(
|
||||||
|
accountState.rootHash(), accountTrieNodeKey, accountTrieOffer
|
||||||
|
)
|
||||||
|
check proofResult.isOk()
|
||||||
|
|
||||||
|
let
|
||||||
|
contractCodeKey = ContractCodeKey(address: address, codeHash: acc.codeHash)
|
||||||
|
contractCode =
|
||||||
|
ContractCodeOffer(code: Bytecode.init(account.code), accountProof: accountProof)
|
||||||
|
codeResult = validateOfferedContractCode(
|
||||||
|
accountState.rootHash(), contractCodeKey, contractCode
|
||||||
|
)
|
||||||
|
check codeResult.isOk()
|
||||||
|
|
||||||
|
if account.code.len() > 0:
|
||||||
|
let storageState = storageStates[address]
|
||||||
|
acc.storageRoot = storageState.rootHash()
|
||||||
|
|
||||||
|
for slotKey, slotValue in account.storage:
|
||||||
|
let
|
||||||
|
storageProof = storageState.generateStorageProof(slotKey)
|
||||||
|
contractTrieNodeKey = ContractTrieNodeKey(
|
||||||
|
address: address,
|
||||||
|
path: Nibbles.init(keccakHash(toBytesBE(slotKey)).data, true),
|
||||||
|
nodeHash: keccakHash(storageProof[^1].asSeq()),
|
||||||
|
)
|
||||||
|
contractTrieOffer = ContractTrieNodeOffer(
|
||||||
|
storageProof: storageProof, accountProof: accountProof
|
||||||
|
)
|
||||||
|
proofResult = validateOfferedContractTrieNode(
|
||||||
|
accountState.rootHash(), contractTrieNodeKey, contractTrieOffer
|
||||||
|
)
|
||||||
|
check proofResult.isOk()
|
||||||
|
|
||||||
|
template checkInvalidProofsWithBadValue(
|
||||||
|
genAccounts: GenesisAlloc,
|
||||||
|
accountState: HexaryTrie,
|
||||||
|
storageStates: Table[EthAddress, HexaryTrie],
|
||||||
|
) =
|
||||||
|
for address, account in genAccounts:
|
||||||
|
var acc = newAccount(account.nonce, account.balance)
|
||||||
|
acc.codeHash = keccakHash(account.code)
|
||||||
|
|
||||||
|
var
|
||||||
|
accountProof = accountState.generateAccountProof(address)
|
||||||
|
accountTrieNodeKey = AccountTrieNodeKey(
|
||||||
|
path: Nibbles.init(keccakHash(address).data, true),
|
||||||
|
nodeHash: keccakHash(accountProof[^1].asSeq()),
|
||||||
|
)
|
||||||
|
accountProof[^1][^1] += 1 # bad account leaf value
|
||||||
|
let
|
||||||
|
accountTrieOffer = AccountTrieNodeOffer(proof: accountProof)
|
||||||
|
proofResult = validateOfferedAccountTrieNode(
|
||||||
|
accountState.rootHash(), accountTrieNodeKey, accountTrieOffer
|
||||||
|
)
|
||||||
|
check proofResult.isErr()
|
||||||
|
|
||||||
|
let
|
||||||
|
contractCodeKey = ContractCodeKey(address: address, codeHash: acc.codeHash)
|
||||||
|
contractCode = ContractCodeOffer(
|
||||||
|
code: Bytecode.init(@[1u8, 2, 3]), # bad code value
|
||||||
|
accountProof: accountProof,
|
||||||
|
)
|
||||||
|
codeResult = validateOfferedContractCode(
|
||||||
|
accountState.rootHash(), contractCodeKey, contractCode
|
||||||
|
)
|
||||||
|
check codeResult.isErr()
|
||||||
|
|
||||||
|
if account.code.len() > 0:
|
||||||
|
let storageState = storageStates[address]
|
||||||
|
acc.storageRoot = storageState.rootHash()
|
||||||
|
|
||||||
|
for slotKey, slotValue in account.storage:
|
||||||
|
var
|
||||||
|
storageProof = storageState.generateStorageProof(slotKey)
|
||||||
|
contractTrieNodeKey = ContractTrieNodeKey(
|
||||||
|
address: address,
|
||||||
|
path: Nibbles.init(keccakHash(toBytesBE(slotKey)).data, true),
|
||||||
|
nodeHash: keccakHash(storageProof[^1].asSeq()),
|
||||||
|
)
|
||||||
|
storageProof[^1][^1] += 1 # bad storage leaf value
|
||||||
|
let
|
||||||
|
contractTrieOffer = ContractTrieNodeOffer(
|
||||||
|
storageProof: storageProof, accountProof: accountProof
|
||||||
|
)
|
||||||
|
proofResult = validateOfferedContractTrieNode(
|
||||||
|
accountState.rootHash(), contractTrieNodeKey, contractTrieOffer
|
||||||
|
)
|
||||||
|
check proofResult.isErr()
|
||||||
|
|
||||||
|
suite "State Proof Verification Tests":
|
||||||
|
let genesisFiles = [
|
||||||
|
"berlin2000.json", "calaveras.json", "chainid1.json", "chainid7.json",
|
||||||
|
"devnet4.json", "devnet5.json", "holesky.json", "mainshadow1.json", "merge.json",
|
||||||
|
]
|
||||||
|
|
||||||
|
test "Valid proofs for existing leafs":
|
||||||
|
for file in genesisFiles:
|
||||||
|
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
||||||
|
let state = accounts.toState()
|
||||||
|
checkValidProofsForExistingLeafs(accounts, state[0], state[1])
|
||||||
|
|
||||||
|
test "Invalid proofs with bad value":
|
||||||
|
for file in genesisFiles:
|
||||||
|
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
||||||
|
var state = accounts.toState()
|
||||||
|
checkInvalidProofsWithBadValue(accounts, state[0], state[1])
|
|
@ -0,0 +1,165 @@
|
||||||
|
# Fluffy
|
||||||
|
# Copyright (c) 2024 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
{.used.}
|
||||||
|
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
|
import
|
||||||
|
std/sequtils,
|
||||||
|
stew/byteutils,
|
||||||
|
unittest2,
|
||||||
|
stint,
|
||||||
|
nimcrypto/hash,
|
||||||
|
eth/trie/[hexary, db, trie_defs],
|
||||||
|
../../network/state/state_validation,
|
||||||
|
./state_test_helpers
|
||||||
|
|
||||||
|
proc getKeyBytes(i: int): seq[byte] =
|
||||||
|
let hash = keccakHash(u256(i).toBytesBE())
|
||||||
|
return toSeq(hash.data)
|
||||||
|
|
||||||
|
suite "MPT trie proof verification":
|
||||||
|
test "Validate proof for existing value":
|
||||||
|
let numValues = 1000
|
||||||
|
var trie = initHexaryTrie(newMemoryDB())
|
||||||
|
|
||||||
|
for i in 1 .. numValues:
|
||||||
|
let bytes = getKeyBytes(i)
|
||||||
|
trie.put(bytes, bytes)
|
||||||
|
|
||||||
|
let rootHash = trie.rootHash()
|
||||||
|
|
||||||
|
for i in 1 .. numValues:
|
||||||
|
let
|
||||||
|
kv = getKeyBytes(i)
|
||||||
|
proof = trie.getTrieProof(kv)
|
||||||
|
res = validateTrieProof(rootHash, kv.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
|
test "Validate proof for non-existing value":
|
||||||
|
let numValues = 1000
|
||||||
|
var trie = initHexaryTrie(newMemoryDB())
|
||||||
|
|
||||||
|
for i in 1 .. numValues:
|
||||||
|
let bytes = getKeyBytes(i)
|
||||||
|
trie.put(bytes, bytes)
|
||||||
|
|
||||||
|
let
|
||||||
|
rootHash = trie.rootHash()
|
||||||
|
key = getKeyBytes(numValues + 1)
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "path contains more nibbles than expected for proof"
|
||||||
|
|
||||||
|
test "Validate proof for empty trie":
|
||||||
|
var trie = initHexaryTrie(newMemoryDB())
|
||||||
|
|
||||||
|
let
|
||||||
|
rootHash = trie.rootHash()
|
||||||
|
key = "not-exist".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "invalid rlp node, expected 2 or 17 elements"
|
||||||
|
|
||||||
|
test "Validate proof for one element trie":
|
||||||
|
var trie = initHexaryTrie(newMemoryDB())
|
||||||
|
|
||||||
|
let key = "k".toBytes
|
||||||
|
trie.put(key, "v".toBytes)
|
||||||
|
|
||||||
|
let
|
||||||
|
rootHash = trie.rootHash
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
|
test "Validate proof bytes":
|
||||||
|
var trie = initHexaryTrie(newMemoryDB(), isPruning = false)
|
||||||
|
|
||||||
|
trie.put("doe".toBytes, "reindeer".toBytes)
|
||||||
|
trie.put("dog".toBytes, "puppy".toBytes)
|
||||||
|
trie.put("dogglesworth".toBytes, "cat".toBytes)
|
||||||
|
|
||||||
|
let rootHash = trie.rootHash
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "doe".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "dog".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "dogglesworth".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isOk()
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "dogg".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "not enough nibbles to validate node prefix"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "dogz".toBytes
|
||||||
|
proof = trie.getTrieProof(key)
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "path contains more nibbles than expected for proof"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "doe".toBytes
|
||||||
|
proof = newSeq[seq[byte]]().asTrieProof()
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "proof is empty"
|
||||||
|
|
||||||
|
block:
|
||||||
|
let
|
||||||
|
key = "doe".toBytes
|
||||||
|
proof = @["aaa".toBytes, "ccc".toBytes].asTrieProof()
|
||||||
|
res = validateTrieProof(rootHash, key.asNibbles(), proof)
|
||||||
|
|
||||||
|
check:
|
||||||
|
res.isErr()
|
||||||
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
|
@ -9,12 +9,10 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
std/net,
|
std/net,
|
||||||
eth/[common, keys, rlp, trie, trie/db],
|
eth/[common, keys, rlp],
|
||||||
eth/p2p/discoveryv5/[enr, node, routing_table],
|
eth/p2p/discoveryv5/[enr, node, routing_table],
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
../network/history/[accumulator, history_content],
|
../network/history/[accumulator, history_content],
|
||||||
../network/state/experimental/state_proof_types,
|
|
||||||
../../nimbus/common/chain_config,
|
|
||||||
../database/content_db
|
../database/content_db
|
||||||
|
|
||||||
proc localAddress*(port: int): Address {.raises: [ValueError].} =
|
proc localAddress*(port: int): Address {.raises: [ValueError].} =
|
||||||
|
@ -113,42 +111,3 @@ func buildHeadersWithProof*(
|
||||||
headersWithProof.add(?buildHeaderWithProof(header, epochAccumulators))
|
headersWithProof.add(?buildHeaderWithProof(header, epochAccumulators))
|
||||||
|
|
||||||
ok(headersWithProof)
|
ok(headersWithProof)
|
||||||
|
|
||||||
proc getGenesisAlloc*(filePath: string): GenesisAlloc =
|
|
||||||
var cn: NetworkParams
|
|
||||||
if not loadNetworkParams(filePath, cn):
|
|
||||||
quit(1)
|
|
||||||
|
|
||||||
cn.genesis.alloc
|
|
||||||
|
|
||||||
proc toState*(
|
|
||||||
alloc: GenesisAlloc
|
|
||||||
): (AccountState, Table[EthAddress, StorageState]) {.raises: [RlpError].} =
|
|
||||||
var accountTrie = initHexaryTrie(newMemoryDB())
|
|
||||||
var storageStates = initTable[EthAddress, StorageState]()
|
|
||||||
|
|
||||||
for address, genAccount in alloc:
|
|
||||||
var storageRoot = EMPTY_ROOT_HASH
|
|
||||||
var codeHash = EMPTY_CODE_HASH
|
|
||||||
|
|
||||||
if genAccount.code.len() > 0:
|
|
||||||
var storageTrie = initHexaryTrie(newMemoryDB())
|
|
||||||
for slotKey, slotValue in genAccount.storage:
|
|
||||||
let key = keccakHash(toBytesBE(slotKey)).data
|
|
||||||
let value = rlp.encode(slotValue)
|
|
||||||
storageTrie.put(key, value)
|
|
||||||
storageStates[address] = storageTrie.StorageState
|
|
||||||
storageRoot = storageTrie.rootHash()
|
|
||||||
codeHash = keccakHash(genAccount.code)
|
|
||||||
|
|
||||||
let account = Account(
|
|
||||||
nonce: genAccount.nonce,
|
|
||||||
balance: genAccount.balance,
|
|
||||||
storageRoot: storageRoot,
|
|
||||||
codeHash: codeHash,
|
|
||||||
)
|
|
||||||
let key = keccakHash(address).data
|
|
||||||
let value = rlp.encode(account)
|
|
||||||
accountTrie.put(key, value)
|
|
||||||
|
|
||||||
(accountTrie.AccountState, storageStates)
|
|
||||||
|
|
|
@ -1,178 +0,0 @@
|
||||||
# Nimbus
|
|
||||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
||||||
# Licensed and distributed under either of
|
|
||||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
||||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
import
|
|
||||||
std/os,
|
|
||||||
unittest2,
|
|
||||||
stew/results,
|
|
||||||
eth/[common, rlp, trie, trie/trie_defs],
|
|
||||||
../../nimbus/common/chain_config,
|
|
||||||
../network/state/experimental/
|
|
||||||
[state_proof_types, state_proof_generation, state_proof_verification],
|
|
||||||
./test_helpers
|
|
||||||
|
|
||||||
proc checkValidProofsForExistingLeafs(
|
|
||||||
genAccounts: GenesisAlloc,
|
|
||||||
accountState: AccountState,
|
|
||||||
storageStates: Table[EthAddress, StorageState],
|
|
||||||
) {.raises: [KeyError, RlpError].} =
|
|
||||||
for address, account in genAccounts:
|
|
||||||
var acc = newAccount(account.nonce, account.balance)
|
|
||||||
acc.codeHash = keccakHash(account.code)
|
|
||||||
let codeResult = verifyContractBytecode(acc.codeHash, account.code)
|
|
||||||
check codeResult.isOk()
|
|
||||||
|
|
||||||
if account.code.len() > 0:
|
|
||||||
let storageState = storageStates[address]
|
|
||||||
acc.storageRoot = storageState.rootHash()
|
|
||||||
|
|
||||||
for slotKey, slotValue in account.storage:
|
|
||||||
let storageProof = storageState.generateStorageProof(slotKey)
|
|
||||||
let proofResult =
|
|
||||||
verifyContractStorageSlot(acc.storageRoot, slotKey, slotValue, storageProof)
|
|
||||||
check proofResult.isOk()
|
|
||||||
|
|
||||||
let accountProof = accountState.generateAccountProof(address)
|
|
||||||
let proofResult = verifyAccount(accountState.rootHash(), address, acc, accountProof)
|
|
||||||
check proofResult.isOk()
|
|
||||||
|
|
||||||
proc checkValidProofsForMissingLeafs(
|
|
||||||
genAccounts: GenesisAlloc,
|
|
||||||
accountState: var AccountState,
|
|
||||||
storageStates: Table[EthAddress, StorageState],
|
|
||||||
) {.raises: [KeyError, RlpError].} =
|
|
||||||
var remainingAccounts = genAccounts.len()
|
|
||||||
|
|
||||||
for address, account in genAccounts:
|
|
||||||
if (remainingAccounts == 1):
|
|
||||||
break # can't generate proofs from an empty state
|
|
||||||
|
|
||||||
var acc = newAccount(account.nonce, account.balance)
|
|
||||||
acc.codeHash = keccakHash(account.code)
|
|
||||||
|
|
||||||
if account.code.len() > 0:
|
|
||||||
var storageState = storageStates[address]
|
|
||||||
acc.storageRoot = storageState.rootHash()
|
|
||||||
|
|
||||||
var remainingSlots = account.storage.len()
|
|
||||||
for slotKey, slotValue in account.storage:
|
|
||||||
if (remainingSlots == 1):
|
|
||||||
break # can't generate proofs from an empty state
|
|
||||||
|
|
||||||
storageState.HexaryTrie.del(keccakHash(toBytesBE(slotKey)).data)
|
|
||||||
# delete the slot from the state
|
|
||||||
dec remainingSlots
|
|
||||||
|
|
||||||
let storageProof = storageState.generateStorageProof(slotKey)
|
|
||||||
let proofResult =
|
|
||||||
verifyContractStorageSlot(acc.storageRoot, slotKey, slotValue, storageProof)
|
|
||||||
check proofResult.isErr()
|
|
||||||
|
|
||||||
accountState.HexaryTrie.del(keccakHash(address).data)
|
|
||||||
# delete the account from the state
|
|
||||||
dec remainingAccounts
|
|
||||||
|
|
||||||
let accountProof = accountState.generateAccountProof(address)
|
|
||||||
let proofResult = verifyAccount(accountState.rootHash(), address, acc, accountProof)
|
|
||||||
check proofResult.isErr()
|
|
||||||
|
|
||||||
proc checkInvalidProofsWithBadStateRoot(
|
|
||||||
genAccounts: GenesisAlloc,
|
|
||||||
accountState: AccountState,
|
|
||||||
storageStates: Table[EthAddress, StorageState],
|
|
||||||
) {.raises: [KeyError, RlpError].} =
|
|
||||||
let badHash =
|
|
||||||
toDigest("2cb1b80b285d09e0570fdbbb808e1d14e4ac53e36dcd95dbc268deec2915b3e7")
|
|
||||||
|
|
||||||
for address, account in genAccounts:
|
|
||||||
var acc = newAccount(account.nonce, account.balance)
|
|
||||||
acc.codeHash = keccakHash(account.code)
|
|
||||||
let codeResult = verifyContractBytecode(badHash, account.code)
|
|
||||||
check codeResult.isErr()
|
|
||||||
|
|
||||||
if account.code.len() > 0:
|
|
||||||
var storageState = storageStates[address]
|
|
||||||
acc.storageRoot = storageState.rootHash()
|
|
||||||
|
|
||||||
var remainingSlots = account.storage.len()
|
|
||||||
for slotKey, slotValue in account.storage:
|
|
||||||
let storageProof = storageState.generateStorageProof(slotKey)
|
|
||||||
let proofResult =
|
|
||||||
verifyContractStorageSlot(badHash, slotKey, slotValue, storageProof)
|
|
||||||
check:
|
|
||||||
proofResult.isErr()
|
|
||||||
proofResult.error() == "missing expected node"
|
|
||||||
|
|
||||||
let accountProof = accountState.generateAccountProof(address)
|
|
||||||
let proofResult = verifyAccount(badHash, address, acc, accountProof)
|
|
||||||
check:
|
|
||||||
proofResult.isErr()
|
|
||||||
proofResult.error() == "missing expected node"
|
|
||||||
|
|
||||||
proc checkInvalidProofsWithBadValue(
|
|
||||||
genAccounts: GenesisAlloc,
|
|
||||||
accountState: AccountState,
|
|
||||||
storageStates: Table[EthAddress, StorageState],
|
|
||||||
) {.raises: [KeyError, RlpError].} =
|
|
||||||
for address, account in genAccounts:
|
|
||||||
var acc = newAccount(account.nonce, account.balance)
|
|
||||||
acc.codeHash = keccakHash(account.code)
|
|
||||||
|
|
||||||
let codeResult = verifyContractBytecode(acc.codeHash, @[1u8, 2, 3]) # bad code value
|
|
||||||
check codeResult.isErr()
|
|
||||||
|
|
||||||
if account.code.len() > 0:
|
|
||||||
var storageState = storageStates[address]
|
|
||||||
acc.storageRoot = storageState.rootHash()
|
|
||||||
|
|
||||||
var remainingSlots = account.storage.len()
|
|
||||||
for slotKey, slotValue in account.storage:
|
|
||||||
let storageProof = storageState.generateStorageProof(slotKey)
|
|
||||||
let badSlotValue = slotValue + 1 # bad slot value
|
|
||||||
|
|
||||||
let proofResult = verifyContractStorageSlot(
|
|
||||||
acc.storageRoot, slotKey, badSlotValue, storageProof
|
|
||||||
)
|
|
||||||
check:
|
|
||||||
proofResult.isErr()
|
|
||||||
proofResult.error() == "proof does not contain expected value"
|
|
||||||
|
|
||||||
let accountProof = accountState.generateAccountProof(address)
|
|
||||||
inc acc.balance # bad account balance
|
|
||||||
let proofResult = verifyAccount(accountState.rootHash(), address, acc, accountProof)
|
|
||||||
check:
|
|
||||||
proofResult.isErr()
|
|
||||||
proofResult.error() == "proof does not contain expected value"
|
|
||||||
|
|
||||||
suite "State Proof Verification Tests":
|
|
||||||
let genesisFiles = ["berlin2000.json", "chainid1.json", "chainid7.json", "merge.json"]
|
|
||||||
|
|
||||||
test "Valid proofs for existing leafs":
|
|
||||||
for file in genesisFiles:
|
|
||||||
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
|
||||||
let state = accounts.toState()
|
|
||||||
checkValidProofsForExistingLeafs(accounts, state[0], state[1])
|
|
||||||
|
|
||||||
test "Valid proofs for missing leafs":
|
|
||||||
for file in genesisFiles:
|
|
||||||
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
|
||||||
var state = accounts.toState()
|
|
||||||
checkValidProofsForMissingLeafs(accounts, state[0], state[1])
|
|
||||||
|
|
||||||
test "Invalid proofs with bad state root":
|
|
||||||
for file in genesisFiles:
|
|
||||||
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
|
||||||
var state = accounts.toState()
|
|
||||||
checkInvalidProofsWithBadStateRoot(accounts, state[0], state[1])
|
|
||||||
|
|
||||||
test "Invalid proofs with bad value":
|
|
||||||
for file in genesisFiles:
|
|
||||||
let accounts = getGenesisAlloc("fluffy" / "tests" / "custom_genesis" / file)
|
|
||||||
var state = accounts.toState()
|
|
||||||
checkInvalidProofsWithBadValue(accounts, state[0], state[1])
|
|
Loading…
Reference in New Issue