mirror of
https://github.com/status-im/nimbus-eth1.git
synced 2025-01-12 05:14:14 +00:00
Bump ssz_serialization and use ByteList[n] + add ContentKeyByteList (#2500)
This commit is contained in:
parent
cfe14f1825
commit
51cf991439
@ -7,21 +7,18 @@
|
|||||||
|
|
||||||
{.push raises: [].}
|
{.push raises: [].}
|
||||||
|
|
||||||
import results, ssz_serialization, eth/rlp, stew/byteutils, nimcrypto/hash
|
import results, ssz_serialization, eth/rlp, nimcrypto/hash
|
||||||
|
|
||||||
export hash
|
export hash
|
||||||
|
|
||||||
type
|
type
|
||||||
ByteList* = List[byte, 2048]
|
Bytes2* = ByteVector[2]
|
||||||
Bytes2* = array[2, byte]
|
Bytes32* = ByteVector[32]
|
||||||
Bytes32* = array[32, byte]
|
|
||||||
|
|
||||||
ContentId* = UInt256
|
ContentId* = UInt256
|
||||||
|
ContentKeyByteList* = ByteList[2048] # The encoded content key
|
||||||
BlockHash* = MDigest[32 * 8] # Bytes32
|
BlockHash* = MDigest[32 * 8] # Bytes32
|
||||||
|
|
||||||
func `$`*(x: ByteList): string =
|
|
||||||
x.asSeq.toHex()
|
|
||||||
|
|
||||||
func decodeRlp*(input: openArray[byte], T: type): Result[T, string] =
|
func decodeRlp*(input: openArray[byte], T: type): Result[T, string] =
|
||||||
try:
|
try:
|
||||||
ok(rlp.decode(input, T))
|
ok(rlp.decode(input, T))
|
||||||
|
@ -447,7 +447,7 @@ proc adjustRadius(
|
|||||||
|
|
||||||
proc createGetHandler*(db: ContentDB): DbGetHandler =
|
proc createGetHandler*(db: ContentDB): DbGetHandler =
|
||||||
return (
|
return (
|
||||||
proc(contentKey: ByteList, contentId: ContentId): Opt[seq[byte]] =
|
proc(contentKey: ContentKeyByteList, contentId: ContentId): Opt[seq[byte]] =
|
||||||
let content = db.get(contentId).valueOr:
|
let content = db.get(contentId).valueOr:
|
||||||
return Opt.none(seq[byte])
|
return Opt.none(seq[byte])
|
||||||
|
|
||||||
@ -459,7 +459,7 @@ proc createStoreHandler*(
|
|||||||
): DbStoreHandler =
|
): DbStoreHandler =
|
||||||
return (
|
return (
|
||||||
proc(
|
proc(
|
||||||
contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||||
) {.raises: [], gcsafe.} =
|
) {.raises: [], gcsafe.} =
|
||||||
# always re-check that the key is in the node range to make sure only
|
# always re-check that the key is in the node range to make sure only
|
||||||
# content in range is stored.
|
# content in range is stored.
|
||||||
|
@ -268,7 +268,7 @@ proc historyPropagateHeaders*(
|
|||||||
|
|
||||||
iterator headersWithProof*(
|
iterator headersWithProof*(
|
||||||
f: Era1File, epochRecord: EpochRecordCached
|
f: Era1File, epochRecord: EpochRecordCached
|
||||||
): (ByteList, seq[byte]) =
|
): (ContentKeyByteList, seq[byte]) =
|
||||||
for blockHeader in f.era1BlockHeaders:
|
for blockHeader in f.era1BlockHeaders:
|
||||||
doAssert blockHeader.isPreMerge()
|
doAssert blockHeader.isPreMerge()
|
||||||
|
|
||||||
@ -285,7 +285,7 @@ iterator headersWithProof*(
|
|||||||
|
|
||||||
yield (contentKey, contentValue)
|
yield (contentKey, contentValue)
|
||||||
|
|
||||||
iterator blockContent*(f: Era1File): (ByteList, seq[byte]) =
|
iterator blockContent*(f: Era1File): (ContentKeyByteList, seq[byte]) =
|
||||||
for (header, body, receipts, _) in f.era1BlockTuples:
|
for (header, body, receipts, _) in f.era1BlockTuples:
|
||||||
let blockHash = header.blockHash()
|
let blockHash = header.blockHash()
|
||||||
|
|
||||||
|
@ -288,7 +288,7 @@ proc getLastFinalityUpdate*(db: BeaconDb): Opt[ForkedLightClientFinalityUpdate]
|
|||||||
|
|
||||||
proc createGetHandler*(db: BeaconDb): DbGetHandler =
|
proc createGetHandler*(db: BeaconDb): DbGetHandler =
|
||||||
return (
|
return (
|
||||||
proc(contentKey: ByteList, contentId: ContentId): results.Opt[seq[byte]] =
|
proc(contentKey: ContentKeyByteList, contentId: ContentId): results.Opt[seq[byte]] =
|
||||||
let contentKey = contentKey.decode().valueOr:
|
let contentKey = contentKey.decode().valueOr:
|
||||||
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
||||||
return Opt.none(seq[byte])
|
return Opt.none(seq[byte])
|
||||||
@ -347,7 +347,7 @@ proc createGetHandler*(db: BeaconDb): DbGetHandler =
|
|||||||
proc createStoreHandler*(db: BeaconDb): DbStoreHandler =
|
proc createStoreHandler*(db: BeaconDb): DbStoreHandler =
|
||||||
return (
|
return (
|
||||||
proc(
|
proc(
|
||||||
contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||||
) {.raises: [], gcsafe.} =
|
) {.raises: [], gcsafe.} =
|
||||||
let contentKey = decode(contentKey).valueOr:
|
let contentKey = decode(contentKey).valueOr:
|
||||||
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
||||||
|
@ -31,7 +31,7 @@ type BeaconNetwork* = ref object
|
|||||||
forkDigests*: ForkDigests
|
forkDigests*: ForkDigests
|
||||||
processContentLoop: Future[void]
|
processContentLoop: Future[void]
|
||||||
|
|
||||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||||
ok(toContentId(contentKey))
|
ok(toContentId(contentKey))
|
||||||
|
|
||||||
proc validateHistoricalSummaries(
|
proc validateHistoricalSummaries(
|
||||||
@ -223,7 +223,7 @@ proc new*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
proc validateContent(
|
proc validateContent(
|
||||||
n: BeaconNetwork, content: seq[byte], contentKey: ByteList
|
n: BeaconNetwork, content: seq[byte], contentKey: ContentKeyByteList
|
||||||
): Result[void, string] =
|
): Result[void, string] =
|
||||||
let key = contentKey.decode().valueOr:
|
let key = contentKey.decode().valueOr:
|
||||||
return err("Error decoding content key")
|
return err("Error decoding content key")
|
||||||
|
@ -58,9 +58,9 @@ type
|
|||||||
of historicalSummaries:
|
of historicalSummaries:
|
||||||
historicalSummariesKey*: HistoricalSummariesKey
|
historicalSummariesKey*: HistoricalSummariesKey
|
||||||
|
|
||||||
func encode*(contentKey: ContentKey): ByteList =
|
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||||
doAssert(contentKey.contentType != unused)
|
doAssert(contentKey.contentType != unused)
|
||||||
ByteList.init(SSZ.encode(contentKey))
|
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||||
|
|
||||||
proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszError].} =
|
proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszError].} =
|
||||||
mixin readSszValue
|
mixin readSszValue
|
||||||
@ -69,13 +69,13 @@ proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszErr
|
|||||||
|
|
||||||
readSszValue(data, val)
|
readSszValue(data, val)
|
||||||
|
|
||||||
func decode*(contentKey: ByteList): Opt[ContentKey] =
|
func decode*(contentKey: ContentKeyByteList): Opt[ContentKey] =
|
||||||
try:
|
try:
|
||||||
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||||
except SerializationError:
|
except SerializationError:
|
||||||
return Opt.none(ContentKey)
|
return Opt.none(ContentKey)
|
||||||
|
|
||||||
func toContentId*(contentKey: ByteList): ContentId =
|
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||||
# TODO: Should we try to parse the content key here for invalid ones?
|
# TODO: Should we try to parse the content key here for invalid ones?
|
||||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||||
readUintBE[256](idHash.data)
|
readUintBE[256](idHash.data)
|
||||||
|
@ -42,16 +42,16 @@ type
|
|||||||
of dummySelector:
|
of dummySelector:
|
||||||
dummyField: uint64
|
dummyField: uint64
|
||||||
|
|
||||||
func encode*(contentKey: ContentKey): ByteList =
|
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||||
ByteList.init(SSZ.encode(contentKey))
|
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||||
|
|
||||||
func decode*(contentKey: ByteList): Option[ContentKey] =
|
func decode*(contentKey: ContentKeyByteList): Option[ContentKey] =
|
||||||
try:
|
try:
|
||||||
some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||||
except SerializationError:
|
except SerializationError:
|
||||||
return none[ContentKey]()
|
return none[ContentKey]()
|
||||||
|
|
||||||
func toContentId*(contentKey: ByteList): ContentId =
|
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||||
# TODO: Should we try to parse the content key here for invalid ones?
|
# TODO: Should we try to parse the content key here for invalid ones?
|
||||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||||
readUintBE[256](idHash.data)
|
readUintBE[256](idHash.data)
|
||||||
|
@ -217,7 +217,8 @@ func buildHeaderWithProof*(
|
|||||||
|
|
||||||
ok(
|
ok(
|
||||||
BlockHeaderWithProof(
|
BlockHeaderWithProof(
|
||||||
header: ByteList.init(rlp.encode(header)), proof: BlockHeaderProof.init(proof)
|
header: ByteList[2048].init(rlp.encode(header)),
|
||||||
|
proof: BlockHeaderProof.init(proof),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -63,16 +63,16 @@ func init*(T: type ContentKey, contentType: ContentType, hash: BlockHash | Diges
|
|||||||
contentType: contentType, epochRecordKey: EpochRecordKey(epochHash: hash)
|
contentType: contentType, epochRecordKey: EpochRecordKey(epochHash: hash)
|
||||||
)
|
)
|
||||||
|
|
||||||
func encode*(contentKey: ContentKey): ByteList =
|
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||||
ByteList.init(SSZ.encode(contentKey))
|
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||||
|
|
||||||
func decode*(contentKey: ByteList): Opt[ContentKey] =
|
func decode*(contentKey: ContentKeyByteList): Opt[ContentKey] =
|
||||||
try:
|
try:
|
||||||
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||||
except SerializationError:
|
except SerializationError:
|
||||||
return Opt.none(ContentKey)
|
return Opt.none(ContentKey)
|
||||||
|
|
||||||
func toContentId*(contentKey: ByteList): ContentId =
|
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||||
# TODO: Should we try to parse the content key here for invalid ones?
|
# TODO: Should we try to parse the content key here for invalid ones?
|
||||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||||
readUintBE[256](idHash.data)
|
readUintBE[256](idHash.data)
|
||||||
|
@ -59,7 +59,7 @@ type
|
|||||||
|
|
||||||
Block* = (BlockHeader, BlockBody)
|
Block* = (BlockHeader, BlockBody)
|
||||||
|
|
||||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||||
ok(toContentId(contentKey))
|
ok(toContentId(contentKey))
|
||||||
|
|
||||||
## Calls to go from SSZ decoded Portal types to RLP fully decoded EL types
|
## Calls to go from SSZ decoded Portal types to RLP fully decoded EL types
|
||||||
@ -632,7 +632,7 @@ proc getBlock*(
|
|||||||
return ok(maybeBlock)
|
return ok(maybeBlock)
|
||||||
|
|
||||||
proc validateContent(
|
proc validateContent(
|
||||||
n: HistoryNetwork, content: seq[byte], contentKey: ByteList
|
n: HistoryNetwork, content: seq[byte], contentKey: ContentKeyByteList
|
||||||
): Future[bool] {.async: (raises: [CancelledError]).} =
|
): Future[bool] {.async: (raises: [CancelledError]).} =
|
||||||
let key = contentKey.decode().valueOr:
|
let key = contentKey.decode().valueOr:
|
||||||
return false
|
return false
|
||||||
|
@ -89,14 +89,14 @@ proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszErr
|
|||||||
|
|
||||||
readSszValue(data, val)
|
readSszValue(data, val)
|
||||||
|
|
||||||
func encode*(contentKey: ContentKey): ByteList =
|
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||||
doAssert(contentKey.contentType != unused)
|
doAssert(contentKey.contentType != unused)
|
||||||
ByteList.init(SSZ.encode(contentKey))
|
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||||
|
|
||||||
func decode*(T: type ContentKey, contentKey: ByteList): Result[T, string] =
|
func decode*(T: type ContentKey, contentKey: ContentKeyByteList): Result[T, string] =
|
||||||
decodeSsz(contentKey.asSeq(), T)
|
decodeSsz(contentKey.asSeq(), T)
|
||||||
|
|
||||||
func toContentId*(contentKey: ByteList): ContentId =
|
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||||
# TODO: Should we try to parse the content key here for invalid ones?
|
# TODO: Should we try to parse the content key here for invalid ones?
|
||||||
let idHash = sha256.digest(contentKey.asSeq())
|
let idHash = sha256.digest(contentKey.asSeq())
|
||||||
readUintBE[256](idHash.data)
|
readUintBE[256](idHash.data)
|
||||||
|
@ -94,7 +94,7 @@ func getParent*(offerWithKey: ContractTrieOfferWithKey): ContractTrieOfferWithKe
|
|||||||
proc gossipOffer*(
|
proc gossipOffer*(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
srcNodeId: Opt[NodeId],
|
srcNodeId: Opt[NodeId],
|
||||||
keyBytes: ByteList,
|
keyBytes: ContentKeyByteList,
|
||||||
offerBytes: seq[byte],
|
offerBytes: seq[byte],
|
||||||
key: AccountTrieNodeKey,
|
key: AccountTrieNodeKey,
|
||||||
offer: AccountTrieNodeOffer,
|
offer: AccountTrieNodeOffer,
|
||||||
@ -107,7 +107,7 @@ proc gossipOffer*(
|
|||||||
proc gossipOffer*(
|
proc gossipOffer*(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
srcNodeId: Opt[NodeId],
|
srcNodeId: Opt[NodeId],
|
||||||
keyBytes: ByteList,
|
keyBytes: ContentKeyByteList,
|
||||||
offerBytes: seq[byte],
|
offerBytes: seq[byte],
|
||||||
key: ContractTrieNodeKey,
|
key: ContractTrieNodeKey,
|
||||||
offer: ContractTrieNodeOffer,
|
offer: ContractTrieNodeOffer,
|
||||||
@ -120,7 +120,7 @@ proc gossipOffer*(
|
|||||||
proc gossipOffer*(
|
proc gossipOffer*(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
srcNodeId: Opt[NodeId],
|
srcNodeId: Opt[NodeId],
|
||||||
keyBytes: ByteList,
|
keyBytes: ContentKeyByteList,
|
||||||
offerBytes: seq[byte],
|
offerBytes: seq[byte],
|
||||||
key: ContractCodeKey,
|
key: ContractCodeKey,
|
||||||
offer: ContractCodeOffer,
|
offer: ContractCodeOffer,
|
||||||
@ -135,7 +135,7 @@ proc gossipOffer*(
|
|||||||
proc recursiveGossipOffer*(
|
proc recursiveGossipOffer*(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
srcNodeId: Opt[NodeId],
|
srcNodeId: Opt[NodeId],
|
||||||
keyBytes: ByteList,
|
keyBytes: ContentKeyByteList,
|
||||||
offerBytes: seq[byte],
|
offerBytes: seq[byte],
|
||||||
key: AccountTrieNodeKey,
|
key: AccountTrieNodeKey,
|
||||||
offer: AccountTrieNodeOffer,
|
offer: AccountTrieNodeOffer,
|
||||||
@ -160,7 +160,7 @@ proc recursiveGossipOffer*(
|
|||||||
proc recursiveGossipOffer*(
|
proc recursiveGossipOffer*(
|
||||||
p: PortalProtocol,
|
p: PortalProtocol,
|
||||||
srcNodeId: Opt[NodeId],
|
srcNodeId: Opt[NodeId],
|
||||||
keyBytes: ByteList,
|
keyBytes: ContentKeyByteList,
|
||||||
offerBytes: seq[byte],
|
offerBytes: seq[byte],
|
||||||
key: ContractTrieNodeKey,
|
key: ContractTrieNodeKey,
|
||||||
offer: ContractTrieNodeOffer,
|
offer: ContractTrieNodeOffer,
|
||||||
|
@ -34,7 +34,7 @@ type StateNetwork* = ref object
|
|||||||
historyNetwork: Opt[HistoryNetwork]
|
historyNetwork: Opt[HistoryNetwork]
|
||||||
validateStateIsCanonical: bool
|
validateStateIsCanonical: bool
|
||||||
|
|
||||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||||
ok(toContentId(contentKey))
|
ok(toContentId(contentKey))
|
||||||
|
|
||||||
proc new*(
|
proc new*(
|
||||||
@ -147,7 +147,7 @@ proc getStateRootByBlockHash*(
|
|||||||
proc processOffer*(
|
proc processOffer*(
|
||||||
n: StateNetwork,
|
n: StateNetwork,
|
||||||
maybeSrcNodeId: Opt[NodeId],
|
maybeSrcNodeId: Opt[NodeId],
|
||||||
contentKeyBytes: ByteList,
|
contentKeyBytes: ContentKeyByteList,
|
||||||
contentValueBytes: seq[byte],
|
contentValueBytes: seq[byte],
|
||||||
contentKey: AccountTrieNodeKey | ContractTrieNodeKey | ContractCodeKey,
|
contentKey: AccountTrieNodeKey | ContractTrieNodeKey | ContractCodeKey,
|
||||||
V: type ContentOfferType,
|
V: type ContentOfferType,
|
||||||
|
@ -26,7 +26,7 @@ const
|
|||||||
perContentKeyOverhead* = 4
|
perContentKeyOverhead* = 4
|
||||||
|
|
||||||
type
|
type
|
||||||
ContentKeysList* = List[ByteList, contentKeysLimit]
|
ContentKeysList* = List[ContentKeyByteList, contentKeysLimit]
|
||||||
ContentKeysBitList* = BitList[contentKeysLimit]
|
ContentKeysBitList* = BitList[contentKeysLimit]
|
||||||
|
|
||||||
# TODO: should become part of the specific networks, considering it is custom.
|
# TODO: should become part of the specific networks, considering it is custom.
|
||||||
@ -50,31 +50,32 @@ type
|
|||||||
|
|
||||||
PingMessage* = object
|
PingMessage* = object
|
||||||
enrSeq*: uint64
|
enrSeq*: uint64
|
||||||
customPayload*: ByteList
|
customPayload*: ByteList[2048]
|
||||||
|
|
||||||
PongMessage* = object
|
PongMessage* = object
|
||||||
enrSeq*: uint64
|
enrSeq*: uint64
|
||||||
customPayload*: ByteList
|
customPayload*: ByteList[2048]
|
||||||
|
|
||||||
FindNodesMessage* = object
|
FindNodesMessage* = object
|
||||||
distances*: List[uint16, 256]
|
distances*: List[uint16, 256]
|
||||||
|
|
||||||
NodesMessage* = object
|
NodesMessage* = object
|
||||||
total*: uint8
|
total*: uint8
|
||||||
enrs*: List[ByteList, 32] # ByteList here is the rlp encoded ENR. This could
|
enrs*: List[ByteList[2048], 32]
|
||||||
|
# ByteList[2048] here is the rlp encoded ENR. This could
|
||||||
# also be limited to ~300 bytes instead of 2048
|
# also be limited to ~300 bytes instead of 2048
|
||||||
|
|
||||||
FindContentMessage* = object
|
FindContentMessage* = object
|
||||||
contentKey*: ByteList
|
contentKey*: ContentKeyByteList
|
||||||
|
|
||||||
ContentMessage* = object
|
ContentMessage* = object
|
||||||
case contentMessageType*: ContentMessageType
|
case contentMessageType*: ContentMessageType
|
||||||
of connectionIdType:
|
of connectionIdType:
|
||||||
connectionId*: Bytes2
|
connectionId*: Bytes2
|
||||||
of contentType:
|
of contentType:
|
||||||
content*: ByteList
|
content*: ByteList[2048]
|
||||||
of enrsType:
|
of enrsType:
|
||||||
enrs*: List[ByteList, 32]
|
enrs*: List[ByteList[2048], 32]
|
||||||
|
|
||||||
OfferMessage* = object
|
OfferMessage* = object
|
||||||
contentKeys*: ContentKeysList
|
contentKeys*: ContentKeysList
|
||||||
|
@ -145,22 +145,22 @@ const
|
|||||||
|
|
||||||
type
|
type
|
||||||
ToContentIdHandler* =
|
ToContentIdHandler* =
|
||||||
proc(contentKey: ByteList): results.Opt[ContentId] {.raises: [], gcsafe.}
|
proc(contentKey: ContentKeyByteList): results.Opt[ContentId] {.raises: [], gcsafe.}
|
||||||
|
|
||||||
DbGetHandler* = proc(
|
DbGetHandler* = proc(
|
||||||
contentKey: ByteList, contentId: ContentId
|
contentKey: ContentKeyByteList, contentId: ContentId
|
||||||
): results.Opt[seq[byte]] {.raises: [], gcsafe.}
|
): results.Opt[seq[byte]] {.raises: [], gcsafe.}
|
||||||
|
|
||||||
DbStoreHandler* = proc(contentKey: ByteList, contentId: ContentId, content: seq[byte]) {.
|
DbStoreHandler* = proc(
|
||||||
raises: [], gcsafe
|
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||||
.}
|
) {.raises: [], gcsafe.}
|
||||||
|
|
||||||
PortalProtocolId* = array[2, byte]
|
PortalProtocolId* = array[2, byte]
|
||||||
|
|
||||||
RadiusCache* = LRUCache[NodeId, UInt256]
|
RadiusCache* = LRUCache[NodeId, UInt256]
|
||||||
|
|
||||||
ContentKV* = object
|
ContentKV* = object
|
||||||
contentKey*: ByteList
|
contentKey*: ContentKeyByteList
|
||||||
content*: seq[byte]
|
content*: seq[byte]
|
||||||
|
|
||||||
OfferRequestType = enum
|
OfferRequestType = enum
|
||||||
@ -239,7 +239,7 @@ type
|
|||||||
utpTransfer*: bool
|
utpTransfer*: bool
|
||||||
trace*: TraceObject
|
trace*: TraceObject
|
||||||
|
|
||||||
func init*(T: type ContentKV, contentKey: ByteList, content: seq[byte]): T =
|
func init*(T: type ContentKV, contentKey: ContentKeyByteList, content: seq[byte]): T =
|
||||||
ContentKV(contentKey: contentKey, content: content)
|
ContentKV(contentKey: contentKey, content: content)
|
||||||
|
|
||||||
func init*(
|
func init*(
|
||||||
@ -324,11 +324,11 @@ func inRange*(p: PortalProtocol, contentId: ContentId): bool =
|
|||||||
|
|
||||||
func truncateEnrs(
|
func truncateEnrs(
|
||||||
nodes: seq[Node], maxSize: int, enrOverhead: int
|
nodes: seq[Node], maxSize: int, enrOverhead: int
|
||||||
): List[ByteList, 32] =
|
): List[ByteList[2048], 32] =
|
||||||
var enrs: List[ByteList, 32]
|
var enrs: List[ByteList[2048], 32]
|
||||||
var totalSize = 0
|
var totalSize = 0
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
let enr = ByteList.init(n.record.raw)
|
let enr = ByteList[2048].init(n.record.raw)
|
||||||
if totalSize + enr.len() + enrOverhead <= maxSize:
|
if totalSize + enr.len() + enrOverhead <= maxSize:
|
||||||
let res = enrs.add(enr)
|
let res = enrs.add(enr)
|
||||||
# With max payload of discv5 and the sizes of ENRs this should not occur.
|
# With max payload of discv5 and the sizes of ENRs this should not occur.
|
||||||
@ -354,19 +354,19 @@ func handlePing(p: PortalProtocol, ping: PingMessage, srcId: NodeId): seq[byte]
|
|||||||
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
||||||
let p = PongMessage(
|
let p = PongMessage(
|
||||||
enrSeq: p.localNode.record.seqNum,
|
enrSeq: p.localNode.record.seqNum,
|
||||||
customPayload: ByteList(SSZ.encode(customPayload)),
|
customPayload: ByteList[2048](SSZ.encode(customPayload)),
|
||||||
)
|
)
|
||||||
|
|
||||||
encodeMessage(p)
|
encodeMessage(p)
|
||||||
|
|
||||||
proc handleFindNodes(p: PortalProtocol, fn: FindNodesMessage): seq[byte] =
|
proc handleFindNodes(p: PortalProtocol, fn: FindNodesMessage): seq[byte] =
|
||||||
if fn.distances.len == 0:
|
if fn.distances.len == 0:
|
||||||
let enrs = List[ByteList, 32](@[])
|
let enrs = List[ByteList[2048], 32](@[])
|
||||||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||||
elif fn.distances.contains(0):
|
elif fn.distances.contains(0):
|
||||||
# A request for our own record.
|
# A request for our own record.
|
||||||
let enr = ByteList(rlp.encode(p.localNode.record))
|
let enr = ByteList[2048](rlp.encode(p.localNode.record))
|
||||||
encodeMessage(NodesMessage(total: 1, enrs: List[ByteList, 32](@[enr])))
|
encodeMessage(NodesMessage(total: 1, enrs: List[ByteList[2048], 32](@[enr])))
|
||||||
else:
|
else:
|
||||||
let distances = fn.distances.asSeq()
|
let distances = fn.distances.asSeq()
|
||||||
if distances.all(
|
if distances.all(
|
||||||
@ -393,7 +393,7 @@ proc handleFindNodes(p: PortalProtocol, fn: FindNodesMessage): seq[byte] =
|
|||||||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||||
else:
|
else:
|
||||||
# invalid request, send empty back
|
# invalid request, send empty back
|
||||||
let enrs = List[ByteList, 32](@[])
|
let enrs = List[ByteList[2048], 32](@[])
|
||||||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||||
|
|
||||||
proc handleFindContent(
|
proc handleFindContent(
|
||||||
@ -422,7 +422,9 @@ proc handleFindContent(
|
|||||||
let content = contentResult.get()
|
let content = contentResult.get()
|
||||||
if content.len <= maxPayloadSize:
|
if content.len <= maxPayloadSize:
|
||||||
return encodeMessage(
|
return encodeMessage(
|
||||||
ContentMessage(contentMessageType: contentType, content: ByteList(content))
|
ContentMessage(
|
||||||
|
contentMessageType: contentType, content: ByteList[2048](content)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
let connectionId = p.stream.addContentRequest(srcId, content)
|
let connectionId = p.stream.addContentRequest(srcId, content)
|
||||||
@ -648,7 +650,7 @@ proc pingImpl*(
|
|||||||
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
||||||
let ping = PingMessage(
|
let ping = PingMessage(
|
||||||
enrSeq: p.localNode.record.seqNum,
|
enrSeq: p.localNode.record.seqNum,
|
||||||
customPayload: ByteList(SSZ.encode(customPayload)),
|
customPayload: ByteList[2048](SSZ.encode(customPayload)),
|
||||||
)
|
)
|
||||||
|
|
||||||
return await reqResponse[PingMessage, PongMessage](p, dst, ping)
|
return await reqResponse[PingMessage, PongMessage](p, dst, ping)
|
||||||
@ -662,7 +664,7 @@ proc findNodesImpl*(
|
|||||||
return await reqResponse[FindNodesMessage, NodesMessage](p, dst, fn)
|
return await reqResponse[FindNodesMessage, NodesMessage](p, dst, fn)
|
||||||
|
|
||||||
proc findContentImpl*(
|
proc findContentImpl*(
|
||||||
p: PortalProtocol, dst: Node, contentKey: ByteList
|
p: PortalProtocol, dst: Node, contentKey: ContentKeyByteList
|
||||||
): Future[PortalResult[ContentMessage]] {.async: (raises: [CancelledError]).} =
|
): Future[PortalResult[ContentMessage]] {.async: (raises: [CancelledError]).} =
|
||||||
let fc = FindContentMessage(contentKey: contentKey)
|
let fc = FindContentMessage(contentKey: contentKey)
|
||||||
|
|
||||||
@ -675,7 +677,9 @@ proc offerImpl*(
|
|||||||
|
|
||||||
return await reqResponse[OfferMessage, AcceptMessage](p, dst, offer)
|
return await reqResponse[OfferMessage, AcceptMessage](p, dst, offer)
|
||||||
|
|
||||||
proc recordsFromBytes*(rawRecords: List[ByteList, 32]): PortalResult[seq[Record]] =
|
proc recordsFromBytes*(
|
||||||
|
rawRecords: List[ByteList[2048], 32]
|
||||||
|
): PortalResult[seq[Record]] =
|
||||||
var records: seq[Record]
|
var records: seq[Record]
|
||||||
for r in rawRecords.asSeq():
|
for r in rawRecords.asSeq():
|
||||||
let record = enr.Record.fromBytes(r.asSeq()).valueOr:
|
let record = enr.Record.fromBytes(r.asSeq()).valueOr:
|
||||||
@ -724,7 +728,7 @@ proc findNodes*(
|
|||||||
return err(nodesMessage.error)
|
return err(nodesMessage.error)
|
||||||
|
|
||||||
proc findContent*(
|
proc findContent*(
|
||||||
p: PortalProtocol, dst: Node, contentKey: ByteList
|
p: PortalProtocol, dst: Node, contentKey: ContentKeyByteList
|
||||||
): Future[PortalResult[FoundContent]] {.async: (raises: [CancelledError]).} =
|
): Future[PortalResult[FoundContent]] {.async: (raises: [CancelledError]).} =
|
||||||
logScope:
|
logScope:
|
||||||
node = dst
|
node = dst
|
||||||
@ -1090,7 +1094,10 @@ proc lookup*(
|
|||||||
return closestNodes
|
return closestNodes
|
||||||
|
|
||||||
proc triggerPoke*(
|
proc triggerPoke*(
|
||||||
p: PortalProtocol, nodes: seq[Node], contentKey: ByteList, content: seq[byte]
|
p: PortalProtocol,
|
||||||
|
nodes: seq[Node],
|
||||||
|
contentKey: ContentKeyByteList,
|
||||||
|
content: seq[byte],
|
||||||
) =
|
) =
|
||||||
## In order to properly test gossip mechanisms (e.g. in Portal Hive),
|
## In order to properly test gossip mechanisms (e.g. in Portal Hive),
|
||||||
## we need the option to turn off the POKE functionality as it influences
|
## we need the option to turn off the POKE functionality as it influences
|
||||||
@ -1118,7 +1125,7 @@ proc triggerPoke*(
|
|||||||
# networks will probably be very similar. Extract lookup function to separate module
|
# networks will probably be very similar. Extract lookup function to separate module
|
||||||
# and make it more generaic
|
# and make it more generaic
|
||||||
proc contentLookup*(
|
proc contentLookup*(
|
||||||
p: PortalProtocol, target: ByteList, targetId: UInt256
|
p: PortalProtocol, target: ContentKeyByteList, targetId: UInt256
|
||||||
): Future[Opt[ContentLookupResult]] {.async: (raises: [CancelledError]).} =
|
): Future[Opt[ContentLookupResult]] {.async: (raises: [CancelledError]).} =
|
||||||
## Perform a lookup for the given target, return the closest n nodes to the
|
## Perform a lookup for the given target, return the closest n nodes to the
|
||||||
## target. Maximum value for n is `BUCKET_SIZE`.
|
## target. Maximum value for n is `BUCKET_SIZE`.
|
||||||
@ -1222,7 +1229,7 @@ proc contentLookup*(
|
|||||||
return Opt.none(ContentLookupResult)
|
return Opt.none(ContentLookupResult)
|
||||||
|
|
||||||
proc traceContentLookup*(
|
proc traceContentLookup*(
|
||||||
p: PortalProtocol, target: ByteList, targetId: UInt256
|
p: PortalProtocol, target: ContentKeyByteList, targetId: UInt256
|
||||||
): Future[TraceContentLookupResult] {.async: (raises: [CancelledError]).} =
|
): Future[TraceContentLookupResult] {.async: (raises: [CancelledError]).} =
|
||||||
## Perform a lookup for the given target, return the closest n nodes to the
|
## Perform a lookup for the given target, return the closest n nodes to the
|
||||||
## target. Maximum value for n is `BUCKET_SIZE`.
|
## target. Maximum value for n is `BUCKET_SIZE`.
|
||||||
@ -1587,7 +1594,10 @@ proc randomGossipDiscardPeers*(
|
|||||||
discard await p.randomGossip(srcNodeId, contentKeys, content)
|
discard await p.randomGossip(srcNodeId, contentKeys, content)
|
||||||
|
|
||||||
proc storeContent*(
|
proc storeContent*(
|
||||||
p: PortalProtocol, contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
p: PortalProtocol,
|
||||||
|
contentKey: ContentKeyByteList,
|
||||||
|
contentId: ContentId,
|
||||||
|
content: seq[byte],
|
||||||
) =
|
) =
|
||||||
doAssert(p.dbPut != nil)
|
doAssert(p.dbPut != nil)
|
||||||
p.dbPut(contentKey, contentId, content)
|
p.dbPut(contentKey, contentId, content)
|
||||||
|
@ -136,7 +136,7 @@ proc installPortalApiHandlers*(
|
|||||||
let
|
let
|
||||||
node = toNodeWithAddress(enr)
|
node = toNodeWithAddress(enr)
|
||||||
foundContentResult =
|
foundContentResult =
|
||||||
await p.findContent(node, ByteList.init(hexToSeqByte(contentKey)))
|
await p.findContent(node, ContentKeyByteList.init(hexToSeqByte(contentKey)))
|
||||||
|
|
||||||
if foundContentResult.isErr():
|
if foundContentResult.isErr():
|
||||||
raise newException(ValueError, $foundContentResult.error)
|
raise newException(ValueError, $foundContentResult.error)
|
||||||
@ -163,7 +163,7 @@ proc installPortalApiHandlers*(
|
|||||||
node = toNodeWithAddress(enr)
|
node = toNodeWithAddress(enr)
|
||||||
key = hexToSeqByte(contentKey)
|
key = hexToSeqByte(contentKey)
|
||||||
content = hexToSeqByte(contentValue)
|
content = hexToSeqByte(contentValue)
|
||||||
contentKV = ContentKV(contentKey: ByteList.init(key), content: content)
|
contentKV = ContentKV(contentKey: ContentKeyByteList.init(key), content: content)
|
||||||
res = await p.offer(node, @[contentKV])
|
res = await p.offer(node, @[contentKV])
|
||||||
|
|
||||||
if res.isOk():
|
if res.isOk():
|
||||||
@ -184,7 +184,7 @@ proc installPortalApiHandlers*(
|
|||||||
contentKey: string
|
contentKey: string
|
||||||
) -> ContentInfo:
|
) -> ContentInfo:
|
||||||
let
|
let
|
||||||
key = ByteList.init(hexToSeqByte(contentKey))
|
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||||
contentId = p.toContentId(key).valueOr:
|
contentId = p.toContentId(key).valueOr:
|
||||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||||
|
|
||||||
@ -199,7 +199,7 @@ proc installPortalApiHandlers*(
|
|||||||
contentKey: string
|
contentKey: string
|
||||||
) -> TraceContentLookupResult:
|
) -> TraceContentLookupResult:
|
||||||
let
|
let
|
||||||
key = ByteList.init(hexToSeqByte(contentKey))
|
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||||
contentId = p.toContentId(key).valueOr:
|
contentId = p.toContentId(key).valueOr:
|
||||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||||
|
|
||||||
@ -217,7 +217,7 @@ proc installPortalApiHandlers*(
|
|||||||
contentKey: string, contentValue: string
|
contentKey: string, contentValue: string
|
||||||
) -> bool:
|
) -> bool:
|
||||||
let
|
let
|
||||||
key = ByteList.init(hexToSeqByte(contentKey))
|
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||||
contentValueBytes = hexToSeqByte(contentValue)
|
contentValueBytes = hexToSeqByte(contentValue)
|
||||||
|
|
||||||
let valueToStore =
|
let valueToStore =
|
||||||
@ -252,7 +252,7 @@ proc installPortalApiHandlers*(
|
|||||||
|
|
||||||
rpcServer.rpc("portal_" & network & "LocalContent") do(contentKey: string) -> string:
|
rpcServer.rpc("portal_" & network & "LocalContent") do(contentKey: string) -> string:
|
||||||
let
|
let
|
||||||
key = ByteList.init(hexToSeqByte(contentKey))
|
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||||
contentId = p.toContentId(key).valueOr:
|
contentId = p.toContentId(key).valueOr:
|
||||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||||
|
|
||||||
@ -267,7 +267,7 @@ proc installPortalApiHandlers*(
|
|||||||
let
|
let
|
||||||
key = hexToSeqByte(contentKey)
|
key = hexToSeqByte(contentKey)
|
||||||
content = hexToSeqByte(contentValue)
|
content = hexToSeqByte(contentValue)
|
||||||
contentKeys = ContentKeysList(@[ByteList.init(key)])
|
contentKeys = ContentKeysList(@[ContentKeyByteList.init(key)])
|
||||||
numberOfPeers =
|
numberOfPeers =
|
||||||
await p.neighborhoodGossip(Opt.none(NodeId), contentKeys, @[content])
|
await p.neighborhoodGossip(Opt.none(NodeId), contentKeys, @[content])
|
||||||
|
|
||||||
@ -279,7 +279,7 @@ proc installPortalApiHandlers*(
|
|||||||
let
|
let
|
||||||
key = hexToSeqByte(contentKey)
|
key = hexToSeqByte(contentKey)
|
||||||
content = hexToSeqByte(contentValue)
|
content = hexToSeqByte(contentValue)
|
||||||
contentKeys = ContentKeysList(@[ByteList.init(key)])
|
contentKeys = ContentKeysList(@[ContentKeyByteList.init(key)])
|
||||||
numberOfPeers = await p.randomGossip(Opt.none(NodeId), contentKeys, @[content])
|
numberOfPeers = await p.randomGossip(Opt.none(NodeId), contentKeys, @[content])
|
||||||
|
|
||||||
return numberOfPeers
|
return numberOfPeers
|
||||||
|
@ -279,25 +279,25 @@ suite "Beacon Content Encodings":
|
|||||||
|
|
||||||
suite "Beacon ContentKey Encodings ":
|
suite "Beacon ContentKey Encodings ":
|
||||||
test "Invalid prefix - 0 value":
|
test "Invalid prefix - 0 value":
|
||||||
let encoded = ByteList.init(@[byte 0x00])
|
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||||
let decoded = decode(encoded)
|
let decoded = decode(encoded)
|
||||||
|
|
||||||
check decoded.isNone()
|
check decoded.isNone()
|
||||||
|
|
||||||
test "Invalid prefix - before valid range":
|
test "Invalid prefix - before valid range":
|
||||||
let encoded = ByteList.init(@[byte 0x01])
|
let encoded = ContentKeyByteList.init(@[byte 0x01])
|
||||||
let decoded = decode(encoded)
|
let decoded = decode(encoded)
|
||||||
|
|
||||||
check decoded.isNone()
|
check decoded.isNone()
|
||||||
|
|
||||||
test "Invalid prefix - after valid range":
|
test "Invalid prefix - after valid range":
|
||||||
let encoded = ByteList.init(@[byte 0x14])
|
let encoded = ContentKeyByteList.init(@[byte 0x14])
|
||||||
let decoded = decode(encoded)
|
let decoded = decode(encoded)
|
||||||
|
|
||||||
check decoded.isNone()
|
check decoded.isNone()
|
||||||
|
|
||||||
test "Invalid key - empty input":
|
test "Invalid key - empty input":
|
||||||
let encoded = ByteList.init(@[])
|
let encoded = ContentKeyByteList.init(@[])
|
||||||
let decoded = decode(encoded)
|
let decoded = decode(encoded)
|
||||||
|
|
||||||
check decoded.isNone()
|
check decoded.isNone()
|
||||||
|
@ -24,7 +24,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
dataRadius = UInt256.high() - 1 # Full radius - 1
|
dataRadius = UInt256.high() - 1 # Full radius - 1
|
||||||
enrSeq = 1'u64
|
enrSeq = 1'u64
|
||||||
# Can be any custom payload, testing with just dataRadius here.
|
# Can be any custom payload, testing with just dataRadius here.
|
||||||
customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
customPayload = ByteList[2048](SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||||
p = PingMessage(enrSeq: enrSeq, customPayload: customPayload)
|
p = PingMessage(enrSeq: enrSeq, customPayload: customPayload)
|
||||||
|
|
||||||
let encoded = encodeMessage(p)
|
let encoded = encodeMessage(p)
|
||||||
@ -44,7 +44,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
dataRadius = UInt256.high() div 2.stuint(256) # Radius of half the UInt256
|
dataRadius = UInt256.high() div 2.stuint(256) # Radius of half the UInt256
|
||||||
enrSeq = 1'u64
|
enrSeq = 1'u64
|
||||||
# Can be any custom payload, testing with just dataRadius here.
|
# Can be any custom payload, testing with just dataRadius here.
|
||||||
customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
customPayload = ByteList[2048](SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||||
p = PongMessage(enrSeq: enrSeq, customPayload: customPayload)
|
p = PongMessage(enrSeq: enrSeq, customPayload: customPayload)
|
||||||
|
|
||||||
let encoded = encodeMessage(p)
|
let encoded = encodeMessage(p)
|
||||||
@ -109,7 +109,9 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
e2 = res2.value
|
e2 = res2.value
|
||||||
total = 0x1'u8
|
total = 0x1'u8
|
||||||
n = NodesMessage(
|
n = NodesMessage(
|
||||||
total: total, enrs: List[ByteList, 32](@[ByteList(e1.raw), ByteList(e2.raw)])
|
total: total,
|
||||||
|
enrs:
|
||||||
|
List[ByteList[2048], 32](@[ByteList[2048](e1.raw), ByteList[2048](e2.raw)]),
|
||||||
)
|
)
|
||||||
|
|
||||||
let encoded = encodeMessage(n)
|
let encoded = encodeMessage(n)
|
||||||
@ -124,13 +126,13 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
message.kind == nodes
|
message.kind == nodes
|
||||||
message.nodes.total == total
|
message.nodes.total == total
|
||||||
message.nodes.enrs.len() == 2
|
message.nodes.enrs.len() == 2
|
||||||
message.nodes.enrs[0] == ByteList(e1.raw)
|
message.nodes.enrs[0] == ByteList[2048](e1.raw)
|
||||||
message.nodes.enrs[1] == ByteList(e2.raw)
|
message.nodes.enrs[1] == ByteList[2048](e2.raw)
|
||||||
|
|
||||||
test "FindContent Request":
|
test "FindContent Request":
|
||||||
const contentKeyString = "0x706f7274616c"
|
const contentKeyString = "0x706f7274616c"
|
||||||
let
|
let
|
||||||
contentKey = ByteList.init(hexToSeqByte(contentKeyString))
|
contentKey = ContentKeyByteList.init(hexToSeqByte(contentKeyString))
|
||||||
fc = FindContentMessage(contentKey: contentKey)
|
fc = FindContentMessage(contentKey: contentKey)
|
||||||
|
|
||||||
let encoded = encodeMessage(fc)
|
let encoded = encodeMessage(fc)
|
||||||
@ -165,7 +167,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
test "Content Response - content payload":
|
test "Content Response - content payload":
|
||||||
const contentString = "0x7468652063616b652069732061206c6965"
|
const contentString = "0x7468652063616b652069732061206c6965"
|
||||||
let
|
let
|
||||||
content = ByteList(hexToSeqByte(contentString))
|
content = ByteList[2048](hexToSeqByte(contentString))
|
||||||
c = ContentMessage(contentMessageType: contentType, content: content)
|
c = ContentMessage(contentMessageType: contentType, content: content)
|
||||||
|
|
||||||
let encoded = encodeMessage(c)
|
let encoded = encodeMessage(c)
|
||||||
@ -195,7 +197,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
let
|
let
|
||||||
e1 = res1.value
|
e1 = res1.value
|
||||||
e2 = res2.value
|
e2 = res2.value
|
||||||
enrs = List[ByteList, 32](@[ByteList(e1.raw), ByteList(e2.raw)])
|
enrs = List[ByteList[2048], 32](@[ByteList[2048](e1.raw), ByteList[2048](e2.raw)])
|
||||||
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
||||||
|
|
||||||
let encoded = encodeMessage(c)
|
let encoded = encodeMessage(c)
|
||||||
@ -210,12 +212,12 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
message.kind == MessageKind.content
|
message.kind == MessageKind.content
|
||||||
message.content.contentMessageType == enrsType
|
message.content.contentMessageType == enrsType
|
||||||
message.content.enrs.len() == 2
|
message.content.enrs.len() == 2
|
||||||
message.content.enrs[0] == ByteList(e1.raw)
|
message.content.enrs[0] == ByteList[2048](e1.raw)
|
||||||
message.content.enrs[1] == ByteList(e2.raw)
|
message.content.enrs[1] == ByteList[2048](e2.raw)
|
||||||
|
|
||||||
test "Content Response - empty enrs":
|
test "Content Response - empty enrs":
|
||||||
let
|
let
|
||||||
enrs = List[ByteList, 32].init(@[])
|
enrs = List[ByteList[2048], 32].init(@[])
|
||||||
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
||||||
let encoded = encodeMessage(c)
|
let encoded = encodeMessage(c)
|
||||||
check encoded.toHex == "0502"
|
check encoded.toHex == "0502"
|
||||||
@ -230,7 +232,8 @@ suite "Portal Wire Protocol Message Encodings":
|
|||||||
|
|
||||||
test "Offer Request":
|
test "Offer Request":
|
||||||
let
|
let
|
||||||
contentKeys = ContentKeysList(List(@[ByteList(@[byte 0x01, 0x02, 0x03])]))
|
contentKeys =
|
||||||
|
ContentKeysList(List(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]))
|
||||||
o = OfferMessage(contentKeys: contentKeys)
|
o = OfferMessage(contentKeys: contentKeys)
|
||||||
|
|
||||||
let encoded = encodeMessage(o)
|
let encoded = encodeMessage(o)
|
||||||
|
@ -177,7 +177,7 @@ proc mockBlockHashToStateRoot*(
|
|||||||
blockHeader = BlockHeader(stateRoot: stateRoot)
|
blockHeader = BlockHeader(stateRoot: stateRoot)
|
||||||
headerRlp = rlp.encode(blockHeader)
|
headerRlp = rlp.encode(blockHeader)
|
||||||
blockHeaderWithProof = BlockHeaderWithProof(
|
blockHeaderWithProof = BlockHeaderWithProof(
|
||||||
header: ByteList.init(headerRlp), proof: BlockHeaderProof.init()
|
header: ByteList[2048].init(headerRlp), proof: BlockHeaderProof.init()
|
||||||
)
|
)
|
||||||
contentKeyBytes = history_content.ContentKey
|
contentKeyBytes = history_content.ContentKey
|
||||||
.init(history_content.ContentType.blockHeader, blockHash)
|
.init(history_content.ContentType.blockHeader, blockHash)
|
||||||
|
@ -105,25 +105,25 @@ suite "State Content Keys":
|
|||||||
decoded.value().contractCodeKey.codeHash == codeHash
|
decoded.value().contractCodeKey.codeHash == codeHash
|
||||||
|
|
||||||
test "Invalid prefix - 0 value":
|
test "Invalid prefix - 0 value":
|
||||||
let encoded = ByteList.init(@[byte 0x00])
|
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||||
let decoded = ContentKey.decode(encoded)
|
let decoded = ContentKey.decode(encoded)
|
||||||
|
|
||||||
check decoded.isErr()
|
check decoded.isErr()
|
||||||
|
|
||||||
test "Invalid prefix - before valid range":
|
test "Invalid prefix - before valid range":
|
||||||
let encoded = ByteList.init(@[byte 0x01])
|
let encoded = ContentKeyByteList.init(@[byte 0x01])
|
||||||
let decoded = ContentKey.decode(encoded)
|
let decoded = ContentKey.decode(encoded)
|
||||||
|
|
||||||
check decoded.isErr()
|
check decoded.isErr()
|
||||||
|
|
||||||
test "Invalid prefix - after valid range":
|
test "Invalid prefix - after valid range":
|
||||||
let encoded = ByteList.init(@[byte 0x25])
|
let encoded = ContentKeyByteList.init(@[byte 0x25])
|
||||||
let decoded = ContentKey.decode(encoded)
|
let decoded = ContentKey.decode(encoded)
|
||||||
|
|
||||||
check decoded.isErr()
|
check decoded.isErr()
|
||||||
|
|
||||||
test "Invalid key - empty input":
|
test "Invalid key - empty input":
|
||||||
let encoded = ByteList.init(@[])
|
let encoded = ContentKeyByteList.init(@[])
|
||||||
let decoded = ContentKey.decode(encoded)
|
let decoded = ContentKey.decode(encoded)
|
||||||
|
|
||||||
check decoded.isErr()
|
check decoded.isErr()
|
||||||
|
@ -51,7 +51,7 @@ procSuite "State Endpoints":
|
|||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
leafData = testData.recursive_gossip[0]
|
leafData = testData.recursive_gossip[0]
|
||||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||||
@ -72,7 +72,7 @@ procSuite "State Endpoints":
|
|||||||
|
|
||||||
# wait for recursive gossip to complete
|
# wait for recursive gossip to complete
|
||||||
for node in testData.recursive_gossip:
|
for node in testData.recursive_gossip:
|
||||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||||
|
|
||||||
let
|
let
|
||||||
@ -158,7 +158,7 @@ procSuite "State Endpoints":
|
|||||||
testData = testCase[0]
|
testData = testCase[0]
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
leafData = testData.recursive_gossip[0]
|
leafData = testData.recursive_gossip[0]
|
||||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||||
@ -183,7 +183,7 @@ procSuite "State Endpoints":
|
|||||||
testData = testCase[1]
|
testData = testCase[1]
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
leafData = testData.recursive_gossip[0]
|
leafData = testData.recursive_gossip[0]
|
||||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||||
@ -204,7 +204,7 @@ procSuite "State Endpoints":
|
|||||||
|
|
||||||
# wait for recursive gossip to complete
|
# wait for recursive gossip to complete
|
||||||
for node in testData.recursive_gossip:
|
for node in testData.recursive_gossip:
|
||||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||||
|
|
||||||
let
|
let
|
||||||
@ -232,7 +232,7 @@ procSuite "State Endpoints":
|
|||||||
raiseAssert "Cannot read test vector: " & error
|
raiseAssert "Cannot read test vector: " & error
|
||||||
testData = testCase[0]
|
testData = testCase[0]
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
|
@ -25,7 +25,8 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
for i, testData in testCase:
|
for i, testData in testCase:
|
||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let key = ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
let key =
|
||||||
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let offer =
|
let offer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -40,7 +41,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
key.accountTrieNodeKey.path.unpackNibbles().len()
|
key.accountTrieNodeKey.path.unpackNibbles().len()
|
||||||
parentOffer.proof.len() == offer.proof.len() - 1
|
parentOffer.proof.len() == offer.proof.len() - 1
|
||||||
parentKey.toContentKey().encode() ==
|
parentKey.toContentKey().encode() ==
|
||||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentOffer.encode() ==
|
parentOffer.encode() ==
|
||||||
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
||||||
parentOffer.toRetrievalValue().encode() ==
|
parentOffer.toRetrievalValue().encode() ==
|
||||||
@ -55,7 +56,8 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
for i, testData in testCase:
|
for i, testData in testCase:
|
||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let key = ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
let key =
|
||||||
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let offer =
|
let offer =
|
||||||
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -70,7 +72,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
key.contractTrieNodeKey.path.unpackNibbles().len()
|
key.contractTrieNodeKey.path.unpackNibbles().len()
|
||||||
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
||||||
parentKey.toContentKey().encode() ==
|
parentKey.toContentKey().encode() ==
|
||||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentOffer.encode() ==
|
parentOffer.encode() ==
|
||||||
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
||||||
parentOffer.toRetrievalValue().encode() ==
|
parentOffer.toRetrievalValue().encode() ==
|
||||||
@ -89,7 +91,9 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
for j in 0 ..< testData.recursive_gossip.high:
|
for j in 0 ..< testData.recursive_gossip.high:
|
||||||
let
|
let
|
||||||
key = ContentKey
|
key = ContentKey
|
||||||
.decode(testData.recursive_gossip[j].content_key.hexToSeqByte().ByteList)
|
.decode(
|
||||||
|
testData.recursive_gossip[j].content_key.hexToSeqByte().ContentKeyByteList
|
||||||
|
)
|
||||||
.get()
|
.get()
|
||||||
offer = AccountTrieNodeOffer
|
offer = AccountTrieNodeOffer
|
||||||
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
||||||
@ -101,7 +105,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
key.accountTrieNodeKey.path.unpackNibbles().len()
|
key.accountTrieNodeKey.path.unpackNibbles().len()
|
||||||
parentOffer.proof.len() == offer.proof.len() - 1
|
parentOffer.proof.len() == offer.proof.len() - 1
|
||||||
parentKey.toContentKey().encode() ==
|
parentKey.toContentKey().encode() ==
|
||||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentOffer.encode() ==
|
parentOffer.encode() ==
|
||||||
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
||||||
|
|
||||||
@ -118,7 +122,9 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
for j in 0 ..< testData.recursive_gossip.high:
|
for j in 0 ..< testData.recursive_gossip.high:
|
||||||
let
|
let
|
||||||
key = ContentKey
|
key = ContentKey
|
||||||
.decode(testData.recursive_gossip[j].content_key.hexToSeqByte().ByteList)
|
.decode(
|
||||||
|
testData.recursive_gossip[j].content_key.hexToSeqByte().ContentKeyByteList
|
||||||
|
)
|
||||||
.get()
|
.get()
|
||||||
offer = ContractTrieNodeOffer
|
offer = ContractTrieNodeOffer
|
||||||
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
||||||
@ -130,6 +136,6 @@ suite "State Gossip getParent - Test Vectors":
|
|||||||
key.contractTrieNodeKey.path.unpackNibbles().len()
|
key.contractTrieNodeKey.path.unpackNibbles().len()
|
||||||
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
||||||
parentKey.toContentKey().encode() ==
|
parentKey.toContentKey().encode() ==
|
||||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentOffer.encode() ==
|
parentOffer.encode() ==
|
||||||
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
||||||
|
@ -47,14 +47,14 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
|
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
contentValue = AccountTrieNodeOffer.decode(contentValueBytes).get()
|
contentValue = AccountTrieNodeOffer.decode(contentValueBytes).get()
|
||||||
|
|
||||||
parentContentKeyBytes =
|
parentContentKeyBytes =
|
||||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
||||||
parentContentId = toContentId(parentContentKeyBytes)
|
parentContentId = toContentId(parentContentKeyBytes)
|
||||||
parentContentValueBytes =
|
parentContentValueBytes =
|
||||||
@ -120,14 +120,14 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
|
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
contentValue = ContractTrieNodeOffer.decode(contentValueBytes).get()
|
contentValue = ContractTrieNodeOffer.decode(contentValueBytes).get()
|
||||||
|
|
||||||
parentContentKeyBytes =
|
parentContentKeyBytes =
|
||||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
||||||
parentContentId = toContentId(parentContentKeyBytes)
|
parentContentId = toContentId(parentContentKeyBytes)
|
||||||
parentContentValueBytes =
|
parentContentValueBytes =
|
||||||
@ -191,7 +191,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
for i, testData in testCase:
|
for i, testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -251,7 +251,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
leafData = testData.recursive_gossip[0]
|
leafData = testData.recursive_gossip[0]
|
||||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||||
@ -275,13 +275,13 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
|
|
||||||
# wait for recursive gossip to complete
|
# wait for recursive gossip to complete
|
||||||
for node in testData.recursive_gossip:
|
for node in testData.recursive_gossip:
|
||||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||||
|
|
||||||
# check that all nodes were received by both state instances
|
# check that all nodes were received by both state instances
|
||||||
for kv in testData.recursive_gossip:
|
for kv in testData.recursive_gossip:
|
||||||
let
|
let
|
||||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ByteList
|
expectedKeyBytes = kv.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
||||||
expectedId = toContentId(expectedKeyBytes)
|
expectedId = toContentId(expectedKeyBytes)
|
||||||
expectedValue =
|
expectedValue =
|
||||||
@ -330,7 +330,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
leafData = testData.recursive_gossip[0]
|
leafData = testData.recursive_gossip[0]
|
||||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||||
@ -354,13 +354,13 @@ procSuite "State Gossip - Gossip Offer":
|
|||||||
|
|
||||||
# wait for recursive gossip to complete
|
# wait for recursive gossip to complete
|
||||||
for node in testData.recursive_gossip:
|
for node in testData.recursive_gossip:
|
||||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||||
|
|
||||||
# check that all nodes were received by both state instances
|
# check that all nodes were received by both state instances
|
||||||
for kv in testData.recursive_gossip:
|
for kv in testData.recursive_gossip:
|
||||||
let
|
let
|
||||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ByteList
|
expectedKeyBytes = kv.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
||||||
expectedId = toContentId(expectedKeyBytes)
|
expectedId = toContentId(expectedKeyBytes)
|
||||||
expectedValue =
|
expectedValue =
|
||||||
|
@ -37,7 +37,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
@ -68,7 +68,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
|
|
||||||
let res =
|
let res =
|
||||||
@ -90,7 +90,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
@ -122,7 +122,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
|
|
||||||
let res = await stateNode1.stateNetwork.getContractTrieNode(
|
let res = await stateNode1.stateNetwork.getContractTrieNode(
|
||||||
@ -145,7 +145,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
@ -176,7 +176,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
|
|
||||||
let res =
|
let res =
|
||||||
@ -209,7 +209,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
@ -264,7 +264,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
@ -319,7 +319,7 @@ procSuite "State Network - Get Content":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||||
|
@ -37,7 +37,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -105,7 +105,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -174,7 +174,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -244,7 +244,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -293,7 +293,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
@ -343,7 +343,7 @@ procSuite "State Network - Offer Content":
|
|||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let
|
let
|
||||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||||
contentId = toContentId(contentKeyBytes)
|
contentId = toContentId(contentKeyBytes)
|
||||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||||
|
@ -26,7 +26,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueRetrieval = AccountTrieNodeRetrieval
|
let contentValueRetrieval = AccountTrieNodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -42,7 +42,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueRetrieval = AccountTrieNodeRetrieval
|
var contentValueRetrieval = AccountTrieNodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -62,7 +62,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueRetrieval = ContractTrieNodeRetrieval
|
let contentValueRetrieval = ContractTrieNodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -78,7 +78,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueRetrieval = ContractTrieNodeRetrieval
|
var contentValueRetrieval = ContractTrieNodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -98,7 +98,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueRetrieval = ContractCodeRetrieval
|
let contentValueRetrieval = ContractCodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -114,7 +114,7 @@ suite "State Validation - Test Vectors":
|
|||||||
|
|
||||||
for testData in testCase:
|
for testData in testCase:
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueRetrieval = ContractCodeRetrieval
|
var contentValueRetrieval = ContractCodeRetrieval
|
||||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -138,8 +138,9 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -153,7 +154,7 @@ suite "State Validation - Test Vectors":
|
|||||||
continue # second test case only has root node and no recursive gossip
|
continue # second test case only has root node and no recursive gossip
|
||||||
|
|
||||||
let contentKey = ContentKey
|
let contentKey = ContentKey
|
||||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList)
|
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
.get()
|
.get()
|
||||||
let contentValueOffer = AccountTrieNodeOffer
|
let contentValueOffer = AccountTrieNodeOffer
|
||||||
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
||||||
@ -180,7 +181,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -201,7 +202,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -220,7 +221,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -237,7 +238,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -261,8 +262,9 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
let contentValueOffer = ContractTrieNodeOffer
|
let contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -277,7 +279,7 @@ suite "State Validation - Test Vectors":
|
|||||||
continue # second test case has no recursive gossip
|
continue # second test case has no recursive gossip
|
||||||
|
|
||||||
let contentKey = ContentKey
|
let contentKey = ContentKey
|
||||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList)
|
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
.get()
|
.get()
|
||||||
let contentValueOffer = ContractTrieNodeOffer
|
let contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
||||||
@ -303,7 +305,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -324,8 +326,9 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer = ContractTrieNodeOffer
|
var contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -340,8 +343,9 @@ suite "State Validation - Test Vectors":
|
|||||||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer = ContractTrieNodeOffer
|
var contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -356,8 +360,9 @@ suite "State Validation - Test Vectors":
|
|||||||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer = ContractTrieNodeOffer
|
var contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -371,8 +376,9 @@ suite "State Validation - Test Vectors":
|
|||||||
.isErr()
|
.isErr()
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer = ContractTrieNodeOffer
|
var contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -386,8 +392,9 @@ suite "State Validation - Test Vectors":
|
|||||||
.isErr()
|
.isErr()
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer = ContractTrieNodeOffer
|
var contentValueOffer = ContractTrieNodeOffer
|
||||||
.decode(testData.content_value_offer.hexToSeqByte())
|
.decode(testData.content_value_offer.hexToSeqByte())
|
||||||
.get()
|
.get()
|
||||||
@ -412,7 +419,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -434,7 +441,7 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||||
|
|
||||||
let contentKey =
|
let contentKey =
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -455,8 +462,9 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -470,8 +478,9 @@ suite "State Validation - Test Vectors":
|
|||||||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -486,8 +495,9 @@ suite "State Validation - Test Vectors":
|
|||||||
"hash of bytecode doesn't match the code hash in the account proof"
|
"hash of bytecode doesn't match the code hash in the account proof"
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -500,8 +510,9 @@ suite "State Validation - Test Vectors":
|
|||||||
.isErr()
|
.isErr()
|
||||||
|
|
||||||
block:
|
block:
|
||||||
let contentKey =
|
let contentKey = ContentKey
|
||||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||||
|
.get()
|
||||||
var contentValueOffer =
|
var contentValueOffer =
|
||||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -535,7 +546,8 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||||
|
|
||||||
for kv in testData.recursive_gossip:
|
for kv in testData.recursive_gossip:
|
||||||
let contentKey = ContentKey.decode(kv.content_key.hexToSeqByte().ByteList).get()
|
let contentKey =
|
||||||
|
ContentKey.decode(kv.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
AccountTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
AccountTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
||||||
|
|
||||||
@ -558,7 +570,8 @@ suite "State Validation - Test Vectors":
|
|||||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||||
|
|
||||||
for kv in testData.recursive_gossip:
|
for kv in testData.recursive_gossip:
|
||||||
let contentKey = ContentKey.decode(kv.content_key.hexToSeqByte().ByteList).get()
|
let contentKey =
|
||||||
|
ContentKey.decode(kv.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||||
let contentValueOffer =
|
let contentValueOffer =
|
||||||
ContractTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
ContractTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ import
|
|||||||
|
|
||||||
const protocolId = [byte 0x50, 0x00]
|
const protocolId = [byte 0x50, 0x00]
|
||||||
|
|
||||||
proc toContentId(contentKey: ByteList): results.Opt[ContentId] =
|
proc toContentId(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||||
# Note: Returning sha256 digest as content id here. This content key to
|
# Note: Returning sha256 digest as content id here. This content key to
|
||||||
# content id derivation is different for the different content networks
|
# content id derivation is different for the different content networks
|
||||||
# and their content types.
|
# and their content types.
|
||||||
@ -74,7 +74,8 @@ procSuite "Portal Wire Protocol Tests":
|
|||||||
|
|
||||||
let pong = await proto1.ping(proto2.localNode)
|
let pong = await proto1.ping(proto2.localNode)
|
||||||
|
|
||||||
let customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: UInt256.high())))
|
let customPayload =
|
||||||
|
ByteList[2048](SSZ.encode(CustomPayload(dataRadius: UInt256.high())))
|
||||||
|
|
||||||
check:
|
check:
|
||||||
pong.isOk()
|
pong.isOk()
|
||||||
@ -135,7 +136,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||||||
check (await proto1.baseProtocol.ping(proto2.localNode)).isOk()
|
check (await proto1.baseProtocol.ping(proto2.localNode)).isOk()
|
||||||
check (await proto2.baseProtocol.ping(proto1.localNode)).isOk()
|
check (await proto2.baseProtocol.ping(proto1.localNode)).isOk()
|
||||||
|
|
||||||
let contentKey = ByteList.init(@[1'u8])
|
let contentKey = ContentKeyByteList.init(@[1'u8])
|
||||||
|
|
||||||
# content does not exist so this should provide us with the closest nodes
|
# content does not exist so this should provide us with the closest nodes
|
||||||
# to the content, which is the only node in the routing table.
|
# to the content, which is the only node in the routing table.
|
||||||
@ -150,7 +151,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||||||
|
|
||||||
asyncTest "Offer/Accept":
|
asyncTest "Offer/Accept":
|
||||||
let (proto1, proto2) = defaultTestSetup(rng)
|
let (proto1, proto2) = defaultTestSetup(rng)
|
||||||
let contentKeys = ContentKeysList(@[ByteList(@[byte 0x01, 0x02, 0x03])])
|
let contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])])
|
||||||
|
|
||||||
let accept = await proto1.offerImpl(proto2.baseProtocol.localNode, contentKeys)
|
let accept = await proto1.offerImpl(proto2.baseProtocol.localNode, contentKeys)
|
||||||
|
|
||||||
@ -166,8 +167,9 @@ procSuite "Portal Wire Protocol Tests":
|
|||||||
let (proto1, proto2) = defaultTestSetup(rng)
|
let (proto1, proto2) = defaultTestSetup(rng)
|
||||||
var content: seq[ContentKV]
|
var content: seq[ContentKV]
|
||||||
for i in 0 ..< contentKeysLimit:
|
for i in 0 ..< contentKeysLimit:
|
||||||
let contentKV =
|
let contentKV = ContentKV(
|
||||||
ContentKV(contentKey: ByteList(@[byte i]), content: repeat(byte i, 5000))
|
contentKey: ContentKeyByteList(@[byte i]), content: repeat(byte i, 5000)
|
||||||
|
)
|
||||||
content.add(contentKV)
|
content.add(contentKV)
|
||||||
|
|
||||||
let res = await proto1.offer(proto2.baseProtocol.localNode, content)
|
let res = await proto1.offer(proto2.baseProtocol.localNode, content)
|
||||||
@ -343,7 +345,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||||||
var distances: seq[UInt256] = @[]
|
var distances: seq[UInt256] = @[]
|
||||||
|
|
||||||
for i in 0 ..< 40:
|
for i in 0 ..< 40:
|
||||||
proto1.storeContent(ByteList.init(@[uint8(i)]), u256(i), item)
|
proto1.storeContent(ByteList[2048].init(@[uint8(i)]), u256(i), item)
|
||||||
distances.add(u256(i) xor proto1.localNode.id)
|
distances.add(u256(i) xor proto1.localNode.id)
|
||||||
|
|
||||||
distances.sort(order = SortOrder.Descending)
|
distances.sort(order = SortOrder.Descending)
|
||||||
|
@ -133,7 +133,7 @@ proc asPortalBlockData*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
headerWithProof = BlockHeaderWithProof(
|
headerWithProof = BlockHeaderWithProof(
|
||||||
header: ByteList(rlp.encode(header)), proof: BlockHeaderProof.init()
|
header: ByteList[2048](rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||||
)
|
)
|
||||||
|
|
||||||
var transactions: Transactions
|
var transactions: Transactions
|
||||||
@ -178,7 +178,7 @@ proc asPortalBlockData*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
headerWithProof = BlockHeaderWithProof(
|
headerWithProof = BlockHeaderWithProof(
|
||||||
header: ByteList(rlp.encode(header)), proof: BlockHeaderProof.init()
|
header: ByteList[2048](rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||||
)
|
)
|
||||||
|
|
||||||
var transactions: Transactions
|
var transactions: Transactions
|
||||||
|
@ -53,7 +53,8 @@ func asPortalBlock(
|
|||||||
|
|
||||||
let
|
let
|
||||||
headerWithProof = BlockHeaderWithProof(
|
headerWithProof = BlockHeaderWithProof(
|
||||||
header: ByteList(rlp.encode(ethBlock.header)), proof: BlockHeaderProof.init()
|
header: ByteList[2048](rlp.encode(ethBlock.header)),
|
||||||
|
proof: BlockHeaderProof.init(),
|
||||||
)
|
)
|
||||||
portalBody = PortalBlockBodyShanghai(
|
portalBody = PortalBlockBodyShanghai(
|
||||||
transactions: transactions, uncles: Uncles(@[byte 0xc0]), withdrawals: withdrawals
|
transactions: transactions, uncles: Uncles(@[byte 0xc0]), withdrawals: withdrawals
|
||||||
|
@ -207,7 +207,7 @@ proc discover(d: discv5_protocol.Protocol) {.async.} =
|
|||||||
info "Lookup finished", nodes = discovered.len
|
info "Lookup finished", nodes = discovered.len
|
||||||
await sleepAsync(30.seconds)
|
await sleepAsync(30.seconds)
|
||||||
|
|
||||||
proc testContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
proc testContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||||
# Note: Returning a static content id here, as in practice this depends
|
# Note: Returning a static content id here, as in practice this depends
|
||||||
# on the content key to content id derivation, which is different for the
|
# on the content key to content id derivation, which is different for the
|
||||||
# different content networks. And we want these tests to be independent from
|
# different content networks. And we want these tests to be independent from
|
||||||
@ -295,7 +295,7 @@ proc run(config: PortalCliConf) =
|
|||||||
echo nodes.error
|
echo nodes.error
|
||||||
of findContent:
|
of findContent:
|
||||||
# For now just some bogus bytes
|
# For now just some bogus bytes
|
||||||
let contentKey = ByteList.init(@[1'u8])
|
let contentKey = ContentKeyByteList.init(@[1'u8])
|
||||||
|
|
||||||
let foundContent = waitFor portal.findContent(config.findContentTarget, contentKey)
|
let foundContent = waitFor portal.findContent(config.findContentTarget, contentKey)
|
||||||
|
|
||||||
|
2
vendor/nim-ssz-serialization
vendored
2
vendor/nim-ssz-serialization
vendored
@ -1 +1 @@
|
|||||||
Subproject commit b71ebc41c8e5027580be77a9707df1a64e6d9c8b
|
Subproject commit 6f831b79df24af00c10e73e717cbe40d7d0e2439
|
Loading…
x
Reference in New Issue
Block a user