Bump ssz_serialization and use ByteList[n] + add ContentKeyByteList (#2500)
This commit is contained in:
parent
cfe14f1825
commit
51cf991439
|
@ -7,21 +7,18 @@
|
|||
|
||||
{.push raises: [].}
|
||||
|
||||
import results, ssz_serialization, eth/rlp, stew/byteutils, nimcrypto/hash
|
||||
import results, ssz_serialization, eth/rlp, nimcrypto/hash
|
||||
|
||||
export hash
|
||||
|
||||
type
|
||||
ByteList* = List[byte, 2048]
|
||||
Bytes2* = array[2, byte]
|
||||
Bytes32* = array[32, byte]
|
||||
Bytes2* = ByteVector[2]
|
||||
Bytes32* = ByteVector[32]
|
||||
|
||||
ContentId* = UInt256
|
||||
ContentKeyByteList* = ByteList[2048] # The encoded content key
|
||||
BlockHash* = MDigest[32 * 8] # Bytes32
|
||||
|
||||
func `$`*(x: ByteList): string =
|
||||
x.asSeq.toHex()
|
||||
|
||||
func decodeRlp*(input: openArray[byte], T: type): Result[T, string] =
|
||||
try:
|
||||
ok(rlp.decode(input, T))
|
||||
|
|
|
@ -447,7 +447,7 @@ proc adjustRadius(
|
|||
|
||||
proc createGetHandler*(db: ContentDB): DbGetHandler =
|
||||
return (
|
||||
proc(contentKey: ByteList, contentId: ContentId): Opt[seq[byte]] =
|
||||
proc(contentKey: ContentKeyByteList, contentId: ContentId): Opt[seq[byte]] =
|
||||
let content = db.get(contentId).valueOr:
|
||||
return Opt.none(seq[byte])
|
||||
|
||||
|
@ -459,7 +459,7 @@ proc createStoreHandler*(
|
|||
): DbStoreHandler =
|
||||
return (
|
||||
proc(
|
||||
contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
||||
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||
) {.raises: [], gcsafe.} =
|
||||
# always re-check that the key is in the node range to make sure only
|
||||
# content in range is stored.
|
||||
|
|
|
@ -268,7 +268,7 @@ proc historyPropagateHeaders*(
|
|||
|
||||
iterator headersWithProof*(
|
||||
f: Era1File, epochRecord: EpochRecordCached
|
||||
): (ByteList, seq[byte]) =
|
||||
): (ContentKeyByteList, seq[byte]) =
|
||||
for blockHeader in f.era1BlockHeaders:
|
||||
doAssert blockHeader.isPreMerge()
|
||||
|
||||
|
@ -285,7 +285,7 @@ iterator headersWithProof*(
|
|||
|
||||
yield (contentKey, contentValue)
|
||||
|
||||
iterator blockContent*(f: Era1File): (ByteList, seq[byte]) =
|
||||
iterator blockContent*(f: Era1File): (ContentKeyByteList, seq[byte]) =
|
||||
for (header, body, receipts, _) in f.era1BlockTuples:
|
||||
let blockHash = header.blockHash()
|
||||
|
||||
|
|
|
@ -288,7 +288,7 @@ proc getLastFinalityUpdate*(db: BeaconDb): Opt[ForkedLightClientFinalityUpdate]
|
|||
|
||||
proc createGetHandler*(db: BeaconDb): DbGetHandler =
|
||||
return (
|
||||
proc(contentKey: ByteList, contentId: ContentId): results.Opt[seq[byte]] =
|
||||
proc(contentKey: ContentKeyByteList, contentId: ContentId): results.Opt[seq[byte]] =
|
||||
let contentKey = contentKey.decode().valueOr:
|
||||
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
||||
return Opt.none(seq[byte])
|
||||
|
@ -347,7 +347,7 @@ proc createGetHandler*(db: BeaconDb): DbGetHandler =
|
|||
proc createStoreHandler*(db: BeaconDb): DbStoreHandler =
|
||||
return (
|
||||
proc(
|
||||
contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
||||
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||
) {.raises: [], gcsafe.} =
|
||||
let contentKey = decode(contentKey).valueOr:
|
||||
# TODO: as this should not fail, maybe it is better to raiseAssert ?
|
||||
|
|
|
@ -31,7 +31,7 @@ type BeaconNetwork* = ref object
|
|||
forkDigests*: ForkDigests
|
||||
processContentLoop: Future[void]
|
||||
|
||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
||||
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||
ok(toContentId(contentKey))
|
||||
|
||||
proc validateHistoricalSummaries(
|
||||
|
@ -223,7 +223,7 @@ proc new*(
|
|||
)
|
||||
|
||||
proc validateContent(
|
||||
n: BeaconNetwork, content: seq[byte], contentKey: ByteList
|
||||
n: BeaconNetwork, content: seq[byte], contentKey: ContentKeyByteList
|
||||
): Result[void, string] =
|
||||
let key = contentKey.decode().valueOr:
|
||||
return err("Error decoding content key")
|
||||
|
|
|
@ -58,9 +58,9 @@ type
|
|||
of historicalSummaries:
|
||||
historicalSummariesKey*: HistoricalSummariesKey
|
||||
|
||||
func encode*(contentKey: ContentKey): ByteList =
|
||||
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||
doAssert(contentKey.contentType != unused)
|
||||
ByteList.init(SSZ.encode(contentKey))
|
||||
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||
|
||||
proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszError].} =
|
||||
mixin readSszValue
|
||||
|
@ -69,13 +69,13 @@ proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszErr
|
|||
|
||||
readSszValue(data, val)
|
||||
|
||||
func decode*(contentKey: ByteList): Opt[ContentKey] =
|
||||
func decode*(contentKey: ContentKeyByteList): Opt[ContentKey] =
|
||||
try:
|
||||
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||
except SerializationError:
|
||||
return Opt.none(ContentKey)
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
|
|
@ -42,16 +42,16 @@ type
|
|||
of dummySelector:
|
||||
dummyField: uint64
|
||||
|
||||
func encode*(contentKey: ContentKey): ByteList =
|
||||
ByteList.init(SSZ.encode(contentKey))
|
||||
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||
|
||||
func decode*(contentKey: ByteList): Option[ContentKey] =
|
||||
func decode*(contentKey: ContentKeyByteList): Option[ContentKey] =
|
||||
try:
|
||||
some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||
except SerializationError:
|
||||
return none[ContentKey]()
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
|
|
@ -217,7 +217,8 @@ func buildHeaderWithProof*(
|
|||
|
||||
ok(
|
||||
BlockHeaderWithProof(
|
||||
header: ByteList.init(rlp.encode(header)), proof: BlockHeaderProof.init(proof)
|
||||
header: ByteList[2048].init(rlp.encode(header)),
|
||||
proof: BlockHeaderProof.init(proof),
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -63,16 +63,16 @@ func init*(T: type ContentKey, contentType: ContentType, hash: BlockHash | Diges
|
|||
contentType: contentType, epochRecordKey: EpochRecordKey(epochHash: hash)
|
||||
)
|
||||
|
||||
func encode*(contentKey: ContentKey): ByteList =
|
||||
ByteList.init(SSZ.encode(contentKey))
|
||||
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||
|
||||
func decode*(contentKey: ByteList): Opt[ContentKey] =
|
||||
func decode*(contentKey: ContentKeyByteList): Opt[ContentKey] =
|
||||
try:
|
||||
Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey))
|
||||
except SerializationError:
|
||||
return Opt.none(ContentKey)
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
|
|
@ -59,7 +59,7 @@ type
|
|||
|
||||
Block* = (BlockHeader, BlockBody)
|
||||
|
||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
||||
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||
ok(toContentId(contentKey))
|
||||
|
||||
## Calls to go from SSZ decoded Portal types to RLP fully decoded EL types
|
||||
|
@ -632,7 +632,7 @@ proc getBlock*(
|
|||
return ok(maybeBlock)
|
||||
|
||||
proc validateContent(
|
||||
n: HistoryNetwork, content: seq[byte], contentKey: ByteList
|
||||
n: HistoryNetwork, content: seq[byte], contentKey: ContentKeyByteList
|
||||
): Future[bool] {.async: (raises: [CancelledError]).} =
|
||||
let key = contentKey.decode().valueOr:
|
||||
return false
|
||||
|
|
|
@ -89,14 +89,14 @@ proc readSszBytes*(data: openArray[byte], val: var ContentKey) {.raises: [SszErr
|
|||
|
||||
readSszValue(data, val)
|
||||
|
||||
func encode*(contentKey: ContentKey): ByteList =
|
||||
func encode*(contentKey: ContentKey): ContentKeyByteList =
|
||||
doAssert(contentKey.contentType != unused)
|
||||
ByteList.init(SSZ.encode(contentKey))
|
||||
ContentKeyByteList.init(SSZ.encode(contentKey))
|
||||
|
||||
func decode*(T: type ContentKey, contentKey: ByteList): Result[T, string] =
|
||||
func decode*(T: type ContentKey, contentKey: ContentKeyByteList): Result[T, string] =
|
||||
decodeSsz(contentKey.asSeq(), T)
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
func toContentId*(contentKey: ContentKeyByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
|
|
@ -94,7 +94,7 @@ func getParent*(offerWithKey: ContractTrieOfferWithKey): ContractTrieOfferWithKe
|
|||
proc gossipOffer*(
|
||||
p: PortalProtocol,
|
||||
srcNodeId: Opt[NodeId],
|
||||
keyBytes: ByteList,
|
||||
keyBytes: ContentKeyByteList,
|
||||
offerBytes: seq[byte],
|
||||
key: AccountTrieNodeKey,
|
||||
offer: AccountTrieNodeOffer,
|
||||
|
@ -107,7 +107,7 @@ proc gossipOffer*(
|
|||
proc gossipOffer*(
|
||||
p: PortalProtocol,
|
||||
srcNodeId: Opt[NodeId],
|
||||
keyBytes: ByteList,
|
||||
keyBytes: ContentKeyByteList,
|
||||
offerBytes: seq[byte],
|
||||
key: ContractTrieNodeKey,
|
||||
offer: ContractTrieNodeOffer,
|
||||
|
@ -120,7 +120,7 @@ proc gossipOffer*(
|
|||
proc gossipOffer*(
|
||||
p: PortalProtocol,
|
||||
srcNodeId: Opt[NodeId],
|
||||
keyBytes: ByteList,
|
||||
keyBytes: ContentKeyByteList,
|
||||
offerBytes: seq[byte],
|
||||
key: ContractCodeKey,
|
||||
offer: ContractCodeOffer,
|
||||
|
@ -135,7 +135,7 @@ proc gossipOffer*(
|
|||
proc recursiveGossipOffer*(
|
||||
p: PortalProtocol,
|
||||
srcNodeId: Opt[NodeId],
|
||||
keyBytes: ByteList,
|
||||
keyBytes: ContentKeyByteList,
|
||||
offerBytes: seq[byte],
|
||||
key: AccountTrieNodeKey,
|
||||
offer: AccountTrieNodeOffer,
|
||||
|
@ -160,7 +160,7 @@ proc recursiveGossipOffer*(
|
|||
proc recursiveGossipOffer*(
|
||||
p: PortalProtocol,
|
||||
srcNodeId: Opt[NodeId],
|
||||
keyBytes: ByteList,
|
||||
keyBytes: ContentKeyByteList,
|
||||
offerBytes: seq[byte],
|
||||
key: ContractTrieNodeKey,
|
||||
offer: ContractTrieNodeOffer,
|
||||
|
|
|
@ -34,7 +34,7 @@ type StateNetwork* = ref object
|
|||
historyNetwork: Opt[HistoryNetwork]
|
||||
validateStateIsCanonical: bool
|
||||
|
||||
func toContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
||||
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||
ok(toContentId(contentKey))
|
||||
|
||||
proc new*(
|
||||
|
@ -147,7 +147,7 @@ proc getStateRootByBlockHash*(
|
|||
proc processOffer*(
|
||||
n: StateNetwork,
|
||||
maybeSrcNodeId: Opt[NodeId],
|
||||
contentKeyBytes: ByteList,
|
||||
contentKeyBytes: ContentKeyByteList,
|
||||
contentValueBytes: seq[byte],
|
||||
contentKey: AccountTrieNodeKey | ContractTrieNodeKey | ContractCodeKey,
|
||||
V: type ContentOfferType,
|
||||
|
|
|
@ -26,7 +26,7 @@ const
|
|||
perContentKeyOverhead* = 4
|
||||
|
||||
type
|
||||
ContentKeysList* = List[ByteList, contentKeysLimit]
|
||||
ContentKeysList* = List[ContentKeyByteList, contentKeysLimit]
|
||||
ContentKeysBitList* = BitList[contentKeysLimit]
|
||||
|
||||
# TODO: should become part of the specific networks, considering it is custom.
|
||||
|
@ -50,31 +50,32 @@ type
|
|||
|
||||
PingMessage* = object
|
||||
enrSeq*: uint64
|
||||
customPayload*: ByteList
|
||||
customPayload*: ByteList[2048]
|
||||
|
||||
PongMessage* = object
|
||||
enrSeq*: uint64
|
||||
customPayload*: ByteList
|
||||
customPayload*: ByteList[2048]
|
||||
|
||||
FindNodesMessage* = object
|
||||
distances*: List[uint16, 256]
|
||||
|
||||
NodesMessage* = object
|
||||
total*: uint8
|
||||
enrs*: List[ByteList, 32] # ByteList here is the rlp encoded ENR. This could
|
||||
enrs*: List[ByteList[2048], 32]
|
||||
# ByteList[2048] here is the rlp encoded ENR. This could
|
||||
# also be limited to ~300 bytes instead of 2048
|
||||
|
||||
FindContentMessage* = object
|
||||
contentKey*: ByteList
|
||||
contentKey*: ContentKeyByteList
|
||||
|
||||
ContentMessage* = object
|
||||
case contentMessageType*: ContentMessageType
|
||||
of connectionIdType:
|
||||
connectionId*: Bytes2
|
||||
of contentType:
|
||||
content*: ByteList
|
||||
content*: ByteList[2048]
|
||||
of enrsType:
|
||||
enrs*: List[ByteList, 32]
|
||||
enrs*: List[ByteList[2048], 32]
|
||||
|
||||
OfferMessage* = object
|
||||
contentKeys*: ContentKeysList
|
||||
|
|
|
@ -145,22 +145,22 @@ const
|
|||
|
||||
type
|
||||
ToContentIdHandler* =
|
||||
proc(contentKey: ByteList): results.Opt[ContentId] {.raises: [], gcsafe.}
|
||||
proc(contentKey: ContentKeyByteList): results.Opt[ContentId] {.raises: [], gcsafe.}
|
||||
|
||||
DbGetHandler* = proc(
|
||||
contentKey: ByteList, contentId: ContentId
|
||||
contentKey: ContentKeyByteList, contentId: ContentId
|
||||
): results.Opt[seq[byte]] {.raises: [], gcsafe.}
|
||||
|
||||
DbStoreHandler* = proc(contentKey: ByteList, contentId: ContentId, content: seq[byte]) {.
|
||||
raises: [], gcsafe
|
||||
.}
|
||||
DbStoreHandler* = proc(
|
||||
contentKey: ContentKeyByteList, contentId: ContentId, content: seq[byte]
|
||||
) {.raises: [], gcsafe.}
|
||||
|
||||
PortalProtocolId* = array[2, byte]
|
||||
|
||||
RadiusCache* = LRUCache[NodeId, UInt256]
|
||||
|
||||
ContentKV* = object
|
||||
contentKey*: ByteList
|
||||
contentKey*: ContentKeyByteList
|
||||
content*: seq[byte]
|
||||
|
||||
OfferRequestType = enum
|
||||
|
@ -239,7 +239,7 @@ type
|
|||
utpTransfer*: bool
|
||||
trace*: TraceObject
|
||||
|
||||
func init*(T: type ContentKV, contentKey: ByteList, content: seq[byte]): T =
|
||||
func init*(T: type ContentKV, contentKey: ContentKeyByteList, content: seq[byte]): T =
|
||||
ContentKV(contentKey: contentKey, content: content)
|
||||
|
||||
func init*(
|
||||
|
@ -324,11 +324,11 @@ func inRange*(p: PortalProtocol, contentId: ContentId): bool =
|
|||
|
||||
func truncateEnrs(
|
||||
nodes: seq[Node], maxSize: int, enrOverhead: int
|
||||
): List[ByteList, 32] =
|
||||
var enrs: List[ByteList, 32]
|
||||
): List[ByteList[2048], 32] =
|
||||
var enrs: List[ByteList[2048], 32]
|
||||
var totalSize = 0
|
||||
for n in nodes:
|
||||
let enr = ByteList.init(n.record.raw)
|
||||
let enr = ByteList[2048].init(n.record.raw)
|
||||
if totalSize + enr.len() + enrOverhead <= maxSize:
|
||||
let res = enrs.add(enr)
|
||||
# With max payload of discv5 and the sizes of ENRs this should not occur.
|
||||
|
@ -354,19 +354,19 @@ func handlePing(p: PortalProtocol, ping: PingMessage, srcId: NodeId): seq[byte]
|
|||
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
||||
let p = PongMessage(
|
||||
enrSeq: p.localNode.record.seqNum,
|
||||
customPayload: ByteList(SSZ.encode(customPayload)),
|
||||
customPayload: ByteList[2048](SSZ.encode(customPayload)),
|
||||
)
|
||||
|
||||
encodeMessage(p)
|
||||
|
||||
proc handleFindNodes(p: PortalProtocol, fn: FindNodesMessage): seq[byte] =
|
||||
if fn.distances.len == 0:
|
||||
let enrs = List[ByteList, 32](@[])
|
||||
let enrs = List[ByteList[2048], 32](@[])
|
||||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||
elif fn.distances.contains(0):
|
||||
# A request for our own record.
|
||||
let enr = ByteList(rlp.encode(p.localNode.record))
|
||||
encodeMessage(NodesMessage(total: 1, enrs: List[ByteList, 32](@[enr])))
|
||||
let enr = ByteList[2048](rlp.encode(p.localNode.record))
|
||||
encodeMessage(NodesMessage(total: 1, enrs: List[ByteList[2048], 32](@[enr])))
|
||||
else:
|
||||
let distances = fn.distances.asSeq()
|
||||
if distances.all(
|
||||
|
@ -393,7 +393,7 @@ proc handleFindNodes(p: PortalProtocol, fn: FindNodesMessage): seq[byte] =
|
|||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||
else:
|
||||
# invalid request, send empty back
|
||||
let enrs = List[ByteList, 32](@[])
|
||||
let enrs = List[ByteList[2048], 32](@[])
|
||||
encodeMessage(NodesMessage(total: 1, enrs: enrs))
|
||||
|
||||
proc handleFindContent(
|
||||
|
@ -422,7 +422,9 @@ proc handleFindContent(
|
|||
let content = contentResult.get()
|
||||
if content.len <= maxPayloadSize:
|
||||
return encodeMessage(
|
||||
ContentMessage(contentMessageType: contentType, content: ByteList(content))
|
||||
ContentMessage(
|
||||
contentMessageType: contentType, content: ByteList[2048](content)
|
||||
)
|
||||
)
|
||||
else:
|
||||
let connectionId = p.stream.addContentRequest(srcId, content)
|
||||
|
@ -648,7 +650,7 @@ proc pingImpl*(
|
|||
let customPayload = CustomPayload(dataRadius: p.dataRadius)
|
||||
let ping = PingMessage(
|
||||
enrSeq: p.localNode.record.seqNum,
|
||||
customPayload: ByteList(SSZ.encode(customPayload)),
|
||||
customPayload: ByteList[2048](SSZ.encode(customPayload)),
|
||||
)
|
||||
|
||||
return await reqResponse[PingMessage, PongMessage](p, dst, ping)
|
||||
|
@ -662,7 +664,7 @@ proc findNodesImpl*(
|
|||
return await reqResponse[FindNodesMessage, NodesMessage](p, dst, fn)
|
||||
|
||||
proc findContentImpl*(
|
||||
p: PortalProtocol, dst: Node, contentKey: ByteList
|
||||
p: PortalProtocol, dst: Node, contentKey: ContentKeyByteList
|
||||
): Future[PortalResult[ContentMessage]] {.async: (raises: [CancelledError]).} =
|
||||
let fc = FindContentMessage(contentKey: contentKey)
|
||||
|
||||
|
@ -675,7 +677,9 @@ proc offerImpl*(
|
|||
|
||||
return await reqResponse[OfferMessage, AcceptMessage](p, dst, offer)
|
||||
|
||||
proc recordsFromBytes*(rawRecords: List[ByteList, 32]): PortalResult[seq[Record]] =
|
||||
proc recordsFromBytes*(
|
||||
rawRecords: List[ByteList[2048], 32]
|
||||
): PortalResult[seq[Record]] =
|
||||
var records: seq[Record]
|
||||
for r in rawRecords.asSeq():
|
||||
let record = enr.Record.fromBytes(r.asSeq()).valueOr:
|
||||
|
@ -724,7 +728,7 @@ proc findNodes*(
|
|||
return err(nodesMessage.error)
|
||||
|
||||
proc findContent*(
|
||||
p: PortalProtocol, dst: Node, contentKey: ByteList
|
||||
p: PortalProtocol, dst: Node, contentKey: ContentKeyByteList
|
||||
): Future[PortalResult[FoundContent]] {.async: (raises: [CancelledError]).} =
|
||||
logScope:
|
||||
node = dst
|
||||
|
@ -1090,7 +1094,10 @@ proc lookup*(
|
|||
return closestNodes
|
||||
|
||||
proc triggerPoke*(
|
||||
p: PortalProtocol, nodes: seq[Node], contentKey: ByteList, content: seq[byte]
|
||||
p: PortalProtocol,
|
||||
nodes: seq[Node],
|
||||
contentKey: ContentKeyByteList,
|
||||
content: seq[byte],
|
||||
) =
|
||||
## In order to properly test gossip mechanisms (e.g. in Portal Hive),
|
||||
## we need the option to turn off the POKE functionality as it influences
|
||||
|
@ -1118,7 +1125,7 @@ proc triggerPoke*(
|
|||
# networks will probably be very similar. Extract lookup function to separate module
|
||||
# and make it more generaic
|
||||
proc contentLookup*(
|
||||
p: PortalProtocol, target: ByteList, targetId: UInt256
|
||||
p: PortalProtocol, target: ContentKeyByteList, targetId: UInt256
|
||||
): Future[Opt[ContentLookupResult]] {.async: (raises: [CancelledError]).} =
|
||||
## Perform a lookup for the given target, return the closest n nodes to the
|
||||
## target. Maximum value for n is `BUCKET_SIZE`.
|
||||
|
@ -1222,7 +1229,7 @@ proc contentLookup*(
|
|||
return Opt.none(ContentLookupResult)
|
||||
|
||||
proc traceContentLookup*(
|
||||
p: PortalProtocol, target: ByteList, targetId: UInt256
|
||||
p: PortalProtocol, target: ContentKeyByteList, targetId: UInt256
|
||||
): Future[TraceContentLookupResult] {.async: (raises: [CancelledError]).} =
|
||||
## Perform a lookup for the given target, return the closest n nodes to the
|
||||
## target. Maximum value for n is `BUCKET_SIZE`.
|
||||
|
@ -1587,7 +1594,10 @@ proc randomGossipDiscardPeers*(
|
|||
discard await p.randomGossip(srcNodeId, contentKeys, content)
|
||||
|
||||
proc storeContent*(
|
||||
p: PortalProtocol, contentKey: ByteList, contentId: ContentId, content: seq[byte]
|
||||
p: PortalProtocol,
|
||||
contentKey: ContentKeyByteList,
|
||||
contentId: ContentId,
|
||||
content: seq[byte],
|
||||
) =
|
||||
doAssert(p.dbPut != nil)
|
||||
p.dbPut(contentKey, contentId, content)
|
||||
|
|
|
@ -136,7 +136,7 @@ proc installPortalApiHandlers*(
|
|||
let
|
||||
node = toNodeWithAddress(enr)
|
||||
foundContentResult =
|
||||
await p.findContent(node, ByteList.init(hexToSeqByte(contentKey)))
|
||||
await p.findContent(node, ContentKeyByteList.init(hexToSeqByte(contentKey)))
|
||||
|
||||
if foundContentResult.isErr():
|
||||
raise newException(ValueError, $foundContentResult.error)
|
||||
|
@ -163,7 +163,7 @@ proc installPortalApiHandlers*(
|
|||
node = toNodeWithAddress(enr)
|
||||
key = hexToSeqByte(contentKey)
|
||||
content = hexToSeqByte(contentValue)
|
||||
contentKV = ContentKV(contentKey: ByteList.init(key), content: content)
|
||||
contentKV = ContentKV(contentKey: ContentKeyByteList.init(key), content: content)
|
||||
res = await p.offer(node, @[contentKV])
|
||||
|
||||
if res.isOk():
|
||||
|
@ -184,7 +184,7 @@ proc installPortalApiHandlers*(
|
|||
contentKey: string
|
||||
) -> ContentInfo:
|
||||
let
|
||||
key = ByteList.init(hexToSeqByte(contentKey))
|
||||
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||
contentId = p.toContentId(key).valueOr:
|
||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||
|
||||
|
@ -199,7 +199,7 @@ proc installPortalApiHandlers*(
|
|||
contentKey: string
|
||||
) -> TraceContentLookupResult:
|
||||
let
|
||||
key = ByteList.init(hexToSeqByte(contentKey))
|
||||
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||
contentId = p.toContentId(key).valueOr:
|
||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||
|
||||
|
@ -217,7 +217,7 @@ proc installPortalApiHandlers*(
|
|||
contentKey: string, contentValue: string
|
||||
) -> bool:
|
||||
let
|
||||
key = ByteList.init(hexToSeqByte(contentKey))
|
||||
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||
contentValueBytes = hexToSeqByte(contentValue)
|
||||
|
||||
let valueToStore =
|
||||
|
@ -252,7 +252,7 @@ proc installPortalApiHandlers*(
|
|||
|
||||
rpcServer.rpc("portal_" & network & "LocalContent") do(contentKey: string) -> string:
|
||||
let
|
||||
key = ByteList.init(hexToSeqByte(contentKey))
|
||||
key = ContentKeyByteList.init(hexToSeqByte(contentKey))
|
||||
contentId = p.toContentId(key).valueOr:
|
||||
raise (ref errors.InvalidRequest)(code: -32602, msg: "Invalid content key")
|
||||
|
||||
|
@ -267,7 +267,7 @@ proc installPortalApiHandlers*(
|
|||
let
|
||||
key = hexToSeqByte(contentKey)
|
||||
content = hexToSeqByte(contentValue)
|
||||
contentKeys = ContentKeysList(@[ByteList.init(key)])
|
||||
contentKeys = ContentKeysList(@[ContentKeyByteList.init(key)])
|
||||
numberOfPeers =
|
||||
await p.neighborhoodGossip(Opt.none(NodeId), contentKeys, @[content])
|
||||
|
||||
|
@ -279,7 +279,7 @@ proc installPortalApiHandlers*(
|
|||
let
|
||||
key = hexToSeqByte(contentKey)
|
||||
content = hexToSeqByte(contentValue)
|
||||
contentKeys = ContentKeysList(@[ByteList.init(key)])
|
||||
contentKeys = ContentKeysList(@[ContentKeyByteList.init(key)])
|
||||
numberOfPeers = await p.randomGossip(Opt.none(NodeId), contentKeys, @[content])
|
||||
|
||||
return numberOfPeers
|
||||
|
|
|
@ -279,25 +279,25 @@ suite "Beacon Content Encodings":
|
|||
|
||||
suite "Beacon ContentKey Encodings ":
|
||||
test "Invalid prefix - 0 value":
|
||||
let encoded = ByteList.init(@[byte 0x00])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||
let decoded = decode(encoded)
|
||||
|
||||
check decoded.isNone()
|
||||
|
||||
test "Invalid prefix - before valid range":
|
||||
let encoded = ByteList.init(@[byte 0x01])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x01])
|
||||
let decoded = decode(encoded)
|
||||
|
||||
check decoded.isNone()
|
||||
|
||||
test "Invalid prefix - after valid range":
|
||||
let encoded = ByteList.init(@[byte 0x14])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x14])
|
||||
let decoded = decode(encoded)
|
||||
|
||||
check decoded.isNone()
|
||||
|
||||
test "Invalid key - empty input":
|
||||
let encoded = ByteList.init(@[])
|
||||
let encoded = ContentKeyByteList.init(@[])
|
||||
let decoded = decode(encoded)
|
||||
|
||||
check decoded.isNone()
|
||||
|
|
|
@ -24,7 +24,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
dataRadius = UInt256.high() - 1 # Full radius - 1
|
||||
enrSeq = 1'u64
|
||||
# Can be any custom payload, testing with just dataRadius here.
|
||||
customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||
customPayload = ByteList[2048](SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||
p = PingMessage(enrSeq: enrSeq, customPayload: customPayload)
|
||||
|
||||
let encoded = encodeMessage(p)
|
||||
|
@ -44,7 +44,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
dataRadius = UInt256.high() div 2.stuint(256) # Radius of half the UInt256
|
||||
enrSeq = 1'u64
|
||||
# Can be any custom payload, testing with just dataRadius here.
|
||||
customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||
customPayload = ByteList[2048](SSZ.encode(CustomPayload(dataRadius: dataRadius)))
|
||||
p = PongMessage(enrSeq: enrSeq, customPayload: customPayload)
|
||||
|
||||
let encoded = encodeMessage(p)
|
||||
|
@ -109,7 +109,9 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
e2 = res2.value
|
||||
total = 0x1'u8
|
||||
n = NodesMessage(
|
||||
total: total, enrs: List[ByteList, 32](@[ByteList(e1.raw), ByteList(e2.raw)])
|
||||
total: total,
|
||||
enrs:
|
||||
List[ByteList[2048], 32](@[ByteList[2048](e1.raw), ByteList[2048](e2.raw)]),
|
||||
)
|
||||
|
||||
let encoded = encodeMessage(n)
|
||||
|
@ -124,13 +126,13 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
message.kind == nodes
|
||||
message.nodes.total == total
|
||||
message.nodes.enrs.len() == 2
|
||||
message.nodes.enrs[0] == ByteList(e1.raw)
|
||||
message.nodes.enrs[1] == ByteList(e2.raw)
|
||||
message.nodes.enrs[0] == ByteList[2048](e1.raw)
|
||||
message.nodes.enrs[1] == ByteList[2048](e2.raw)
|
||||
|
||||
test "FindContent Request":
|
||||
const contentKeyString = "0x706f7274616c"
|
||||
let
|
||||
contentKey = ByteList.init(hexToSeqByte(contentKeyString))
|
||||
contentKey = ContentKeyByteList.init(hexToSeqByte(contentKeyString))
|
||||
fc = FindContentMessage(contentKey: contentKey)
|
||||
|
||||
let encoded = encodeMessage(fc)
|
||||
|
@ -165,7 +167,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
test "Content Response - content payload":
|
||||
const contentString = "0x7468652063616b652069732061206c6965"
|
||||
let
|
||||
content = ByteList(hexToSeqByte(contentString))
|
||||
content = ByteList[2048](hexToSeqByte(contentString))
|
||||
c = ContentMessage(contentMessageType: contentType, content: content)
|
||||
|
||||
let encoded = encodeMessage(c)
|
||||
|
@ -195,7 +197,7 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
let
|
||||
e1 = res1.value
|
||||
e2 = res2.value
|
||||
enrs = List[ByteList, 32](@[ByteList(e1.raw), ByteList(e2.raw)])
|
||||
enrs = List[ByteList[2048], 32](@[ByteList[2048](e1.raw), ByteList[2048](e2.raw)])
|
||||
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
||||
|
||||
let encoded = encodeMessage(c)
|
||||
|
@ -210,12 +212,12 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
message.kind == MessageKind.content
|
||||
message.content.contentMessageType == enrsType
|
||||
message.content.enrs.len() == 2
|
||||
message.content.enrs[0] == ByteList(e1.raw)
|
||||
message.content.enrs[1] == ByteList(e2.raw)
|
||||
message.content.enrs[0] == ByteList[2048](e1.raw)
|
||||
message.content.enrs[1] == ByteList[2048](e2.raw)
|
||||
|
||||
test "Content Response - empty enrs":
|
||||
let
|
||||
enrs = List[ByteList, 32].init(@[])
|
||||
enrs = List[ByteList[2048], 32].init(@[])
|
||||
c = ContentMessage(contentMessageType: enrsType, enrs: enrs)
|
||||
let encoded = encodeMessage(c)
|
||||
check encoded.toHex == "0502"
|
||||
|
@ -230,7 +232,8 @@ suite "Portal Wire Protocol Message Encodings":
|
|||
|
||||
test "Offer Request":
|
||||
let
|
||||
contentKeys = ContentKeysList(List(@[ByteList(@[byte 0x01, 0x02, 0x03])]))
|
||||
contentKeys =
|
||||
ContentKeysList(List(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])]))
|
||||
o = OfferMessage(contentKeys: contentKeys)
|
||||
|
||||
let encoded = encodeMessage(o)
|
||||
|
|
|
@ -177,7 +177,7 @@ proc mockBlockHashToStateRoot*(
|
|||
blockHeader = BlockHeader(stateRoot: stateRoot)
|
||||
headerRlp = rlp.encode(blockHeader)
|
||||
blockHeaderWithProof = BlockHeaderWithProof(
|
||||
header: ByteList.init(headerRlp), proof: BlockHeaderProof.init()
|
||||
header: ByteList[2048].init(headerRlp), proof: BlockHeaderProof.init()
|
||||
)
|
||||
contentKeyBytes = history_content.ContentKey
|
||||
.init(history_content.ContentType.blockHeader, blockHash)
|
||||
|
|
|
@ -105,25 +105,25 @@ suite "State Content Keys":
|
|||
decoded.value().contractCodeKey.codeHash == codeHash
|
||||
|
||||
test "Invalid prefix - 0 value":
|
||||
let encoded = ByteList.init(@[byte 0x00])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x00])
|
||||
let decoded = ContentKey.decode(encoded)
|
||||
|
||||
check decoded.isErr()
|
||||
|
||||
test "Invalid prefix - before valid range":
|
||||
let encoded = ByteList.init(@[byte 0x01])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x01])
|
||||
let decoded = ContentKey.decode(encoded)
|
||||
|
||||
check decoded.isErr()
|
||||
|
||||
test "Invalid prefix - after valid range":
|
||||
let encoded = ByteList.init(@[byte 0x25])
|
||||
let encoded = ContentKeyByteList.init(@[byte 0x25])
|
||||
let decoded = ContentKey.decode(encoded)
|
||||
|
||||
check decoded.isErr()
|
||||
|
||||
test "Invalid key - empty input":
|
||||
let encoded = ByteList.init(@[])
|
||||
let encoded = ContentKeyByteList.init(@[])
|
||||
let decoded = ContentKey.decode(encoded)
|
||||
|
||||
check decoded.isErr()
|
||||
|
|
|
@ -51,7 +51,7 @@ procSuite "State Endpoints":
|
|||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
leafData = testData.recursive_gossip[0]
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||
|
@ -72,7 +72,7 @@ procSuite "State Endpoints":
|
|||
|
||||
# wait for recursive gossip to complete
|
||||
for node in testData.recursive_gossip:
|
||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
||||
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||
|
||||
let
|
||||
|
@ -158,7 +158,7 @@ procSuite "State Endpoints":
|
|||
testData = testCase[0]
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
leafData = testData.recursive_gossip[0]
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||
|
@ -183,7 +183,7 @@ procSuite "State Endpoints":
|
|||
testData = testCase[1]
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
leafData = testData.recursive_gossip[0]
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||
|
@ -204,7 +204,7 @@ procSuite "State Endpoints":
|
|||
|
||||
# wait for recursive gossip to complete
|
||||
for node in testData.recursive_gossip:
|
||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
||||
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||
|
||||
let
|
||||
|
@ -232,7 +232,7 @@ procSuite "State Endpoints":
|
|||
raiseAssert "Cannot read test vector: " & error
|
||||
testData = testCase[0]
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
|
|
@ -25,7 +25,8 @@ suite "State Gossip getParent - Test Vectors":
|
|||
for i, testData in testCase:
|
||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let key = ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let key =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let offer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -40,7 +41,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||
key.accountTrieNodeKey.path.unpackNibbles().len()
|
||||
parentOffer.proof.len() == offer.proof.len() - 1
|
||||
parentKey.toContentKey().encode() ==
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentOffer.encode() ==
|
||||
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
||||
parentOffer.toRetrievalValue().encode() ==
|
||||
|
@ -55,7 +56,8 @@ suite "State Gossip getParent - Test Vectors":
|
|||
for i, testData in testCase:
|
||||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let key = ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let key =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let offer =
|
||||
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -70,7 +72,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||
key.contractTrieNodeKey.path.unpackNibbles().len()
|
||||
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
||||
parentKey.toContentKey().encode() ==
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentOffer.encode() ==
|
||||
testData.recursive_gossip.content_value_offer.hexToSeqByte()
|
||||
parentOffer.toRetrievalValue().encode() ==
|
||||
|
@ -89,7 +91,9 @@ suite "State Gossip getParent - Test Vectors":
|
|||
for j in 0 ..< testData.recursive_gossip.high:
|
||||
let
|
||||
key = ContentKey
|
||||
.decode(testData.recursive_gossip[j].content_key.hexToSeqByte().ByteList)
|
||||
.decode(
|
||||
testData.recursive_gossip[j].content_key.hexToSeqByte().ContentKeyByteList
|
||||
)
|
||||
.get()
|
||||
offer = AccountTrieNodeOffer
|
||||
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
||||
|
@ -101,7 +105,7 @@ suite "State Gossip getParent - Test Vectors":
|
|||
key.accountTrieNodeKey.path.unpackNibbles().len()
|
||||
parentOffer.proof.len() == offer.proof.len() - 1
|
||||
parentKey.toContentKey().encode() ==
|
||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentOffer.encode() ==
|
||||
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
||||
|
||||
|
@ -118,7 +122,9 @@ suite "State Gossip getParent - Test Vectors":
|
|||
for j in 0 ..< testData.recursive_gossip.high:
|
||||
let
|
||||
key = ContentKey
|
||||
.decode(testData.recursive_gossip[j].content_key.hexToSeqByte().ByteList)
|
||||
.decode(
|
||||
testData.recursive_gossip[j].content_key.hexToSeqByte().ContentKeyByteList
|
||||
)
|
||||
.get()
|
||||
offer = ContractTrieNodeOffer
|
||||
.decode(testData.recursive_gossip[j].content_value.hexToSeqByte())
|
||||
|
@ -130,6 +136,6 @@ suite "State Gossip getParent - Test Vectors":
|
|||
key.contractTrieNodeKey.path.unpackNibbles().len()
|
||||
parentOffer.storageProof.len() == offer.storageProof.len() - 1
|
||||
parentKey.toContentKey().encode() ==
|
||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip[j + 1].content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentOffer.encode() ==
|
||||
testData.recursive_gossip[j + 1].content_value.hexToSeqByte()
|
||||
|
|
|
@ -47,14 +47,14 @@ procSuite "State Gossip - Gossip Offer":
|
|||
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
contentValue = AccountTrieNodeOffer.decode(contentValueBytes).get()
|
||||
|
||||
parentContentKeyBytes =
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
||||
parentContentId = toContentId(parentContentKeyBytes)
|
||||
parentContentValueBytes =
|
||||
|
@ -120,14 +120,14 @@ procSuite "State Gossip - Gossip Offer":
|
|||
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
contentValue = ContractTrieNodeOffer.decode(contentValueBytes).get()
|
||||
|
||||
parentContentKeyBytes =
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ByteList
|
||||
testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList
|
||||
parentContentKey = ContentKey.decode(parentContentKeyBytes).get()
|
||||
parentContentId = toContentId(parentContentKeyBytes)
|
||||
parentContentValueBytes =
|
||||
|
@ -191,7 +191,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||
for i, testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -251,7 +251,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
leafData = testData.recursive_gossip[0]
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||
|
@ -275,13 +275,13 @@ procSuite "State Gossip - Gossip Offer":
|
|||
|
||||
# wait for recursive gossip to complete
|
||||
for node in testData.recursive_gossip:
|
||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
||||
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||
|
||||
# check that all nodes were received by both state instances
|
||||
for kv in testData.recursive_gossip:
|
||||
let
|
||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ByteList
|
||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ContentKeyByteList
|
||||
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
||||
expectedId = toContentId(expectedKeyBytes)
|
||||
expectedValue =
|
||||
|
@ -330,7 +330,7 @@ procSuite "State Gossip - Gossip Offer":
|
|||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
leafData = testData.recursive_gossip[0]
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = leafData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = leafData.content_value.hexToSeqByte()
|
||||
|
@ -354,13 +354,13 @@ procSuite "State Gossip - Gossip Offer":
|
|||
|
||||
# wait for recursive gossip to complete
|
||||
for node in testData.recursive_gossip:
|
||||
let keyBytes = node.content_key.hexToSeqByte().ByteList
|
||||
let keyBytes = node.content_key.hexToSeqByte().ContentKeyByteList
|
||||
await stateNode2.waitUntilContentAvailable(toContentId(keyBytes))
|
||||
|
||||
# check that all nodes were received by both state instances
|
||||
for kv in testData.recursive_gossip:
|
||||
let
|
||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ByteList
|
||||
expectedKeyBytes = kv.content_key.hexToSeqByte().ContentKeyByteList
|
||||
expectedKey = ContentKey.decode(expectedKeyBytes).get()
|
||||
expectedId = toContentId(expectedKeyBytes)
|
||||
expectedValue =
|
||||
|
|
|
@ -37,7 +37,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
@ -68,7 +68,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
|
||||
let res =
|
||||
|
@ -90,7 +90,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
@ -122,7 +122,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
|
||||
let res = await stateNode1.stateNetwork.getContractTrieNode(
|
||||
|
@ -145,7 +145,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
@ -176,7 +176,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
|
||||
let res =
|
||||
|
@ -209,7 +209,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
@ -264,7 +264,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
@ -319,7 +319,7 @@ procSuite "State Network - Get Content":
|
|||
|
||||
for testData in testCase:
|
||||
let
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_retrieval.hexToSeqByte()
|
||||
|
|
|
@ -37,7 +37,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -105,7 +105,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -174,7 +174,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -244,7 +244,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -293,7 +293,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
@ -343,7 +343,7 @@ procSuite "State Network - Offer Content":
|
|||
for testData in testCase:
|
||||
let
|
||||
stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ByteList
|
||||
contentKeyBytes = testData.content_key.hexToSeqByte().ContentKeyByteList
|
||||
contentKey = ContentKey.decode(contentKeyBytes).get()
|
||||
contentId = toContentId(contentKeyBytes)
|
||||
contentValueBytes = testData.content_value_offer.hexToSeqByte()
|
||||
|
|
|
@ -26,7 +26,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueRetrieval = AccountTrieNodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -42,7 +42,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueRetrieval = AccountTrieNodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -62,7 +62,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueRetrieval = ContractTrieNodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -78,7 +78,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueRetrieval = ContractTrieNodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -98,7 +98,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueRetrieval = ContractCodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -114,7 +114,7 @@ suite "State Validation - Test Vectors":
|
|||
|
||||
for testData in testCase:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueRetrieval = ContractCodeRetrieval
|
||||
.decode(testData.content_value_retrieval.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -138,8 +138,9 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
let contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -153,7 +154,7 @@ suite "State Validation - Test Vectors":
|
|||
continue # second test case only has root node and no recursive gossip
|
||||
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList)
|
||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
let contentValueOffer = AccountTrieNodeOffer
|
||||
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
||||
|
@ -180,7 +181,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -201,7 +202,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -220,7 +221,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -237,7 +238,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
var contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -261,8 +262,9 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
let contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -277,7 +279,7 @@ suite "State Validation - Test Vectors":
|
|||
continue # second test case has no recursive gossip
|
||||
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ByteList)
|
||||
.decode(testData.recursive_gossip.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
let contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.recursive_gossip.content_value_offer.hexToSeqByte())
|
||||
|
@ -303,7 +305,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
ContractTrieNodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -324,8 +326,9 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -340,8 +343,9 @@ suite "State Validation - Test Vectors":
|
|||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -356,8 +360,9 @@ suite "State Validation - Test Vectors":
|
|||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -371,8 +376,9 @@ suite "State Validation - Test Vectors":
|
|||
.isErr()
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -386,8 +392,9 @@ suite "State Validation - Test Vectors":
|
|||
.isErr()
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer = ContractTrieNodeOffer
|
||||
.decode(testData.content_value_offer.hexToSeqByte())
|
||||
.get()
|
||||
|
@ -412,7 +419,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -434,7 +441,7 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -455,8 +462,9 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -470,8 +478,9 @@ suite "State Validation - Test Vectors":
|
|||
res.error() == "hash of proof root node doesn't match the expected root hash"
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -486,8 +495,9 @@ suite "State Validation - Test Vectors":
|
|||
"hash of bytecode doesn't match the code hash in the account proof"
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -500,8 +510,9 @@ suite "State Validation - Test Vectors":
|
|||
.isErr()
|
||||
|
||||
block:
|
||||
let contentKey =
|
||||
ContentKey.decode(testData.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey = ContentKey
|
||||
.decode(testData.content_key.hexToSeqByte().ContentKeyByteList)
|
||||
.get()
|
||||
var contentValueOffer =
|
||||
ContractCodeOffer.decode(testData.content_value_offer.hexToSeqByte()).get()
|
||||
|
||||
|
@ -535,7 +546,8 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(stateRoots[i].hexToSeqByte())
|
||||
|
||||
for kv in testData.recursive_gossip:
|
||||
let contentKey = ContentKey.decode(kv.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey =
|
||||
ContentKey.decode(kv.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
AccountTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
||||
|
||||
|
@ -558,7 +570,8 @@ suite "State Validation - Test Vectors":
|
|||
var stateRoot = KeccakHash.fromBytes(testData.state_root.hexToSeqByte())
|
||||
|
||||
for kv in testData.recursive_gossip:
|
||||
let contentKey = ContentKey.decode(kv.content_key.hexToSeqByte().ByteList).get()
|
||||
let contentKey =
|
||||
ContentKey.decode(kv.content_key.hexToSeqByte().ContentKeyByteList).get()
|
||||
let contentValueOffer =
|
||||
ContractTrieNodeOffer.decode(kv.content_value.hexToSeqByte()).get()
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ import
|
|||
|
||||
const protocolId = [byte 0x50, 0x00]
|
||||
|
||||
proc toContentId(contentKey: ByteList): results.Opt[ContentId] =
|
||||
proc toContentId(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||
# Note: Returning sha256 digest as content id here. This content key to
|
||||
# content id derivation is different for the different content networks
|
||||
# and their content types.
|
||||
|
@ -74,7 +74,8 @@ procSuite "Portal Wire Protocol Tests":
|
|||
|
||||
let pong = await proto1.ping(proto2.localNode)
|
||||
|
||||
let customPayload = ByteList(SSZ.encode(CustomPayload(dataRadius: UInt256.high())))
|
||||
let customPayload =
|
||||
ByteList[2048](SSZ.encode(CustomPayload(dataRadius: UInt256.high())))
|
||||
|
||||
check:
|
||||
pong.isOk()
|
||||
|
@ -135,7 +136,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||
check (await proto1.baseProtocol.ping(proto2.localNode)).isOk()
|
||||
check (await proto2.baseProtocol.ping(proto1.localNode)).isOk()
|
||||
|
||||
let contentKey = ByteList.init(@[1'u8])
|
||||
let contentKey = ContentKeyByteList.init(@[1'u8])
|
||||
|
||||
# content does not exist so this should provide us with the closest nodes
|
||||
# to the content, which is the only node in the routing table.
|
||||
|
@ -150,7 +151,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||
|
||||
asyncTest "Offer/Accept":
|
||||
let (proto1, proto2) = defaultTestSetup(rng)
|
||||
let contentKeys = ContentKeysList(@[ByteList(@[byte 0x01, 0x02, 0x03])])
|
||||
let contentKeys = ContentKeysList(@[ContentKeyByteList(@[byte 0x01, 0x02, 0x03])])
|
||||
|
||||
let accept = await proto1.offerImpl(proto2.baseProtocol.localNode, contentKeys)
|
||||
|
||||
|
@ -166,8 +167,9 @@ procSuite "Portal Wire Protocol Tests":
|
|||
let (proto1, proto2) = defaultTestSetup(rng)
|
||||
var content: seq[ContentKV]
|
||||
for i in 0 ..< contentKeysLimit:
|
||||
let contentKV =
|
||||
ContentKV(contentKey: ByteList(@[byte i]), content: repeat(byte i, 5000))
|
||||
let contentKV = ContentKV(
|
||||
contentKey: ContentKeyByteList(@[byte i]), content: repeat(byte i, 5000)
|
||||
)
|
||||
content.add(contentKV)
|
||||
|
||||
let res = await proto1.offer(proto2.baseProtocol.localNode, content)
|
||||
|
@ -343,7 +345,7 @@ procSuite "Portal Wire Protocol Tests":
|
|||
var distances: seq[UInt256] = @[]
|
||||
|
||||
for i in 0 ..< 40:
|
||||
proto1.storeContent(ByteList.init(@[uint8(i)]), u256(i), item)
|
||||
proto1.storeContent(ByteList[2048].init(@[uint8(i)]), u256(i), item)
|
||||
distances.add(u256(i) xor proto1.localNode.id)
|
||||
|
||||
distances.sort(order = SortOrder.Descending)
|
||||
|
|
|
@ -133,7 +133,7 @@ proc asPortalBlockData*(
|
|||
)
|
||||
|
||||
headerWithProof = BlockHeaderWithProof(
|
||||
header: ByteList(rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||
header: ByteList[2048](rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||
)
|
||||
|
||||
var transactions: Transactions
|
||||
|
@ -178,7 +178,7 @@ proc asPortalBlockData*(
|
|||
)
|
||||
|
||||
headerWithProof = BlockHeaderWithProof(
|
||||
header: ByteList(rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||
header: ByteList[2048](rlp.encode(header)), proof: BlockHeaderProof.init()
|
||||
)
|
||||
|
||||
var transactions: Transactions
|
||||
|
|
|
@ -53,7 +53,8 @@ func asPortalBlock(
|
|||
|
||||
let
|
||||
headerWithProof = BlockHeaderWithProof(
|
||||
header: ByteList(rlp.encode(ethBlock.header)), proof: BlockHeaderProof.init()
|
||||
header: ByteList[2048](rlp.encode(ethBlock.header)),
|
||||
proof: BlockHeaderProof.init(),
|
||||
)
|
||||
portalBody = PortalBlockBodyShanghai(
|
||||
transactions: transactions, uncles: Uncles(@[byte 0xc0]), withdrawals: withdrawals
|
||||
|
|
|
@ -207,7 +207,7 @@ proc discover(d: discv5_protocol.Protocol) {.async.} =
|
|||
info "Lookup finished", nodes = discovered.len
|
||||
await sleepAsync(30.seconds)
|
||||
|
||||
proc testContentIdHandler(contentKey: ByteList): results.Opt[ContentId] =
|
||||
proc testContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
||||
# Note: Returning a static content id here, as in practice this depends
|
||||
# on the content key to content id derivation, which is different for the
|
||||
# different content networks. And we want these tests to be independent from
|
||||
|
@ -295,7 +295,7 @@ proc run(config: PortalCliConf) =
|
|||
echo nodes.error
|
||||
of findContent:
|
||||
# For now just some bogus bytes
|
||||
let contentKey = ByteList.init(@[1'u8])
|
||||
let contentKey = ContentKeyByteList.init(@[1'u8])
|
||||
|
||||
let foundContent = waitFor portal.findContent(config.findContentTarget, contentKey)
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit b71ebc41c8e5027580be77a9707df1a64e6d9c8b
|
||||
Subproject commit 6f831b79df24af00c10e73e717cbe40d7d0e2439
|
Loading…
Reference in New Issue