diff --git a/fluffy/common/common_types.nim b/fluffy/common/common_types.nim index 17281fe7f..bc8cd77d6 100644 --- a/fluffy/common/common_types.nim +++ b/fluffy/common/common_types.nim @@ -34,11 +34,11 @@ func decodeRlp*(input: openArray[byte], T: type): Result[T, string] = func decodeSsz*(input: openArray[byte], T: type): Result[T, string] = try: ok(SSZ.decode(input, T)) - except SszError as e: + except SerializationError as e: err(e.msg) func decodeSszOrRaise*(input: openArray[byte], T: type): T = try: SSZ.decode(input, T) - except SszError as e: + except SerializationError as e: raiseAssert(e.msg) \ No newline at end of file diff --git a/fluffy/content_db.nim b/fluffy/content_db.nim index c4c706fe7..38f6b33c7 100644 --- a/fluffy/content_db.nim +++ b/fluffy/content_db.nim @@ -155,7 +155,7 @@ proc getSszDecoded(kv: KvStoreRef, key: openArray[byte], T: type auto): Opt[T] = if res.isSome(): try: Opt.some(SSZ.decode(res.get(), T)) - except SszError: + except SerializationError: raiseAssert("Stored data should always be serialized correctly") else: Opt.none(T) diff --git a/fluffy/eth_data/history_data_ssz_e2s.nim b/fluffy/eth_data/history_data_ssz_e2s.nim index 55cab98dd..a6d043068 100644 --- a/fluffy/eth_data/history_data_ssz_e2s.nim +++ b/fluffy/eth_data/history_data_ssz_e2s.nim @@ -26,7 +26,7 @@ proc readAccumulator*(file: string): Result[FinishedAccumulator, string] = try: ok(SSZ.decode(encodedAccumulator, FinishedAccumulator)) - except SszError as e: + except SerializationError as e: err("Failed decoding accumulator: " & e.msg) proc readEpochAccumulator*(file: string): Result[EpochAccumulator, string] = @@ -34,7 +34,7 @@ proc readEpochAccumulator*(file: string): Result[EpochAccumulator, string] = try: ok(SSZ.decode(encodedAccumulator, EpochAccumulator)) - except SszError as e: + except SerializationError as e: err("Decoding epoch accumulator failed: " & e.msg) proc readEpochAccumulatorCached*(file: string): Result[EpochAccumulatorCached, string] = @@ -42,7 +42,7 @@ proc readEpochAccumulatorCached*(file: string): Result[EpochAccumulatorCached, s try: ok(SSZ.decode(encodedAccumulator, EpochAccumulatorCached)) - except SszError as e: + except SerializationError as e: err("Decoding epoch accumulator failed: " & e.msg) # Reading data in e2s format diff --git a/fluffy/network/beacon_light_client/beacon_light_client_content.nim b/fluffy/network/beacon_light_client/beacon_light_client_content.nim index 6e06741c2..b1c74aec4 100644 --- a/fluffy/network/beacon_light_client/beacon_light_client_content.nim +++ b/fluffy/network/beacon_light_client/beacon_light_client_content.nim @@ -85,7 +85,7 @@ func encode*(contentKey: ContentKey): ByteList = func decode*(contentKey: ByteList): Opt[ContentKey] = try: Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey)) - except SszError: + except SerializationError: return Opt.none(ContentKey) func toContentId*(contentKey: ByteList): ContentId = diff --git a/fluffy/network/beacon_light_client/beacon_light_client_network.nim b/fluffy/network/beacon_light_client/beacon_light_client_network.nim index 17969515d..f9401052a 100644 --- a/fluffy/network/beacon_light_client/beacon_light_client_network.nim +++ b/fluffy/network/beacon_light_client/beacon_light_client_network.nim @@ -289,4 +289,4 @@ proc stop*(n: LightClientNetwork) = n.portalProtocol.stop() if not n.processContentLoop.isNil: - n.processContentLoop.cancel() + n.processContentLoop.cancelSoon() diff --git a/fluffy/network/header/header_content.nim b/fluffy/network/header/header_content.nim index 324b04af5..9cb63cb37 100644 --- a/fluffy/network/header/header_content.nim +++ b/fluffy/network/header/header_content.nim @@ -25,10 +25,10 @@ type ContentType* = enum newBlockHeader = 0x00 - # TODO: remove or fix this temporary + # TODO: remove or fix this temporary # dummySelector per latest spec. # This is temporary workaround - # to fool SSZ.isUnion + # to fool SSZ.isUnion dummySelector = 0x01 NewBlockHeaderKey* = object @@ -48,7 +48,7 @@ func encode*(contentKey: ContentKey): ByteList = func decode*(contentKey: ByteList): Option[ContentKey] = try: some(SSZ.decode(contentKey.asSeq(), ContentKey)) - except SszError: + except SerializationError: return none[ContentKey]() func toContentId*(contentKey: ByteList): ContentId = diff --git a/fluffy/network/history/history_content.nim b/fluffy/network/history/history_content.nim index 05a176a1e..462e1f806 100644 --- a/fluffy/network/history/history_content.nim +++ b/fluffy/network/history/history_content.nim @@ -77,7 +77,7 @@ func encode*(contentKey: ContentKey): ByteList = func decode*(contentKey: ByteList): Opt[ContentKey] = try: Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey)) - except SszError: + except SerializationError: return Opt.none(ContentKey) func toContentId*(contentKey: ByteList): ContentId = diff --git a/fluffy/network/history/history_network.nim b/fluffy/network/history/history_network.nim index 9bd4df09b..ddc6bac99 100644 --- a/fluffy/network/history/history_network.nim +++ b/fluffy/network/history/history_network.nim @@ -335,7 +335,7 @@ proc get(db: ContentDB, T: type BlockHeader, contentId: ContentId): Opt[T] = let headerWithProof = try: SSZ.decode(contentFromDB.get(), BlockHeaderWithProof) - except SszError as e: + except SerializationError as e: raiseAssert(e.msg) let res = decodeRlp(headerWithProof.header.asSeq(), T) @@ -801,4 +801,4 @@ proc stop*(n: HistoryNetwork) = n.portalProtocol.stop() if not n.processContentLoop.isNil: - n.processContentLoop.cancel() + n.processContentLoop.cancelSoon() diff --git a/fluffy/network/state/state_content.nim b/fluffy/network/state/state_content.nim index b604aed4c..a7a7bb6c0 100644 --- a/fluffy/network/state/state_content.nim +++ b/fluffy/network/state/state_content.nim @@ -72,7 +72,7 @@ func encode*(contentKey: ContentKey): ByteList = func decode*(contentKey: ByteList): Opt[ContentKey] = try: Opt.some(SSZ.decode(contentKey.asSeq(), ContentKey)) - except SszError: + except SerializationError: return Opt.none(ContentKey) template computeContentId*(digestCtxType: type, body: untyped): ContentId = diff --git a/fluffy/network/state/state_network.nim b/fluffy/network/state/state_network.nim index 51a512f47..ff96b211b 100644 --- a/fluffy/network/state/state_network.nim +++ b/fluffy/network/state/state_network.nim @@ -107,4 +107,4 @@ proc stop*(n: StateNetwork) = n.portalProtocol.stop() if not n.processContentLoop.isNil: - n.processContentLoop.cancel() + n.processContentLoop.cancelSoon() diff --git a/fluffy/network/wire/messages.nim b/fluffy/network/wire/messages.nim index 09179f96c..702737dab 100644 --- a/fluffy/network/wire/messages.nim +++ b/fluffy/network/wire/messages.nim @@ -147,7 +147,7 @@ func decodeMessage*(body: openArray[byte]): Result[Message, string] = if body.len < 1: # TODO: This check should probably move a layer down return err("No message data, peer might not support this talk protocol") ok(SSZ.decode(body, Message)) - except SszError as e: + except SerializationError as e: err("Invalid message encoding: " & e.msg) template innerMessage[T: SomeMessage]( diff --git a/fluffy/network/wire/portal_protocol.nim b/fluffy/network/wire/portal_protocol.nim index 4f02a38e7..0b82a7691 100644 --- a/fluffy/network/wire/portal_protocol.nim +++ b/fluffy/network/wire/portal_protocol.nim @@ -268,7 +268,7 @@ func handlePing( # pings from different nodes to clear the LRU. let customPayloadDecoded = try: SSZ.decode(ping.customPayload.asSeq(), CustomPayload) - except MalformedSszError, SszSizeMismatchError: + except SerializationError: # invalid custom payload, send empty back return @[] p.radiusCache.put(srcId, customPayloadDecoded.dataRadius) @@ -1052,7 +1052,7 @@ proc contentLookup*(p: PortalProtocol, target: ByteList, targetId: UInt256): of Content: # cancel any pending queries as the content has been found for f in pendingQueries: - f.cancel() + f.cancelSoon() portal_lookup_content_requests.observe(requestAmount) return Opt.some(ContentLookupResult.init( content.content, content.utpTransfer, nodesWithoutContent)) @@ -1357,12 +1357,12 @@ proc start*(p: PortalProtocol) = proc stop*(p: PortalProtocol) = if not p.revalidateLoop.isNil: - p.revalidateLoop.cancel() + p.revalidateLoop.cancelSoon() if not p.refreshLoop.isNil: - p.refreshLoop.cancel() + p.refreshLoop.cancelSoon() for worker in p.offerWorkers: - worker.cancel() + worker.cancelSoon() p.offerWorkers = @[] proc resolve*(p: PortalProtocol, id: NodeId): Future[Opt[Node]] {.async.} = diff --git a/fluffy/tests/test_history_network.nim b/fluffy/tests/test_history_network.nim index 7e65235fd..75eb89053 100644 --- a/fluffy/tests/test_history_network.nim +++ b/fluffy/tests/test_history_network.nim @@ -234,6 +234,17 @@ procSuite "History Content Network": check offerResult.isOk() + # Make sure the content got processed out of content queue + while not historyNode2.historyNetwork.contentQueue.empty(): + await sleepAsync(1.milliseconds) + + # Note: It seems something changed in chronos, causing different behavior. + # Seems that validateContent called through processContentLoop used to + # run immediatly in case of a "non async shortpath". This is no longer the + # case and causes the content not yet to be validated and thus stored at + # this step. Add an await here so that the store can happen. + await sleepAsync(100.milliseconds) + for i, contentKV in contentKVs: let id = toContentId(contentKV.contentKey) if i < len(contentKVs) - 1: @@ -298,6 +309,12 @@ procSuite "History Content Network": check offerResult.isOk() + # Make sure the content got processed out of content queue + while not historyNode2.historyNetwork.contentQueue.empty(): + await sleepAsync(1.milliseconds) + + await sleepAsync(100.milliseconds) + for contentKV in contentKVs: let id = toContentId(contentKV.contentKey) check historyNode2.containsId(id) == true diff --git a/fluffy/tools/beacon_lc_bridge/beacon_lc_bridge.nim b/fluffy/tools/beacon_lc_bridge/beacon_lc_bridge.nim index 18913a8e3..e5f20ede6 100644 --- a/fluffy/tools/beacon_lc_bridge/beacon_lc_bridge.nim +++ b/fluffy/tools/beacon_lc_bridge/beacon_lc_bridge.nim @@ -92,14 +92,14 @@ import from stew/objects import checkedEnumAssign from stew/byteutils import readHexChar -from web3/ethtypes import BlockHash +from web3/ethtypes as web3types import BlockHash from beacon_chain/gossip_processing/block_processor import newExecutionPayload from beacon_chain/gossip_processing/eth2_processor import toValidationResult type Hash256 = etypes.Hash256 -template asEthHash(hash: ethtypes.BlockHash): Hash256 = +template asEthHash(hash: web3types.BlockHash): Hash256 = Hash256(data: distinctBase(hash)) # TODO: Ugh why isn't gasLimit and gasUsed a uint64 in nim-eth / nimbus-eth1 :( @@ -451,8 +451,8 @@ proc run(config: BeaconBridgeConf) {.raises: [CatchableError].} = withBlck(signedBlock): when consensusFork >= ConsensusFork.Bellatrix: - if blck.message.is_execution_block: - template payload(): auto = blck.message.body.execution_payload + if forkyBlck.message.is_execution_block: + template payload(): auto = forkyBlck.message.body.execution_payload # TODO: Get rid of the asEngineExecutionPayload step? let executionPayload = payload.asEngineExecutionPayload() diff --git a/nimbus/db/core_db/backend/legacy_db.nim b/nimbus/db/core_db/backend/legacy_db.nim index d1fc78055..219b37958 100644 --- a/nimbus/db/core_db/backend/legacy_db.nim +++ b/nimbus/db/core_db/backend/legacy_db.nim @@ -209,7 +209,7 @@ proc mptMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbMptFns = fetchFn: proc(k: openArray[byte]): CoreDbRc[Blob] = db.mapRlpException("legacy/mpt/get()"): return ok(mpt.trie.get(k)) - discard, + , deleteFn: proc(k: openArray[byte]): CoreDbRc[void] = db.mapRlpException("legacy/mpt/del()"): @@ -224,7 +224,7 @@ proc mptMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbMptFns = containsFn: proc(k: openArray[byte]): CoreDbRc[bool] = db.mapRlpException("legacy/mpt/put()"): return ok(mpt.trie.contains(k)) - discard, + , rootVidFn: proc(): CoreDbVidRef = db.bless(LegacyCoreDbVid(vHash: mpt.trie.rootHash)), @@ -236,13 +236,13 @@ proc mptMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbMptFns = reraiseRlpException("legacy/mpt/pairs()"): for k,v in mpt.trie.pairs(): yield (k,v) - discard, + , replicateIt: iterator: (Blob,Blob) {.gcsafe, raises: [LegacyApiRlpError].} = reraiseRlpException("legacy/mpt/replicate()"): for k,v in mpt.trie.replicate(): yield (k,v) - discard) + ) proc accMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbAccFns = ## Hexary trie database handlers @@ -254,7 +254,7 @@ proc accMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbAccFns = const info = "legacy/mpt/getAccount()" db.mapRlpException info: return ok mpt.trie.get(k.keccakHash.data).toCoreDbAccount(db) - return err(db.bless LegacyCoreDbError(error: MptNotFound, ctx: info)), + , deleteFn: proc(k: EthAddress): CoreDbRc[void] = db.mapRlpException("legacy/mpt/del()"): @@ -269,7 +269,7 @@ proc accMethods(mpt: HexaryChildDbRef; db: LegacyDbRef): CoreDbAccFns = containsFn: proc(k: EthAddress): CoreDbRc[bool] = db.mapRlpException("legacy/mpt/put()"): return ok(mpt.trie.contains k.keccakHash.data) - discard, + , rootVidFn: proc(): CoreDbVidRef = db.bless(LegacyCoreDbVid(vHash: mpt.trie.rootHash)), @@ -329,7 +329,7 @@ proc baseMethods( destroyFn: proc(ignore: bool) = if not closeDb.isNil: closeDb() - discard, + , vidHashFn: proc(vid: CoreDbVidRef): Result[Hash256,void] = ok(vid.lvHash), diff --git a/nimbus_verified_proxy/nimbus_verified_proxy.nim b/nimbus_verified_proxy/nimbus_verified_proxy.nim index 44437ae1d..cecdfb188 100644 --- a/nimbus_verified_proxy/nimbus_verified_proxy.nim +++ b/nimbus_verified_proxy/nimbus_verified_proxy.nim @@ -109,8 +109,8 @@ proc run(config: VerifiedProxyConf) {.raises: [CatchableError].} = wallSlot = getBeaconTime().slotOrZero withBlck(signedBlock): when consensusFork >= ConsensusFork.Bellatrix: - if blck.message.is_execution_block: - template payload(): auto = blck.message.body.execution_payload + if forkyBlck.message.is_execution_block: + template payload(): auto = forkyBlck.message.body.execution_payload blockCache.add(asExecutionData(payload.asEngineExecutionPayload())) else: discard return diff --git a/vendor/nim-chronos b/vendor/nim-chronos index 00614476c..253bc3cfc 160000 --- a/vendor/nim-chronos +++ b/vendor/nim-chronos @@ -1 +1 @@ -Subproject commit 00614476c68f0553432b4bb505e24d6ad5586ae4 +Subproject commit 253bc3cfc079de35f9b96b9934ce702605400a51 diff --git a/vendor/nim-json-serialization b/vendor/nim-json-serialization index f64d55f7f..85b7ea093 160000 --- a/vendor/nim-json-serialization +++ b/vendor/nim-json-serialization @@ -1 +1 @@ -Subproject commit f64d55f7ff480ed13eca9c97c15450a2c4106078 +Subproject commit 85b7ea093cb85ee4f433a617b97571bd709d30df diff --git a/vendor/nim-serialization b/vendor/nim-serialization index bc46b4c1c..4bdbc29e5 160000 --- a/vendor/nim-serialization +++ b/vendor/nim-serialization @@ -1 +1 @@ -Subproject commit bc46b4c1c1730cc25bf5fb5f3d64bd708a6ad89e +Subproject commit 4bdbc29e54fe54049950e352bb969aab97173b35 diff --git a/vendor/nim-ssz-serialization b/vendor/nim-ssz-serialization index 3f8946ab2..edf07d4f7 160000 --- a/vendor/nim-ssz-serialization +++ b/vendor/nim-ssz-serialization @@ -1 +1 @@ -Subproject commit 3f8946ab2de304a9130aa80874bc2e4025d62303 +Subproject commit edf07d4f7e0cb27afd207aa183c23cf448082d1b diff --git a/vendor/nimbus-eth2 b/vendor/nimbus-eth2 index 5c88e74c0..35bf03a3f 160000 --- a/vendor/nimbus-eth2 +++ b/vendor/nimbus-eth2 @@ -1 +1 @@ -Subproject commit 5c88e74c08bf2c15d230b8b05d959809065a2894 +Subproject commit 35bf03a3fbb6a55c52911479760c9bbb69e7c2cc