diff --git a/nimbus/db/aristo/TODO.md b/nimbus/db/aristo/TODO.md index a35630bf9..fab7e70b0 100644 --- a/nimbus/db/aristo/TODO.md +++ b/nimbus/db/aristo/TODO.md @@ -16,3 +16,7 @@ function mentioned above. * `aristo_nearby` also qualifies for a re-write, now + +* A storage tree vid should be allowed to become stale (or locally deactivated) + if the vertex vanishes to be re-used when the tree is re-filled. Currently + the vid is removed and then re-allocated. diff --git a/nimbus/db/aristo/aristo_api.nim b/nimbus/db/aristo/aristo_api.nim index 135051b0d..b487604fa 100644 --- a/nimbus/db/aristo/aristo_api.nim +++ b/nimbus/db/aristo/aristo_api.nim @@ -18,8 +18,8 @@ import results, ./aristo_desc/desc_backend, ./aristo_init/memory_db, - "."/[aristo_delete, aristo_desc, aristo_fetch, - aristo_init, aristo_merge, aristo_path, aristo_profile, aristo_tx] + "."/[aristo_delete, aristo_desc, aristo_fetch, aristo_init, aristo_merge, + aristo_part, aristo_path, aristo_profile, aristo_tx] export AristoDbProfListRef @@ -327,6 +327,74 @@ type ## `(accPath,stoPath)` where `accPath` is the account key (into the MPT) ## and `stoPath` is the slot path of the corresponding storage area. + AristoApiPartAccountTwig* = + proc(db: AristoDbRef; + accPath: Hash256; + ): Result[seq[Blob], AristoError] + {.noRaise.} + ## This function returns a chain of rlp-encoded nodes along the argument + ## path `(root,path)`. + + AristoApiPartGenericTwig* = + proc(db: AristoDbRef; + root: VertexID; + path: openArray[byte]; + ): Result[seq[Blob], AristoError] + {.noRaise.} + ## Variant of `partAccountTwig()`. + ## + ## Note: This function provides a functionality comparable to the + ## `getBranch()` function from `hexary.nim` + + AristoApiPartStorageTwig* = + proc(db: AristoDbRef; + accPath: Hash256; + stoPath: Hash256; + ): Result[seq[Blob], AristoError] + {.noRaise.} + ## Variant of `partAccountTwig()`. + + AristoApiPartUntwigGeneric* = + proc(chain: openArray[Blob]; + root: Hash256; + path: openArray[byte]; + ): Result[Blob,AristoError] + {.noRaise.} + ## Follow and verify the argument `chain` up unlil the last entry + ## which must be a leaf node. Extract the payload and pass it on + ## as return code. + + AristoApiPartUntwigGenericOk* = + proc(chain: openArray[Blob]; + root: Hash256; + path: openArray[byte]; + payload: openArray[byte]; + ): Result[void,AristoError] + {.noRaise.} + ## Variant of `partUntwigGeneric()`. The function verifis the argument + ## `chain` of rlp-encoded nodes against the `path` and `payload` + ## arguments. + ## + ## Note: This function provides a functionality comparable to the + ## `isValidBranch()` function from `hexary.nim`. + + AristoApiPartUntwigPath* = + proc(chain: openArray[Blob]; + root: Hash256; + path: Hash256; + ): Result[Blob,AristoError] + {.noRaise.} + ## Variant of `partUntwigGeneric()`. + + AristoApiPartUntwigPathOk* = + proc(chain: openArray[Blob]; + root: Hash256; + path: Hash256; + payload: openArray[byte]; + ): Result[void,AristoError] + {.noRaise.} + ## Variant of `partUntwigGenericOk()`. + AristoApiPathAsBlobFn* = proc(tag: PathID; ): Blob @@ -444,6 +512,14 @@ type mergeGenericData*: AristoApiMergeGenericDataFn mergeStorageData*: AristoApiMergeStorageDataFn + partAccountTwig*: AristoApiPartAccountTwig + partGenericTwig*: AristoApiPartGenericTwig + partStorageTwig*: AristoApiPartStorageTwig + partUntwigGeneric*: AristoApiPartUntwigGeneric + partUntwigGenericOk*: AristoApiPartUntwigGenericOk + partUntwigPath*: AristoApiPartUntwigPath + partUntwigPathOk*: AristoApiPartUntwigPathOk + pathAsBlob*: AristoApiPathAsBlobFn persist*: AristoApiPersistFn reCentre*: AristoApiReCentreFn @@ -491,6 +567,14 @@ type AristoApiProfMergeGenericDataFn = "mergeGenericData" AristoApiProfMergeStorageDataFn = "mergeStorageData" + AristoApiProfPartAccountTwigFn = "partAccountTwig" + AristoApiProfPartGenericTwigFn = "partGenericTwig" + AristoApiProfPartStorageTwigFn = "partStorageTwig" + AristoApiProfPartUntwigGenericFn = "partUntwigGeneric" + AristoApiProfPartUntwigGenericOkFn = "partUntwigGenericOk" + AristoApiProfPartUntwigPathFn = "partUntwigPath" + AristoApiProfPartUntwigPathOkFn = "partUntwigPathOk" + AristoApiProfPathAsBlobFn = "pathAsBlob" AristoApiProfPersistFn = "persist" AristoApiProfReCentreFn = "reCentre" @@ -555,6 +639,14 @@ when AutoValidateApiHooks: doAssert not api.mergeGenericData.isNil doAssert not api.mergeStorageData.isNil + doAssert not api.partAccountTwig.isNil + doAssert not api.partGenericTwig.isNil + doAssert not api.partStorageTwig.isNil + doAssert not api.partUntwigGeneric.isNil + doAssert not api.partUntwigGenericOk.isNil + doAssert not api.partUntwigPath.isNil + doAssert not api.partUntwigPathOk.isNil + doAssert not api.pathAsBlob.isNil doAssert not api.persist.isNil doAssert not api.reCentre.isNil @@ -623,6 +715,14 @@ func init*(api: var AristoApiObj) = api.mergeGenericData = mergeGenericData api.mergeStorageData = mergeStorageData + api.partAccountTwig = partAccountTwig + api.partGenericTwig = partGenericTwig + api.partStorageTwig = partStorageTwig + api.partUntwigGeneric = partUntwigGeneric + api.partUntwigGenericOk = partUntwigGenericOk + api.partUntwigPath = partUntwigPath + api.partUntwigPathOk = partUntwigPathOk + api.pathAsBlob = pathAsBlob api.persist = persist api.reCentre = reCentre @@ -673,6 +773,14 @@ func dup*(api: AristoApiRef): AristoApiRef = mergeGenericData: api.mergeGenericData, mergeStorageData: api.mergeStorageData, + partAccountTwig: api.partAccountTwig, + partGenericTwig: api.partGenericTwig, + partStorageTwig: api.partStorageTwig, + partUntwigGeneric: api.partUntwigGeneric, + partUntwigGenericOk: api.partUntwigGenericOk, + partUntwigPath: api.partUntwigPath, + partUntwigPathOk: api.partUntwigPathOk, + pathAsBlob: api.pathAsBlob, persist: api.persist, reCentre: api.reCentre, @@ -845,6 +953,41 @@ func init*( AristoApiProfMergeStorageDataFn.profileRunner: result = api.mergeStorageData(a, b, c, d) + profApi.partAccountTwig = + proc(a: AristoDbRef; b: Hash256): auto = + AristoApiProfPartAccountTwigFn.profileRunner: + result = api.partAccountTwig(a, b) + + profApi.partGenericTwig = + proc(a: AristoDbRef; b: VertexID; c: openArray[byte]): auto = + AristoApiProfPartGenericTwigFn.profileRunner: + result = api.partGenericTwig(a, b, c) + + profApi.partStorageTwig = + proc(a: AristoDbRef; b: Hash256; c: Hash256): auto = + AristoApiProfPartStorageTwigFn.profileRunner: + result = api.partStorageTwig(a, b, c) + + profApi.partUntwigGeneric = + proc(a: openArray[Blob]; b: Hash256; c: openArray[byte]): auto = + AristoApiProfPartUntwigGenericFn.profileRunner: + result = api.partUntwigGeneric(a, b, c) + + profApi.partUntwigGenericOk = + proc(a: openArray[Blob]; b: Hash256; c, d: openArray[byte]): auto = + AristoApiProfPartUntwigGenericOkFn.profileRunner: + result = api.partUntwigGenericOk(a, b, c, d) + + profApi.partUntwigPath = + proc(a: openArray[Blob]; b, c: Hash256): auto = + AristoApiProfPartUntwigPathFn.profileRunner: + result = api.partUntwigPath(a, b, c) + + profApi.partUntwigPathOk = + proc(a: openArray[Blob]; b, c: Hash256; d: openArray[byte]): auto = + AristoApiProfPartUntwigPathOkFn.profileRunner: + result = api.partUntwigPathOk(a, b, c, d) + profApi.pathAsBlob = proc(a: PathID): auto = AristoApiProfPathAsBlobFn.profileRunner: diff --git a/nimbus/db/aristo/aristo_check.nim b/nimbus/db/aristo/aristo_check.nim index 59727d5e1..f9e15bc58 100644 --- a/nimbus/db/aristo/aristo_check.nim +++ b/nimbus/db/aristo/aristo_check.nim @@ -20,7 +20,7 @@ import results, ./aristo_walk/persistent, "."/[aristo_desc, aristo_get, aristo_init], - ./aristo_check/[check_be, check_top] + ./aristo_check/[check_be, check_top, check_twig] # ------------------------------------------------------------------------------ # Public functions @@ -78,7 +78,7 @@ proc checkBE*( proc check*( - db: AristoDbRef; # Database, top layer + db: AristoDbRef; # Database relax = false; # Check existing hashes only cache = true; # Also verify against top layer cache proofMode = false; # Has proof nodes @@ -88,6 +88,41 @@ proc check*( ? db.checkBE() ok() +proc check*( + db: AristoDbRef; # Database + root: VertexID; # Start node + path: openArray[byte]; # Data path + ): Result[void,AristoError] = + ## Check generic path `path` against portal proof generation and + ## verification. + ## + ## Note that this check might have side effects in that it might compile + ## the hash keys on the `root` sub-tree. + db.checkTwig(root, path) + +proc check*( + db: AristoDbRef; # Database + accPath: Hash256; # Account key + ): Result[void,AristoError] = + ## Check accounts tree path `accPath` against portal proof generation and + ## verification. + ## + ## Note that this check might have side effects in that it might compile + ## the hash keys on the accounts sub-tree. + db.checkTwig(VertexID(1), accPath.data) + +proc check*( + db: AristoDbRef; # Database + accPath: Hash256; # Account key + stoPath: Hash256; # Storage key + ): Result[void,AristoError] = + ## Check account tree `Account key` against portal proof generation and + ## verification. + ## + ## Note that this check might have side effects in that it might compile + ## the hash keys on the particulat storage sub-tree. + db.checkTwig(accPath, stoPath) + # ------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------ diff --git a/nimbus/db/aristo/aristo_check/check_twig.nim b/nimbus/db/aristo/aristo_check/check_twig.nim new file mode 100644 index 000000000..88e6ece42 --- /dev/null +++ b/nimbus/db/aristo/aristo_check/check_twig.nim @@ -0,0 +1,50 @@ +# nimbus-eth1 +# Copyright (c) 2023-2024 Status Research & Development GmbH +# Licensed under either of +# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or +# http://www.apache.org/licenses/LICENSE-2.0) +# * MIT license ([LICENSE-MIT](LICENSE-MIT) or +# http://opensource.org/licenses/MIT) +# at your option. This file may not be copied, modified, or distributed +# except according to those terms. + +{.push raises: [].} + +import + eth/common, + results, + ".."/[aristo_compute, aristo_desc, aristo_fetch, aristo_part] + +# ------------------------------------------------------------------------------ +# Public functions +# ------------------------------------------------------------------------------ + +proc checkTwig*( + db: AristoDbRef; # Database + root: VertexID; # Start node + path: openArray[byte]; # Data path + ): Result[void,AristoError] = + let + proof = ? db.partGenericTwig(root, path) + key = ? db.computeKey (root,root) + pyl = ? proof.partUntwigGeneric(key.to(Hash256), path) + + ok() + +proc checkTwig*( + db: AristoDbRef; # Database + accPath: Hash256; # Account key + stoPath: Hash256; # Storage key + ): Result[void,AristoError] = + let + proof = ? db.partStorageTwig(accPath, stoPath) + vid = ? db.fetchStorageID accPath + key = ? db.computeKey (VertexID(1),vid) + pyl = ? proof.partUntwigPath(key.to(Hash256), stoPath) + + ok() + +# ------------------------------------------------------------------------------ +# End +# ------------------------------------------------------------------------------ + diff --git a/nimbus/db/aristo/aristo_debug.nim b/nimbus/db/aristo/aristo_debug.nim index 7924c6256..bb0d0b5fe 100644 --- a/nimbus/db/aristo/aristo_debug.nim +++ b/nimbus/db/aristo/aristo_debug.nim @@ -488,7 +488,7 @@ proc ppLayer( # Public functions # ------------------------------------------------------------------------------ -proc pp*(w: Hash256; codeHashOk = false): string = +proc pp*(w: Hash256; codeHashOk: bool): string = if codeHashOk: w.ppCodeHash elif w == EMPTY_ROOT_HASH: diff --git a/nimbus/db/aristo/aristo_part.nim b/nimbus/db/aristo/aristo_part.nim index f2cd637fe..771194170 100644 --- a/nimbus/db/aristo/aristo_part.nim +++ b/nimbus/db/aristo/aristo_part.nim @@ -94,7 +94,7 @@ proc partAccountTwig*( db: AristoDbRef; accPath: Hash256; ): Result[seq[Blob], AristoError] = - ## Variant of `partGetBranch()`. + ## Variant of `partGenericTwig()`. db.partGenericTwig(VertexID(1), NibblesBuf.fromBytes accPath.data) proc partStorageTwig*( @@ -102,13 +102,13 @@ proc partStorageTwig*( accPath: Hash256; stoPath: Hash256; ): Result[seq[Blob], AristoError] = - ## Variant of `partGetBranch()`. + ## Variant of `partGenericTwig()`. let vid = ? db.fetchStorageID accPath db.partGenericTwig(vid, NibblesBuf.fromBytes stoPath.data) # ---------- -proc partUntwig*( +proc partUntwigGeneric*( chain: openArray[Blob]; root: Hash256; path: openArray[byte]; @@ -120,39 +120,40 @@ proc partUntwig*( except RlpError as e: return err(PartTrkRlpError) -proc partUntwig*( +proc partUntwigPath*( chain: openArray[Blob]; root: Hash256; path: Hash256; ): Result[Blob,AristoError] = - ## Veriant of `partUntwig()`. - chain.partUntwig(root, path.data) + ## Variant of `partUntwigGeneric()`. + chain.partUntwigGeneric(root, path.data) -proc partUntwigOk*( +proc partUntwigGenericOk*( chain: openArray[Blob]; root: Hash256; path: openArray[byte]; payload: openArray[byte]; ): Result[void,AristoError] = - ## Verify the chain of rlp-encoded nodes and return the payload. + ## Verify the argument `chain` of rlp-encoded nodes against the `path` + ## and `payload` arguments. ## ## Note: This function provides a functionality comparable to the - ## `isValidBranch()` function from `hexary.nim` + ## `isValidBranch()` function from `hexary.nim`. ## - if payload == ? chain.partUntwig(root, path): + if payload == ? chain.partUntwigGeneric(root, path): ok() else: err(PartTrkPayloadMismatch) -proc partUntwigOk*( +proc partUntwigPathOk*( chain: openArray[Blob]; root: Hash256; path: Hash256; payload: openArray[byte]; ): Result[void,AristoError] = - ## Veriant of `partUntwigOk()`. - chain.partUntwigOk(root, path.data, payload) + ## Variant of `partUntwigGenericOk()`. + chain.partUntwigGenericOk(root, path.data, payload) # ---------------- diff --git a/nimbus/db/aristo/aristo_utils.nim b/nimbus/db/aristo/aristo_utils.nim index 094a78244..1dc7b5162 100644 --- a/nimbus/db/aristo/aristo_utils.nim +++ b/nimbus/db/aristo/aristo_utils.nim @@ -61,7 +61,7 @@ proc toNode*( if vtx.lData.pType == AccountData: let vid = vtx.lData.stoID if vid.isValid: - let key = db.getKey (root, vid) + let key = db.getKey (vid, vid) if not key.isValid: return err(@[vid]) node.key[0] = key diff --git a/nimbus/db/core_db/base.nim b/nimbus/db/core_db/base.nim index 7f9dd6d4f..8074a4f72 100644 --- a/nimbus/db/core_db/base.nim +++ b/nimbus/db/core_db/base.nim @@ -55,7 +55,8 @@ when CoreDbEnableCaptJournal: else: import ../aristo/[ - aristo_delete, aristo_desc, aristo_fetch, aristo_merge, aristo_tx], + aristo_delete, aristo_desc, aristo_fetch, aristo_merge, aristo_part, + aristo_tx], ../kvt/[kvt_desc, kvt_utils, kvt_tx] # ------------------------------------------------------------------------------ @@ -140,7 +141,7 @@ proc forget*(ctx: CoreDbCtxRef) = ctx.ifTrackNewApi: debug logTxt, api, elapsed # ------------------------------------------------------------------------------ -# Public main descriptor methods +# Public base descriptor methods # ------------------------------------------------------------------------------ proc finish*(db: CoreDbRef; eradicate = false) = @@ -212,6 +213,104 @@ proc stateBlockNumber*(db: CoreDbRef): BlockNumber = 0u64 db.ifTrackNewApi: debug logTxt, api, elapsed, result +proc verify*( + db: CoreDbRef | CoreDbMptRef | CoreDbAccRef; + proof: openArray[Blob]; + root: Hash256; + path: openArray[byte]; + ): CoreDbRc[Blob] = + ## This function os the counterpart of any of the `proof()` functions. Given + ## the argument chain of rlp-encoded nodes `proof`, this function verifies + ## that the chain represents a partial MPT starting with a root node state + ## `root` followig the path `key` leading to leaf node encapsulating a + ## payload which is passed back as return code. + ## + ## Note: The `mpt` argument is used for administative purposes (e.g. logging) + ## only. The functionality is provided by the `Aristo` database + ## function `aristo_part.partUntwigGeneric()` with the same prototype + ## arguments except the `db`. + ## + template mpt: untyped = + when db is CoreDbRef: + CoreDbAccRef(db.defCtx) + else: + db + mpt.setTrackNewApi BaseVerifyFn + result = block: + let rc = mpt.call(partUntwigGeneric, proof, root, path) + if rc.isOk: + ok(rc.value) + else: + err(rc.error.toError($api, ProofVerify)) + mpt.ifTrackNewApi: debug logTxt, api, elapsed, result + +proc verifyOk*( + db: CoreDbRef | CoreDbMptRef | CoreDbAccRef; + proof: openArray[Blob]; + root: Hash256; + path: openArray[byte]; + payload: openArray[byte]; + ): CoreDbRc[void] = + ## Variant of `verify()` which directly checks the argument `payload` + ## against what would be the return code in `verify()`. + ## + template mpt: untyped = + when db is CoreDbRef: + CoreDbAccRef(db.defCtx) + else: + db + mpt.setTrackNewApi BaseVerifyOkFn + result = block: + let rc = mpt.call(partUntwigGenericOk, proof, root, path, payload) + if rc.isOk: + ok() + else: + err(rc.error.toError($api, ProofVerify)) + mpt.ifTrackNewApi: debug logTxt, api, elapsed, result + +proc verify*( + db: CoreDbRef | CoreDbMptRef | CoreDbAccRef; + proof: openArray[Blob]; + root: Hash256; + path: Hash256; + ): CoreDbRc[Blob] = + ## Variant of `verify()`. + template mpt: untyped = + when db is CoreDbRef: + CoreDbAccRef(db.defCtx) + else: + db + mpt.setTrackNewApi BaseVerifyFn + result = block: + let rc = mpt.call(partUntwigPath, proof, root, path) + if rc.isOk: + ok(rc.value) + else: + err(rc.error.toError($api, ProofVerify)) + mpt.ifTrackNewApi: debug logTxt, api, elapsed, result + +proc verifyOk*( + db: CoreDbRef | CoreDbMptRef | CoreDbAccRef; + proof: openArray[Blob]; + root: Hash256; + path: Hash256; + payload: openArray[byte]; + ): CoreDbRc[void] = + ## Variant of `verifyOk()`. + template mpt: untyped = + when db is CoreDbRef: + CoreDbAccRef(db.defCtx) + else: + db + mpt.setTrackNewApi BaseVerifyOkFn + result = block: + let rc = mpt.call(partUntwigPathOk, proof, root, path, payload) + if rc.isOk: + ok() + else: + err(rc.error.toError($api, ProofVerify)) + mpt.ifTrackNewApi: debug logTxt, api, elapsed, result + # ------------------------------------------------------------------------------ # Public key-value table methods # ------------------------------------------------------------------------------ @@ -323,6 +422,22 @@ proc getGeneric*( # ----------- generic MPT --------------- +proc proof*( + mpt: CoreDbMptRef; + key: openArray[byte]; + ): CoreDbRc[seq[Blob]] = + ## On the generic MPT, collect the nodes along the `key` interpreted as + ## path. Return these path nodes as a chain of rlp-encoded blobs. + ## + mpt.setTrackNewApi MptProofFn + result = block: + let rc = mpt.call(partGenericTwig, mpt.mpt, CoreDbVidGeneric, key) + if rc.isOk: + ok(rc.value) + else: + err(rc.error.toError($api, ProofCreate)) + mpt.ifTrackNewApi: debug logTxt, api, elapsed, result + proc fetch*(mpt: CoreDbMptRef; key: openArray[byte]): CoreDbRc[Blob] = ## Fetch data from the argument `mpt`. The function always returns a ## non-empty `Blob` or an error code. @@ -422,6 +537,22 @@ proc getAccounts*(ctx: CoreDbCtxRef): CoreDbAccRef = # ----------- accounts --------------- +proc proof*( + acc: CoreDbAccRef; + accPath: Hash256; + ): CoreDbRc[seq[Blob]] = + ## On the accounts MPT, collect the nodes along the `accPath` interpreted as + ## path. Return these path nodes as a chain of rlp-encoded blobs. + ## + acc.setTrackNewApi AccProofFn + result = block: + let rc = acc.call(partAccountTwig, acc.mpt, accPath) + if rc.isOk: + ok(rc.value) + else: + err(rc.error.toError($api, ProofCreate)) + acc.ifTrackNewApi: debug logTxt, api, elapsed, result + proc fetch*( acc: CoreDbAccRef; accPath: Hash256; @@ -529,6 +660,24 @@ proc state*(acc: CoreDbAccRef; updateOk = false): CoreDbRc[Hash256] = # ------------ storage --------------- +proc slotProof*( + acc: CoreDbAccRef; + accPath: Hash256; + stoPath: Hash256; + ): CoreDbRc[seq[Blob]] = + ## On the storage MPT related to the argument account `acPath`, collect the + ## nodes along the `stoPath` interpreted as path. Return these path nodes as + ## a chain of rlp-encoded blobs. + ## + acc.setTrackNewApi AccSlotProofFn + result = block: + let rc = acc.call(partStorageTwig, acc.mpt, accPath, stoPath) + if rc.isOk: + ok(rc.value) + else: + err(rc.error.toError($api, ProofCreate)) + acc.ifTrackNewApi: debug logTxt, api, elapsed, result + proc slotFetch*( acc: CoreDbAccRef; accPath: Hash256; diff --git a/nimbus/db/core_db/base/api_tracking.nim b/nimbus/db/core_db/base/api_tracking.nim index 039f6803b..05c9e7f7a 100644 --- a/nimbus/db/core_db/base/api_tracking.nim +++ b/nimbus/db/core_db/base/api_tracking.nim @@ -11,7 +11,7 @@ {.push raises: [].} import - std/[strutils, times, typetraits], + std/[sequtils, strutils, times, typetraits], eth/common, results, stew/byteutils, @@ -36,6 +36,7 @@ type AccForgetFn = "acc/forget" AccHasPathFn = "acc/hasPath" AccMergeFn = "acc/merge" + AccProofFn = "acc/proof" AccRecastFn = "recast" AccStateFn = "acc/state" @@ -43,6 +44,7 @@ type AccSlotDeleteFn = "slotDelete" AccSlotHasPathFn = "slotHasPath" AccSlotMergeFn = "slotMerge" + AccSlotProofFn = "slotProof" AccSlotStateFn = "slotState" AccSlotStateEmptyFn = "slotStateEmpty" AccSlotStateEmptyOrVoidFn = "slotStateEmptyOrVoid" @@ -54,6 +56,8 @@ type BaseNewTxFn = "newTransaction" BasePersistentFn = "persistent" BaseStateBlockNumberFn = "stateBlockNumber" + BaseVerifyFn = "verify" + BaseVerifyOkFn = "verifyOk" CptKvtLogFn = "kvtLog" CptLevelFn = "level" @@ -80,6 +84,7 @@ type MptForgetFn = "mpt/forget" MptHasPathFn = "mpt/hasPath" MptMergeFn = "mpt/merge" + MptProofFn = "mpt/proof" MptPairsIt = "mpt/pairs" MptReplicateIt = "mpt/replicate" MptStateFn = "mpt/state" @@ -122,6 +127,10 @@ func toStr(rc: CoreDbRc[Blob]): string = if rc.isOk: "ok(Blob[" & $rc.value.len & "])" else: "err(" & rc.error.toStr & ")" +func toStr(rc: CoreDbRc[seq[Blob]]): string = + if rc.isOk: "ok([" & rc.value.mapIt("[#" & $it.len & "]").join(",") & "])" + else: "err(" & rc.error.toStr & ")" + func toStr(rc: CoreDbRc[Hash256]): string = if rc.isOk: "ok(" & rc.value.toStr & ")" else: "err(" & rc.error.toStr & ")" diff --git a/nimbus/db/core_db/base/base_desc.nim b/nimbus/db/core_db/base/base_desc.nim index be660d398..8ad2c0bb5 100644 --- a/nimbus/db/core_db/base/base_desc.nim +++ b/nimbus/db/core_db/base/base_desc.nim @@ -54,6 +54,8 @@ type HashNotAvailable KvtNotFound MptNotFound + ProofCreate + ProofVerify RlpException StoNotFound TxPending diff --git a/nimbus/db/core_db/base/base_helpers.nim b/nimbus/db/core_db/base/base_helpers.nim index 1a41b4ef7..1b7a0aef4 100644 --- a/nimbus/db/core_db/base/base_helpers.nim +++ b/nimbus/db/core_db/base/base_helpers.nim @@ -51,10 +51,10 @@ proc bless*(ctx: CoreDbCtxRef; dsc: CoreDbMptRef | CoreDbTxRef): auto = # ------------------------------------------------------------------------------ template kvt*(dsc: CoreDbKvtRef): KvtDbRef = - dsc.distinctBase.kvt + CoreDbCtxRef(dsc).kvt template ctx*(kvt: CoreDbKvtRef): CoreDbCtxRef = - kvt.distinctBase + CoreDbCtxRef(kvt) # --------------- @@ -65,7 +65,7 @@ template call*(api: KvtApiRef; fn: untyped; args: varArgs[untyped]): untyped = fn(args) template call*(kvt: CoreDbKvtRef; fn: untyped; args: varArgs[untyped]): untyped = - kvt.distinctBase.parent.kvtApi.call(fn, args) + CoreDbCtxRef(kvt).parent.kvtApi.call(fn, args) # --------------- @@ -81,13 +81,13 @@ func toError*(e: KvtError; s: string; error = Unspecified): CoreDbError = # ------------------------------------------------------------------------------ template mpt*(dsc: CoreDbAccRef | CoreDbMptRef): AristoDbRef = - dsc.distinctBase.mpt + CoreDbCtxRef(dsc).mpt template mpt*(tx: CoreDbTxRef): AristoDbRef = tx.ctx.mpt template ctx*(acc: CoreDbAccRef): CoreDbCtxRef = - acc.distinctBase + CoreDbCtxRef(acc) # --------------- @@ -102,7 +102,7 @@ template call*( fn: untyped; args: varArgs[untyped]; ): untyped = - acc.distinctBase.parent.ariApi.call(fn, args) + CoreDbCtxRef(acc).parent.ariApi.call(fn, args) # --------------- diff --git a/nimbus/db/kvt/kvt_api.nim b/nimbus/db/kvt/kvt_api.nim index cf05eaf5a..f1cad133e 100644 --- a/nimbus/db/kvt/kvt_api.nim +++ b/nimbus/db/kvt/kvt_api.nim @@ -365,8 +365,9 @@ func init*( data.list[KvtApiProfBeLenKvpFn.ord].masked = true beDup.putKvpFn = - proc(a: PutHdlRef; b: openArray[(Blob,Blob)]) = - be.putKvpFn(a,b) + proc(a: PutHdlRef; b, c: openArray[byte]) = + KvtApiProfBePutKvpFn.profileRunner: + be.putKvpFn(a, b, c) data.list[KvtApiProfBePutKvpFn.ord].masked = true beDup.putEndFn = diff --git a/tests/test_aristo/test_portal_proof.nim b/tests/test_aristo/test_portal_proof.nim index 91c156e1b..72f362b43 100644 --- a/tests/test_aristo/test_portal_proof.nim +++ b/tests/test_aristo/test_portal_proof.nim @@ -183,13 +183,13 @@ proc testCreatePortalProof(node: JsonNode, testStatusIMPL: var TestStatus) = # Verify proof let root = pq.db.getKey((rVid,rVid)).to(Hash256) block: - let rc = proof.chain.partUntwig(root, path) + let rc = proof.chain.partUntwigPath(root, path) check rc.isOk if rc.isOk: check rc.value == pyl # Just for completeness (same a above combined into a single function) - check proof.chain.partUntwigOk(root, path, pyl).isOk + check proof.chain.partUntwigPathOk(root, path, pyl).isOk # Extension nodes are rare, so there is one created, inserted and the # previous test repeated. @@ -212,12 +212,12 @@ proc testCreatePortalProof(node: JsonNode, testStatusIMPL: var TestStatus) = let root = pq.db.getKey((rVid,rVid)).to(Hash256) block: - let rc = chain.partUntwig(root, path) + let rc = chain.partUntwigPath(root, path) check rc.isOk if rc.isOk: check rc.value == pyl - check chain.partUntwigOk(root, path, pyl).isOk + check chain.partUntwigPathOk(root, path, pyl).isOk # ------------------------------------------------------------------------------ # Test