Provide portal proof functionality with coredb (#2550)

* Provide portal proof functions in `aristo_api`

why:
  So it can be fully supported by `CoreDb`

* Fix prototype in `kvt_api`

* Fix node constructor for account leafs with storage trees

* Provide simple path check based on portal proof functionality

* Provide portal proof functionality in `CoreDb`

* Update TODO list
This commit is contained in:
Jordan Hrycaj 2024-08-07 11:30:55 +00:00 committed by GitHub
parent 3cef119b78
commit 488bdbc267
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 428 additions and 34 deletions

View File

@ -16,3 +16,7 @@
function mentioned above. function mentioned above.
* `aristo_nearby` also qualifies for a re-write, now * `aristo_nearby` also qualifies for a re-write, now
* A storage tree vid should be allowed to become stale (or locally deactivated)
if the vertex vanishes to be re-used when the tree is re-filled. Currently
the vid is removed and then re-allocated.

View File

@ -18,8 +18,8 @@ import
results, results,
./aristo_desc/desc_backend, ./aristo_desc/desc_backend,
./aristo_init/memory_db, ./aristo_init/memory_db,
"."/[aristo_delete, aristo_desc, aristo_fetch, "."/[aristo_delete, aristo_desc, aristo_fetch, aristo_init, aristo_merge,
aristo_init, aristo_merge, aristo_path, aristo_profile, aristo_tx] aristo_part, aristo_path, aristo_profile, aristo_tx]
export export
AristoDbProfListRef AristoDbProfListRef
@ -327,6 +327,74 @@ type
## `(accPath,stoPath)` where `accPath` is the account key (into the MPT) ## `(accPath,stoPath)` where `accPath` is the account key (into the MPT)
## and `stoPath` is the slot path of the corresponding storage area. ## and `stoPath` is the slot path of the corresponding storage area.
AristoApiPartAccountTwig* =
proc(db: AristoDbRef;
accPath: Hash256;
): Result[seq[Blob], AristoError]
{.noRaise.}
## This function returns a chain of rlp-encoded nodes along the argument
## path `(root,path)`.
AristoApiPartGenericTwig* =
proc(db: AristoDbRef;
root: VertexID;
path: openArray[byte];
): Result[seq[Blob], AristoError]
{.noRaise.}
## Variant of `partAccountTwig()`.
##
## Note: This function provides a functionality comparable to the
## `getBranch()` function from `hexary.nim`
AristoApiPartStorageTwig* =
proc(db: AristoDbRef;
accPath: Hash256;
stoPath: Hash256;
): Result[seq[Blob], AristoError]
{.noRaise.}
## Variant of `partAccountTwig()`.
AristoApiPartUntwigGeneric* =
proc(chain: openArray[Blob];
root: Hash256;
path: openArray[byte];
): Result[Blob,AristoError]
{.noRaise.}
## Follow and verify the argument `chain` up unlil the last entry
## which must be a leaf node. Extract the payload and pass it on
## as return code.
AristoApiPartUntwigGenericOk* =
proc(chain: openArray[Blob];
root: Hash256;
path: openArray[byte];
payload: openArray[byte];
): Result[void,AristoError]
{.noRaise.}
## Variant of `partUntwigGeneric()`. The function verifis the argument
## `chain` of rlp-encoded nodes against the `path` and `payload`
## arguments.
##
## Note: This function provides a functionality comparable to the
## `isValidBranch()` function from `hexary.nim`.
AristoApiPartUntwigPath* =
proc(chain: openArray[Blob];
root: Hash256;
path: Hash256;
): Result[Blob,AristoError]
{.noRaise.}
## Variant of `partUntwigGeneric()`.
AristoApiPartUntwigPathOk* =
proc(chain: openArray[Blob];
root: Hash256;
path: Hash256;
payload: openArray[byte];
): Result[void,AristoError]
{.noRaise.}
## Variant of `partUntwigGenericOk()`.
AristoApiPathAsBlobFn* = AristoApiPathAsBlobFn* =
proc(tag: PathID; proc(tag: PathID;
): Blob ): Blob
@ -444,6 +512,14 @@ type
mergeGenericData*: AristoApiMergeGenericDataFn mergeGenericData*: AristoApiMergeGenericDataFn
mergeStorageData*: AristoApiMergeStorageDataFn mergeStorageData*: AristoApiMergeStorageDataFn
partAccountTwig*: AristoApiPartAccountTwig
partGenericTwig*: AristoApiPartGenericTwig
partStorageTwig*: AristoApiPartStorageTwig
partUntwigGeneric*: AristoApiPartUntwigGeneric
partUntwigGenericOk*: AristoApiPartUntwigGenericOk
partUntwigPath*: AristoApiPartUntwigPath
partUntwigPathOk*: AristoApiPartUntwigPathOk
pathAsBlob*: AristoApiPathAsBlobFn pathAsBlob*: AristoApiPathAsBlobFn
persist*: AristoApiPersistFn persist*: AristoApiPersistFn
reCentre*: AristoApiReCentreFn reCentre*: AristoApiReCentreFn
@ -491,6 +567,14 @@ type
AristoApiProfMergeGenericDataFn = "mergeGenericData" AristoApiProfMergeGenericDataFn = "mergeGenericData"
AristoApiProfMergeStorageDataFn = "mergeStorageData" AristoApiProfMergeStorageDataFn = "mergeStorageData"
AristoApiProfPartAccountTwigFn = "partAccountTwig"
AristoApiProfPartGenericTwigFn = "partGenericTwig"
AristoApiProfPartStorageTwigFn = "partStorageTwig"
AristoApiProfPartUntwigGenericFn = "partUntwigGeneric"
AristoApiProfPartUntwigGenericOkFn = "partUntwigGenericOk"
AristoApiProfPartUntwigPathFn = "partUntwigPath"
AristoApiProfPartUntwigPathOkFn = "partUntwigPathOk"
AristoApiProfPathAsBlobFn = "pathAsBlob" AristoApiProfPathAsBlobFn = "pathAsBlob"
AristoApiProfPersistFn = "persist" AristoApiProfPersistFn = "persist"
AristoApiProfReCentreFn = "reCentre" AristoApiProfReCentreFn = "reCentre"
@ -555,6 +639,14 @@ when AutoValidateApiHooks:
doAssert not api.mergeGenericData.isNil doAssert not api.mergeGenericData.isNil
doAssert not api.mergeStorageData.isNil doAssert not api.mergeStorageData.isNil
doAssert not api.partAccountTwig.isNil
doAssert not api.partGenericTwig.isNil
doAssert not api.partStorageTwig.isNil
doAssert not api.partUntwigGeneric.isNil
doAssert not api.partUntwigGenericOk.isNil
doAssert not api.partUntwigPath.isNil
doAssert not api.partUntwigPathOk.isNil
doAssert not api.pathAsBlob.isNil doAssert not api.pathAsBlob.isNil
doAssert not api.persist.isNil doAssert not api.persist.isNil
doAssert not api.reCentre.isNil doAssert not api.reCentre.isNil
@ -623,6 +715,14 @@ func init*(api: var AristoApiObj) =
api.mergeGenericData = mergeGenericData api.mergeGenericData = mergeGenericData
api.mergeStorageData = mergeStorageData api.mergeStorageData = mergeStorageData
api.partAccountTwig = partAccountTwig
api.partGenericTwig = partGenericTwig
api.partStorageTwig = partStorageTwig
api.partUntwigGeneric = partUntwigGeneric
api.partUntwigGenericOk = partUntwigGenericOk
api.partUntwigPath = partUntwigPath
api.partUntwigPathOk = partUntwigPathOk
api.pathAsBlob = pathAsBlob api.pathAsBlob = pathAsBlob
api.persist = persist api.persist = persist
api.reCentre = reCentre api.reCentre = reCentre
@ -673,6 +773,14 @@ func dup*(api: AristoApiRef): AristoApiRef =
mergeGenericData: api.mergeGenericData, mergeGenericData: api.mergeGenericData,
mergeStorageData: api.mergeStorageData, mergeStorageData: api.mergeStorageData,
partAccountTwig: api.partAccountTwig,
partGenericTwig: api.partGenericTwig,
partStorageTwig: api.partStorageTwig,
partUntwigGeneric: api.partUntwigGeneric,
partUntwigGenericOk: api.partUntwigGenericOk,
partUntwigPath: api.partUntwigPath,
partUntwigPathOk: api.partUntwigPathOk,
pathAsBlob: api.pathAsBlob, pathAsBlob: api.pathAsBlob,
persist: api.persist, persist: api.persist,
reCentre: api.reCentre, reCentre: api.reCentre,
@ -845,6 +953,41 @@ func init*(
AristoApiProfMergeStorageDataFn.profileRunner: AristoApiProfMergeStorageDataFn.profileRunner:
result = api.mergeStorageData(a, b, c, d) result = api.mergeStorageData(a, b, c, d)
profApi.partAccountTwig =
proc(a: AristoDbRef; b: Hash256): auto =
AristoApiProfPartAccountTwigFn.profileRunner:
result = api.partAccountTwig(a, b)
profApi.partGenericTwig =
proc(a: AristoDbRef; b: VertexID; c: openArray[byte]): auto =
AristoApiProfPartGenericTwigFn.profileRunner:
result = api.partGenericTwig(a, b, c)
profApi.partStorageTwig =
proc(a: AristoDbRef; b: Hash256; c: Hash256): auto =
AristoApiProfPartStorageTwigFn.profileRunner:
result = api.partStorageTwig(a, b, c)
profApi.partUntwigGeneric =
proc(a: openArray[Blob]; b: Hash256; c: openArray[byte]): auto =
AristoApiProfPartUntwigGenericFn.profileRunner:
result = api.partUntwigGeneric(a, b, c)
profApi.partUntwigGenericOk =
proc(a: openArray[Blob]; b: Hash256; c, d: openArray[byte]): auto =
AristoApiProfPartUntwigGenericOkFn.profileRunner:
result = api.partUntwigGenericOk(a, b, c, d)
profApi.partUntwigPath =
proc(a: openArray[Blob]; b, c: Hash256): auto =
AristoApiProfPartUntwigPathFn.profileRunner:
result = api.partUntwigPath(a, b, c)
profApi.partUntwigPathOk =
proc(a: openArray[Blob]; b, c: Hash256; d: openArray[byte]): auto =
AristoApiProfPartUntwigPathOkFn.profileRunner:
result = api.partUntwigPathOk(a, b, c, d)
profApi.pathAsBlob = profApi.pathAsBlob =
proc(a: PathID): auto = proc(a: PathID): auto =
AristoApiProfPathAsBlobFn.profileRunner: AristoApiProfPathAsBlobFn.profileRunner:

View File

@ -20,7 +20,7 @@ import
results, results,
./aristo_walk/persistent, ./aristo_walk/persistent,
"."/[aristo_desc, aristo_get, aristo_init], "."/[aristo_desc, aristo_get, aristo_init],
./aristo_check/[check_be, check_top] ./aristo_check/[check_be, check_top, check_twig]
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Public functions # Public functions
@ -78,7 +78,7 @@ proc checkBE*(
proc check*( proc check*(
db: AristoDbRef; # Database, top layer db: AristoDbRef; # Database
relax = false; # Check existing hashes only relax = false; # Check existing hashes only
cache = true; # Also verify against top layer cache cache = true; # Also verify against top layer cache
proofMode = false; # Has proof nodes proofMode = false; # Has proof nodes
@ -88,6 +88,41 @@ proc check*(
? db.checkBE() ? db.checkBE()
ok() ok()
proc check*(
db: AristoDbRef; # Database
root: VertexID; # Start node
path: openArray[byte]; # Data path
): Result[void,AristoError] =
## Check generic path `path` against portal proof generation and
## verification.
##
## Note that this check might have side effects in that it might compile
## the hash keys on the `root` sub-tree.
db.checkTwig(root, path)
proc check*(
db: AristoDbRef; # Database
accPath: Hash256; # Account key
): Result[void,AristoError] =
## Check accounts tree path `accPath` against portal proof generation and
## verification.
##
## Note that this check might have side effects in that it might compile
## the hash keys on the accounts sub-tree.
db.checkTwig(VertexID(1), accPath.data)
proc check*(
db: AristoDbRef; # Database
accPath: Hash256; # Account key
stoPath: Hash256; # Storage key
): Result[void,AristoError] =
## Check account tree `Account key` against portal proof generation and
## verification.
##
## Note that this check might have side effects in that it might compile
## the hash keys on the particulat storage sub-tree.
db.checkTwig(accPath, stoPath)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# End # End
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------

View File

@ -0,0 +1,50 @@
# nimbus-eth1
# Copyright (c) 2023-2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed
# except according to those terms.
{.push raises: [].}
import
eth/common,
results,
".."/[aristo_compute, aristo_desc, aristo_fetch, aristo_part]
# ------------------------------------------------------------------------------
# Public functions
# ------------------------------------------------------------------------------
proc checkTwig*(
db: AristoDbRef; # Database
root: VertexID; # Start node
path: openArray[byte]; # Data path
): Result[void,AristoError] =
let
proof = ? db.partGenericTwig(root, path)
key = ? db.computeKey (root,root)
pyl = ? proof.partUntwigGeneric(key.to(Hash256), path)
ok()
proc checkTwig*(
db: AristoDbRef; # Database
accPath: Hash256; # Account key
stoPath: Hash256; # Storage key
): Result[void,AristoError] =
let
proof = ? db.partStorageTwig(accPath, stoPath)
vid = ? db.fetchStorageID accPath
key = ? db.computeKey (VertexID(1),vid)
pyl = ? proof.partUntwigPath(key.to(Hash256), stoPath)
ok()
# ------------------------------------------------------------------------------
# End
# ------------------------------------------------------------------------------

View File

@ -488,7 +488,7 @@ proc ppLayer(
# Public functions # Public functions
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
proc pp*(w: Hash256; codeHashOk = false): string = proc pp*(w: Hash256; codeHashOk: bool): string =
if codeHashOk: if codeHashOk:
w.ppCodeHash w.ppCodeHash
elif w == EMPTY_ROOT_HASH: elif w == EMPTY_ROOT_HASH:

View File

@ -94,7 +94,7 @@ proc partAccountTwig*(
db: AristoDbRef; db: AristoDbRef;
accPath: Hash256; accPath: Hash256;
): Result[seq[Blob], AristoError] = ): Result[seq[Blob], AristoError] =
## Variant of `partGetBranch()`. ## Variant of `partGenericTwig()`.
db.partGenericTwig(VertexID(1), NibblesBuf.fromBytes accPath.data) db.partGenericTwig(VertexID(1), NibblesBuf.fromBytes accPath.data)
proc partStorageTwig*( proc partStorageTwig*(
@ -102,13 +102,13 @@ proc partStorageTwig*(
accPath: Hash256; accPath: Hash256;
stoPath: Hash256; stoPath: Hash256;
): Result[seq[Blob], AristoError] = ): Result[seq[Blob], AristoError] =
## Variant of `partGetBranch()`. ## Variant of `partGenericTwig()`.
let vid = ? db.fetchStorageID accPath let vid = ? db.fetchStorageID accPath
db.partGenericTwig(vid, NibblesBuf.fromBytes stoPath.data) db.partGenericTwig(vid, NibblesBuf.fromBytes stoPath.data)
# ---------- # ----------
proc partUntwig*( proc partUntwigGeneric*(
chain: openArray[Blob]; chain: openArray[Blob];
root: Hash256; root: Hash256;
path: openArray[byte]; path: openArray[byte];
@ -120,39 +120,40 @@ proc partUntwig*(
except RlpError as e: except RlpError as e:
return err(PartTrkRlpError) return err(PartTrkRlpError)
proc partUntwig*( proc partUntwigPath*(
chain: openArray[Blob]; chain: openArray[Blob];
root: Hash256; root: Hash256;
path: Hash256; path: Hash256;
): Result[Blob,AristoError] = ): Result[Blob,AristoError] =
## Veriant of `partUntwig()`. ## Variant of `partUntwigGeneric()`.
chain.partUntwig(root, path.data) chain.partUntwigGeneric(root, path.data)
proc partUntwigOk*( proc partUntwigGenericOk*(
chain: openArray[Blob]; chain: openArray[Blob];
root: Hash256; root: Hash256;
path: openArray[byte]; path: openArray[byte];
payload: openArray[byte]; payload: openArray[byte];
): Result[void,AristoError] = ): Result[void,AristoError] =
## Verify the chain of rlp-encoded nodes and return the payload. ## Verify the argument `chain` of rlp-encoded nodes against the `path`
## and `payload` arguments.
## ##
## Note: This function provides a functionality comparable to the ## Note: This function provides a functionality comparable to the
## `isValidBranch()` function from `hexary.nim` ## `isValidBranch()` function from `hexary.nim`.
## ##
if payload == ? chain.partUntwig(root, path): if payload == ? chain.partUntwigGeneric(root, path):
ok() ok()
else: else:
err(PartTrkPayloadMismatch) err(PartTrkPayloadMismatch)
proc partUntwigOk*( proc partUntwigPathOk*(
chain: openArray[Blob]; chain: openArray[Blob];
root: Hash256; root: Hash256;
path: Hash256; path: Hash256;
payload: openArray[byte]; payload: openArray[byte];
): Result[void,AristoError] = ): Result[void,AristoError] =
## Veriant of `partUntwigOk()`. ## Variant of `partUntwigGenericOk()`.
chain.partUntwigOk(root, path.data, payload) chain.partUntwigGenericOk(root, path.data, payload)
# ---------------- # ----------------

View File

@ -61,7 +61,7 @@ proc toNode*(
if vtx.lData.pType == AccountData: if vtx.lData.pType == AccountData:
let vid = vtx.lData.stoID let vid = vtx.lData.stoID
if vid.isValid: if vid.isValid:
let key = db.getKey (root, vid) let key = db.getKey (vid, vid)
if not key.isValid: if not key.isValid:
return err(@[vid]) return err(@[vid])
node.key[0] = key node.key[0] = key

View File

@ -55,7 +55,8 @@ when CoreDbEnableCaptJournal:
else: else:
import import
../aristo/[ ../aristo/[
aristo_delete, aristo_desc, aristo_fetch, aristo_merge, aristo_tx], aristo_delete, aristo_desc, aristo_fetch, aristo_merge, aristo_part,
aristo_tx],
../kvt/[kvt_desc, kvt_utils, kvt_tx] ../kvt/[kvt_desc, kvt_utils, kvt_tx]
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@ -140,7 +141,7 @@ proc forget*(ctx: CoreDbCtxRef) =
ctx.ifTrackNewApi: debug logTxt, api, elapsed ctx.ifTrackNewApi: debug logTxt, api, elapsed
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Public main descriptor methods # Public base descriptor methods
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
proc finish*(db: CoreDbRef; eradicate = false) = proc finish*(db: CoreDbRef; eradicate = false) =
@ -212,6 +213,104 @@ proc stateBlockNumber*(db: CoreDbRef): BlockNumber =
0u64 0u64
db.ifTrackNewApi: debug logTxt, api, elapsed, result db.ifTrackNewApi: debug logTxt, api, elapsed, result
proc verify*(
db: CoreDbRef | CoreDbMptRef | CoreDbAccRef;
proof: openArray[Blob];
root: Hash256;
path: openArray[byte];
): CoreDbRc[Blob] =
## This function os the counterpart of any of the `proof()` functions. Given
## the argument chain of rlp-encoded nodes `proof`, this function verifies
## that the chain represents a partial MPT starting with a root node state
## `root` followig the path `key` leading to leaf node encapsulating a
## payload which is passed back as return code.
##
## Note: The `mpt` argument is used for administative purposes (e.g. logging)
## only. The functionality is provided by the `Aristo` database
## function `aristo_part.partUntwigGeneric()` with the same prototype
## arguments except the `db`.
##
template mpt: untyped =
when db is CoreDbRef:
CoreDbAccRef(db.defCtx)
else:
db
mpt.setTrackNewApi BaseVerifyFn
result = block:
let rc = mpt.call(partUntwigGeneric, proof, root, path)
if rc.isOk:
ok(rc.value)
else:
err(rc.error.toError($api, ProofVerify))
mpt.ifTrackNewApi: debug logTxt, api, elapsed, result
proc verifyOk*(
db: CoreDbRef | CoreDbMptRef | CoreDbAccRef;
proof: openArray[Blob];
root: Hash256;
path: openArray[byte];
payload: openArray[byte];
): CoreDbRc[void] =
## Variant of `verify()` which directly checks the argument `payload`
## against what would be the return code in `verify()`.
##
template mpt: untyped =
when db is CoreDbRef:
CoreDbAccRef(db.defCtx)
else:
db
mpt.setTrackNewApi BaseVerifyOkFn
result = block:
let rc = mpt.call(partUntwigGenericOk, proof, root, path, payload)
if rc.isOk:
ok()
else:
err(rc.error.toError($api, ProofVerify))
mpt.ifTrackNewApi: debug logTxt, api, elapsed, result
proc verify*(
db: CoreDbRef | CoreDbMptRef | CoreDbAccRef;
proof: openArray[Blob];
root: Hash256;
path: Hash256;
): CoreDbRc[Blob] =
## Variant of `verify()`.
template mpt: untyped =
when db is CoreDbRef:
CoreDbAccRef(db.defCtx)
else:
db
mpt.setTrackNewApi BaseVerifyFn
result = block:
let rc = mpt.call(partUntwigPath, proof, root, path)
if rc.isOk:
ok(rc.value)
else:
err(rc.error.toError($api, ProofVerify))
mpt.ifTrackNewApi: debug logTxt, api, elapsed, result
proc verifyOk*(
db: CoreDbRef | CoreDbMptRef | CoreDbAccRef;
proof: openArray[Blob];
root: Hash256;
path: Hash256;
payload: openArray[byte];
): CoreDbRc[void] =
## Variant of `verifyOk()`.
template mpt: untyped =
when db is CoreDbRef:
CoreDbAccRef(db.defCtx)
else:
db
mpt.setTrackNewApi BaseVerifyOkFn
result = block:
let rc = mpt.call(partUntwigPathOk, proof, root, path, payload)
if rc.isOk:
ok()
else:
err(rc.error.toError($api, ProofVerify))
mpt.ifTrackNewApi: debug logTxt, api, elapsed, result
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Public key-value table methods # Public key-value table methods
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@ -323,6 +422,22 @@ proc getGeneric*(
# ----------- generic MPT --------------- # ----------- generic MPT ---------------
proc proof*(
mpt: CoreDbMptRef;
key: openArray[byte];
): CoreDbRc[seq[Blob]] =
## On the generic MPT, collect the nodes along the `key` interpreted as
## path. Return these path nodes as a chain of rlp-encoded blobs.
##
mpt.setTrackNewApi MptProofFn
result = block:
let rc = mpt.call(partGenericTwig, mpt.mpt, CoreDbVidGeneric, key)
if rc.isOk:
ok(rc.value)
else:
err(rc.error.toError($api, ProofCreate))
mpt.ifTrackNewApi: debug logTxt, api, elapsed, result
proc fetch*(mpt: CoreDbMptRef; key: openArray[byte]): CoreDbRc[Blob] = proc fetch*(mpt: CoreDbMptRef; key: openArray[byte]): CoreDbRc[Blob] =
## Fetch data from the argument `mpt`. The function always returns a ## Fetch data from the argument `mpt`. The function always returns a
## non-empty `Blob` or an error code. ## non-empty `Blob` or an error code.
@ -422,6 +537,22 @@ proc getAccounts*(ctx: CoreDbCtxRef): CoreDbAccRef =
# ----------- accounts --------------- # ----------- accounts ---------------
proc proof*(
acc: CoreDbAccRef;
accPath: Hash256;
): CoreDbRc[seq[Blob]] =
## On the accounts MPT, collect the nodes along the `accPath` interpreted as
## path. Return these path nodes as a chain of rlp-encoded blobs.
##
acc.setTrackNewApi AccProofFn
result = block:
let rc = acc.call(partAccountTwig, acc.mpt, accPath)
if rc.isOk:
ok(rc.value)
else:
err(rc.error.toError($api, ProofCreate))
acc.ifTrackNewApi: debug logTxt, api, elapsed, result
proc fetch*( proc fetch*(
acc: CoreDbAccRef; acc: CoreDbAccRef;
accPath: Hash256; accPath: Hash256;
@ -529,6 +660,24 @@ proc state*(acc: CoreDbAccRef; updateOk = false): CoreDbRc[Hash256] =
# ------------ storage --------------- # ------------ storage ---------------
proc slotProof*(
acc: CoreDbAccRef;
accPath: Hash256;
stoPath: Hash256;
): CoreDbRc[seq[Blob]] =
## On the storage MPT related to the argument account `acPath`, collect the
## nodes along the `stoPath` interpreted as path. Return these path nodes as
## a chain of rlp-encoded blobs.
##
acc.setTrackNewApi AccSlotProofFn
result = block:
let rc = acc.call(partStorageTwig, acc.mpt, accPath, stoPath)
if rc.isOk:
ok(rc.value)
else:
err(rc.error.toError($api, ProofCreate))
acc.ifTrackNewApi: debug logTxt, api, elapsed, result
proc slotFetch*( proc slotFetch*(
acc: CoreDbAccRef; acc: CoreDbAccRef;
accPath: Hash256; accPath: Hash256;

View File

@ -11,7 +11,7 @@
{.push raises: [].} {.push raises: [].}
import import
std/[strutils, times, typetraits], std/[sequtils, strutils, times, typetraits],
eth/common, eth/common,
results, results,
stew/byteutils, stew/byteutils,
@ -36,6 +36,7 @@ type
AccForgetFn = "acc/forget" AccForgetFn = "acc/forget"
AccHasPathFn = "acc/hasPath" AccHasPathFn = "acc/hasPath"
AccMergeFn = "acc/merge" AccMergeFn = "acc/merge"
AccProofFn = "acc/proof"
AccRecastFn = "recast" AccRecastFn = "recast"
AccStateFn = "acc/state" AccStateFn = "acc/state"
@ -43,6 +44,7 @@ type
AccSlotDeleteFn = "slotDelete" AccSlotDeleteFn = "slotDelete"
AccSlotHasPathFn = "slotHasPath" AccSlotHasPathFn = "slotHasPath"
AccSlotMergeFn = "slotMerge" AccSlotMergeFn = "slotMerge"
AccSlotProofFn = "slotProof"
AccSlotStateFn = "slotState" AccSlotStateFn = "slotState"
AccSlotStateEmptyFn = "slotStateEmpty" AccSlotStateEmptyFn = "slotStateEmpty"
AccSlotStateEmptyOrVoidFn = "slotStateEmptyOrVoid" AccSlotStateEmptyOrVoidFn = "slotStateEmptyOrVoid"
@ -54,6 +56,8 @@ type
BaseNewTxFn = "newTransaction" BaseNewTxFn = "newTransaction"
BasePersistentFn = "persistent" BasePersistentFn = "persistent"
BaseStateBlockNumberFn = "stateBlockNumber" BaseStateBlockNumberFn = "stateBlockNumber"
BaseVerifyFn = "verify"
BaseVerifyOkFn = "verifyOk"
CptKvtLogFn = "kvtLog" CptKvtLogFn = "kvtLog"
CptLevelFn = "level" CptLevelFn = "level"
@ -80,6 +84,7 @@ type
MptForgetFn = "mpt/forget" MptForgetFn = "mpt/forget"
MptHasPathFn = "mpt/hasPath" MptHasPathFn = "mpt/hasPath"
MptMergeFn = "mpt/merge" MptMergeFn = "mpt/merge"
MptProofFn = "mpt/proof"
MptPairsIt = "mpt/pairs" MptPairsIt = "mpt/pairs"
MptReplicateIt = "mpt/replicate" MptReplicateIt = "mpt/replicate"
MptStateFn = "mpt/state" MptStateFn = "mpt/state"
@ -122,6 +127,10 @@ func toStr(rc: CoreDbRc[Blob]): string =
if rc.isOk: "ok(Blob[" & $rc.value.len & "])" if rc.isOk: "ok(Blob[" & $rc.value.len & "])"
else: "err(" & rc.error.toStr & ")" else: "err(" & rc.error.toStr & ")"
func toStr(rc: CoreDbRc[seq[Blob]]): string =
if rc.isOk: "ok([" & rc.value.mapIt("[#" & $it.len & "]").join(",") & "])"
else: "err(" & rc.error.toStr & ")"
func toStr(rc: CoreDbRc[Hash256]): string = func toStr(rc: CoreDbRc[Hash256]): string =
if rc.isOk: "ok(" & rc.value.toStr & ")" else: "err(" & rc.error.toStr & ")" if rc.isOk: "ok(" & rc.value.toStr & ")" else: "err(" & rc.error.toStr & ")"

View File

@ -54,6 +54,8 @@ type
HashNotAvailable HashNotAvailable
KvtNotFound KvtNotFound
MptNotFound MptNotFound
ProofCreate
ProofVerify
RlpException RlpException
StoNotFound StoNotFound
TxPending TxPending

View File

@ -51,10 +51,10 @@ proc bless*(ctx: CoreDbCtxRef; dsc: CoreDbMptRef | CoreDbTxRef): auto =
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
template kvt*(dsc: CoreDbKvtRef): KvtDbRef = template kvt*(dsc: CoreDbKvtRef): KvtDbRef =
dsc.distinctBase.kvt CoreDbCtxRef(dsc).kvt
template ctx*(kvt: CoreDbKvtRef): CoreDbCtxRef = template ctx*(kvt: CoreDbKvtRef): CoreDbCtxRef =
kvt.distinctBase CoreDbCtxRef(kvt)
# --------------- # ---------------
@ -65,7 +65,7 @@ template call*(api: KvtApiRef; fn: untyped; args: varArgs[untyped]): untyped =
fn(args) fn(args)
template call*(kvt: CoreDbKvtRef; fn: untyped; args: varArgs[untyped]): untyped = template call*(kvt: CoreDbKvtRef; fn: untyped; args: varArgs[untyped]): untyped =
kvt.distinctBase.parent.kvtApi.call(fn, args) CoreDbCtxRef(kvt).parent.kvtApi.call(fn, args)
# --------------- # ---------------
@ -81,13 +81,13 @@ func toError*(e: KvtError; s: string; error = Unspecified): CoreDbError =
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
template mpt*(dsc: CoreDbAccRef | CoreDbMptRef): AristoDbRef = template mpt*(dsc: CoreDbAccRef | CoreDbMptRef): AristoDbRef =
dsc.distinctBase.mpt CoreDbCtxRef(dsc).mpt
template mpt*(tx: CoreDbTxRef): AristoDbRef = template mpt*(tx: CoreDbTxRef): AristoDbRef =
tx.ctx.mpt tx.ctx.mpt
template ctx*(acc: CoreDbAccRef): CoreDbCtxRef = template ctx*(acc: CoreDbAccRef): CoreDbCtxRef =
acc.distinctBase CoreDbCtxRef(acc)
# --------------- # ---------------
@ -102,7 +102,7 @@ template call*(
fn: untyped; fn: untyped;
args: varArgs[untyped]; args: varArgs[untyped];
): untyped = ): untyped =
acc.distinctBase.parent.ariApi.call(fn, args) CoreDbCtxRef(acc).parent.ariApi.call(fn, args)
# --------------- # ---------------

View File

@ -365,8 +365,9 @@ func init*(
data.list[KvtApiProfBeLenKvpFn.ord].masked = true data.list[KvtApiProfBeLenKvpFn.ord].masked = true
beDup.putKvpFn = beDup.putKvpFn =
proc(a: PutHdlRef; b: openArray[(Blob,Blob)]) = proc(a: PutHdlRef; b, c: openArray[byte]) =
be.putKvpFn(a,b) KvtApiProfBePutKvpFn.profileRunner:
be.putKvpFn(a, b, c)
data.list[KvtApiProfBePutKvpFn.ord].masked = true data.list[KvtApiProfBePutKvpFn.ord].masked = true
beDup.putEndFn = beDup.putEndFn =

View File

@ -183,13 +183,13 @@ proc testCreatePortalProof(node: JsonNode, testStatusIMPL: var TestStatus) =
# Verify proof # Verify proof
let root = pq.db.getKey((rVid,rVid)).to(Hash256) let root = pq.db.getKey((rVid,rVid)).to(Hash256)
block: block:
let rc = proof.chain.partUntwig(root, path) let rc = proof.chain.partUntwigPath(root, path)
check rc.isOk check rc.isOk
if rc.isOk: if rc.isOk:
check rc.value == pyl check rc.value == pyl
# Just for completeness (same a above combined into a single function) # Just for completeness (same a above combined into a single function)
check proof.chain.partUntwigOk(root, path, pyl).isOk check proof.chain.partUntwigPathOk(root, path, pyl).isOk
# Extension nodes are rare, so there is one created, inserted and the # Extension nodes are rare, so there is one created, inserted and the
# previous test repeated. # previous test repeated.
@ -212,12 +212,12 @@ proc testCreatePortalProof(node: JsonNode, testStatusIMPL: var TestStatus) =
let root = pq.db.getKey((rVid,rVid)).to(Hash256) let root = pq.db.getKey((rVid,rVid)).to(Hash256)
block: block:
let rc = chain.partUntwig(root, path) let rc = chain.partUntwigPath(root, path)
check rc.isOk check rc.isOk
if rc.isOk: if rc.isOk:
check rc.value == pyl check rc.value == pyl
check chain.partUntwigOk(root, path, pyl).isOk check chain.partUntwigPathOk(root, path, pyl).isOk
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Test # Test