nimbus-eth1/nimbus/db/aristo/aristo_merge/merge_payload_helper.nim

455 lines
14 KiB
Nim
Raw Normal View History

# nimbus-eth1
# Copyright (c) 2023-2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed
# except according to those terms.
{.push raises: [].}
import
std/typetraits,
eth/common,
results,
".."/[aristo_desc, aristo_get, aristo_hike, aristo_layers, aristo_vid]
# ------------------------------------------------------------------------------
# Private getters & setters
# ------------------------------------------------------------------------------
proc xPfx(vtx: VertexRef): NibblesBuf =
case vtx.vType:
of Leaf:
return vtx.lPfx
of Extension:
return vtx.ePfx
of Branch:
doAssert vtx.vType != Branch # Ooops
# ------------------------------------------------------------------------------
# Private helpers
# ------------------------------------------------------------------------------
proc clearMerkleKeys(
db: AristoDbRef; # Database, top layer
hike: Hike; # Implied vertex IDs to clear hashes for
vid: VertexID; # Additional vertex IDs to clear
) =
for w in hike.legs:
db.layersResKey((hike.root, w.wp.vid))
db.layersResKey((hike.root, vid))
# -----------
proc insertBranch(
db: AristoDbRef; # Database, top layer
hike: Hike; # Current state
linkID: VertexID; # Vertex ID to insert
linkVtx: VertexRef; # Vertex to insert
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
##
## Insert `Extension->Branch` vertex chain or just a `Branch` vertex
##
## ... --(linkID)--> <linkVtx>
##
## <-- immutable --> <---- mutable ----> ..
##
## will become either
##
## --(linkID)-->
## <extVtx> --(local1)-->
## <forkVtx>[linkInx] --(local2)--> <linkVtx*>
## [leafInx] --(local3)--> <leafVtx>
##
## or in case that there is no common prefix
##
## --(linkID)-->
## <forkVtx>[linkInx] --(local2)--> <linkVtx*>
## [leafInx] --(local3)--> <leafVtx>
##
## *) vertex was slightly modified or removed if obsolete `Extension`
##
let n = linkVtx.xPfx.sharedPrefixLen hike.tail
# Verify minimum requirements
if hike.tail.len == n:
# Should have been tackeld by `hikeUp()`, already
return err(MergeLeafGarbledHike)
if linkVtx.xPfx.len == n:
return err(MergeBranchLinkVtxPfxTooShort)
# Provide and install `forkVtx`
let
forkVtx = VertexRef(vType: Branch)
linkInx = linkVtx.xPfx[n]
leafInx = hike.tail[n]
var
leafLeg = Leg(nibble: -1)
# Install `forkVtx`
block:
# Clear Merkle hashes (aka hash keys) unless proof mode.
db.clearMerkleKeys(hike, linkID)
if linkVtx.vType == Leaf:
# Double check path prefix
if 64 < hike.legsTo(NibblesBuf).len + linkVtx.lPfx.len:
return err(MergeBranchLinkLeafGarbled)
let
local = db.vidFetch(pristine = true)
linkDup = linkVtx.dup
db.layersUpdateVtx((hike.root, local), linkDup)
linkDup.lPfx = linkDup.lPfx.slice(1+n)
forkVtx.bVid[linkInx] = local
elif linkVtx.ePfx.len == n + 1:
# This extension `linkVtx` becomes obsolete
forkVtx.bVid[linkInx] = linkVtx.eVid
else:
let
local = db.vidFetch
linkDup = linkVtx.dup
db.layersUpdateVtx((hike.root, local), linkDup)
linkDup.ePfx = linkDup.ePfx.slice(1+n)
forkVtx.bVid[linkInx] = local
block:
let local = db.vidFetch(pristine = true)
forkVtx.bVid[leafInx] = local
leafLeg.wp.vid = local
leafLeg.wp.vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1+n),
lData: payload)
db.layersUpdateVtx((hike.root, local), leafLeg.wp.vtx)
# Update branch leg, ready to append more legs
var okHike = Hike(root: hike.root, legs: hike.legs)
# Update in-beween glue linking `branch --[..]--> forkVtx`
if 0 < n:
let extVtx = VertexRef(
vType: Extension,
ePfx: hike.tail.slice(0,n),
eVid: db.vidFetch)
db.layersUpdateVtx((hike.root, linkID), extVtx)
okHike.legs.add Leg(
nibble: -1,
wp: VidVtxPair(
vid: linkID,
vtx: extVtx))
db.layersUpdateVtx((hike.root, extVtx.eVid), forkVtx)
okHike.legs.add Leg(
nibble: leafInx.int8,
wp: VidVtxPair(
vid: extVtx.eVid,
vtx: forkVtx))
else:
db.layersUpdateVtx((hike.root, linkID), forkVtx)
okHike.legs.add Leg(
nibble: leafInx.int8,
wp: VidVtxPair(
vid: linkID,
vtx: forkVtx))
okHike.legs.add leafLeg
ok okHike
proc concatBranchAndLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path top has a `Branch` vertex
brVid: VertexID; # Branch vertex ID from from `Hike` top
brVtx: VertexRef; # Branch vertex, linked to from `Hike`
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append argument branch vertex passed as argument `(brID,brVtx)` and then
## a `Leaf` vertex derived from the argument `payload`.
##
if hike.tail.len == 0:
return err(MergeBranchGarbledTail)
let nibble = hike.tail[0].int8
if brVtx.bVid[nibble].isValid:
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode.
db.clearMerkleKeys(hike, brVid)
# Append branch vertex
var okHike = Hike(root: hike.root, legs: hike.legs)
okHike.legs.add Leg(wp: VidVtxPair(vtx: brVtx, vid: brVid), nibble: nibble)
# Append leaf vertex
let
brDup = brVtx.dup
vid = db.vidFetch(pristine = true)
vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
brDup.bVid[nibble] = vid
db.layersUpdateVtx((hike.root, brVid), brDup)
db.layersUpdateVtx((hike.root, vid), vtx)
okHike.legs.add Leg(wp: VidVtxPair(vtx: vtx, vid: vid), nibble: -1)
ok okHike
# ------------------------------------------------------------------------------
# Private functions: add Particia Trie leaf vertex
# ------------------------------------------------------------------------------
proc mergePayloadTopIsBranchAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path top has a `Branch` vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the top
## leg of the `hike` argument which is assumend to refert to a `Branch`
## vertex. If successful, the function returns the updated `hike` trail.
if hike.tail.len == 0:
return err(MergeBranchGarbledTail)
let nibble = hike.legs[^1].nibble
if nibble < 0:
return err(MergeBranchGarbledNibble)
let
parent = hike.legs[^1].wp.vid
branch = hike.legs[^1].wp.vtx
linkID = branch.bVid[nibble]
linkVtx = db.getVtx (hike.root, linkID)
if not linkVtx.isValid:
#
# .. <branch>[nibble] --(linkID)--> nil
#
# <-------- immutable ------------> <---- mutable ----> ..
#
# Not much else that can be done here
raiseAssert "Dangling edge:" &
" pfx=" & $hike.legsTo(hike.legs.len-1,NibblesBuf) &
" branch=" & $parent &
" nibble=" & $nibble &
" edge=" & $linkID &
" tail=" & $hike.tail
if linkVtx.vType == Branch:
# Slot link to a branch vertex should be handled by `hikeUp()`
#
# .. <branch>[nibble] --(linkID)--> <linkVtx>[]
#
# <-------- immutable ------------> <---- mutable ----> ..
#
return db.concatBranchAndLeaf(hike, linkID, linkVtx, payload)
db.insertBranch(hike, linkID, linkVtx, payload)
proc mergePayloadTopIsExtAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path top has an `Extension` vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the top
## leg of the `hike` argument which is assumend to refert to a `Extension`
## vertex. If successful, the function returns the
## updated `hike` trail.
let
extVtx = hike.legs[^1].wp.vtx
extVid = hike.legs[^1].wp.vid
brVid = extVtx.eVid
brVtx = db.getVtx (hike.root, brVid)
var okHike = Hike(root: hike.root, legs: hike.legs)
if not brVtx.isValid:
# Blind vertex, promote to leaf vertex.
#
# --(extVid)--> <extVtx> --(brVid)--> nil
#
# <-------- immutable -------------->
#
let vtx = VertexRef(
vType: Leaf,
lPfx: extVtx.ePfx & hike.tail,
lData: payload)
db.layersUpdateVtx((hike.root, extVid), vtx)
okHike.legs[^1].wp.vtx = vtx
elif brVtx.vType != Branch:
return err(MergeBranchRootExpected)
else:
let
nibble = hike.tail[0].int8
linkID = brVtx.bVid[nibble]
#
# Required
#
# --(extVid)--> <extVtx> --(brVid)--> <brVtx>[nibble] --(linkID)--> nil
#
# <-------- immutable --------------> <-------- mutable ----------> ..
#
if linkID.isValid:
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode
db.clearMerkleKeys(hike, brVid)
let
brDup = brVtx.dup
vid = db.vidFetch(pristine = true)
vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
brDup.bVid[nibble] = vid
db.layersUpdateVtx((hike.root, brVid), brDup)
db.layersUpdateVtx((hike.root, vid), vtx)
okHike.legs.add Leg(wp: VidVtxPair(vtx: brDup, vid: brVid), nibble: nibble)
okHike.legs.add Leg(wp: VidVtxPair(vtx: vtx, vid: vid), nibble: -1)
ok okHike
proc mergePayloadTopIsEmptyAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # No path legs
rootVtx: VertexRef; # Root vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the
## argument vertex `rootVtx` and append both the empty arguent `hike`.
if rootVtx.vType == Branch:
let nibble = hike.tail[0].int8
if rootVtx.bVid[nibble].isValid:
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode
db.clearMerkleKeys(hike, hike.root)
let
rootDup = rootVtx.dup
leafVid = db.vidFetch(pristine = true)
leafVtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
rootDup.bVid[nibble] = leafVid
db.layersUpdateVtx((hike.root, hike.root), rootDup)
db.layersUpdateVtx((hike.root, leafVid), leafVtx)
return ok Hike(
root: hike.root,
legs: @[Leg(wp: VidVtxPair(vtx: rootDup, vid: hike.root), nibble: nibble),
Leg(wp: VidVtxPair(vtx: leafVtx, vid: leafVid), nibble: -1)])
db.insertBranch(hike, hike.root, rootVtx, payload)
proc mergePayloadUpdate(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path to payload
payload: PayloadRef; # Payload value to add
): Result[Hike,AristoError] =
## Update leaf vertex if payloads differ
let leafLeg = hike.legs[^1]
# Update payloads if they differ
if leafLeg.wp.vtx.lData != payload:
let vid = leafLeg.wp.vid
# Update accounts storage root which is handled implicitly
if hike.root == VertexID(1):
Update storage tree admin (#2419) * Tighten `CoreDb` API for accounts why: Apart from cruft, the way to fetch the accounts state root via a `CoreDbColRef` record was unnecessarily complicated. * Extend `CoreDb` API for accounts to cover storage tries why: In future, this will make the notion of column objects obsolete. Storage trees will then be indexed by the account address rather than the vertex ID equivalent like a `CoreDbColRef`. * Apply new/extended accounts API to ledger and tests details: This makes the `distinct_ledger` module obsolete * Remove column object constructors why: They were needed as an abstraction of MPT sub-trees including storage trees. Now, storage trees are handled by the account (e.g. via address) they belong to and all other trees can be identified by a constant well known vertex ID. So there is no need for column objects anymore. Still there are some left-over column object methods wnich will be removed next. * Remove `serialise()` and `PayloadRef` from default Aristo API why: Not needed. `PayloadRef` was used for unstructured/unknown payload formats (account or blob) and `serialise()` was used for decodng `PayloadRef`. Now it is known in advance what the payload looks like. * Added query function `hasStorageData()` whether a storage area exists why: Useful for supporting `slotStateEmpty()` of the `CoreDb` API * In the `Ledger` replace `storage.stateEmpty()` by `slotStateEmpty()` * On Aristo, hide the storage root/vertex ID in the `PayloadRef` why: The storage vertex ID is fully controlled by Aristo while the `AristoAccount` object is controlled by the application. With the storage root part of the `AristoAccount` object, there was a useless administrative burden to keep that storage root field up to date. * Remove cruft, update comments etc. * Update changed MPT access paradigms why: Fixes verified proxy tests * Fluffy cosmetics
2024-06-27 09:01:26 +00:00
payload.stoID = leafLeg.wp.vtx.lData.stoID
# Update vertex and hike
let vtx = VertexRef(
vType: Leaf,
lPfx: leafLeg.wp.vtx.lPfx,
lData: payload)
var hike = hike
hike.legs[^1].wp.vtx = vtx
# Modify top level cache
db.layersUpdateVtx((hike.root, vid), vtx)
db.clearMerkleKeys(hike, vid)
ok hike
elif db.layersGetVtx((hike.root, leafLeg.wp.vid)).isErr:
err(MergeLeafPathOnBackendAlready)
else:
err(MergeLeafPathCachedAlready)
# ------------------------------------------------------------------------------
# Public functions
# ------------------------------------------------------------------------------
proc mergePayloadImpl*(
db: AristoDbRef; # Database, top layer
root: VertexID; # MPT state root
path: openArray[byte]; # Leaf item to add to the database
payload: PayloadRef; # Payload value
): Result[void,AristoError] =
## Merge the argument `(root,path)` key-value-pair into the top level vertex
## table of the database `db`. The `path` argument is used to address the
## leaf vertex with the payload. It is stored or updated on the database
## accordingly.
##
let
nibblesPath = NibblesBuf.fromBytes(path)
hike = nibblesPath.hikeUp(root, db).to(Hike)
var okHike: Hike
if 0 < hike.legs.len:
case hike.legs[^1].wp.vtx.vType:
of Branch:
okHike = ? db.mergePayloadTopIsBranchAddLeaf(hike, payload)
of Leaf:
if 0 < hike.tail.len: # `Leaf` vertex problem?
return err(MergeLeafGarbledHike)
okHike = ? db.mergePayloadUpdate(hike, payload)
of Extension:
okHike = ? db.mergePayloadTopIsExtAddLeaf(hike, payload)
else:
# Empty hike
let rootVtx = db.getVtx (hike.root, hike.root)
if rootVtx.isValid:
okHike = ? db.mergePayloadTopIsEmptyAddLeaf(hike,rootVtx, payload)
else:
# Bootstrap for existing root ID
let wp = VidVtxPair(
vid: hike.root,
vtx: VertexRef(
vType: Leaf,
lPfx: nibblesPath,
lData: payload))
db.layersUpdateVtx((hike.root, wp.vid), wp.vtx)
okHike = Hike(root: wp.vid, legs: @[Leg(wp: wp, nibble: -1)])
# Double check the result (may be removed in future)
if okHike.to(NibblesBuf) != nibblesPath:
return err(MergeAssemblyFailed) # Ooops
ok()
# ------------------------------------------------------------------------------
# End
# ------------------------------------------------------------------------------