merge: avoid hike allocations (#2472)

hike allocations (and the garbage collection maintenance that follows)
are responsible for some 10% of cpu time (not wall time!) at this point
- this PR avoids them by stepping through the layers one step at a time,
simplifying the code at the same time.
This commit is contained in:
Jacek Sieka 2024-07-11 13:26:46 +02:00 committed by GitHub
parent 4a20756e6b
commit a6764670f0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 178 additions and 376 deletions

View File

@ -45,7 +45,9 @@ proc checkTopStrict*(
elif db.layersGetKey(rvid).isErr: elif db.layersGetKey(rvid).isErr:
# So `vtx` exists but not `key`, so cache is supposed dirty and the # So `vtx` exists but not `key`, so cache is supposed dirty and the
# vertex has a zero entry. # vertex has a zero entry.
return err((rvid.vid,CheckStkVtxKeyMissing)) # TODO when we're writing a brand new entry, we don't write a zero key
# to the database to avoid the unnecessary delete traffic..
discard # return err((rvid.vid,CheckStkVtxKeyMissing))
else: # Empty key flags key is for update else: # Empty key flags key is for update
zeroKeys.incl rvid.vid zeroKeys.incl rvid.vid

View File

@ -27,14 +27,15 @@ import
eth/common, eth/common,
results, results,
./aristo_constants, ./aristo_constants,
./aristo_desc/[desc_error, desc_identifiers, desc_structural] ./aristo_desc/[desc_error, desc_identifiers, desc_nibbles, desc_structural]
from ./aristo_desc/desc_backend from ./aristo_desc/desc_backend
import BackendRef import BackendRef
# Not auto-exporting backend # Not auto-exporting backend
export export
aristo_constants, desc_error, desc_identifiers, desc_structural, keyed_queue aristo_constants, desc_error, desc_identifiers, desc_nibbles, desc_structural,
keyed_queue
const const
accLruSize* = 1024 * 1024 accLruSize* = 1024 * 1024

View File

@ -157,7 +157,7 @@ type
# Merge leaf `merge()` # Merge leaf `merge()`
MergeAssemblyFailed # Ooops, internal error MergeHikeFailed # Ooops, internal error
MergeAccRootNotAccepted MergeAccRootNotAccepted
MergeStoRootNotAccepted MergeStoRootNotAccepted
MergeBranchGarbledNibble MergeBranchGarbledNibble

View File

@ -18,6 +18,9 @@ type NibblesBuf* = object
# Where valid nibbles can be found - we use indices here to avoid copies # Where valid nibbles can be found - we use indices here to avoid copies
# wen slicing - iend not inclusive # wen slicing - iend not inclusive
func high*(T: type NibblesBuf): int =
63
func fromBytes*(T: type NibblesBuf, bytes: openArray[byte]): T = func fromBytes*(T: type NibblesBuf, bytes: openArray[byte]): T =
result.iend = 2 * (int8 result.bytes.copyFrom(bytes)) result.iend = 2 * (int8 result.bytes.copyFrom(bytes))

View File

@ -28,7 +28,7 @@ import
std/typetraits, std/typetraits,
eth/common, eth/common,
results, results,
"."/[aristo_desc, aristo_fetch, aristo_layers, aristo_utils, aristo_vid], "."/[aristo_desc, aristo_hike, aristo_layers, aristo_utils, aristo_vid],
./aristo_merge/merge_payload_helper ./aristo_merge/merge_payload_helper
const const
@ -107,43 +107,57 @@ proc mergeStorageData*(
## `(accPath,stoPath)` where `accPath` is the account key (into the MPT) ## `(accPath,stoPath)` where `accPath` is the account key (into the MPT)
## and `stoPath` is the slot path of the corresponding storage area. ## and `stoPath` is the slot path of the corresponding storage area.
## ##
let var
accHike = db.fetchAccountHike(accPath).valueOr: path = NibblesBuf.fromBytes(accPath.data)
if error == FetchAccInaccessible: next = VertexID(1)
return err(MergeStoAccMissing) vtx: VertexRef
return err(error) touched: array[NibblesBuf.high(), VertexID]
wpAcc = accHike.legs[^1].wp pos: int
stoID = wpAcc.vtx.lData.stoID
# Provide new storage ID when needed template resetKeys() =
useID = if stoID.isValid: stoID else: db.vidFetch() # Reset cached hashes of touched verticies
for i in 0 ..< pos:
db.layersResKey((VertexID(1), touched[pos - i - 1]))
# Call merge while path.len > 0:
pyl = PayloadRef(pType: StoData, stoData: stoData) touched[pos] = next
rc = db.mergePayloadImpl(useID, stoPath.data, pyl) pos += 1
if rc.isOk: (vtx, path, next) = ?step(path, (VertexID(1), next), db)
# Mark account path Merkle keys for update
db.updateAccountForHasher accHike
if stoID.isValid: if vtx.vType == Leaf:
return ok() let
stoID = vtx.lData.stoID
else: # Provide new storage ID when needed
# Make sure that there is an account that refers to that storage trie useID = if stoID.isValid: stoID else: db.vidFetch()
let leaf = wpAcc.vtx.dup # Dup on modify
leaf.lData.stoID = useID
db.layersPutStoID(accPath, useID)
db.layersUpdateVtx((accHike.root, wpAcc.vid), leaf)
return ok()
elif rc.error in MergeNoAction: # Call merge
assert stoID.isValid # debugging only pyl = PayloadRef(pType: StoData, stoData: stoData)
return ok() rc = db.mergePayloadImpl(useID, stoPath.data, pyl)
# Error: mark account path Merkle keys for update if rc.isOk:
db.updateAccountForHasher accHike # Mark account path Merkle keys for update
err(rc.error) resetKeys()
if stoID.isValid:
return ok()
else:
# Make sure that there is an account that refers to that storage trie
let leaf = vtx.dup # Dup on modify
leaf.lData.stoID = useID
db.layersPutStoID(accPath, useID)
db.layersPutVtx((VertexID(1), touched[pos - 1]), leaf)
return ok()
elif rc.error in MergeNoAction:
assert stoID.isValid # debugging only
return ok()
return err(rc.error)
err(MergeHikeFailed)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# End # End

View File

@ -10,47 +10,36 @@
{.push raises: [].} {.push raises: [].}
import import eth/common, results, ".."/[aristo_desc, aristo_get, aristo_layers, aristo_vid]
std/typetraits,
eth/common,
results,
".."/[aristo_desc, aristo_get, aristo_hike, aristo_layers, aristo_vid]
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Private getters & setters # Private getters & setters
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
proc xPfx(vtx: VertexRef): NibblesBuf = proc xPfx(vtx: VertexRef): NibblesBuf =
case vtx.vType: case vtx.vType
of Leaf: of Leaf:
return vtx.lPfx vtx.lPfx
of Extension: of Extension:
return vtx.ePfx vtx.ePfx
of Branch: of Branch:
doAssert vtx.vType != Branch # Ooops raiseAssert "oops"
# ------------------------------------------------------------------------------
# Private helpers
# ------------------------------------------------------------------------------
proc clearMerkleKeys(
db: AristoDbRef; # Database, top layer
hike: Hike; # Implied vertex IDs to clear hashes for
vid: VertexID; # Additional vertex IDs to clear
) =
for w in hike.legs:
db.layersResKey((hike.root, w.wp.vid))
db.layersResKey((hike.root, vid))
# ----------- # -----------
proc layersPutLeaf(
db: AristoDbRef, rvid: RootedVertexID, path: NibblesBuf, payload: PayloadRef
) =
let vtx = VertexRef(vType: Leaf, lPfx: path, lData: payload)
db.layersPutVtx(rvid, vtx)
proc insertBranch( proc insertBranch(
db: AristoDbRef; # Database, top layer db: AristoDbRef, # Database, top layer
hike: Hike; # Current state linkID: RootedVertexID, # Vertex ID to insert
linkID: VertexID; # Vertex ID to insert linkVtx: VertexRef, # Vertex to insert
linkVtx: VertexRef; # Vertex to insert path: NibblesBuf,
payload: PayloadRef; # Leaf data payload payload: PayloadRef, # Leaf data payload
): Result[Hike,AristoError] = ): Result[void, AristoError] =
## ##
## Insert `Extension->Branch` vertex chain or just a `Branch` vertex ## Insert `Extension->Branch` vertex chain or just a `Branch` vertex
## ##
@ -73,381 +62,174 @@ proc insertBranch(
## ##
## *) vertex was slightly modified or removed if obsolete `Extension` ## *) vertex was slightly modified or removed if obsolete `Extension`
## ##
let n = linkVtx.xPfx.sharedPrefixLen hike.tail if linkVtx.xPfx.len == 0:
# Verify minimum requirements
if hike.tail.len == n:
# Should have been tackeld by `hikeUp()`, already
return err(MergeLeafGarbledHike)
if linkVtx.xPfx.len == n:
return err(MergeBranchLinkVtxPfxTooShort) return err(MergeBranchLinkVtxPfxTooShort)
let n = linkVtx.xPfx.sharedPrefixLen path
# Verify minimum requirements
doAssert n < path.len
# Provide and install `forkVtx` # Provide and install `forkVtx`
let let
forkVtx = VertexRef(vType: Branch) forkVtx = VertexRef(vType: Branch)
linkInx = linkVtx.xPfx[n] linkInx = linkVtx.xPfx[n]
leafInx = hike.tail[n] leafInx = path[n]
var
leafLeg = Leg(nibble: -1)
# Install `forkVtx` # Install `forkVtx`
block: block:
# Clear Merkle hashes (aka hash keys) unless proof mode.
db.clearMerkleKeys(hike, linkID)
if linkVtx.vType == Leaf: if linkVtx.vType == Leaf:
# Double check path prefix
if 64 < hike.legsTo(NibblesBuf).len + linkVtx.lPfx.len:
return err(MergeBranchLinkLeafGarbled)
let let
local = db.vidFetch(pristine = true) local = db.vidFetch(pristine = true)
linkDup = linkVtx.dup linkDup = linkVtx.dup
db.layersUpdateVtx((hike.root, local), linkDup) linkDup.lPfx = linkVtx.lPfx.slice(1 + n)
linkDup.lPfx = linkDup.lPfx.slice(1+n)
forkVtx.bVid[linkInx] = local forkVtx.bVid[linkInx] = local
db.layersPutVtx((linkID.root, local), linkDup)
elif linkVtx.ePfx.len == n + 1: elif linkVtx.ePfx.len == n + 1:
# This extension `linkVtx` becomes obsolete # This extension `linkVtx` becomes obsolete
forkVtx.bVid[linkInx] = linkVtx.eVid forkVtx.bVid[linkInx] = linkVtx.eVid
else: else:
let let
local = db.vidFetch local = db.vidFetch
linkDup = linkVtx.dup linkDup = linkVtx.dup
db.layersUpdateVtx((hike.root, local), linkDup) linkDup.ePfx = linkDup.ePfx.slice(1 + n)
linkDup.ePfx = linkDup.ePfx.slice(1+n)
forkVtx.bVid[linkInx] = local forkVtx.bVid[linkInx] = local
db.layersPutVtx((linkID.root, local), linkDup)
block: block:
let local = db.vidFetch(pristine = true) let local = db.vidFetch(pristine = true)
forkVtx.bVid[leafInx] = local forkVtx.bVid[leafInx] = local
leafLeg.wp.vid = local db.layersPutLeaf((linkID.root, local), path.slice(1 + n), payload)
leafLeg.wp.vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1+n),
lData: payload)
db.layersUpdateVtx((hike.root, local), leafLeg.wp.vtx)
# Update branch leg, ready to append more legs
var okHike = Hike(root: hike.root, legs: hike.legs)
# Update in-beween glue linking `branch --[..]--> forkVtx` # Update in-beween glue linking `branch --[..]--> forkVtx`
if 0 < n: if 0 < n:
let extVtx = VertexRef( let
vType: Extension, vid = db.vidFetch()
ePfx: hike.tail.slice(0,n), extVtx = VertexRef(vType: Extension, ePfx: path.slice(0, n), eVid: vid)
eVid: db.vidFetch) db.layersPutVtx(linkID, extVtx)
db.layersPutVtx((linkID.root, vid), forkVtx)
db.layersUpdateVtx((hike.root, linkID), extVtx)
okHike.legs.add Leg(
nibble: -1,
wp: VidVtxPair(
vid: linkID,
vtx: extVtx))
db.layersUpdateVtx((hike.root, extVtx.eVid), forkVtx)
okHike.legs.add Leg(
nibble: leafInx.int8,
wp: VidVtxPair(
vid: extVtx.eVid,
vtx: forkVtx))
else: else:
db.layersUpdateVtx((hike.root, linkID), forkVtx) db.layersPutVtx(linkID, forkVtx)
okHike.legs.add Leg(
nibble: leafInx.int8,
wp: VidVtxPair(
vid: linkID,
vtx: forkVtx))
okHike.legs.add leafLeg
ok okHike
ok()
proc concatBranchAndLeaf( proc concatBranchAndLeaf(
db: AristoDbRef; # Database, top layer db: AristoDbRef, # Database, top layer
hike: Hike; # Path top has a `Branch` vertex brVid: RootedVertexID, # Branch vertex ID from from `Hike` top
brVid: VertexID; # Branch vertex ID from from `Hike` top brVtx: VertexRef, # Branch vertex, linked to from `Hike`
brVtx: VertexRef; # Branch vertex, linked to from `Hike` path: NibblesBuf,
payload: PayloadRef; # Leaf data payload payload: PayloadRef, # Leaf data payload
): Result[Hike,AristoError] = ): Result[void, AristoError] =
## Append argument branch vertex passed as argument `(brID,brVtx)` and then ## Append argument branch vertex passed as argument `(brID,brVtx)` and then
## a `Leaf` vertex derived from the argument `payload`. ## a `Leaf` vertex derived from the argument `payload`.
## ##
if hike.tail.len == 0: if path.len == 0:
return err(MergeBranchGarbledTail) return err(MergeBranchGarbledTail)
let nibble = hike.tail[0].int8 let nibble = path[0].int8
if brVtx.bVid[nibble].isValid: doAssert not brVtx.bVid[nibble].isValid
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode.
db.clearMerkleKeys(hike, brVid)
# Append branch vertex
var okHike = Hike(root: hike.root, legs: hike.legs)
okHike.legs.add Leg(wp: VidVtxPair(vtx: brVtx, vid: brVid), nibble: nibble)
# Append leaf vertex
let let
brDup = brVtx.dup brDup = brVtx.dup
vid = db.vidFetch(pristine = true) vid = db.vidFetch(pristine = true)
vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
brDup.bVid[nibble] = vid brDup.bVid[nibble] = vid
db.layersUpdateVtx((hike.root, brVid), brDup)
db.layersUpdateVtx((hike.root, vid), vtx)
okHike.legs.add Leg(wp: VidVtxPair(vtx: vtx, vid: vid), nibble: -1)
ok okHike db.layersPutVtx(brVid, brDup)
db.layersPutLeaf((brVid.root, vid), path.slice(1), payload)
# ------------------------------------------------------------------------------ ok()
# Private functions: add Particia Trie leaf vertex
# ------------------------------------------------------------------------------
proc mergePayloadTopIsBranchAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path top has a `Branch` vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the top
## leg of the `hike` argument which is assumend to refert to a `Branch`
## vertex. If successful, the function returns the updated `hike` trail.
if hike.tail.len == 0:
return err(MergeBranchGarbledTail)
let nibble = hike.legs[^1].nibble
if nibble < 0:
return err(MergeBranchGarbledNibble)
let
parent = hike.legs[^1].wp.vid
branch = hike.legs[^1].wp.vtx
linkID = branch.bVid[nibble]
linkVtx = db.getVtx (hike.root, linkID)
if not linkVtx.isValid:
#
# .. <branch>[nibble] --(linkID)--> nil
#
# <-------- immutable ------------> <---- mutable ----> ..
#
# Not much else that can be done here
raiseAssert "Dangling edge:" &
" pfx=" & $hike.legsTo(hike.legs.len-1,NibblesBuf) &
" branch=" & $parent &
" nibble=" & $nibble &
" edge=" & $linkID &
" tail=" & $hike.tail
if linkVtx.vType == Branch:
# Slot link to a branch vertex should be handled by `hikeUp()`
#
# .. <branch>[nibble] --(linkID)--> <linkVtx>[]
#
# <-------- immutable ------------> <---- mutable ----> ..
#
return db.concatBranchAndLeaf(hike, linkID, linkVtx, payload)
db.insertBranch(hike, linkID, linkVtx, payload)
proc mergePayloadTopIsExtAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path top has an `Extension` vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the top
## leg of the `hike` argument which is assumend to refert to a `Extension`
## vertex. If successful, the function returns the
## updated `hike` trail.
let
extVtx = hike.legs[^1].wp.vtx
extVid = hike.legs[^1].wp.vid
brVid = extVtx.eVid
brVtx = db.getVtx (hike.root, brVid)
var okHike = Hike(root: hike.root, legs: hike.legs)
if not brVtx.isValid:
# Blind vertex, promote to leaf vertex.
#
# --(extVid)--> <extVtx> --(brVid)--> nil
#
# <-------- immutable -------------->
#
let vtx = VertexRef(
vType: Leaf,
lPfx: extVtx.ePfx & hike.tail,
lData: payload)
db.layersUpdateVtx((hike.root, extVid), vtx)
okHike.legs[^1].wp.vtx = vtx
elif brVtx.vType != Branch:
return err(MergeBranchRootExpected)
else:
let
nibble = hike.tail[0].int8
linkID = brVtx.bVid[nibble]
#
# Required
#
# --(extVid)--> <extVtx> --(brVid)--> <brVtx>[nibble] --(linkID)--> nil
#
# <-------- immutable --------------> <-------- mutable ----------> ..
#
if linkID.isValid:
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode
db.clearMerkleKeys(hike, brVid)
let
brDup = brVtx.dup
vid = db.vidFetch(pristine = true)
vtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
brDup.bVid[nibble] = vid
db.layersUpdateVtx((hike.root, brVid), brDup)
db.layersUpdateVtx((hike.root, vid), vtx)
okHike.legs.add Leg(wp: VidVtxPair(vtx: brDup, vid: brVid), nibble: nibble)
okHike.legs.add Leg(wp: VidVtxPair(vtx: vtx, vid: vid), nibble: -1)
ok okHike
proc mergePayloadTopIsEmptyAddLeaf(
db: AristoDbRef; # Database, top layer
hike: Hike; # No path legs
rootVtx: VertexRef; # Root vertex
payload: PayloadRef; # Leaf data payload
): Result[Hike,AristoError] =
## Append a `Leaf` vertex derived from the argument `payload` after the
## argument vertex `rootVtx` and append both the empty arguent `hike`.
if rootVtx.vType == Branch:
let nibble = hike.tail[0].int8
if rootVtx.bVid[nibble].isValid:
return err(MergeRootBranchLinkBusy)
# Clear Merkle hashes (aka hash keys) unless proof mode
db.clearMerkleKeys(hike, hike.root)
let
rootDup = rootVtx.dup
leafVid = db.vidFetch(pristine = true)
leafVtx = VertexRef(
vType: Leaf,
lPfx: hike.tail.slice(1),
lData: payload)
rootDup.bVid[nibble] = leafVid
db.layersUpdateVtx((hike.root, hike.root), rootDup)
db.layersUpdateVtx((hike.root, leafVid), leafVtx)
return ok Hike(
root: hike.root,
legs: @[Leg(wp: VidVtxPair(vtx: rootDup, vid: hike.root), nibble: nibble),
Leg(wp: VidVtxPair(vtx: leafVtx, vid: leafVid), nibble: -1)])
db.insertBranch(hike, hike.root, rootVtx, payload)
proc mergePayloadUpdate(
db: AristoDbRef; # Database, top layer
hike: Hike; # Path to payload
payload: PayloadRef; # Payload value to add
): Result[Hike,AristoError] =
## Update leaf vertex if payloads differ
let leafLeg = hike.legs[^1]
# Update payloads if they differ
if leafLeg.wp.vtx.lData != payload:
let vid = leafLeg.wp.vid
# Update accounts storage root which is handled implicitly
if hike.root == VertexID(1):
payload.stoID = leafLeg.wp.vtx.lData.stoID
# Update vertex and hike
let vtx = VertexRef(
vType: Leaf,
lPfx: leafLeg.wp.vtx.lPfx,
lData: payload)
var hike = hike
hike.legs[^1].wp.vtx = vtx
# Modify top level cache
db.layersUpdateVtx((hike.root, vid), vtx)
db.clearMerkleKeys(hike, vid)
ok hike
elif db.layersGetVtx((hike.root, leafLeg.wp.vid)).isErr:
err(MergeLeafPathOnBackendAlready)
else:
err(MergeLeafPathCachedAlready)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Public functions # Public functions
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
proc mergePayloadImpl*( proc mergePayloadImpl*(
db: AristoDbRef; # Database, top layer db: AristoDbRef, # Database, top layer
root: VertexID; # MPT state root root: VertexID, # MPT state root
path: openArray[byte]; # Leaf item to add to the database path: openArray[byte], # Leaf item to add to the database
payload: PayloadRef; # Payload value payload: PayloadRef, # Payload value
): Result[void,AristoError] = ): Result[void, AristoError] =
## Merge the argument `(root,path)` key-value-pair into the top level vertex ## Merge the argument `(root,path)` key-value-pair into the top level vertex
## table of the database `db`. The `path` argument is used to address the ## table of the database `db`. The `path` argument is used to address the
## leaf vertex with the payload. It is stored or updated on the database ## leaf vertex with the payload. It is stored or updated on the database
## accordingly. ## accordingly.
## ##
let var
nibblesPath = NibblesBuf.fromBytes(path) path = NibblesBuf.fromBytes(path)
hike = nibblesPath.hikeUp(root, db).to(Hike) cur = root
touched: array[NibblesBuf.high + 1, VertexID]
pos = 0
vtx = db.getVtxRc((root, cur)).valueOr:
if error != GetVtxNotFound:
return err(error)
var okHike: Hike # We're at the root vertex and there is no data - this must be a fresh
if 0 < hike.legs.len: # VertexID!
case hike.legs[^1].wp.vtx.vType: db.layersPutLeaf((root, cur), path, payload)
of Branch: return ok()
okHike = ? db.mergePayloadTopIsBranchAddLeaf(hike, payload)
template resetKeys() =
# Reset cached hashes of touched verticies
for i in 0 ..< pos:
db.layersResKey((root, touched[pos - i - 1]))
while path.len > 0:
# Clear existing merkle keys along the traversal path
touched[pos] = cur
pos += 1
case vtx.vType
of Leaf: of Leaf:
if 0 < hike.tail.len: # `Leaf` vertex problem? if path == vtx.lPfx:
return err(MergeLeafGarbledHike) # Replace the current vertex with a new payload
okHike = ? db.mergePayloadUpdate(hike, payload)
if vtx.lData == payload:
# TODO is this still needed? Higher levels should already be doing
# these checks
return err(MergeLeafPathCachedAlready)
if root == VertexID(1):
# TODO can we avoid this hack? it feels like the caller should already
# have set an appropriate stoID - this "fixup" feels risky,
# specially from a caching point of view
payload.stoID = vtx.lData.stoID
db.layersPutLeaf((root, cur), path, payload)
else:
# Turn leaf into branch, leaves with possible ext prefix
? db.insertBranch((root, cur), vtx, path, payload)
resetKeys()
return ok()
of Extension: of Extension:
okHike = ? db.mergePayloadTopIsExtAddLeaf(hike, payload) if vtx.ePfx.len == path.sharedPrefixLen(vtx.ePfx):
cur = vtx.eVid
path = path.slice(vtx.ePfx.len)
vtx = ?db.getVtxRc((root, cur))
else:
? db.insertBranch((root, cur), vtx, path, payload)
else: resetKeys()
# Empty hike return ok()
let rootVtx = db.getVtx (hike.root, hike.root) of Branch:
if rootVtx.isValid: let
okHike = ? db.mergePayloadTopIsEmptyAddLeaf(hike,rootVtx, payload) nibble = path[0]
next = vtx.bVid[nibble]
else: if next.isValid:
# Bootstrap for existing root ID cur = next
let wp = VidVtxPair( path = path.slice(1)
vid: hike.root, vtx = ?db.getVtxRc((root, next))
vtx: VertexRef( else:
vType: Leaf, ? db.concatBranchAndLeaf((root, cur), vtx, path, payload)
lPfx: nibblesPath, resetKeys()
lData: payload)) return ok()
db.layersUpdateVtx((hike.root, wp.vid), wp.vtx)
okHike = Hike(root: wp.vid, legs: @[Leg(wp: wp, nibble: -1)])
# Double check the result (may be removed in future) err(MergeHikeFailed)
if okHike.to(NibblesBuf) != nibblesPath:
return err(MergeAssemblyFailed) # Ooops
ok()
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# End # End

View File

@ -16,7 +16,7 @@ import
results, results,
../../aristo, ../../aristo,
../../aristo/aristo_init/rocks_db as use_ari, ../../aristo/aristo_init/rocks_db as use_ari,
../../aristo/[aristo_desc, aristo_walk/persistent, aristo_tx], ../../aristo/[aristo_desc, aristo_walk/persistent],
../../kvt, ../../kvt,
../../kvt/kvt_persistent as use_kvt, ../../kvt/kvt_persistent as use_kvt,
../../kvt/kvt_init/rocks_db/rdb_init, ../../kvt/kvt_init/rocks_db/rdb_init,

View File

@ -11,7 +11,7 @@
{.push raises: [].} {.push raises: [].}
import import
std/[tables, hashes, sets, typetraits], std/[tables, hashes, sets],
chronicles, chronicles,
eth/common, eth/common,
results, results,