Allocation-free nibbles buffer (#2406)
This buffer eleminates a large part of allocations during MPT traversal, reducing overall memory usage and GC pressure. Ideally, we would use it throughout in the API instead of `openArray[byte]` since the built-in length limit appropriately exposes the natural 64-nibble depth constraint that `openArray` fails to capture.
This commit is contained in:
parent
768307d91d
commit
6b68ff92d3
|
@ -9,7 +9,7 @@
|
|||
|
||||
import
|
||||
std/strutils,
|
||||
eth/[common],
|
||||
eth/common,
|
||||
stew/byteutils,
|
||||
stint,
|
||||
chronos,
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
import
|
||||
std/[tables],
|
||||
../tx_info,
|
||||
eth/[common],
|
||||
eth/common,
|
||||
stew/[sorted_set],
|
||||
results
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
import
|
||||
../tx_info,
|
||||
../tx_item,
|
||||
eth/[common],
|
||||
eth/common,
|
||||
stew/[keyed_queue, keyed_queue/kq_debug, sorted_set],
|
||||
results,
|
||||
../../eip4844
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
import
|
||||
../tx_info,
|
||||
../tx_item,
|
||||
eth/[common],
|
||||
eth/common,
|
||||
stew/[keyed_queue, keyed_queue/kq_debug, sorted_set],
|
||||
results
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
import
|
||||
std/times,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
./aristo_desc/desc_backend,
|
||||
./aristo_init/memory_db,
|
||||
|
@ -247,7 +247,7 @@ type
|
|||
## data record indexed by `path` exists on the database.
|
||||
|
||||
AristoApiHikeUpFn* =
|
||||
proc(path: NibblesSeq;
|
||||
proc(path: NibblesBuf;
|
||||
root: VertexID;
|
||||
db: AristoDbRef;
|
||||
): Result[Hike,(VertexID,AristoError,Hike)]
|
||||
|
@ -762,7 +762,7 @@ func init*(
|
|||
result = api.hasPathStorage(a, b, c)
|
||||
|
||||
profApi.hikeUp =
|
||||
proc(a: NibblesSeq; b: VertexID; c: AristoDbRef): auto =
|
||||
proc(a: NibblesBuf; b: VertexID; c: AristoDbRef): auto =
|
||||
AristoApiProfHikeUpFn.profileRunner:
|
||||
result = api.hikeUp(a, b, c)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/bitops,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
stew/endians2,
|
||||
./aristo_desc
|
||||
|
@ -112,7 +112,7 @@ proc blobifyTo*(vtx: VertexRef; data: var Blob): Result[void,AristoError] =
|
|||
data &= [0x08u8]
|
||||
of Extension:
|
||||
let
|
||||
pSegm = vtx.ePfx.hexPrefixEncode(isleaf = false)
|
||||
pSegm = vtx.ePfx.toHexPrefix(isleaf = false)
|
||||
psLen = pSegm.len.byte
|
||||
if psLen == 0 or 33 < psLen:
|
||||
return err(BlobifyExtPathOverflow)
|
||||
|
@ -123,7 +123,7 @@ proc blobifyTo*(vtx: VertexRef; data: var Blob): Result[void,AristoError] =
|
|||
data &= [0x80u8 or psLen]
|
||||
of Leaf:
|
||||
let
|
||||
pSegm = vtx.lPfx.hexPrefixEncode(isleaf = true)
|
||||
pSegm = vtx.lPfx.toHexPrefix(isleaf = true)
|
||||
psLen = pSegm.len.byte
|
||||
if psLen == 0 or 33 < psLen:
|
||||
return err(BlobifyLeafPathOverflow)
|
||||
|
@ -280,7 +280,8 @@ proc deblobifyTo*(
|
|||
return err(DeblobExtTooShort)
|
||||
if 8 + sLen != rLen: # => slen is at least 1
|
||||
return err(DeblobExtSizeGarbled)
|
||||
let (isLeaf, pathSegment) = hexPrefixDecode record.toOpenArray(8, rLen - 1)
|
||||
let (isLeaf, pathSegment) =
|
||||
NibblesBuf.fromHexPrefix record.toOpenArray(8, rLen - 1)
|
||||
if isLeaf:
|
||||
return err(DeblobExtGotLeafPrefix)
|
||||
vtx = VertexRef(
|
||||
|
@ -295,7 +296,8 @@ proc deblobifyTo*(
|
|||
pLen = rLen - sLen # payload length
|
||||
if rLen < sLen:
|
||||
return err(DeblobLeafSizeGarbled)
|
||||
let (isLeaf, pathSegment) = hexPrefixDecode record.toOpenArray(pLen, rLen-1)
|
||||
let (isLeaf, pathSegment) =
|
||||
NibblesBuf.fromHexPrefix record.toOpenArray(pLen, rLen-1)
|
||||
if not isLeaf:
|
||||
return err(DeblobLeafGotExtPrefix)
|
||||
var pyl: PayloadRef
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/[sets, tables],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
stew/interval_set,
|
||||
../../aristo,
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/[sequtils, sets, typetraits],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
".."/[aristo_desc, aristo_get, aristo_layers, aristo_serialise, aristo_utils]
|
||||
|
||||
|
|
|
@ -12,16 +12,13 @@
|
|||
|
||||
import
|
||||
std/sets,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
./aristo_desc/desc_identifiers
|
||||
|
||||
const
|
||||
EmptyBlob* = seq[byte].default
|
||||
## Useful shortcut (borrowed from `sync/snap/constants.nim`)
|
||||
|
||||
EmptyNibbleSeq* = EmptyBlob.initNibbleRange
|
||||
## Useful shortcut (borrowed from `sync/snap/constants.nim`)
|
||||
|
||||
EmptyVidSeq* = seq[VertexID].default
|
||||
## Useful shortcut
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/[algorithm, sequtils, sets, strutils, tables],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
stew/[byteutils, interval_set],
|
||||
./aristo_desc/desc_backend,
|
||||
|
@ -192,11 +192,11 @@ proc ppKey(key: HashKey; db: AristoDbRef; pfx = true): string =
|
|||
result &= @(key.data).toHex.squeeze(hex=true,ignLen=true) & tag
|
||||
|
||||
proc ppLeafTie(lty: LeafTie, db: AristoDbRef): string =
|
||||
let pfx = lty.path.to(NibblesSeq)
|
||||
let pfx = lty.path.to(NibblesBuf)
|
||||
"@" & lty.root.ppVid(pfx=false) & ":" &
|
||||
($pfx).squeeze(hex=true,ignLen=(pfx.len==64))
|
||||
|
||||
proc ppPathPfx(pfx: NibblesSeq): string =
|
||||
proc ppPathPfx(pfx: NibblesBuf): string =
|
||||
let s = $pfx
|
||||
if s.len < 20: s else: s[0 .. 5] & ".." & s[s.len-8 .. ^1] & ":" & $s.len
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
import
|
||||
std/[sets, typetraits],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
"."/[aristo_desc, aristo_get, aristo_hike, aristo_layers,
|
||||
aristo_utils, aristo_vid]
|
||||
|
@ -96,7 +96,7 @@ proc collapseBranch(
|
|||
vid: br.vid,
|
||||
vtx: VertexRef(
|
||||
vType: Extension,
|
||||
ePfx: @[nibble].initNibbleRange.slice(1),
|
||||
ePfx: NibblesBuf.nibble(nibble),
|
||||
eVid: br.vtx.bVid[nibble]))
|
||||
|
||||
if 2 < hike.legs.len: # (1) or (2)
|
||||
|
@ -145,7 +145,7 @@ proc collapseExt(
|
|||
vid: br.vid,
|
||||
vtx: VertexRef(
|
||||
vType: Extension,
|
||||
ePfx: @[nibble].initNibbleRange.slice(1) & vtx.ePfx,
|
||||
ePfx: NibblesBuf.nibble(nibble) & vtx.ePfx,
|
||||
eVid: vtx.eVid))
|
||||
db.disposeOfVtx(hike.root, br.vtx.bVid[nibble]) # `vtx` is obsolete now
|
||||
|
||||
|
@ -198,7 +198,7 @@ proc collapseLeaf(
|
|||
vid: br.vtx.bVid[nibble],
|
||||
vtx: VertexRef(
|
||||
vType: Leaf,
|
||||
lPfx: @[nibble].initNibbleRange.slice(1) & vtx.lPfx,
|
||||
lPfx: NibblesBuf.nibble(nibble) & vtx.lPfx,
|
||||
lData: vtx.lData))
|
||||
db.layersResKey(hike.root, lf.vid) # `vtx` was modified
|
||||
|
||||
|
@ -354,7 +354,7 @@ proc deleteAccountPayload*(
|
|||
## leaf entry referres to a storage tree, this one will be deleted as well.
|
||||
##
|
||||
let
|
||||
hike = path.initNibbleRange.hikeUp(VertexID(1), db).valueOr:
|
||||
hike = NibblesBuf.fromBytes(path).hikeUp(VertexID(1), db).valueOr:
|
||||
if error[1] in HikeAcceptableStopsNotFound:
|
||||
return err(DelPathNotFound)
|
||||
return err(error[1])
|
||||
|
@ -391,7 +391,7 @@ proc deleteGenericData*(
|
|||
elif LEAST_FREE_VID <= root.distinctBase:
|
||||
return err(DelStoRootNotAccepted)
|
||||
|
||||
let hike = path.initNibbleRange.hikeUp(root, db).valueOr:
|
||||
let hike = NibblesBuf.fromBytes(path).hikeUp(root, db).valueOr:
|
||||
if error[1] in HikeAcceptableStopsNotFound:
|
||||
return err(DelPathNotFound)
|
||||
return err(error[1])
|
||||
|
@ -438,7 +438,7 @@ proc deleteStorageData*(
|
|||
if not stoID.isValid:
|
||||
return err(DelStoRootMissing)
|
||||
|
||||
let stoHike = path.initNibbleRange.hikeUp(stoID, db).valueOr:
|
||||
let stoHike = NibblesBuf.fromBytes(path).hikeUp(stoID, db).valueOr:
|
||||
if error[1] in HikeAcceptableStopsNotFound:
|
||||
return err(DelPathNotFound)
|
||||
return err(error[1])
|
||||
|
|
|
@ -16,11 +16,15 @@
|
|||
|
||||
import
|
||||
std/[algorithm, sequtils, sets, strutils, hashes],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
stew/byteutils,
|
||||
chronicles,
|
||||
results,
|
||||
stint
|
||||
stint,
|
||||
./desc_nibbles
|
||||
|
||||
export
|
||||
desc_nibbles
|
||||
|
||||
type
|
||||
VertexID* = distinct uint64
|
||||
|
@ -267,9 +271,9 @@ func cmp*(a, b: LeafTie): int =
|
|||
# Public helpers: Reversible conversions between `PathID`, `HashKey`, etc.
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
func to*(pid: PathID; T: type NibblesSeq): T =
|
||||
func to*(pid: PathID; T: type NibblesBuf): T =
|
||||
## Representation of a `PathID` as `NibbleSeq` (preserving full information)
|
||||
let nibbles = pid.pfx.toBytesBE.toSeq.initNibbleRange()
|
||||
let nibbles = NibblesBuf.fromBytes(pid.pfx.toBytesBE)
|
||||
if pid.length < 64:
|
||||
nibbles.slice(0, pid.length.int)
|
||||
else:
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
# nimbus-eth1
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed
|
||||
# except according to those terms.
|
||||
|
||||
import stew/arrayops
|
||||
|
||||
type NibblesBuf* = object
|
||||
## Allocation-free type for storing up to 64 4-bit nibbles, as seen in the
|
||||
## Ethereum MPT
|
||||
bytes: array[32, byte]
|
||||
ibegin, iend: int8
|
||||
# Where valid nibbles can be found - we use indices here to avoid copies
|
||||
# wen slicing - iend not inclusive
|
||||
|
||||
func fromBytes*(T: type NibblesBuf, bytes: openArray[byte]): T =
|
||||
result.iend = 2 * (int8 result.bytes.copyFrom(bytes))
|
||||
|
||||
func nibble*(T: type NibblesBuf, nibble: byte): T =
|
||||
result.bytes[0] = nibble shl 4
|
||||
result.iend = 1
|
||||
|
||||
template `[]`*(r: NibblesBuf, i: int): byte =
|
||||
let pos = r.ibegin + i
|
||||
if (pos and 1) != 0:
|
||||
(r.bytes[pos shr 1] and 0xf)
|
||||
else:
|
||||
(r.bytes[pos shr 1] shr 4)
|
||||
|
||||
template `[]=`*(r: NibblesBuf, i: int, v: byte) =
|
||||
let pos = r.ibegin + i
|
||||
r.bytes[pos shr 1] =
|
||||
if (pos and 1) != 0:
|
||||
(v and 0x0f) or (r.bytes[pos shr 1] and 0xf0)
|
||||
else:
|
||||
(v shl 4) or (r.bytes[pos shr 1] and 0x0f)
|
||||
|
||||
func len*(r: NibblesBuf): int =
|
||||
r.iend - r.ibegin
|
||||
|
||||
func `==`*(lhs, rhs: NibblesBuf): bool =
|
||||
if lhs.len == rhs.len:
|
||||
for i in 0 ..< lhs.len:
|
||||
if lhs[i] != rhs[i]:
|
||||
return false
|
||||
return true
|
||||
else:
|
||||
return false
|
||||
|
||||
func `$`*(r: NibblesBuf): string =
|
||||
result = newStringOfCap(64)
|
||||
for i in 0 ..< r.len:
|
||||
const chars = "0123456789abcdef"
|
||||
result.add chars[r[i]]
|
||||
|
||||
func slice*(r: NibblesBuf, ibegin: int, iend = -1): NibblesBuf =
|
||||
result.bytes = r.bytes
|
||||
result.ibegin = r.ibegin + ibegin.int8
|
||||
let e =
|
||||
if iend < 0:
|
||||
min(64, r.iend + iend + 1)
|
||||
else:
|
||||
min(64, r.ibegin + iend)
|
||||
doAssert ibegin >= 0 and e <= result.bytes.len * 2
|
||||
result.iend = e.int8
|
||||
|
||||
template writeFirstByte(nibbleCountExpr) {.dirty.} =
|
||||
let nibbleCount = nibbleCountExpr
|
||||
var oddnessFlag = (nibbleCount and 1) != 0
|
||||
newSeq(result, (nibbleCount div 2) + 1)
|
||||
result[0] = byte((int(isLeaf) * 2 + int(oddnessFlag)) shl 4)
|
||||
var writeHead = 0
|
||||
|
||||
template writeNibbles(r) {.dirty.} =
|
||||
for i in 0 ..< r.len:
|
||||
let nextNibble = r[i]
|
||||
if oddnessFlag:
|
||||
result[writeHead] = result[writeHead] or nextNibble
|
||||
else:
|
||||
inc writeHead
|
||||
result[writeHead] = nextNibble shl 4
|
||||
oddnessFlag = not oddnessFlag
|
||||
|
||||
func toHexPrefix*(r: NibblesBuf, isLeaf = false): seq[byte] =
|
||||
writeFirstByte(r.len)
|
||||
writeNibbles(r)
|
||||
|
||||
func toHexPrefix*(r1, r2: NibblesBuf, isLeaf = false): seq[byte] =
|
||||
writeFirstByte(r1.len + r2.len)
|
||||
writeNibbles(r1)
|
||||
writeNibbles(r2)
|
||||
|
||||
func sharedPrefixLen*(lhs, rhs: NibblesBuf): int =
|
||||
result = 0
|
||||
while result < lhs.len and result < rhs.len:
|
||||
if lhs[result] != rhs[result]:
|
||||
break
|
||||
inc result
|
||||
|
||||
func startsWith*(lhs, rhs: NibblesBuf): bool =
|
||||
sharedPrefixLen(lhs, rhs) == rhs.len
|
||||
|
||||
func fromHexPrefix*(
|
||||
T: type NibblesBuf, r: openArray[byte]
|
||||
): tuple[isLeaf: bool, nibbles: NibblesBuf] =
|
||||
if r.len > 0:
|
||||
result.isLeaf = (r[0] and 0x20) != 0
|
||||
let hasOddLen = (r[0] and 0x10) != 0
|
||||
|
||||
var i = 0'i8
|
||||
if hasOddLen:
|
||||
result.nibbles[0] = r[0] and 0x0f
|
||||
i += 1
|
||||
|
||||
for j in 1 ..< r.len:
|
||||
if i >= 64:
|
||||
break
|
||||
result.nibbles[i] = r[j] shr 4
|
||||
result.nibbles[i + 1] = r[j] and 0x0f
|
||||
i += 2
|
||||
|
||||
result.nibbles.iend = i
|
||||
else:
|
||||
result.isLeaf = false
|
||||
|
||||
func `&`*(a, b: NibblesBuf): NibblesBuf =
|
||||
for i in 0 ..< a.len:
|
||||
result[i] = a[i]
|
||||
|
||||
for i in 0 ..< b.len:
|
||||
result[i + a.len] = b[i]
|
||||
|
||||
result.iend = int8(min(64, a.len + b.len))
|
||||
|
||||
template getBytes*(a: NibblesBuf): array[32, byte] =
|
||||
a.bytes
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
import
|
||||
std/[hashes, sets, tables],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
"."/[desc_error, desc_identifiers]
|
||||
|
||||
type
|
||||
|
@ -57,10 +57,10 @@ type
|
|||
## Vertex for building a hexary Patricia or Merkle Patricia Trie
|
||||
case vType*: VertexType
|
||||
of Leaf:
|
||||
lPfx*: NibblesSeq ## Portion of path segment
|
||||
lPfx*: NibblesBuf ## Portion of path segment
|
||||
lData*: PayloadRef ## Reference to data payload
|
||||
of Extension:
|
||||
ePfx*: NibblesSeq ## Portion of path segment
|
||||
ePfx*: NibblesBuf ## Portion of path segment
|
||||
eVid*: VertexID ## Edge to vertex with ID `eVid`
|
||||
of Branch:
|
||||
bVid*: array[16,VertexID] ## Edge list with vertex IDs
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import
|
||||
std/typetraits,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
"."/[aristo_desc, aristo_get, aristo_hike, aristo_utils]
|
||||
|
||||
|
@ -44,7 +44,7 @@ proc retrievePayload(
|
|||
if path.len == 0:
|
||||
return err(FetchPathInvalid)
|
||||
|
||||
let hike = path.initNibbleRange.hikeUp(root, db).valueOr:
|
||||
let hike = NibblesBuf.fromBytes(path).hikeUp(root, db).valueOr:
|
||||
if error[1] in HikeAcceptableStopsNotFound:
|
||||
return err(FetchPathNotFound)
|
||||
return err(error[1])
|
||||
|
@ -74,7 +74,7 @@ proc hasPayload(
|
|||
if path.len == 0:
|
||||
return err(FetchPathInvalid)
|
||||
|
||||
let hike = path.initNibbleRange.hikeUp(VertexID(1), db).valueOr:
|
||||
let hike = NibblesBuf.fromBytes(path).hikeUp(VertexID(1), db).valueOr:
|
||||
if error[1] in HikeAcceptableStopsNotFound:
|
||||
return ok(false)
|
||||
return err(error[1])
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
{.push raises: [].}
|
||||
|
||||
import
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
"."/[aristo_desc, aristo_get]
|
||||
|
||||
|
@ -25,7 +25,7 @@ type
|
|||
## Trie traversal path
|
||||
root*: VertexID ## Handy for some fringe cases
|
||||
legs*: seq[Leg] ## Chain of vertices and IDs
|
||||
tail*: NibblesSeq ## Portion of non completed path
|
||||
tail*: NibblesBuf ## Portion of non completed path
|
||||
|
||||
const
|
||||
HikeAcceptableStopsNotFound* = {
|
||||
|
@ -43,13 +43,13 @@ const
|
|||
# Private functions
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
func getNibblesImpl(hike: Hike; start = 0; maxLen = high(int)): NibblesSeq =
|
||||
func getNibblesImpl(hike: Hike; start = 0; maxLen = high(int)): NibblesBuf =
|
||||
## May be needed for partial rebuild, as well
|
||||
for n in start ..< min(hike.legs.len, maxLen):
|
||||
let leg = hike.legs[n]
|
||||
case leg.wp.vtx.vType:
|
||||
of Branch:
|
||||
result = result & @[leg.nibble.byte].initNibbleRange.slice(1)
|
||||
result = result & NibblesBuf.nibble(leg.nibble.byte)
|
||||
of Extension:
|
||||
result = result & leg.wp.vtx.ePfx
|
||||
of Leaf:
|
||||
|
@ -63,22 +63,22 @@ func to*(rc: Result[Hike,(VertexID,AristoError,Hike)]; T: type Hike): T =
|
|||
## Extract `Hike` from either ok ot error part of argument `rc`.
|
||||
if rc.isOk: rc.value else: rc.error[2]
|
||||
|
||||
func to*(hike: Hike; T: type NibblesSeq): T =
|
||||
func to*(hike: Hike; T: type NibblesBuf): T =
|
||||
## Convert back
|
||||
hike.getNibblesImpl() & hike.tail
|
||||
|
||||
func legsTo*(hike: Hike; T: type NibblesSeq): T =
|
||||
func legsTo*(hike: Hike; T: type NibblesBuf): T =
|
||||
## Convert back
|
||||
hike.getNibblesImpl()
|
||||
|
||||
func legsTo*(hike: Hike; numLegs: int; T: type NibblesSeq): T =
|
||||
func legsTo*(hike: Hike; numLegs: int; T: type NibblesBuf): T =
|
||||
## variant of `legsTo()`
|
||||
hike.getNibblesImpl(0, numLegs)
|
||||
|
||||
# --------
|
||||
|
||||
proc hikeUp*(
|
||||
path: NibblesSeq; # Partial path
|
||||
path: NibblesBuf; # Partial path
|
||||
root: VertexID; # Start vertex
|
||||
db: AristoDbRef; # Database
|
||||
): Result[Hike,(VertexID,AristoError,Hike)] =
|
||||
|
@ -114,7 +114,7 @@ proc hikeUp*(
|
|||
if hike.tail.len == hike.tail.sharedPrefixLen(leg.wp.vtx.lPfx):
|
||||
# Bingo, got full path
|
||||
hike.legs.add leg
|
||||
hike.tail = EmptyNibbleSeq
|
||||
hike.tail = NibblesBuf()
|
||||
# This is the only loop exit
|
||||
break
|
||||
|
||||
|
@ -142,7 +142,7 @@ proc hikeUp*(
|
|||
# There must be some more data (aka `tail`) after an `Extension` vertex.
|
||||
if hike.tail.len == 0:
|
||||
hike.legs.add leg
|
||||
hike.tail = EmptyNibbleSeq
|
||||
hike.tail = NibblesBuf()
|
||||
return err((vid,HikeExtTailEmpty,hike)) # Well, somehow odd
|
||||
|
||||
if leg.wp.vtx.ePfx.len != hike.tail.sharedPrefixLen(leg.wp.vtx.ePfx):
|
||||
|
@ -163,7 +163,7 @@ proc hikeUp*(
|
|||
db: AristoDbRef;
|
||||
): Result[Hike,(VertexID,AristoError,Hike)] =
|
||||
## Variant of `hike()`
|
||||
lty.path.to(NibblesSeq).hikeUp(lty.root, db)
|
||||
lty.path.to(NibblesBuf).hikeUp(lty.root, db)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# End
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/[sequtils, sets, typetraits],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
".."/[aristo_desc, aristo_get, aristo_hike, aristo_layers, aristo_vid]
|
||||
|
||||
|
@ -20,7 +20,7 @@ import
|
|||
# Private getters & setters
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
proc xPfx(vtx: VertexRef): NibblesSeq =
|
||||
proc xPfx(vtx: VertexRef): NibblesBuf =
|
||||
case vtx.vType:
|
||||
of Leaf:
|
||||
return vtx.lPfx
|
||||
|
@ -106,7 +106,7 @@ proc insertBranch(
|
|||
|
||||
if linkVtx.vType == Leaf:
|
||||
# Double check path prefix
|
||||
if 64 < hike.legsTo(NibblesSeq).len + linkVtx.lPfx.len:
|
||||
if 64 < hike.legsTo(NibblesBuf).len + linkVtx.lPfx.len:
|
||||
return err(MergeBranchLinkLeafGarbled)
|
||||
|
||||
let
|
||||
|
@ -266,7 +266,7 @@ proc mergePayloadTopIsBranchAddLeaf(
|
|||
if db.pPrf.len == 0:
|
||||
# Not much else that can be done here
|
||||
raiseAssert "Dangling edge:" &
|
||||
" pfx=" & $hike.legsTo(hike.legs.len-1,NibblesSeq) &
|
||||
" pfx=" & $hike.legsTo(hike.legs.len-1,NibblesBuf) &
|
||||
" branch=" & $parent &
|
||||
" nibble=" & $nibble &
|
||||
" edge=" & $linkID &
|
||||
|
@ -479,7 +479,7 @@ proc mergePayloadImpl*(
|
|||
## leaf record.
|
||||
##
|
||||
let
|
||||
nibblesPath = path.initNibbleRange
|
||||
nibblesPath = NibblesBuf.fromBytes(path)
|
||||
hike = nibblesPath.hikeUp(root, db).to(Hike)
|
||||
|
||||
var okHike: Hike
|
||||
|
@ -512,7 +512,7 @@ proc mergePayloadImpl*(
|
|||
okHike = Hike(root: wp.vid, legs: @[Leg(wp: wp, nibble: -1)])
|
||||
|
||||
# Double check the result (may be removed in future)
|
||||
if okHike.to(NibblesSeq) != nibblesPath:
|
||||
if okHike.to(NibblesBuf) != nibblesPath:
|
||||
return err(MergeAssemblyFailed) # Ooops
|
||||
|
||||
ok()
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
import
|
||||
std/tables,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
"."/[aristo_desc, aristo_get, aristo_hike, aristo_path]
|
||||
|
||||
|
@ -30,7 +30,7 @@ import
|
|||
# Private helpers
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
proc `<=`(a, b: NibblesSeq): bool =
|
||||
proc `<=`(a, b: NibblesBuf): bool =
|
||||
## Compare nibbles, different lengths are padded to the right with zeros
|
||||
let abMin = min(a.len, b.len)
|
||||
for n in 0 ..< abMin:
|
||||
|
@ -47,7 +47,7 @@ proc `<=`(a, b: NibblesSeq): bool =
|
|||
return false
|
||||
true
|
||||
|
||||
proc `<`(a, b: NibblesSeq): bool =
|
||||
proc `<`(a, b: NibblesBuf): bool =
|
||||
not (b <= a)
|
||||
|
||||
# ------------------
|
||||
|
@ -75,7 +75,7 @@ proc branchNibbleMax*(vtx: VertexRef; maxInx: int8): int8 =
|
|||
proc toLeafTiePayload(hike: Hike): (LeafTie,PayloadRef) =
|
||||
## Shortcut for iterators. This function will gloriously crash unless the
|
||||
## `hike` argument is complete.
|
||||
(LeafTie(root: hike.root, path: hike.to(NibblesSeq).pathToTag.value),
|
||||
(LeafTie(root: hike.root, path: hike.to(NibblesBuf).pathToTag.value),
|
||||
hike.legs[^1].wp.vtx.lData)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -139,7 +139,7 @@ proc zeroAdjust(
|
|||
## Adjust empty argument path to the first vertex entry to the right. Ths
|
||||
## applies is the argument `hike` is before the first entry in the database.
|
||||
## The result is a hike which is aligned with the first entry.
|
||||
proc accept(p: Hike; pfx: NibblesSeq): bool =
|
||||
proc accept(p: Hike; pfx: NibblesBuf): bool =
|
||||
when doLeast:
|
||||
p.tail <= pfx
|
||||
else:
|
||||
|
@ -151,7 +151,7 @@ proc zeroAdjust(
|
|||
else:
|
||||
w.branchNibbleMax n
|
||||
|
||||
proc toHike(pfx: NibblesSeq, root: VertexID, db: AristoDbRef): Hike =
|
||||
proc toHike(pfx: NibblesBuf, root: VertexID, db: AristoDbRef): Hike =
|
||||
when doLeast:
|
||||
pfx.pathPfxPad(0).hikeUp(root, db).to(Hike)
|
||||
else:
|
||||
|
@ -163,7 +163,7 @@ proc zeroAdjust(
|
|||
let root = db.getVtx hike.root
|
||||
if root.isValid:
|
||||
block fail:
|
||||
var pfx: NibblesSeq
|
||||
var pfx: NibblesBuf
|
||||
case root.vType:
|
||||
of Branch:
|
||||
# Find first non-dangling link and assign it
|
||||
|
@ -179,7 +179,7 @@ proc zeroAdjust(
|
|||
if n < 0:
|
||||
# Before or after the database range
|
||||
return err((hike.root,NearbyBeyondRange))
|
||||
pfx = @[n.byte].initNibbleRange.slice(1)
|
||||
pfx = NibblesBuf.nibble(n.byte)
|
||||
|
||||
of Extension:
|
||||
let ePfx = root.ePfx
|
||||
|
@ -210,7 +210,7 @@ proc finalise(
|
|||
moveRight: static[bool]; # Direction of next vertex
|
||||
): Result[Hike,(VertexID,AristoError)] =
|
||||
## Handle some pathological cases after main processing failed
|
||||
proc beyond(p: Hike; pfx: NibblesSeq): bool =
|
||||
proc beyond(p: Hike; pfx: NibblesBuf): bool =
|
||||
when moveRight:
|
||||
pfx < p.tail
|
||||
else:
|
||||
|
@ -239,14 +239,14 @@ proc finalise(
|
|||
if not vtx.isValid:
|
||||
return err((vid,NearbyDanglingLink))
|
||||
|
||||
var pfx: NibblesSeq
|
||||
var pfx: NibblesBuf
|
||||
case vtx.vType:
|
||||
of Leaf:
|
||||
pfx = vtx.lPfx
|
||||
of Extension:
|
||||
pfx = vtx.ePfx
|
||||
of Branch:
|
||||
pfx = @[vtx.branchBorderNibble.byte].initNibbleRange.slice(1)
|
||||
pfx = NibblesBuf.nibble(vtx.branchBorderNibble.byte)
|
||||
if hike.beyond pfx:
|
||||
return err((vid,NearbyBeyondRange))
|
||||
|
||||
|
@ -274,7 +274,7 @@ proc nearbyNext(
|
|||
else:
|
||||
0 < nibble
|
||||
|
||||
proc accept(p: Hike; pfx: NibblesSeq): bool =
|
||||
proc accept(p: Hike; pfx: NibblesBuf): bool =
|
||||
when moveRight:
|
||||
p.tail <= pfx
|
||||
else:
|
||||
|
@ -356,7 +356,7 @@ proc nearbyNext(
|
|||
uHike.tail = uHikeTail
|
||||
else:
|
||||
# Pop current `Branch` vertex on top and append nibble to `tail`
|
||||
uHike.tail = @[top.nibble.byte].initNibbleRange.slice(1) & uHike.tail
|
||||
uHike.tail = NibblesBuf.nibble(top.nibble.byte) & uHike.tail
|
||||
uHike.legs.setLen(uHike.legs.len - 1)
|
||||
# End while
|
||||
|
||||
|
@ -375,7 +375,7 @@ proc nearbyNextLeafTie(
|
|||
if 0 < hike.legs.len:
|
||||
if hike.legs[^1].wp.vtx.vType != Leaf:
|
||||
return err((hike.legs[^1].wp.vid,NearbyLeafExpected))
|
||||
let rc = hike.legsTo(NibblesSeq).pathToTag
|
||||
let rc = hike.legsTo(NibblesBuf).pathToTag
|
||||
if rc.isOk:
|
||||
return ok rc.value
|
||||
return err((VertexID(0),rc.error))
|
||||
|
@ -434,14 +434,14 @@ iterator rightPairs*(
|
|||
if 0 < tail.len:
|
||||
let topNibble = tail[tail.len - 1]
|
||||
if topNibble < 15:
|
||||
let newNibble = @[topNibble+1].initNibbleRange.slice(1)
|
||||
let newNibble = NibblesBuf.nibble(topNibble+1)
|
||||
hike.tail = tail.slice(0, tail.len - 1) & newNibble
|
||||
hike.legs.setLen(hike.legs.len - 1)
|
||||
break reuseHike
|
||||
if 1 < tail.len:
|
||||
let nxtNibble = tail[tail.len - 2]
|
||||
if nxtNibble < 15:
|
||||
let dblNibble = @[((nxtNibble+1) shl 4) + 0].initNibbleRange
|
||||
let dblNibble = NibblesBuf.fromBytes([((nxtNibble+1) shl 4) + 0])
|
||||
hike.tail = tail.slice(0, tail.len - 2) & dblNibble
|
||||
hike.legs.setLen(hike.legs.len - 1)
|
||||
break reuseHike
|
||||
|
@ -497,14 +497,14 @@ iterator leftPairs*(
|
|||
if 0 < tail.len:
|
||||
let topNibble = tail[tail.len - 1]
|
||||
if 0 < topNibble:
|
||||
let newNibble = @[topNibble - 1].initNibbleRange.slice(1)
|
||||
let newNibble = NibblesBuf.nibble(topNibble - 1)
|
||||
hike.tail = tail.slice(0, tail.len - 1) & newNibble
|
||||
hike.legs.setLen(hike.legs.len - 1)
|
||||
break reuseHike
|
||||
if 1 < tail.len:
|
||||
let nxtNibble = tail[tail.len - 2]
|
||||
if 0 < nxtNibble:
|
||||
let dblNibble = @[((nxtNibble-1) shl 4) + 15].initNibbleRange
|
||||
let dblNibble = NibblesBuf.fromBytes([((nxtNibble-1) shl 4) + 15])
|
||||
hike.tail = tail.slice(0, tail.len - 2) & dblNibble
|
||||
hike.legs.setLen(hike.legs.len - 1)
|
||||
break reuseHike
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/sequtils,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
./aristo_desc
|
||||
|
||||
|
@ -30,7 +30,7 @@ import
|
|||
#
|
||||
# where the `ignored` part is typically expected a zero nibble.
|
||||
|
||||
func pathPfxPad*(pfx: NibblesSeq; dblNibble: static[byte]): NibblesSeq
|
||||
func pathPfxPad*(pfx: NibblesBuf; dblNibble: static[byte]): NibblesBuf
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Public functions
|
||||
|
@ -51,17 +51,7 @@ func pathAsBlob*(tag: PathID): Blob =
|
|||
else:
|
||||
return key[0 .. (tag.length - 1) div 2]
|
||||
|
||||
func pathAsHEP*(tag: PathID; isLeaf = false): Blob =
|
||||
## Convert the `tag` argument to a hex encoded partial path as used in `eth`
|
||||
## or `snap` protocol where full paths of nibble length 64 are encoded as 32
|
||||
## byte `Blob` and non-leaf partial paths are *compact encoded* (i.e. per
|
||||
## the Ethereum wire protocol.)
|
||||
if 64 <= tag.length:
|
||||
@(tag.pfx.toBytesBE)
|
||||
else:
|
||||
tag.to(NibblesSeq).hexPrefixEncode(isLeaf=true)
|
||||
|
||||
func pathToTag*(partPath: NibblesSeq): Result[PathID,AristoError] =
|
||||
func pathToTag*(partPath: NibblesBuf): Result[PathID,AristoError] =
|
||||
## Convert the argument `partPath` to a `PathID` type value.
|
||||
if partPath.len == 0:
|
||||
return ok VOID_PATH_ID
|
||||
|
@ -83,9 +73,9 @@ func pathToTag*(partPath: openArray[byte]): Result[PathID,AristoError] =
|
|||
|
||||
# --------------------
|
||||
|
||||
func pathPfxPad*(pfx: NibblesSeq; dblNibble: static[byte]): NibblesSeq =
|
||||
func pathPfxPad*(pfx: NibblesBuf; dblNibble: static[byte]): NibblesBuf =
|
||||
## Extend (or cut) the argument nibbles sequence `pfx` for generating a
|
||||
## `NibblesSeq` with exactly 64 nibbles, the equivalent of a path key.
|
||||
## `NibblesBuf` with exactly 64 nibbles, the equivalent of a path key.
|
||||
##
|
||||
## This function must be handled with some care regarding a meaningful value
|
||||
## for the `dblNibble` argument. Currently, only static values `0` and `255`
|
||||
|
@ -95,11 +85,11 @@ func pathPfxPad*(pfx: NibblesSeq; dblNibble: static[byte]): NibblesSeq =
|
|||
|
||||
let padLen = 64 - pfx.len
|
||||
if 0 <= padLen:
|
||||
result = pfx & dblNibble.repeat(padLen div 2).mapIt(it.byte).initNibbleRange
|
||||
result = pfx & NibblesBuf.fromBytes(dblNibble.repeat(padLen div 2).mapIt(it.byte))
|
||||
if (padLen and 1) == 1:
|
||||
result = result & @[dblNibble.byte].initNibbleRange.slice(1)
|
||||
result = result & NibblesBuf.nibble(dblNibble.byte)
|
||||
else:
|
||||
let nope = seq[byte].default.initNibbleRange
|
||||
let nope = NibblesBuf()
|
||||
result = pfx.slice(0,64) & nope # nope forces re-alignment
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
import
|
||||
std/sequtils,
|
||||
eth/[common, rlp, trie/nibbles],
|
||||
eth/[common, rlp],
|
||||
results,
|
||||
"."/[aristo_constants, aristo_desc, aristo_get]
|
||||
|
||||
|
@ -98,7 +98,7 @@ proc read*(rlp: var Rlp; T: type NodeRef): T {.gcsafe, raises: [RlpError].} =
|
|||
of 2:
|
||||
if blobs[0].len == 0:
|
||||
return aristoError(RlpNonEmptyBlobExpected)
|
||||
let (isLeaf, pathSegment) = hexPrefixDecode blobs[0]
|
||||
let (isLeaf, pathSegment) = NibblesBuf.fromHexPrefix blobs[0]
|
||||
if isLeaf:
|
||||
return NodeRef(
|
||||
vType: Leaf,
|
||||
|
@ -147,7 +147,7 @@ proc append*(writer: var RlpWriter; node: NodeRef) =
|
|||
|
||||
of Extension:
|
||||
writer.startList(2)
|
||||
writer.append node.ePfx.hexPrefixEncode(isleaf = false)
|
||||
writer.append node.ePfx.toHexPrefix(isleaf = false)
|
||||
writer.addHashKey node.key[0]
|
||||
|
||||
of Leaf:
|
||||
|
@ -155,7 +155,7 @@ proc append*(writer: var RlpWriter; node: NodeRef) =
|
|||
ok(node.key[0]) # always succeeds
|
||||
|
||||
writer.startList(2)
|
||||
writer.append node.lPfx.hexPrefixEncode(isleaf = true)
|
||||
writer.append node.lPfx.toHexPrefix(isleaf = true)
|
||||
writer.append node.lData.serialise(getKey0).value
|
||||
|
||||
# ---------------------
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import
|
||||
std/[sequtils, sets, typetraits],
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
results,
|
||||
"."/[aristo_constants, aristo_desc, aristo_get, aristo_hike, aristo_layers]
|
||||
|
||||
|
@ -184,7 +184,7 @@ proc retrieveStoAccHike*(
|
|||
## vertex and the vertex ID.
|
||||
##
|
||||
# Expand vertex path to account leaf
|
||||
let hike = (@accPath).initNibbleRange.hikeUp(VertexID(1), db).valueOr:
|
||||
let hike = accPath.to(NibblesBuf).hikeUp(VertexID(1), db).valueOr:
|
||||
return err(UtilsAccInaccessible)
|
||||
|
||||
# Extract the account payload fro the leaf
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
import
|
||||
std/[strutils, typetraits],
|
||||
chronicles,
|
||||
eth/[common, trie/nibbles],
|
||||
eth/common,
|
||||
stew/byteutils,
|
||||
../../../aristo,
|
||||
../../../aristo/aristo_desc,
|
||||
|
@ -487,7 +487,7 @@ proc ctxMethods(cCtx: AristoCoreDbCtxRef): CoreDbCtxFns =
|
|||
let error = (col.stoRoot,MptRootUnacceptable)
|
||||
return err(error.toError(base, info, RootUnacceptable))
|
||||
# Verify path if there is a particular storge root VID
|
||||
let rc = api.hikeUp(newMpt.accPath.to(NibblesSeq), AccountsVID, mpt)
|
||||
let rc = api.hikeUp(newMpt.accPath.to(NibblesBuf), AccountsVID, mpt)
|
||||
if rc.isErr:
|
||||
return err(rc.error[1].toError(base, info, AccNotFound))
|
||||
else:
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
# Nimbus
|
||||
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
# This implementation of getBranch on the CoreDbMptRef type is a temporary solution
|
||||
# which can be removed once we get an equivient proc defined on the CoreDbMptRef type
|
||||
# in the db layer.
|
||||
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
eth/[rlp, trie/nibbles],
|
||||
results,
|
||||
"."/[core_db]
|
||||
|
||||
type
|
||||
TrieNodeKey = object
|
||||
hash: KeccakHash
|
||||
usedBytes: uint8
|
||||
|
||||
template len(key: TrieNodeKey): int =
|
||||
key.usedBytes.int
|
||||
|
||||
template asDbKey(k: TrieNodeKey): untyped =
|
||||
doAssert k.usedBytes == 32
|
||||
k.hash.data
|
||||
|
||||
template extensionNodeKey(r: Rlp): auto =
|
||||
hexPrefixDecode r.listElem(0).toBytes
|
||||
|
||||
proc getLocalBytes(x: TrieNodeKey): seq[byte] =
|
||||
## This proc should be used on nodes using the optimization
|
||||
## of short values within the key.
|
||||
doAssert x.usedBytes < 32
|
||||
x.hash.data[0..<x.usedBytes]
|
||||
|
||||
proc dbGet(db: CoreDbRef, data: openArray[byte]): seq[byte]
|
||||
{.gcsafe, raises: [].} =
|
||||
db.newKvt().get(data).valueOr: EmptyBlob
|
||||
|
||||
template keyToLocalBytes(db: CoreDbRef, k: TrieNodeKey): seq[byte] =
|
||||
if k.len < 32: k.getLocalBytes
|
||||
else: dbGet(db, k.asDbKey)
|
||||
|
||||
proc expectHash(r: Rlp): seq[byte] {.raises: [RlpError].} =
|
||||
result = r.toBytes
|
||||
if result.len != 32:
|
||||
raise newException(RlpTypeMismatch,
|
||||
"RLP expected to be a Keccak hash value, but has an incorrect length")
|
||||
|
||||
template getNode(db: CoreDbRef, elem: Rlp): untyped =
|
||||
if elem.isList: @(elem.rawData)
|
||||
else: dbGet(db, elem.expectHash)
|
||||
|
||||
proc getBranchAux(
|
||||
db: CoreDbRef, node: openArray[byte],
|
||||
fullPath: NibblesSeq,
|
||||
pathIndex: int,
|
||||
output: var seq[seq[byte]]) {.raises: [RlpError].} =
|
||||
var nodeRlp = rlpFromBytes node
|
||||
if not nodeRlp.hasData or nodeRlp.isEmpty: return
|
||||
|
||||
let path = fullPath.slice(pathIndex)
|
||||
case nodeRlp.listLen
|
||||
of 2:
|
||||
let (isLeaf, k) = nodeRlp.extensionNodeKey
|
||||
let sharedNibbles = sharedPrefixLen(path, k)
|
||||
if sharedNibbles == k.len:
|
||||
let value = nodeRlp.listElem(1)
|
||||
if not isLeaf:
|
||||
let nextLookup = getNode(db, value)
|
||||
output.add nextLookup
|
||||
getBranchAux(db, nextLookup, fullPath, pathIndex + sharedNibbles, output)
|
||||
of 17:
|
||||
if path.len != 0:
|
||||
var branch = nodeRlp.listElem(path[0].int)
|
||||
if not branch.isEmpty:
|
||||
let nextLookup = getNode(db, branch)
|
||||
output.add nextLookup
|
||||
getBranchAux(db, nextLookup, fullPath, pathIndex + 1, output)
|
||||
else:
|
||||
raise newException(RlpError, "node has an unexpected number of children")
|
||||
|
||||
proc getBranch*(
|
||||
# self: CoreDxPhkRef;
|
||||
# Note that PHK type has been removed. The difference PHK an MPT was that
|
||||
# the keys of the PHK were pre-hased (as in the legacy `SecureHexaryTrie`
|
||||
# object.) Can this code have worked at all? Looking at the `keyHash`
|
||||
# below it would mean the `key` was double hashed? -- j
|
||||
self: CoreDbMptRef;
|
||||
key: openArray[byte]): seq[seq[byte]] {.raises: [RlpError].} =
|
||||
let
|
||||
keyHash = keccakHash(key).data
|
||||
rootHash = self.getColumn.state.valueOr:
|
||||
raiseAssert "vmExecCommit(): state() failed " & $$error
|
||||
result = @[]
|
||||
var node = keyToLocalBytes(self.parent(), TrieNodeKey(
|
||||
hash: rootHash,usedBytes: rootHash.data.len().uint8))
|
||||
result.add node
|
||||
getBranchAux(self.parent(), node, initNibbleRange(keyHash), 0, result)
|
Loading…
Reference in New Issue