mirror of
https://github.com/status-im/nimbus-eth1.git
synced 2025-01-15 23:04:34 +00:00
f034af422a
Each branch node may have up to 16 sub-items - currently, these are given VertexID based when they are first needed leading to a mostly-random order of vertexid for each subitem. Here, we pre-allocate all 16 vertex ids such that when a branch subitem is filled, it already has a vertexid waiting for it. This brings several important benefits: * subitems are sorted and "close" in their id sequencing - this means that when rocksdb stores them, they are likely to end up in the same data block thus improving read efficiency * because the ids are consequtive, we can store just the starting id and a bitmap representing which subitems are in use - this reduces disk space usage for branches allowing more of them fit into a single disk read, further improving disk read and caching performance - disk usage at block 18M is down from 84 to 78gb! * the in-memory footprint of VertexRef reduced allowing more instances to fit into caches and less memory to be used overall. Because of the increased locality of reference, it turns out that we no longer need to iterate over the entire database to efficiently generate the hash key database because the normal computation is now faster - this significantly benefits "live" chain processing as well where each dirtied key must be accompanied by a read of all branch subitems next to it - most of the performance benefit in this branch comes from this locality-of-reference improvement. On a sample resync, there's already ~20% improvement with later blocks seeing increasing benefit (because the trie is deeper in later blocks leading to more benefit from branch read perf improvements) ``` blocks: 18729664, baseline: 190h43m49s, contender: 153h59m0s Time (total): -36h44m48s, -19.27% ``` Note: clients need to be resynced as the PR changes the on-disk format R.I.P. little bloom filter - your life in the repo was short but valuable
205 lines
6.0 KiB
Nim
205 lines
6.0 KiB
Nim
# nimbus-eth1
|
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
# Licensed under either of
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
|
# http://www.apache.org/licenses/LICENSE-2.0)
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
|
# http://opensource.org/licenses/MIT)
|
|
# at your option. This file may not be copied, modified, or distributed
|
|
# except according to those terms.
|
|
|
|
{.push raises: [].}
|
|
|
|
import
|
|
eth/common,
|
|
results,
|
|
stew/arraybuf,
|
|
"."/[aristo_desc, aristo_get]
|
|
|
|
const
|
|
HikeAcceptableStopsNotFound* = {
|
|
HikeBranchTailEmpty,
|
|
HikeBranchMissingEdge,
|
|
HikeLeafUnexpected,
|
|
HikeNoLegs}
|
|
## When trying to find a leaf vertex the Patricia tree, there are several
|
|
## conditions where the search stops which do not constitute a problem
|
|
## with the trie (aka sysetm error.)
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# Private functions
|
|
# ------------------------------------------------------------------------------
|
|
|
|
func getNibblesImpl(hike: Hike; start = 0; maxLen = high(int)): NibblesBuf =
|
|
## May be needed for partial rebuild, as well
|
|
for n in start ..< min(hike.legs.len, maxLen):
|
|
let leg = hike.legs[n]
|
|
case leg.wp.vtx.vType:
|
|
of Branch:
|
|
result = result & leg.wp.vtx.pfx & NibblesBuf.nibble(leg.nibble.byte)
|
|
of Leaf:
|
|
result = result & leg.wp.vtx.pfx
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# Public functions
|
|
# ------------------------------------------------------------------------------
|
|
|
|
func to*(rc: Result[Hike,(VertexID,AristoError,Hike)]; T: type Hike): T =
|
|
## Extract `Hike` from either ok ot error part of argument `rc`.
|
|
if rc.isOk: rc.value else: rc.error[2]
|
|
|
|
func to*(hike: Hike; T: type NibblesBuf): T =
|
|
## Convert back
|
|
hike.getNibblesImpl() & hike.tail
|
|
|
|
func legsTo*(hike: Hike; T: type NibblesBuf): T =
|
|
## Convert back
|
|
hike.getNibblesImpl()
|
|
|
|
func legsTo*(hike: Hike; numLegs: int; T: type NibblesBuf): T =
|
|
## variant of `legsTo()`
|
|
hike.getNibblesImpl(0, numLegs)
|
|
|
|
# --------
|
|
|
|
proc step*(
|
|
path: NibblesBuf, rvid: RootedVertexID, db: AristoDbRef
|
|
): Result[(VertexRef, NibblesBuf, VertexID), AristoError] =
|
|
# Fetch next vertex
|
|
let (vtx, _) = db.getVtxRc(rvid).valueOr:
|
|
if error != GetVtxNotFound:
|
|
return err(error)
|
|
|
|
if rvid.root == rvid.vid:
|
|
return err(HikeNoLegs)
|
|
# The vertex ID `vid` was a follow up from a parent vertex, but there is
|
|
# no child vertex on the database. So `vid` is a dangling link which is
|
|
# allowed only if there is a partial trie (e.g. with `snap` sync.)
|
|
return err(HikeDanglingEdge)
|
|
|
|
case vtx.vType:
|
|
of Leaf:
|
|
# This must be the last vertex, so there cannot be any `tail` left.
|
|
if path.len != path.sharedPrefixLen(vtx.pfx):
|
|
return err(HikeLeafUnexpected)
|
|
|
|
ok (vtx, NibblesBuf(), VertexID(0))
|
|
|
|
of Branch:
|
|
# There must be some more data (aka `tail`) after a `Branch` vertex.
|
|
if path.len <= vtx.pfx.len:
|
|
return err(HikeBranchTailEmpty)
|
|
|
|
let
|
|
nibble = path[vtx.pfx.len]
|
|
nextVid = vtx.bVid(nibble)
|
|
|
|
if not nextVid.isValid:
|
|
return err(HikeBranchMissingEdge)
|
|
|
|
ok (vtx, path.slice(vtx.pfx.len + 1), nextVid)
|
|
|
|
|
|
iterator stepUp*(
|
|
path: NibblesBuf; # Partial path
|
|
root: VertexID; # Start vertex
|
|
db: AristoDbRef; # Database
|
|
): Result[VertexRef, AristoError] =
|
|
## For the argument `path`, iterate over the logest possible path in the
|
|
## argument database `db`.
|
|
var
|
|
path = path
|
|
next = root
|
|
vtx: VertexRef
|
|
block iter:
|
|
while true:
|
|
(vtx, path, next) = step(path, (root, next), db).valueOr:
|
|
yield Result[VertexRef, AristoError].err(error)
|
|
break iter
|
|
|
|
yield Result[VertexRef, AristoError].ok(vtx)
|
|
|
|
if path.len == 0:
|
|
break
|
|
|
|
proc hikeUp*(
|
|
path: NibblesBuf; # Partial path
|
|
root: VertexID; # Start vertex
|
|
db: AristoDbRef; # Database
|
|
leaf: Opt[VertexRef];
|
|
hike: var Hike;
|
|
): Result[void,(VertexID,AristoError)] =
|
|
## For the argument `path`, find and return the logest possible path in the
|
|
## argument database `db` - this may result in a partial match in which case
|
|
## hike.tail will be non-empty.
|
|
##
|
|
## If a leaf is given, it gets used for the "last" leg of the hike.
|
|
hike.root = root
|
|
hike.tail = path
|
|
hike.legs.setLen(0)
|
|
|
|
if not root.isValid:
|
|
return err((VertexID(0),HikeRootMissing))
|
|
if path.len == 0:
|
|
return err((VertexID(0),HikeEmptyPath))
|
|
|
|
var vid = root
|
|
while true:
|
|
if leaf.isSome() and leaf[].isValid and path == leaf[].pfx:
|
|
hike.legs.add Leg(wp: VidVtxPair(vid: vid, vtx: leaf[]), nibble: -1)
|
|
reset(hike.tail)
|
|
break
|
|
|
|
let (vtx, path, next) = step(hike.tail, (root, vid), db).valueOr:
|
|
return err((vid,error))
|
|
|
|
let wp = VidVtxPair(vid:vid, vtx:vtx)
|
|
|
|
case vtx.vType
|
|
of Leaf:
|
|
hike.legs.add Leg(wp: wp, nibble: -1)
|
|
hike.tail = path
|
|
|
|
break
|
|
|
|
of Branch:
|
|
hike.legs.add Leg(wp: wp, nibble: int8 hike.tail[vtx.pfx.len])
|
|
|
|
hike.tail = path
|
|
vid = next
|
|
|
|
ok()
|
|
|
|
proc hikeUp*(
|
|
lty: LeafTie;
|
|
db: AristoDbRef;
|
|
leaf: Opt[VertexRef];
|
|
hike: var Hike
|
|
): Result[void,(VertexID,AristoError)] =
|
|
## Variant of `hike()`
|
|
lty.path.to(NibblesBuf).hikeUp(lty.root, db, leaf, hike)
|
|
|
|
proc hikeUp*(
|
|
path: openArray[byte];
|
|
root: VertexID;
|
|
db: AristoDbRef;
|
|
leaf: Opt[VertexRef];
|
|
hike: var Hike
|
|
): Result[void,(VertexID,AristoError)] =
|
|
## Variant of `hike()`
|
|
NibblesBuf.fromBytes(path).hikeUp(root, db, leaf, hike)
|
|
|
|
proc hikeUp*(
|
|
path: Hash32;
|
|
root: VertexID;
|
|
db: AristoDbRef;
|
|
leaf: Opt[VertexRef];
|
|
hike: var Hike
|
|
): Result[void,(VertexID,AristoError)] =
|
|
## Variant of `hike()`
|
|
NibblesBuf.fromBytes(path.data).hikeUp(root, db, leaf, hike)
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# End
|
|
# ------------------------------------------------------------------------------
|