382 lines
12 KiB
Nim
382 lines
12 KiB
Nim
# nimbus-eth1
|
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
|
# Licensed under either of
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
|
# http://www.apache.org/licenses/LICENSE-2.0)
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
|
# http://opensource.org/licenses/MIT)
|
|
# at your option. This file may not be copied, modified, or distributed
|
|
# except according to those terms.
|
|
|
|
{.push raises: [].}
|
|
|
|
import
|
|
eth/common,
|
|
results,
|
|
stew/[arrayops, endians2],
|
|
./aristo_desc
|
|
|
|
# Allocation-free version short big-endian encoding that skips the leading
|
|
# zeroes
|
|
type
|
|
SbeBuf*[I] = object
|
|
buf*: array[sizeof(I), byte]
|
|
len*: byte
|
|
|
|
RVidBuf* = object
|
|
buf*: array[sizeof(SbeBuf[VertexID]) * 2, byte]
|
|
len*: byte
|
|
|
|
func significantBytesBE(val: openArray[byte]): byte =
|
|
for i in 0 ..< val.len:
|
|
if val[i] != 0:
|
|
return byte(val.len - i)
|
|
return 1
|
|
|
|
func blobify*(v: VertexID|uint64): SbeBuf[typeof(v)] =
|
|
let b = v.uint64.toBytesBE()
|
|
SbeBuf[typeof(v)](buf: b, len: significantBytesBE(b))
|
|
|
|
func blobify*(v: StUint): SbeBuf[typeof(v)] =
|
|
let b = v.toBytesBE()
|
|
SbeBuf[typeof(v)](buf: b, len: significantBytesBE(b))
|
|
|
|
template data*(v: SbeBuf): openArray[byte] =
|
|
let vv = v
|
|
vv.buf.toOpenArray(vv.buf.len - int(vv.len), vv.buf.high)
|
|
|
|
|
|
func blobify*(rvid: RootedVertexID): RVidBuf =
|
|
# Length-prefixed root encoding creates a unique and common prefix for all
|
|
# verticies sharing the same root
|
|
# TODO evaluate an encoding that colocates short roots (like VertexID(1)) with
|
|
# the length
|
|
let root = rvid.root.blobify()
|
|
result.buf[0] = root.len
|
|
assign(result.buf.toOpenArray(1, root.len), root.data())
|
|
|
|
if rvid.root == rvid.vid:
|
|
result.len = root.len + 1
|
|
else:
|
|
# We can derive the length of the `vid` from the total length
|
|
let vid = rvid.vid.blobify()
|
|
assign(result.buf.toOpenArray(root.len + 1, root.len + vid.len), vid.data())
|
|
result.len = root.len + 1 + vid.len
|
|
|
|
proc deblobify*[T: uint64|VertexID](data: openArray[byte], _: type T): Result[T,AristoError] =
|
|
if data.len < 1 or data.len > 8:
|
|
return err(Deblob64LenUnsupported)
|
|
|
|
var tmp: array[8, byte]
|
|
discard tmp.toOpenArray(8 - data.len, 7).copyFrom(data)
|
|
|
|
ok T(uint64.fromBytesBE(tmp))
|
|
|
|
proc deblobify*(data: openArray[byte], _: type UInt256): Result[UInt256,AristoError] =
|
|
if data.len < 1 or data.len > 32:
|
|
return err(Deblob256LenUnsupported)
|
|
|
|
ok UInt256.fromBytesBE(data)
|
|
|
|
func deblobify*(data: openArray[byte], T: type RootedVertexID): Result[T, AristoError] =
|
|
let rlen = int(data[0])
|
|
if data.len < 2:
|
|
return err(DeblobRVidLenUnsupported)
|
|
|
|
if data.len < rlen + 1:
|
|
return err(DeblobRVidLenUnsupported)
|
|
|
|
let
|
|
root = ?deblobify(data.toOpenArray(1, rlen), VertexID)
|
|
vid = if data.len > rlen + 1:
|
|
?deblobify(data.toOpenArray(rlen + 1, data.high()), VertexID)
|
|
else:
|
|
root
|
|
ok (root, vid)
|
|
|
|
template data*(v: RVidBuf): openArray[byte] =
|
|
let vv = v
|
|
vv.buf.toOpenArray(0, vv.len - 1)
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# Private helper
|
|
# ------------------------------------------------------------------------------
|
|
|
|
proc load64(data: openArray[byte]; start: var int, len: int): Result[uint64,AristoError] =
|
|
if data.len < start + len:
|
|
return err(Deblob256LenUnsupported)
|
|
|
|
let val = ?deblobify(data.toOpenArray(start, start + len - 1), uint64)
|
|
start += len
|
|
ok val
|
|
|
|
proc load256(data: openArray[byte]; start: var int, len: int): Result[UInt256,AristoError] =
|
|
if data.len < start + len:
|
|
return err(Deblob256LenUnsupported)
|
|
let val = ?deblobify(data.toOpenArray(start, start + len - 1), UInt256)
|
|
start += len
|
|
ok val
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# Public functions
|
|
# ------------------------------------------------------------------------------
|
|
|
|
proc blobifyTo*(pyl: PayloadRef, data: var Blob) =
|
|
if pyl.isNil:
|
|
return
|
|
case pyl.pType
|
|
of RawData:
|
|
data &= pyl.rawBlob
|
|
data &= [0x10.byte]
|
|
|
|
of AccountData:
|
|
# `lens` holds `len-1` since `mask` filters out the zero-length case (which
|
|
# allows saving 1 bit per length)
|
|
var lens: uint16
|
|
var mask: byte
|
|
if 0 < pyl.account.nonce:
|
|
mask = mask or 0x01
|
|
let tmp = pyl.account.nonce.blobify()
|
|
lens += tmp.len - 1 # 3 bits
|
|
data &= tmp.data()
|
|
|
|
if 0 < pyl.account.balance:
|
|
mask = mask or 0x02
|
|
let tmp = pyl.account.balance.blobify()
|
|
lens += uint16(tmp.len - 1) shl 3 # 5 bits
|
|
data &= tmp.data()
|
|
|
|
if VertexID(0) < pyl.stoID:
|
|
mask = mask or 0x04
|
|
let tmp = pyl.stoID.blobify()
|
|
lens += uint16(tmp.len - 1) shl 8 # 3 bits
|
|
data &= tmp.data()
|
|
|
|
if pyl.account.codeHash != EMPTY_CODE_HASH:
|
|
mask = mask or 0x08
|
|
data &= pyl.account.codeHash.data
|
|
|
|
data &= lens.toBytesBE()
|
|
data &= [mask]
|
|
of StoData:
|
|
data &= pyl.stoData.blobify().data
|
|
data &= [0x20.byte]
|
|
|
|
proc blobifyTo*(vtx: VertexRef; data: var Blob): Result[void,AristoError] =
|
|
## This function serialises the vertex argument to a database record.
|
|
## Contrary to RLP based serialisation, these records aim to align on
|
|
## fixed byte boundaries.
|
|
## ::
|
|
## Branch:
|
|
## [VertexID, ...] -- list of up to 16 child vertices lookup keys
|
|
## uint64 -- lengths of each child vertex, each taking 4 bits
|
|
## 0x08 -- marker(8)
|
|
##
|
|
## Extension:
|
|
## VertexID -- child vertex lookup key
|
|
## Blob -- hex encoded partial path (at least one byte)
|
|
## 0x80 + xx -- marker(2) + pathSegmentLen(6)
|
|
##
|
|
## Leaf:
|
|
## Blob -- opaque leaf data payload (might be zero length)
|
|
## Blob -- hex encoded partial path (at least one byte)
|
|
## 0xc0 + yy -- marker(2) + partialPathLen(6)
|
|
##
|
|
## For a branch record, the bytes of the `access` array indicate the position
|
|
## of the Patricia Trie vertex reference. So the `vertexID` with index `n` has
|
|
## ::
|
|
## 8 * n * ((access shr (n * 4)) and 15)
|
|
##
|
|
if not vtx.isValid:
|
|
return err(BlobifyNilVertex)
|
|
case vtx.vType:
|
|
of Branch:
|
|
var
|
|
lens = 0u64
|
|
pos = data.len
|
|
for n in 0..15:
|
|
if vtx.bVid[n].isValid:
|
|
let tmp = vtx.bVid[n].blobify()
|
|
lens += uint64(tmp.len) shl (n * 4)
|
|
data &= tmp.data()
|
|
if data.len == pos:
|
|
return err(BlobifyBranchMissingRefs)
|
|
data &= lens.toBytesBE
|
|
data &= [0x08u8]
|
|
of Extension:
|
|
let
|
|
pSegm = vtx.ePfx.toHexPrefix(isleaf = false)
|
|
psLen = pSegm.len.byte
|
|
if psLen == 0 or 33 < psLen:
|
|
return err(BlobifyExtPathOverflow)
|
|
if not vtx.eVid.isValid:
|
|
return err(BlobifyExtMissingRefs)
|
|
data &= vtx.eVid.blobify().data()
|
|
data &= pSegm
|
|
data &= [0x80u8 or psLen]
|
|
of Leaf:
|
|
let
|
|
pSegm = vtx.lPfx.toHexPrefix(isleaf = true)
|
|
psLen = pSegm.len.byte
|
|
if psLen == 0 or 33 < psLen:
|
|
return err(BlobifyLeafPathOverflow)
|
|
vtx.lData.blobifyTo(data)
|
|
data &= pSegm
|
|
data &= [0xC0u8 or psLen]
|
|
|
|
ok()
|
|
|
|
proc blobify*(vtx: VertexRef): Result[Blob, AristoError] =
|
|
## Variant of `blobify()`
|
|
var data: Blob
|
|
? vtx.blobifyTo data
|
|
ok(move(data))
|
|
|
|
proc blobifyTo*(lSst: SavedState; data: var Blob): Result[void,AristoError] =
|
|
## Serialise a last saved state record
|
|
data.add lSst.key.data
|
|
data.add lSst.serial.toBytesBE
|
|
data.add @[0x7fu8]
|
|
ok()
|
|
|
|
proc blobify*(lSst: SavedState): Result[Blob,AristoError] =
|
|
## Variant of `blobify()`
|
|
var data: Blob
|
|
? lSst.blobifyTo data
|
|
ok(move(data))
|
|
|
|
# -------------
|
|
proc deblobify(
|
|
data: openArray[byte];
|
|
T: type PayloadRef;
|
|
): Result[PayloadRef,AristoError] =
|
|
if data.len == 0:
|
|
return ok PayloadRef(pType: RawData)
|
|
|
|
let mask = data[^1]
|
|
if (mask and 0x10) > 0: # unstructured payload
|
|
return ok PayloadRef(pType: RawData, rawBlob: data[0 .. ^2])
|
|
|
|
if (mask and 0x20) > 0: # Slot storage data
|
|
return ok PayloadRef(
|
|
pType: StoData,
|
|
stoData: ?deblobify(data.toOpenArray(0, data.len - 2), UInt256))
|
|
|
|
var
|
|
pAcc = PayloadRef(pType: AccountData)
|
|
start = 0
|
|
lens = uint16.fromBytesBE(data.toOpenArray(data.len - 3, data.len - 2))
|
|
|
|
if (mask and 0x01) > 0:
|
|
let len = lens and 0b111
|
|
pAcc.account.nonce = ? load64(data, start, int(len + 1))
|
|
|
|
if (mask and 0x02) > 0:
|
|
let len = (lens shr 3) and 0b11111
|
|
pAcc.account.balance = ? load256(data, start, int(len + 1))
|
|
|
|
if (mask and 0x04) > 0:
|
|
let len = (lens shr 8) and 0b111
|
|
pAcc.stoID = VertexID(? load64(data, start, int(len + 1)))
|
|
|
|
if (mask and 0x08) > 0:
|
|
if data.len() < start + 32:
|
|
return err(DeblobCodeLenUnsupported)
|
|
discard pAcc.account.codeHash.data.copyFrom(data.toOpenArray(start, start + 31))
|
|
else:
|
|
pAcc.account.codeHash = EMPTY_CODE_HASH
|
|
|
|
ok(pAcc)
|
|
|
|
proc deblobify*(
|
|
record: openArray[byte];
|
|
T: type VertexRef;
|
|
): Result[T,AristoError] =
|
|
## De-serialise a data record encoded with `blobify()`. The second
|
|
## argument `vtx` can be `nil`.
|
|
if record.len < 3: # minimum `Leaf` record
|
|
return err(DeblobVtxTooShort)
|
|
|
|
ok case record[^1] shr 6:
|
|
of 0: # `Branch` vertex
|
|
if record[^1] != 0x08u8:
|
|
return err(DeblobUnknown)
|
|
if record.len < 11: # at least two edges
|
|
return err(DeblobBranchTooShort)
|
|
let
|
|
aInx = record.len - 9
|
|
aIny = record.len - 2
|
|
var
|
|
offs = 0
|
|
lens = uint64.fromBytesBE record.toOpenArray(aInx, aIny) # bitmap
|
|
vtxList: array[16,VertexID]
|
|
n = 0
|
|
while lens != 0:
|
|
let len = lens and 0b1111
|
|
if len > 0:
|
|
vtxList[n] = VertexID(? load64(record, offs, int(len)))
|
|
inc n
|
|
lens = lens shr 4
|
|
|
|
# End `while`
|
|
VertexRef(
|
|
vType: Branch,
|
|
bVid: vtxList)
|
|
|
|
of 2: # `Extension` vertex
|
|
let
|
|
sLen = record[^1].int and 0x3f # length of path segment
|
|
rLen = record.len - 1 # `vertexID` + path segm
|
|
pLen = rLen - sLen # payload length
|
|
if rLen < sLen or pLen < 1:
|
|
return err(DeblobLeafSizeGarbled)
|
|
let (isLeaf, pathSegment) =
|
|
NibblesBuf.fromHexPrefix record.toOpenArray(pLen, rLen - 1)
|
|
if isLeaf:
|
|
return err(DeblobExtGotLeafPrefix)
|
|
|
|
var offs = 0
|
|
VertexRef(
|
|
vType: Extension,
|
|
eVid: VertexID(?load64(record, offs, pLen)),
|
|
ePfx: pathSegment)
|
|
|
|
of 3: # `Leaf` vertex
|
|
let
|
|
sLen = record[^1].int and 0x3f # length of path segment
|
|
rLen = record.len - 1 # payload + path segment
|
|
pLen = rLen - sLen # payload length
|
|
if rLen < sLen or pLen < 1:
|
|
return err(DeblobLeafSizeGarbled)
|
|
let (isLeaf, pathSegment) =
|
|
NibblesBuf.fromHexPrefix record.toOpenArray(pLen, rLen-1)
|
|
if not isLeaf:
|
|
return err(DeblobLeafGotExtPrefix)
|
|
let pyl = ? record.toOpenArray(0, pLen - 1).deblobify(PayloadRef)
|
|
VertexRef(
|
|
vType: Leaf,
|
|
lPfx: pathSegment,
|
|
lData: pyl)
|
|
|
|
else:
|
|
return err(DeblobUnknown)
|
|
|
|
proc deblobify*(
|
|
data: openArray[byte];
|
|
T: type SavedState;
|
|
): Result[SavedState,AristoError] =
|
|
## De-serialise the last saved state data record previously encoded with
|
|
## `blobify()`.
|
|
if data.len != 41:
|
|
return err(DeblobWrongSize)
|
|
if data[^1] != 0x7f:
|
|
return err(DeblobWrongType)
|
|
|
|
ok(SavedState(
|
|
key: Hash256(data: array[32, byte].initCopyFrom(data.toOpenArray(0, 31))),
|
|
serial: uint64.fromBytesBE data.toOpenArray(32, 39)))
|
|
|
|
# ------------------------------------------------------------------------------
|
|
# End
|
|
# ------------------------------------------------------------------------------
|