nimbus-eth1/stateless/tree_from_witness.nim

523 lines
16 KiB
Nim
Raw Normal View History

2023-11-01 03:32:09 +00:00
# Nimbus
# Copyright (c) 2020-2023 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed except
# according to those terms.
import
2020-04-29 09:59:38 +00:00
typetraits,
faststreams/inputs, eth/[common, rlp], stint,
Unified database frontend integration (#1670) * Nimbus folder environment update details: * Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder. * The `nimbus` program does not compile yet as it needs the updates in the parallel `stateless` sub-folder. * Stateless environment update details: * Integrated `CoreDbRef` for the sources in the `stateless` sub-folder. * The `nimbus` program compiles now. * Premix environment update details: * Integrated `CoreDbRef` for the sources in the `premix` sub-folder. * Fluffy environment update details: * Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder. * Tools environment update details: * Integrated `CoreDbRef` for the sources in the `tools` sub-folder. * Nodocker environment update details: * Integrated `CoreDbRef` for the sources in the `hive_integration/nodocker` sub-folder. * Tests environment update details: * Integrated `CoreDbRef` for the sources in the `tests` sub-folder. * The unit tests compile and run cleanly now. * Generalise `CoreDbRef` to any `select_backend` supported database why: Generalisation was just missed due to overcoming some compiler oddity which was tied to rocksdb for testing. * Suppress compiler warning for `newChainDB()` why: Warning was added to this function which must be wrapped so that any `CatchableError` is re-raised as `Defect`. * Split off persistent `CoreDbRef` constructor into separate file why: This allows to compile a memory only database version without linking the backend library. * Use memory `CoreDbRef` database by default detail: Persistent DB constructor needs to import `db/core_db/persistent why: Most tests use memory DB anyway. This avoids linking `-lrocksdb` or any other backend by default. * fix `toLegacyBackend()` availability check why: got garbled after memory/persistent split. * Clarify raw access to MPT for snap sync handler why: Logically, `kvt` is not the raw access for the hexary trie (although this holds for the legacy database)
2023-08-04 11:10:09 +00:00
eth/trie/trie_defs,
./witness_types, stew/byteutils, ../nimbus/[constants, db/core_db]
type
Unified database frontend integration (#1670) * Nimbus folder environment update details: * Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder. * The `nimbus` program does not compile yet as it needs the updates in the parallel `stateless` sub-folder. * Stateless environment update details: * Integrated `CoreDbRef` for the sources in the `stateless` sub-folder. * The `nimbus` program compiles now. * Premix environment update details: * Integrated `CoreDbRef` for the sources in the `premix` sub-folder. * Fluffy environment update details: * Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder. * Tools environment update details: * Integrated `CoreDbRef` for the sources in the `tools` sub-folder. * Nodocker environment update details: * Integrated `CoreDbRef` for the sources in the `hive_integration/nodocker` sub-folder. * Tests environment update details: * Integrated `CoreDbRef` for the sources in the `tests` sub-folder. * The unit tests compile and run cleanly now. * Generalise `CoreDbRef` to any `select_backend` supported database why: Generalisation was just missed due to overcoming some compiler oddity which was tied to rocksdb for testing. * Suppress compiler warning for `newChainDB()` why: Warning was added to this function which must be wrapped so that any `CatchableError` is re-raised as `Defect`. * Split off persistent `CoreDbRef` constructor into separate file why: This allows to compile a memory only database version without linking the backend library. * Use memory `CoreDbRef` database by default detail: Persistent DB constructor needs to import `db/core_db/persistent why: Most tests use memory DB anyway. This avoids linking `-lrocksdb` or any other backend by default. * fix `toLegacyBackend()` availability check why: got garbled after memory/persistent split. * Clarify raw access to MPT for snap sync handler why: Logically, `kvt` is not the raw access for the hexary trie (although this holds for the legacy database)
2023-08-04 11:10:09 +00:00
DB = CoreDbRef
2020-04-22 11:04:19 +00:00
NodeKey = object
usedBytes: int
data: array[32, byte]
2020-04-22 11:04:19 +00:00
AccountAndSlots* = object
address*: EthAddress
codeLen*: int
slots*: seq[StorageSlot]
TreeBuilder = object
when defined(useInputStream):
input: InputStream
else:
input: seq[byte]
pos: int
db: DB
root: KeccakHash
2020-04-29 05:46:50 +00:00
flags: WitnessFlags
keys*: seq[AccountAndSlots]
# this TreeBuilder support short node parsing
# but a block witness should not contains short node
2020-07-08 14:14:19 +00:00
# for account trie. Short rlp node only appears in
# storage trie with depth >= 9
# the InputStream still unstable
# when using large dataset for testing
# or run longer
when defined(useInputStream):
2020-04-29 05:46:50 +00:00
proc initTreeBuilder*(input: InputStream, db: DB, flags: WitnessFlags): TreeBuilder =
result.input = input
result.db = db
result.root = emptyRlpHash
2020-04-29 05:46:50 +00:00
result.flags = flags
2020-04-29 05:46:50 +00:00
proc initTreeBuilder*(input: openArray[byte], db: DB, flags: WitnessFlags): TreeBuilder =
result.input = memoryInput(input)
result.db = db
result.root = emptyRlpHash
2020-04-29 05:46:50 +00:00
result.flags = flags
else:
2020-04-29 05:46:50 +00:00
proc initTreeBuilder*(input: openArray[byte], db: DB, flags: WitnessFlags): TreeBuilder =
result.input = @input
result.db = db
result.root = emptyRlpHash
2020-04-29 05:46:50 +00:00
result.flags = flags
func rootHash*(t: TreeBuilder): KeccakHash {.inline.} =
t.root
func getDB*(t: TreeBuilder): DB {.inline.} =
t.db
when defined(useInputStream):
template readByte(t: var TreeBuilder): byte =
t.input.read
template len(t: TreeBuilder): int =
t.input.len
template read(t: var TreeBuilder, len: int): auto =
t.input.read(len)
template readable(t: var TreeBuilder): bool =
t.input.readable
template readable(t: var TreeBuilder, len: int): bool =
t.input.readable(len)
else:
template readByte(t: var TreeBuilder): byte =
let pos = t.pos
inc t.pos
t.input[pos]
2020-04-23 02:56:35 +00:00
template len(t: TreeBuilder): int =
t.input.len
2020-04-23 02:56:35 +00:00
template readable(t: var TreeBuilder): bool =
t.pos < t.input.len
2020-04-23 02:56:35 +00:00
template readable(t: var TreeBuilder, length: int): bool =
t.pos + length <= t.input.len
template read(t: var TreeBuilder, len: int): auto =
let pos = t.pos
inc(t.pos, len)
toOpenArray(t.input, pos, pos+len-1)
2020-04-23 02:56:35 +00:00
proc safeReadByte(t: var TreeBuilder): byte =
if t.readable:
result = t.readByte()
else:
raise newException(ParsingError, "Cannot read byte from input stream")
2020-07-21 06:15:06 +00:00
when defined(debugHash):
proc safeReadU32(t: var TreeBuilder): uint32 =
if t.readable(4):
result = fromBytesBE(uint32, t.read(4))
else:
raise newException(ParsingError, "Cannot read U32 from input stream")
2020-04-29 09:59:38 +00:00
template safeReadEnum(t: var TreeBuilder, T: type): untyped =
let typ = t.safeReadByte.int
if typ < low(T).int or typ > high(T).int:
raise newException(ParsingError, "Wrong " & T.name & " value " & $typ)
2020-04-29 09:59:38 +00:00
T(typ)
template safeReadBytes(t: var TreeBuilder, length: int, body: untyped) =
if t.readable(length):
body
else:
raise newException(ParsingError, "Failed when try to read " & $length & " bytes")
proc readUVarint32(t: var TreeBuilder): uint32 =
# LEB128 varint encoding
var shift = 0
while true:
let b = t.safeReadByte()
result = result or ((b and 0x7F).uint32 shl shift)
if (0x80 and b) == 0:
break
inc(shift, 7)
if shift > 28:
raise newException(ParsingError, "Failed when try to parse uvarint32")
proc readUVarint256(t: var TreeBuilder): UInt256 =
# LEB128 varint encoding
var shift = 0
while true:
let b = t.safeReadByte()
result = result or ((b and 0x7F).u256 shl shift)
if (0x80 and b) == 0:
break
inc(shift, 7)
if shift > 252:
raise newException(ParsingError, "Failed when try to parse uvarint256")
2020-04-22 11:04:19 +00:00
proc toKeccak(r: var NodeKey, x: openArray[byte]) {.inline.} =
r.data[0..31] = x[0..31]
2020-04-22 11:04:19 +00:00
r.usedBytes = 32
proc toKeccak(r: var NodeKey, z: byte, x: openArray[byte]) {.inline.} =
r.data[0] = z
r.data[1..31] = x[0..30]
r.usedBytes = 32
2020-04-22 11:04:19 +00:00
proc append(r: var RlpWriter, n: NodeKey) =
if n.usedBytes < 32:
r.append rlpFromBytes(n.data.toOpenArray(0, n.usedBytes-1))
else:
r.append n.data.toOpenArray(0, n.usedBytes-1)
proc toNodeKey(t: var TreeBuilder, z: openArray[byte]): NodeKey =
2020-04-22 11:04:19 +00:00
if z.len < 32:
result.usedBytes = z.len
result.data[0..z.len-1] = z[0..z.len-1]
else:
result.data = keccakHash(z).data
2020-04-22 11:04:19 +00:00
result.usedBytes = 32
Unified database frontend integration (#1670) * Nimbus folder environment update details: * Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder. * The `nimbus` program does not compile yet as it needs the updates in the parallel `stateless` sub-folder. * Stateless environment update details: * Integrated `CoreDbRef` for the sources in the `stateless` sub-folder. * The `nimbus` program compiles now. * Premix environment update details: * Integrated `CoreDbRef` for the sources in the `premix` sub-folder. * Fluffy environment update details: * Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder. * Tools environment update details: * Integrated `CoreDbRef` for the sources in the `tools` sub-folder. * Nodocker environment update details: * Integrated `CoreDbRef` for the sources in the `hive_integration/nodocker` sub-folder. * Tests environment update details: * Integrated `CoreDbRef` for the sources in the `tests` sub-folder. * The unit tests compile and run cleanly now. * Generalise `CoreDbRef` to any `select_backend` supported database why: Generalisation was just missed due to overcoming some compiler oddity which was tied to rocksdb for testing. * Suppress compiler warning for `newChainDB()` why: Warning was added to this function which must be wrapped so that any `CatchableError` is re-raised as `Defect`. * Split off persistent `CoreDbRef` constructor into separate file why: This allows to compile a memory only database version without linking the backend library. * Use memory `CoreDbRef` database by default detail: Persistent DB constructor needs to import `db/core_db/persistent why: Most tests use memory DB anyway. This avoids linking `-lrocksdb` or any other backend by default. * fix `toLegacyBackend()` availability check why: got garbled after memory/persistent split. * Clarify raw access to MPT for snap sync handler why: Logically, `kvt` is not the raw access for the hexary trie (although this holds for the legacy database)
2023-08-04 11:10:09 +00:00
t.db.kvt.put(result.data, z)
proc toNodeKey(z: openArray[byte]): NodeKey =
if z.len >= 32:
raise newException(ParsingError, "Failed when try to convert short rlp to NodeKey")
result.usedBytes = z.len
result.data[0..z.len-1] = z[0..z.len-1]
2020-04-29 09:59:38 +00:00
proc forceSmallNodeKeyToHash(t: var TreeBuilder, r: NodeKey): NodeKey =
let hash = keccakHash(r.data.toOpenArray(0, r.usedBytes-1))
Unified database frontend integration (#1670) * Nimbus folder environment update details: * Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder. * The `nimbus` program does not compile yet as it needs the updates in the parallel `stateless` sub-folder. * Stateless environment update details: * Integrated `CoreDbRef` for the sources in the `stateless` sub-folder. * The `nimbus` program compiles now. * Premix environment update details: * Integrated `CoreDbRef` for the sources in the `premix` sub-folder. * Fluffy environment update details: * Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder. * Tools environment update details: * Integrated `CoreDbRef` for the sources in the `tools` sub-folder. * Nodocker environment update details: * Integrated `CoreDbRef` for the sources in the `hive_integration/nodocker` sub-folder. * Tests environment update details: * Integrated `CoreDbRef` for the sources in the `tests` sub-folder. * The unit tests compile and run cleanly now. * Generalise `CoreDbRef` to any `select_backend` supported database why: Generalisation was just missed due to overcoming some compiler oddity which was tied to rocksdb for testing. * Suppress compiler warning for `newChainDB()` why: Warning was added to this function which must be wrapped so that any `CatchableError` is re-raised as `Defect`. * Split off persistent `CoreDbRef` constructor into separate file why: This allows to compile a memory only database version without linking the backend library. * Use memory `CoreDbRef` database by default detail: Persistent DB constructor needs to import `db/core_db/persistent why: Most tests use memory DB anyway. This avoids linking `-lrocksdb` or any other backend by default. * fix `toLegacyBackend()` availability check why: got garbled after memory/persistent split. * Clarify raw access to MPT for snap sync handler why: Logically, `kvt` is not the raw access for the hexary trie (although this holds for the legacy database)
2023-08-04 11:10:09 +00:00
t.db.kvt.put(hash.data, r.data.toOpenArray(0, r.usedBytes-1))
2020-04-29 09:59:38 +00:00
result.data = hash.data
result.usedBytes = 32
proc writeCode(t: var TreeBuilder, code: openArray[byte]): Hash256 =
result = keccakHash(code)
Unified database frontend integration (#1670) * Nimbus folder environment update details: * Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder. * The `nimbus` program does not compile yet as it needs the updates in the parallel `stateless` sub-folder. * Stateless environment update details: * Integrated `CoreDbRef` for the sources in the `stateless` sub-folder. * The `nimbus` program compiles now. * Premix environment update details: * Integrated `CoreDbRef` for the sources in the `premix` sub-folder. * Fluffy environment update details: * Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder. * Tools environment update details: * Integrated `CoreDbRef` for the sources in the `tools` sub-folder. * Nodocker environment update details: * Integrated `CoreDbRef` for the sources in the `hive_integration/nodocker` sub-folder. * Tests environment update details: * Integrated `CoreDbRef` for the sources in the `tests` sub-folder. * The unit tests compile and run cleanly now. * Generalise `CoreDbRef` to any `select_backend` supported database why: Generalisation was just missed due to overcoming some compiler oddity which was tied to rocksdb for testing. * Suppress compiler warning for `newChainDB()` why: Warning was added to this function which must be wrapped so that any `CatchableError` is re-raised as `Defect`. * Split off persistent `CoreDbRef` constructor into separate file why: This allows to compile a memory only database version without linking the backend library. * Use memory `CoreDbRef` database by default detail: Persistent DB constructor needs to import `db/core_db/persistent why: Most tests use memory DB anyway. This avoids linking `-lrocksdb` or any other backend by default. * fix `toLegacyBackend()` availability check why: got garbled after memory/persistent split. * Clarify raw access to MPT for snap sync handler why: Logically, `kvt` is not the raw access for the hexary trie (although this holds for the legacy database)
2023-08-04 11:10:09 +00:00
put(t.db.kvt, result.data, code)
proc branchNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey {.gcsafe.}
proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey {.gcsafe.}
proc accountNode(t: var TreeBuilder, depth: int): NodeKey {.gcsafe.}
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey {.gcsafe.}
proc hashNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey {.gcsafe.}
proc treeNode(t: var TreeBuilder, depth: int = 0, storageMode = false): NodeKey {.gcsafe.}
2020-05-03 01:47:14 +00:00
proc buildTree*(t: var TreeBuilder): KeccakHash
2023-06-12 04:29:03 +00:00
{.raises: [ParsingError, Exception].} =
let version = t.safeReadByte().int
if version != BlockWitnessVersion.int:
raise newException(ParsingError, "Wrong block witness version")
# one or more trees
# we only parse one tree here
let metadataType = t.safeReadByte().int
if metadataType != MetadataNothing.int:
raise newException(ParsingError, "This tree builder support no metadata")
var res = treeNode(t)
if res.usedBytes != 32:
raise newException(ParsingError, "Buildtree should produce hash")
result.data = res.data
# after the block witness spec mention how to split the big tree into
# chunks, modify this buildForest into chunked witness tree builder
proc buildForest*(
t: var TreeBuilder): seq[KeccakHash]
2023-06-12 04:29:03 +00:00
{.raises: [ParsingError, Exception].} =
let version = t.safeReadByte().int
if version != BlockWitnessVersion.int:
raise newException(ParsingError, "Wrong block witness version")
while t.readable:
let metadataType = t.safeReadByte().int
if metadataType != MetadataNothing.int:
raise newException(ParsingError, "This tree builder support no metadata")
var res = treeNode(t)
if res.usedBytes != 32:
raise newException(ParsingError, "Buildtree should produce hash")
result.add KeccakHash(data: res.data)
proc treeNode(t: var TreeBuilder, depth: int, storageMode = false): NodeKey =
if depth > 64:
2020-05-24 04:41:20 +00:00
raise newException(ParsingError, "invalid trie structure")
2020-04-29 09:59:38 +00:00
let nodeType = safeReadEnum(t, TrieNodeType)
case nodeType
of BranchNodeType: result = t.branchNode(depth, storageMode)
of ExtensionNodeType: result = t.extensionNode(depth, storageMode)
of AccountNodeType:
if storageMode:
# parse account storage leaf node
result = t.accountStorageLeafNode(depth)
else:
result = t.accountNode(depth)
of HashNodeType: result = t.hashNode(depth, storageMode)
2020-04-22 11:04:19 +00:00
if depth == 0 and result.usedBytes < 32:
2020-04-29 09:59:38 +00:00
result = t.forceSmallNodeKeyToHash(result)
proc branchNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
2020-05-24 04:41:20 +00:00
if depth >= 64:
raise newException(ParsingError, "invalid trie structure")
let mask = constructBranchMask(t.safeReadByte, t.safeReadByte)
2020-04-22 11:04:19 +00:00
when defined(debugDepth):
let readDepth = t.safeReadByte().int
2020-04-22 11:04:19 +00:00
doAssert(readDepth == depth, "branchNode " & $readDepth & " vs. " & $depth)
when defined(debugHash):
var hash: NodeKey
toKeccak(hash, t.read(32))
2020-04-22 11:04:19 +00:00
var r = initRlpList(17)
for i in 0 ..< 16:
if mask.branchMaskBitIsSet(i):
r.append t.treeNode(depth+1, storageMode)
else:
r.append ""
if branchMaskBitIsSet(mask, 16):
2020-04-29 09:26:09 +00:00
raise newException(ParsingError, "The 17th elem of branch node should empty")
# 17th elem should always empty
r.append ""
result = t.toNodeKey(r.finish)
2020-04-22 11:04:19 +00:00
when defined(debugHash):
if result != hash:
debugEcho "DEPTH: ", depth
debugEcho "result: ", result.data.toHex, " vs. ", hash.data.toHex
2020-05-14 04:09:01 +00:00
func hexPrefixExtension(r: var RlpWriter, x: openArray[byte], nibblesLen: int) =
# extension hexPrefix
2020-05-07 04:28:11 +00:00
doAssert(nibblesLen >= 1 and nibblesLen <= 64)
2020-04-22 04:43:37 +00:00
var bytes: array[33, byte]
2020-05-14 04:09:01 +00:00
let last = nibblesLen div 2
if (nibblesLen mod 2) == 0: # even
2020-05-14 04:09:01 +00:00
bytes[0] = 0.byte
2020-04-22 04:43:37 +00:00
var i = 1
for y in x:
2020-04-22 04:43:37 +00:00
bytes[i] = y
inc i
else: # odd
2020-05-14 04:09:01 +00:00
bytes[0] = 0b0001_0000.byte or (x[0] shr 4)
for i in 1..last:
2020-04-22 04:43:37 +00:00
bytes[i] = (x[i-1] shl 4) or (x[i] shr 4)
2020-04-22 11:04:19 +00:00
2020-05-14 04:09:01 +00:00
r.append toOpenArray(bytes, 0, last)
func hexPrefixLeaf(r: var RlpWriter, x: openArray[byte], depth: int) =
# leaf hexPrefix
doAssert(depth >= 0 and depth <= 64)
let nibblesLen = 64 - depth
var bytes: array[33, byte]
var start = depth div 2
if (nibblesLen mod 2) == 0: # even
bytes[0] = 0b0010_0000.byte
else: # odd
bytes[0] = 0b0011_0000.byte or (x[start] and 0x0F)
inc start
var i = 1
for z in start..31:
bytes[i] = x[z]
inc i
2020-04-22 04:43:37 +00:00
r.append toOpenArray(bytes, 0, nibblesLen div 2)
proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
2020-05-24 04:41:20 +00:00
if depth >= 63:
raise newException(ParsingError, "invalid trie structure")
let nibblesLen = t.safeReadByte().int
2020-05-24 04:41:20 +00:00
if nibblesLen > 64 or nibblesLen < 1:
raise newException(ParsingError, "nibblesLen should between 1..64")
var r = initRlpList(2)
let pathLen = nibblesLen div 2 + nibblesLen mod 2
safeReadBytes(t, pathLen):
2020-05-14 04:09:01 +00:00
r.hexPrefixExtension(t.read(pathLen), nibblesLen)
2020-04-22 11:04:19 +00:00
when defined(debugDepth):
let readDepth = t.safeReadByte().int
2020-04-22 11:04:19 +00:00
doAssert(readDepth == depth, "extensionNode " & $readDepth & " vs. " & $depth)
when defined(debugHash):
var hash: NodeKey
toKeccak(hash, t.read(32))
2020-04-22 11:04:19 +00:00
2020-05-24 04:41:20 +00:00
if nibblesLen + depth > 64 or nibblesLen + depth < 1:
raise newException(ParsingError, "depth should between 1..64")
2020-04-29 09:59:38 +00:00
let nodeType = safeReadEnum(t, TrieNodeType)
case nodeType
of BranchNodeType: r.append t.branchNode(depth + nibblesLen, storageMode)
of HashNodeType: r.append t.hashNode(depth, storageMode)
2020-04-29 09:26:09 +00:00
else: raise newException(ParsingError, "wrong type during parsing child of extension node")
result = t.toNodeKey(r.finish)
2020-04-22 11:04:19 +00:00
when defined(debugHash):
if result != hash:
debugEcho "DEPTH: ", depth
doAssert(result == hash, "EXT HASH DIFF " & result.data.toHex & " vs. " & hash.data.toHex)
func toAddress(x: openArray[byte]): EthAddress =
2020-05-14 04:09:01 +00:00
result[0..19] = x[0..19]
2020-05-14 04:09:01 +00:00
proc readAddress(t: var TreeBuilder): Hash256 =
safeReadBytes(t, 20):
2020-05-14 04:09:01 +00:00
let address = toAddress(t.read(20))
result = keccakHash(address)
2020-05-14 04:09:01 +00:00
t.keys.add AccountAndSlots(address: address)
proc readCodeLen(t: var TreeBuilder): int =
let codeLen = t.readUVarint32()
if wfEIP170 in t.flags and codeLen > EIP170_MAX_CODE_SIZE:
2020-05-08 05:16:24 +00:00
raise newException(ContractCodeError, "code len exceed EIP170 code size limit: " & $codeLen)
t.keys[^1].codeLen = codeLen.int
result = codeLen.int
proc readHashNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
let nodeType = safeReadEnum(t, TrieNodeType)
if nodeType != HashNodeType:
raise newException(ParsingError, "hash node expected but got " & $nodeType)
result = t.hashNode(depth, storageMode)
proc readByteCode(t: var TreeBuilder, acc: var Account, depth: int) =
2020-05-14 04:09:01 +00:00
let bytecodeType = safeReadEnum(t, BytecodeType)
case bytecodeType
of CodeTouched:
let codeLen = t.readCodeLen()
safeReadBytes(t, codeLen):
acc.codeHash = t.writeCode(t.read(codeLen))
of CodeUntouched:
# readCodeLen already save the codeLen
# along with recovered address
# we could discard it here
discard t.readCodeLen()
let codeHash = t.readHashNode(depth, false)
2020-05-14 04:09:01 +00:00
doAssert(codeHash.usedBytes == 32)
acc.codeHash.data = codeHash.data
2020-04-22 11:04:19 +00:00
proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
2020-05-24 04:41:20 +00:00
if depth >= 65:
raise newException(ParsingError, "invalid trie structure")
when defined(debugHash):
let len = t.safeReadU32().int
2020-04-29 05:46:50 +00:00
let node = @(t.read(len))
let nodeKey = t.toNodeKey(node)
2020-04-22 11:04:19 +00:00
when defined(debugDepth):
let readDepth = t.safeReadByte().int
2020-04-22 11:04:19 +00:00
doAssert(readDepth == depth, "accountNode " & $readDepth & " vs. " & $depth)
2020-04-29 09:59:38 +00:00
let accountType = safeReadEnum(t, AccountType)
2020-05-14 04:09:01 +00:00
let addressHash = t.readAddress()
2020-05-14 04:09:01 +00:00
var r = initRlpList(2)
r.hexPrefixLeaf(addressHash.data, depth)
var acc = Account(
balance: t.readUVarint256(),
nonce: t.readUVarint256().truncate(AccountNonce)
)
case accountType
of SimpleAccountType:
acc.codeHash = blankStringHash
acc.storageRoot = emptyRlpHash
of ExtendedAccountType:
t.readByteCode(acc, depth)
# switch to account storage parsing mode
# and reset the depth
let storageRoot = t.treeNode(0, storageMode = true)
doAssert(storageRoot.usedBytes == 32)
acc.storageRoot.data = storageRoot.data
r.append rlp.encode(acc)
let nodeRes = r.finish
result = t.toNodeKey(nodeRes)
when defined(debugHash):
2020-04-29 05:46:50 +00:00
if result != nodeKey:
2020-05-14 04:09:01 +00:00
debugEcho "Address: ", t.keys[^1].address.toHex
debugEcho "addressHash: ", addressHash.data.toHex
debugEcho "depth: ", depth
2020-04-29 05:46:50 +00:00
debugEcho "result.usedBytes: ", result.usedBytes
debugEcho "nodeKey.usedBytes: ", nodeKey.usedBytes
var rlpa = rlpFromBytes(node)
var rlpb = rlpFromBytes(nodeRes)
2020-04-29 05:46:50 +00:00
debugEcho "Expected: ", inspect(rlpa)
debugEcho "Actual: ", inspect(rlpb)
var a = rlpa.listElem(1).toBytes.decode(Account)
var b = rlpb.listElem(1).toBytes.decode(Account)
debugEcho "Expected: ", a
debugEcho "Actual: ", b
doAssert(result == nodeKey, "account node parsing error")
func toStorageSlot(x: openArray[byte]): StorageSlot =
2020-05-14 04:09:01 +00:00
result[0..31] = x[0..31]
2020-05-14 04:09:01 +00:00
proc readStorageSlot(t: var TreeBuilder): Hash256 =
safeReadBytes(t, 32):
2020-05-14 04:09:01 +00:00
let slot = toStorageSlot(t.read(32))
result = keccakHash(slot)
2020-05-14 04:09:01 +00:00
t.keys[^1].slots.add slot
2020-04-22 11:04:19 +00:00
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey =
2020-05-24 04:41:20 +00:00
if depth >= 65:
raise newException(ParsingError, "invalid trie structure")
when defined(debugHash):
let len = t.safeReadU32().int
let node = @(t.read(len))
let nodeKey = t.toNodeKey(node)
when defined(debugDepth):
let readDepth = t.safeReadByte().int
doAssert(readDepth == depth, "accountNode " & $readDepth & " vs. " & $depth)
var r = initRlpList(2)
2020-05-14 04:09:01 +00:00
let slotHash = t.readStorageSlot()
r.hexPrefixLeaf(slotHash.data, depth)
2020-04-29 09:59:38 +00:00
safeReadBytes(t, 32):
let val = UInt256.fromBytesBE(t.read(32))
r.append rlp.encode(val)
result = t.toNodeKey(r.finish)
2020-04-21 09:13:43 +00:00
when defined(debugHash):
2020-05-06 03:36:00 +00:00
doAssert(result == nodeKey, "account storage leaf node parsing error")
proc hashNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
if storageMode and depth >= 9:
let z = t.safeReadByte()
if z == ShortRlpPrefix:
let rlpLen = t.safeReadByte().int
if rlpLen == 0:
safeReadBytes(t, 31):
result.toKeccak(0, t.read(31))
else:
safeReadBytes(t, rlpLen):
result = toNodeKey(t.read(rlpLen))
else:
safeReadBytes(t, 31):
result.toKeccak(z, t.read(31))
else:
safeReadBytes(t, 32):
result.toKeccak(t.read(32))