2020-04-21 11:04:18 +00:00
|
|
|
import
|
2020-04-29 09:59:38 +00:00
|
|
|
typetraits,
|
2020-04-22 04:21:39 +00:00
|
|
|
faststreams/input_stream, eth/[common, rlp], stint, stew/endians2,
|
|
|
|
eth/trie/[db, trie_defs], nimcrypto/[keccak, hash],
|
2020-04-28 07:27:55 +00:00
|
|
|
./witness_types, stew/byteutils, ../nimbus/constants
|
2020-04-20 11:23:22 +00:00
|
|
|
|
|
|
|
type
|
2020-04-22 04:21:39 +00:00
|
|
|
DB = TrieDatabaseRef
|
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
NodeKey = object
|
|
|
|
usedBytes: int
|
2020-05-03 01:11:04 +00:00
|
|
|
data: array[32, byte]
|
2020-04-22 11:04:19 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
AccountAndSlots* = object
|
|
|
|
address*: EthAddress
|
2020-05-05 13:07:38 +00:00
|
|
|
codeLen*: int
|
2020-05-05 06:50:31 +00:00
|
|
|
slots*: seq[StorageSlot]
|
|
|
|
|
2020-04-20 11:23:22 +00:00
|
|
|
TreeBuilder = object
|
2020-04-28 04:59:58 +00:00
|
|
|
when defined(useInputStream):
|
|
|
|
input: InputStream
|
|
|
|
else:
|
|
|
|
input: seq[byte]
|
|
|
|
pos: int
|
2020-04-22 04:21:39 +00:00
|
|
|
db: DB
|
|
|
|
root: KeccakHash
|
2020-04-29 05:46:50 +00:00
|
|
|
flags: WitnessFlags
|
2020-05-05 06:50:31 +00:00
|
|
|
keys: seq[AccountAndSlots]
|
|
|
|
|
|
|
|
# this TreeBuilder support short node parsing
|
|
|
|
# but a block witness should not contains short node
|
2020-04-22 04:21:39 +00:00
|
|
|
|
2020-04-28 04:59:58 +00:00
|
|
|
# the InputStream still unstable
|
|
|
|
# when using large dataset for testing
|
|
|
|
# or run longer
|
|
|
|
|
|
|
|
when defined(useInputStream):
|
2020-04-29 05:46:50 +00:00
|
|
|
proc initTreeBuilder*(input: InputStream, db: DB, flags: WitnessFlags): TreeBuilder =
|
2020-04-28 04:59:58 +00:00
|
|
|
result.input = input
|
|
|
|
result.db = db
|
|
|
|
result.root = emptyRlpHash
|
2020-04-29 05:46:50 +00:00
|
|
|
result.flags = flags
|
2020-04-28 04:59:58 +00:00
|
|
|
|
2020-04-29 05:46:50 +00:00
|
|
|
proc initTreeBuilder*(input: openArray[byte], db: DB, flags: WitnessFlags): TreeBuilder =
|
2020-04-28 04:59:58 +00:00
|
|
|
result.input = memoryInput(input)
|
|
|
|
result.db = db
|
|
|
|
result.root = emptyRlpHash
|
2020-04-29 05:46:50 +00:00
|
|
|
result.flags = flags
|
2020-04-28 04:59:58 +00:00
|
|
|
else:
|
2020-04-29 05:46:50 +00:00
|
|
|
proc initTreeBuilder*(input: openArray[byte], db: DB, flags: WitnessFlags): TreeBuilder =
|
2020-04-28 04:59:58 +00:00
|
|
|
result.input = @input
|
|
|
|
result.db = db
|
|
|
|
result.root = emptyRlpHash
|
2020-04-29 05:46:50 +00:00
|
|
|
result.flags = flags
|
2020-04-24 09:04:43 +00:00
|
|
|
|
2020-04-22 04:21:39 +00:00
|
|
|
func rootHash*(t: TreeBuilder): KeccakHash {.inline.} =
|
|
|
|
t.root
|
|
|
|
|
2020-05-03 01:11:04 +00:00
|
|
|
func getDB*(t: TreeBuilder): DB {.inline.} =
|
|
|
|
t.db
|
|
|
|
|
2020-04-28 04:59:58 +00:00
|
|
|
when defined(useInputStream):
|
|
|
|
template readByte(t: var TreeBuilder): byte =
|
|
|
|
t.input.read
|
|
|
|
|
|
|
|
template len(t: TreeBuilder): int =
|
|
|
|
t.input.len
|
|
|
|
|
|
|
|
template read(t: var TreeBuilder, len: int): auto =
|
|
|
|
t.input.read(len)
|
|
|
|
|
2020-04-29 08:29:25 +00:00
|
|
|
template readable(t: var TreeBuilder): bool =
|
|
|
|
t.input.readable
|
2020-04-29 09:22:39 +00:00
|
|
|
|
|
|
|
template readable(t: var TreeBuilder, len: int): bool =
|
|
|
|
t.input.readable(len)
|
|
|
|
|
2020-04-28 04:59:58 +00:00
|
|
|
else:
|
|
|
|
template readByte(t: var TreeBuilder): byte =
|
|
|
|
let pos = t.pos
|
|
|
|
inc t.pos
|
|
|
|
t.input[pos]
|
2020-04-23 02:56:35 +00:00
|
|
|
|
2020-04-28 04:59:58 +00:00
|
|
|
template len(t: TreeBuilder): int =
|
|
|
|
t.input.len
|
2020-04-23 02:56:35 +00:00
|
|
|
|
2020-04-29 08:29:25 +00:00
|
|
|
template readable(t: var TreeBuilder): bool =
|
|
|
|
t.pos < t.input.len
|
2020-04-23 02:56:35 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
template readable(t: var TreeBuilder, length: int): bool =
|
|
|
|
t.pos + length <= t.input.len
|
|
|
|
|
2020-04-28 04:59:58 +00:00
|
|
|
template read(t: var TreeBuilder, len: int): auto =
|
|
|
|
let pos = t.pos
|
|
|
|
inc(t.pos, len)
|
|
|
|
toOpenArray(t.input, pos, pos+len-1)
|
2020-04-23 02:56:35 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
proc safeReadByte(t: var TreeBuilder): byte =
|
|
|
|
if t.readable:
|
|
|
|
result = t.readByte()
|
|
|
|
else:
|
|
|
|
raise newException(IOError, "Cannot read byte from input stream")
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
proc safeReadU32(t: var TreeBuilder): uint32 =
|
|
|
|
if t.readable(4):
|
|
|
|
result = fromBytesBE(uint32, t.read(4))
|
|
|
|
else:
|
|
|
|
raise newException(IOError, "Cannot read U32 from input stream")
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
template safeReadEnum(t: var TreeBuilder, T: type): untyped =
|
|
|
|
let typ = t.safeReadByte.int
|
|
|
|
if typ < low(T).int or typ > high(T).int:
|
2020-05-05 06:50:31 +00:00
|
|
|
raise newException(ParsingError, "Wrong " & T.name & " value " & $typ)
|
2020-04-29 09:59:38 +00:00
|
|
|
T(typ)
|
|
|
|
|
|
|
|
template safeReadBytes(t: var TreeBuilder, length: int, body: untyped) =
|
|
|
|
if t.readable(length):
|
|
|
|
body
|
|
|
|
else:
|
|
|
|
raise newException(ParsingError, "Failed when try to read " & $length & " bytes")
|
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
proc toKeccak(r: var NodeKey, x: openArray[byte]) {.inline.} =
|
2020-04-22 04:21:39 +00:00
|
|
|
r.data[0..31] = x[0..31]
|
2020-04-22 11:04:19 +00:00
|
|
|
r.usedBytes = 32
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
proc append(r: var RlpWriter, n: NodeKey) =
|
|
|
|
if n.usedBytes < 32:
|
|
|
|
r.append rlpFromBytes(n.data.toOpenArray(0, n.usedBytes-1))
|
|
|
|
else:
|
|
|
|
r.append n.data.toOpenArray(0, n.usedBytes-1)
|
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
proc toNodeKey(t: var TreeBuilder, z: openArray[byte]): NodeKey =
|
2020-04-22 11:04:19 +00:00
|
|
|
if z.len < 32:
|
|
|
|
result.usedBytes = z.len
|
|
|
|
result.data[0..z.len-1] = z[0..z.len-1]
|
|
|
|
else:
|
|
|
|
result.data = keccak(z).data
|
|
|
|
result.usedBytes = 32
|
2020-04-29 09:22:39 +00:00
|
|
|
t.db.put(result.data, z)
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
proc forceSmallNodeKeyToHash(t: var TreeBuilder, r: NodeKey): NodeKey =
|
|
|
|
let hash = keccak(r.data.toOpenArray(0, r.usedBytes-1))
|
|
|
|
t.db.put(hash.data, r.data.toOpenArray(0, r.usedBytes-1))
|
|
|
|
result.data = hash.data
|
|
|
|
result.usedBytes = 32
|
|
|
|
|
2020-04-28 07:27:55 +00:00
|
|
|
proc writeCode(t: var TreeBuilder, code: openArray[byte]): Hash256 =
|
|
|
|
result = keccak(code)
|
|
|
|
put(t.db, result.data, code)
|
|
|
|
|
2020-04-28 14:06:37 +00:00
|
|
|
proc branchNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey
|
|
|
|
proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey
|
2020-04-22 11:04:19 +00:00
|
|
|
proc accountNode(t: var TreeBuilder, depth: int): NodeKey
|
|
|
|
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey
|
|
|
|
proc hashNode(t: var TreeBuilder): NodeKey
|
2020-04-29 08:29:25 +00:00
|
|
|
proc treeNode(t: var TreeBuilder, depth: int = 0, storageMode = false): NodeKey
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-05-03 01:47:14 +00:00
|
|
|
proc buildTree*(t: var TreeBuilder): KeccakHash
|
|
|
|
{.raises: [ContractCodeError, Defect, IOError, ParsingError, Exception].} =
|
2020-04-29 09:22:39 +00:00
|
|
|
let version = t.safeReadByte().int
|
2020-04-29 08:29:25 +00:00
|
|
|
if version != BlockWitnessVersion.int:
|
|
|
|
raise newException(ParsingError, "Wrong block witness version")
|
|
|
|
|
|
|
|
# one or more trees
|
|
|
|
|
|
|
|
# we only parse one tree here
|
2020-04-29 09:22:39 +00:00
|
|
|
let metadataType = t.safeReadByte().int
|
2020-04-29 08:29:25 +00:00
|
|
|
if metadataType != MetadataNothing.int:
|
|
|
|
raise newException(ParsingError, "This tree builder support no metadata")
|
|
|
|
|
|
|
|
var res = treeNode(t)
|
|
|
|
if res.usedBytes != 32:
|
|
|
|
raise newException(ParsingError, "Buildtree should produce hash")
|
|
|
|
|
|
|
|
result.data = res.data
|
|
|
|
|
2020-05-06 04:45:21 +00:00
|
|
|
# after the block witness spec mention how to split the big tree into
|
|
|
|
# chunks, modify this buildForest into chunked witness tree builder
|
2020-05-03 01:47:14 +00:00
|
|
|
proc buildForest*(t: var TreeBuilder): seq[KeccakHash]
|
|
|
|
{.raises: [ContractCodeError, Defect, IOError, ParsingError, Exception].} =
|
2020-04-29 09:22:39 +00:00
|
|
|
let version = t.safeReadByte().int
|
2020-04-29 08:29:25 +00:00
|
|
|
if version != BlockWitnessVersion.int:
|
|
|
|
raise newException(ParsingError, "Wrong block witness version")
|
|
|
|
|
|
|
|
while t.readable:
|
2020-04-29 09:22:39 +00:00
|
|
|
let metadataType = t.safeReadByte().int
|
2020-04-29 08:29:25 +00:00
|
|
|
if metadataType != MetadataNothing.int:
|
|
|
|
raise newException(ParsingError, "This tree builder support no metadata")
|
|
|
|
|
|
|
|
var res = treeNode(t)
|
|
|
|
if res.usedBytes != 32:
|
|
|
|
raise newException(ParsingError, "Buildtree should produce hash")
|
|
|
|
|
|
|
|
result.add KeccakHash(data: res.data)
|
|
|
|
|
2020-05-05 13:07:38 +00:00
|
|
|
proc treeNode(t: var TreeBuilder, depth: int, storageMode = false): NodeKey =
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth < 64)
|
2020-04-29 09:59:38 +00:00
|
|
|
let nodeType = safeReadEnum(t, TrieNodeType)
|
2020-04-20 11:23:22 +00:00
|
|
|
case nodeType
|
2020-04-28 14:06:37 +00:00
|
|
|
of BranchNodeType: result = t.branchNode(depth, storageMode)
|
|
|
|
of ExtensionNodeType: result = t.extensionNode(depth, storageMode)
|
2020-04-20 11:23:22 +00:00
|
|
|
of AccountNodeType:
|
2020-04-28 14:06:37 +00:00
|
|
|
if storageMode:
|
2020-04-20 11:23:22 +00:00
|
|
|
# parse account storage leaf node
|
2020-04-22 04:21:39 +00:00
|
|
|
result = t.accountStorageLeafNode(depth)
|
2020-04-20 11:23:22 +00:00
|
|
|
else:
|
2020-04-22 04:21:39 +00:00
|
|
|
result = t.accountNode(depth)
|
|
|
|
of HashNodeType: result = t.hashNode()
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
if depth == 0 and result.usedBytes < 32:
|
2020-04-29 09:59:38 +00:00
|
|
|
result = t.forceSmallNodeKeyToHash(result)
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-04-28 14:06:37 +00:00
|
|
|
proc branchNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth < 64)
|
2020-04-29 09:22:39 +00:00
|
|
|
let mask = constructBranchMask(t.safeReadByte, t.safeReadByte)
|
2020-04-22 11:04:19 +00:00
|
|
|
|
|
|
|
when defined(debugDepth):
|
2020-04-29 09:22:39 +00:00
|
|
|
let readDepth = t.safeReadByte().int
|
2020-04-22 11:04:19 +00:00
|
|
|
doAssert(readDepth == depth, "branchNode " & $readDepth & " vs. " & $depth)
|
|
|
|
|
|
|
|
when defined(debugHash):
|
2020-05-05 06:50:31 +00:00
|
|
|
var hash: NodeKey
|
|
|
|
toKeccak(hash, t.read(32))
|
2020-04-22 11:04:19 +00:00
|
|
|
|
2020-04-22 04:21:39 +00:00
|
|
|
var r = initRlpList(17)
|
|
|
|
|
2020-04-20 11:23:22 +00:00
|
|
|
for i in 0 ..< 16:
|
|
|
|
if mask.branchMaskBitIsSet(i):
|
2020-04-28 14:06:37 +00:00
|
|
|
r.append t.treeNode(depth+1, storageMode)
|
2020-04-22 04:21:39 +00:00
|
|
|
else:
|
|
|
|
r.append ""
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
if branchMaskBitIsSet(mask, 16):
|
2020-04-29 09:26:09 +00:00
|
|
|
raise newException(ParsingError, "The 17th elem of branch node should empty")
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-04-24 09:04:43 +00:00
|
|
|
# 17th elem should always empty
|
|
|
|
r.append ""
|
2020-04-22 04:21:39 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
result = t.toNodeKey(r.finish)
|
2020-04-22 11:04:19 +00:00
|
|
|
|
|
|
|
when defined(debugHash):
|
|
|
|
if result != hash:
|
|
|
|
debugEcho "DEPTH: ", depth
|
|
|
|
debugEcho "result: ", result.data.toHex, " vs. ", hash.data.toHex
|
2020-04-22 04:21:39 +00:00
|
|
|
|
2020-04-28 07:27:55 +00:00
|
|
|
func hexPrefix(r: var RlpWriter, x: openArray[byte], nibblesLen: int, isLeaf: static[bool] = false) =
|
2020-05-07 04:28:11 +00:00
|
|
|
doAssert(nibblesLen >= 1 and nibblesLen <= 64)
|
2020-04-22 04:43:37 +00:00
|
|
|
var bytes: array[33, byte]
|
2020-04-28 07:27:55 +00:00
|
|
|
if (nibblesLen mod 2) == 0: # even
|
|
|
|
when isLeaf:
|
|
|
|
bytes[0] = 0b0010_0000.byte
|
|
|
|
else:
|
|
|
|
bytes[0] = 0.byte
|
2020-04-22 04:43:37 +00:00
|
|
|
var i = 1
|
2020-04-22 04:21:39 +00:00
|
|
|
for y in x:
|
2020-04-22 04:43:37 +00:00
|
|
|
bytes[i] = y
|
|
|
|
inc i
|
2020-04-28 07:27:55 +00:00
|
|
|
else: # odd
|
|
|
|
when isLeaf:
|
|
|
|
bytes[0] = 0b0011_0000.byte or (x[0] shr 4)
|
|
|
|
else:
|
|
|
|
bytes[0] = 0b0001_0000.byte or (x[0] shr 4)
|
2020-04-22 04:21:39 +00:00
|
|
|
var last = nibblesLen div 2
|
|
|
|
for i in 1..last:
|
2020-04-22 04:43:37 +00:00
|
|
|
bytes[i] = (x[i-1] shl 4) or (x[i] shr 4)
|
2020-04-22 11:04:19 +00:00
|
|
|
|
2020-04-22 04:43:37 +00:00
|
|
|
r.append toOpenArray(bytes, 0, nibblesLen div 2)
|
2020-04-22 04:21:39 +00:00
|
|
|
|
2020-04-28 14:06:37 +00:00
|
|
|
proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth < 63)
|
2020-04-29 09:22:39 +00:00
|
|
|
let nibblesLen = t.safeReadByte().int
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(nibblesLen < 65)
|
2020-04-22 04:21:39 +00:00
|
|
|
var r = initRlpList(2)
|
2020-04-29 09:22:39 +00:00
|
|
|
let pathLen = nibblesLen div 2 + nibblesLen mod 2
|
2020-05-06 04:45:21 +00:00
|
|
|
safeReadBytes(t, pathLen):
|
2020-04-29 09:22:39 +00:00
|
|
|
r.hexPrefix(t.read(pathLen), nibblesLen)
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
when defined(debugDepth):
|
2020-04-29 09:22:39 +00:00
|
|
|
let readDepth = t.safeReadByte().int
|
2020-04-22 11:04:19 +00:00
|
|
|
doAssert(readDepth == depth, "extensionNode " & $readDepth & " vs. " & $depth)
|
|
|
|
|
|
|
|
when defined(debugHash):
|
2020-05-05 06:50:31 +00:00
|
|
|
var hash: NodeKey
|
|
|
|
toKeccak(hash, t.read(32))
|
2020-04-22 11:04:19 +00:00
|
|
|
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth + nibblesLen < 65)
|
2020-04-29 09:59:38 +00:00
|
|
|
let nodeType = safeReadEnum(t, TrieNodeType)
|
2020-04-20 11:23:22 +00:00
|
|
|
case nodeType
|
2020-04-28 14:06:37 +00:00
|
|
|
of BranchNodeType: r.append t.branchNode(depth + nibblesLen, storageMode)
|
2020-04-22 04:21:39 +00:00
|
|
|
of HashNodeType: r.append t.hashNode()
|
2020-04-29 09:26:09 +00:00
|
|
|
else: raise newException(ParsingError, "wrong type during parsing child of extension node")
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
result = t.toNodeKey(r.finish)
|
2020-04-22 11:04:19 +00:00
|
|
|
|
|
|
|
when defined(debugHash):
|
|
|
|
if result != hash:
|
|
|
|
debugEcho "DEPTH: ", depth
|
|
|
|
doAssert(result == hash, "EXT HASH DIFF " & result.data.toHex & " vs. " & hash.data.toHex)
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
func toAddress(x: openArray[byte]): EthAddress =
|
|
|
|
result[0..19] = result[0..19]
|
|
|
|
|
|
|
|
proc readAddress(t: var TreeBuilder) =
|
|
|
|
safeReadBytes(t, 20):
|
|
|
|
t.keys.add AccountAndSlots(address: toAddress(t.read(20)))
|
|
|
|
|
2020-05-05 13:07:38 +00:00
|
|
|
proc readCodeLen(t: var TreeBuilder): int =
|
|
|
|
let codeLen = t.safeReadU32()
|
|
|
|
if wfEIP170 in t.flags and codeLen > EIP170_CODE_SIZE_LIMIT:
|
2020-05-08 05:16:24 +00:00
|
|
|
raise newException(ContractCodeError, "code len exceed EIP170 code size limit: " & $codeLen)
|
2020-05-05 13:07:38 +00:00
|
|
|
t.keys[^1].codeLen = codeLen.int
|
|
|
|
result = codeLen.int
|
|
|
|
|
|
|
|
proc readHashNode(t: var TreeBuilder): NodeKey =
|
|
|
|
let nodeType = safeReadEnum(t, TrieNodeType)
|
|
|
|
if nodeType != HashNodeType:
|
|
|
|
raise newException(ParsingError, "hash node expected but got " & $nodeType)
|
|
|
|
result = t.hashNode()
|
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth < 65)
|
2020-04-28 09:58:16 +00:00
|
|
|
|
|
|
|
when defined(debugHash):
|
2020-04-29 09:22:39 +00:00
|
|
|
let len = t.safeReadU32().int
|
2020-04-29 05:46:50 +00:00
|
|
|
let node = @(t.read(len))
|
2020-04-29 09:22:39 +00:00
|
|
|
let nodeKey = t.toNodeKey(node)
|
2020-04-22 11:04:19 +00:00
|
|
|
|
|
|
|
when defined(debugDepth):
|
2020-04-29 09:22:39 +00:00
|
|
|
let readDepth = t.safeReadByte().int
|
2020-04-22 11:04:19 +00:00
|
|
|
doAssert(readDepth == depth, "accountNode " & $readDepth & " vs. " & $depth)
|
2020-04-22 04:21:39 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
let accountType = safeReadEnum(t, AccountType)
|
2020-04-20 11:23:22 +00:00
|
|
|
let nibblesLen = 64 - depth
|
2020-04-28 07:27:55 +00:00
|
|
|
var r = initRlpList(2)
|
2020-04-29 09:22:39 +00:00
|
|
|
|
|
|
|
let pathLen = nibblesLen div 2 + nibblesLen mod 2
|
2020-04-29 09:59:38 +00:00
|
|
|
safeReadBytes(t, pathLen):
|
2020-04-29 09:22:39 +00:00
|
|
|
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
2020-04-28 07:27:55 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
t.readAddress()
|
2020-04-28 14:06:37 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
safeReadBytes(t, 64):
|
|
|
|
var acc = Account(
|
|
|
|
balance: UInt256.fromBytesBE(t.read(32), false),
|
|
|
|
# TODO: why nonce must be 32 bytes, isn't 64 bit uint enough?
|
|
|
|
nonce: UInt256.fromBytesBE(t.read(32), false).truncate(AccountNonce)
|
|
|
|
)
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-05-05 08:21:39 +00:00
|
|
|
case accountType
|
|
|
|
of SimpleAccountType:
|
2020-04-29 09:59:38 +00:00
|
|
|
acc.codeHash = blankStringHash
|
|
|
|
acc.storageRoot = emptyRlpHash
|
2020-05-05 08:21:39 +00:00
|
|
|
of ExtendedAccountType:
|
2020-05-05 13:07:38 +00:00
|
|
|
let codeLen = t.readCodeLen()
|
|
|
|
safeReadBytes(t, codeLen):
|
|
|
|
acc.codeHash = t.writeCode(t.read(codeLen))
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
# switch to account storage parsing mode
|
|
|
|
# and reset the depth
|
|
|
|
let storageRoot = t.treeNode(0, storageMode = true)
|
|
|
|
doAssert(storageRoot.usedBytes == 32)
|
|
|
|
acc.storageRoot.data = storageRoot.data
|
2020-05-05 08:21:39 +00:00
|
|
|
of CodeUntouched:
|
2020-05-05 13:07:38 +00:00
|
|
|
let codeHash = t.readHashNode()
|
2020-05-05 08:21:39 +00:00
|
|
|
doAssert(codeHash.usedBytes == 32)
|
|
|
|
acc.codeHash.data = codeHash.data
|
|
|
|
|
2020-05-06 03:36:00 +00:00
|
|
|
# readCodeLen already save the codeLen
|
|
|
|
# along with recovered address
|
|
|
|
# we could discard it here
|
2020-05-05 13:07:38 +00:00
|
|
|
discard t.readCodeLen()
|
|
|
|
|
2020-05-05 08:21:39 +00:00
|
|
|
let storageRoot = t.treeNode(0, storageMode = true)
|
|
|
|
doAssert(storageRoot.usedBytes == 32)
|
|
|
|
acc.storageRoot.data = storageRoot.data
|
2020-04-28 14:06:37 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
r.append rlp.encode(acc)
|
2020-04-28 14:06:37 +00:00
|
|
|
|
2020-04-29 09:22:39 +00:00
|
|
|
let nodeRes = r.finish
|
|
|
|
result = t.toNodeKey(nodeRes)
|
2020-04-28 14:06:37 +00:00
|
|
|
|
2020-04-28 09:58:16 +00:00
|
|
|
when defined(debugHash):
|
2020-04-29 05:46:50 +00:00
|
|
|
if result != nodeKey:
|
|
|
|
debugEcho "result.usedBytes: ", result.usedBytes
|
|
|
|
debugEcho "nodeKey.usedBytes: ", nodeKey.usedBytes
|
|
|
|
var rlpa = rlpFromBytes(node)
|
2020-04-29 09:22:39 +00:00
|
|
|
var rlpb = rlpFromBytes(nodeRes)
|
2020-04-29 05:46:50 +00:00
|
|
|
debugEcho "Expected: ", inspect(rlpa)
|
|
|
|
debugEcho "Actual: ", inspect(rlpb)
|
|
|
|
var a = rlpa.listElem(1).toBytes.decode(Account)
|
|
|
|
var b = rlpb.listElem(1).toBytes.decode(Account)
|
|
|
|
debugEcho "Expected: ", a
|
|
|
|
debugEcho "Actual: ", b
|
|
|
|
|
2020-04-28 09:58:16 +00:00
|
|
|
doAssert(result == nodeKey, "account node parsing error")
|
2020-04-20 11:23:22 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
func toStorageSlot(x: openArray[byte]): StorageSlot =
|
|
|
|
result[0..31] = result[0..31]
|
|
|
|
|
|
|
|
proc readStorageSlot(t: var TreeBuilder) =
|
|
|
|
safeReadBytes(t, 32):
|
|
|
|
t.keys[^1].slots.add toStorageSlot(t.read(32))
|
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey =
|
2020-04-20 11:23:22 +00:00
|
|
|
assert(depth < 65)
|
2020-05-05 06:50:31 +00:00
|
|
|
|
|
|
|
when defined(debugHash):
|
|
|
|
let len = t.safeReadU32().int
|
|
|
|
let node = @(t.read(len))
|
|
|
|
let nodeKey = t.toNodeKey(node)
|
|
|
|
|
|
|
|
when defined(debugDepth):
|
|
|
|
let readDepth = t.safeReadByte().int
|
|
|
|
doAssert(readDepth == depth, "accountNode " & $readDepth & " vs. " & $depth)
|
|
|
|
|
2020-04-20 11:23:22 +00:00
|
|
|
let nibblesLen = 64 - depth
|
2020-04-28 14:06:37 +00:00
|
|
|
var r = initRlpList(2)
|
2020-04-29 09:22:39 +00:00
|
|
|
let pathLen = nibblesLen div 2 + nibblesLen mod 2
|
2020-04-29 09:59:38 +00:00
|
|
|
safeReadBytes(t, pathLen):
|
|
|
|
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
t.readStorageSlot()
|
2020-04-29 09:22:39 +00:00
|
|
|
|
2020-04-29 09:59:38 +00:00
|
|
|
safeReadBytes(t, 32):
|
|
|
|
let val = UInt256.fromBytesBE(t.read(32))
|
|
|
|
r.append rlp.encode(val)
|
|
|
|
result = t.toNodeKey(r.finish)
|
2020-04-21 09:13:43 +00:00
|
|
|
|
2020-05-05 06:50:31 +00:00
|
|
|
when defined(debugHash):
|
2020-05-06 03:36:00 +00:00
|
|
|
doAssert(result == nodeKey, "account storage leaf node parsing error")
|
2020-05-05 06:50:31 +00:00
|
|
|
|
2020-04-22 11:04:19 +00:00
|
|
|
proc hashNode(t: var TreeBuilder): NodeKey =
|
2020-04-29 09:59:38 +00:00
|
|
|
safeReadBytes(t, 32):
|
|
|
|
result.toKeccak(t.read(32))
|