witness builder major transformation, support multiproof
This commit is contained in:
parent
c3548d63a5
commit
e9191fefa1
|
@ -1,20 +1,19 @@
|
||||||
import
|
import
|
||||||
eth/common, eth/trie/[db, nibbles], algorithm, stew/byteutils,
|
eth/common, eth/trie/[db, nibbles], algorithm,
|
||||||
./witness_types
|
./witness_types
|
||||||
|
|
||||||
type
|
type
|
||||||
KeyHash = array[32, byte]
|
KeyHash* = array[32, byte]
|
||||||
StorageSlot = array[32, byte]
|
|
||||||
|
|
||||||
KeyData = object
|
KeyData* = object
|
||||||
visited: bool
|
visited*: bool
|
||||||
hash: KeyHash
|
hash*: KeyHash
|
||||||
case storageMode: bool
|
case storageMode*: bool
|
||||||
of true:
|
of true:
|
||||||
storageSlot: StorageSlot
|
storageSlot*: StorageSlot
|
||||||
of false:
|
of false:
|
||||||
storageKeys: MultikeysRef
|
storageKeys*: MultikeysRef
|
||||||
address: EthAddress
|
address*: EthAddress
|
||||||
|
|
||||||
Multikeys* = object
|
Multikeys* = object
|
||||||
keys: seq[KeyData]
|
keys: seq[KeyData]
|
||||||
|
@ -22,7 +21,7 @@ type
|
||||||
MultikeysRef* = ref Multikeys
|
MultikeysRef* = ref Multikeys
|
||||||
|
|
||||||
Group* = object
|
Group* = object
|
||||||
first, last: int16
|
first*, last*: int16
|
||||||
|
|
||||||
BranchGroup* = object
|
BranchGroup* = object
|
||||||
mask*: uint
|
mask*: uint
|
||||||
|
@ -57,9 +56,6 @@ func compareNibbles(x: openArray[byte], start: int, n: NibblesSeq): bool =
|
||||||
inc i
|
inc i
|
||||||
result = true
|
result = true
|
||||||
|
|
||||||
func `$`(x: KeyHash): string =
|
|
||||||
toHex(x)
|
|
||||||
|
|
||||||
proc newMultiKeys*(keys: openArray[AccountKey]): MultikeysRef =
|
proc newMultiKeys*(keys: openArray[AccountKey]): MultikeysRef =
|
||||||
result = new Multikeys
|
result = new Multikeys
|
||||||
result.keys = newSeq[KeyData](keys.len)
|
result.keys = newSeq[KeyData](keys.len)
|
||||||
|
@ -79,7 +75,8 @@ proc newMultiKeys*(keys: openArray[StorageSlot]): MultikeysRef =
|
||||||
result.keys.sort(cmpHash)
|
result.keys.sort(cmpHash)
|
||||||
|
|
||||||
func initGroup*(m: MultikeysRef): Group =
|
func initGroup*(m: MultikeysRef): Group =
|
||||||
result = Group(first: 0'i16, last: (m.keys.len - 1).int16)
|
type T = type result.last
|
||||||
|
result = Group(first: 0'i16, last: (m.keys.len - 1).T)
|
||||||
|
|
||||||
func groups*(m: MultikeysRef, parentGroup: Group, depth: int): BranchGroup =
|
func groups*(m: MultikeysRef, parentGroup: Group, depth: int): BranchGroup =
|
||||||
# similar to a branch node, the product of this func
|
# similar to a branch node, the product of this func
|
||||||
|
@ -130,34 +127,18 @@ iterator groups*(m: MultikeysRef, depth: int, n: NibblesSeq, parentGroup: Group)
|
||||||
haveGroup = false
|
haveGroup = false
|
||||||
yield (matchResult, groupResult)
|
yield (matchResult, groupResult)
|
||||||
|
|
||||||
when isMainModule:
|
func keyData*(m: MultikeysRef, g: Group): KeyData =
|
||||||
let keys = [
|
doAssert(g.first == g.last)
|
||||||
(hexToByteArray[20]("abcdef0a0b0c0d0e0f1234567890aabbccddeeff"), MultikeysRef(nil)),
|
result = m.keys[g.first]
|
||||||
(hexToByteArray[20]("abc0000000000000000000000000000000000000"), MultikeysRef(nil)),
|
|
||||||
(hexToByteArray[20]("cde9769bbcbdef9880932852388bdceabcdeadea"), MultikeysRef(nil)),
|
|
||||||
(hexToByteArray[20]("bad03eaeaea69072375281381267397182bcdbef"), MultikeysRef(nil)),
|
|
||||||
(hexToByteArray[20]("abcdefbbbbbbdddeefffaaccee19826736134298"), MultikeysRef(nil)),
|
|
||||||
(hexToByteArray[20]("ba88888888dddddbbbbfffeeeccaa78128301389"), MultikeysRef(nil)),
|
|
||||||
(hexToByteArray[20]("ba9084097472374372327238bbbcdffecadfecf3"), MultikeysRef(nil))
|
|
||||||
]
|
|
||||||
|
|
||||||
proc main() =
|
iterator keyDatas*(m: MultikeysRef, g: Group): KeyData =
|
||||||
var m = newMultikeys(keys)
|
for i in g.first..g.last:
|
||||||
|
yield m.keys[i]
|
||||||
|
|
||||||
for x in m.keys:
|
iterator addresses*(m :MultikeysRef): EthAddress =
|
||||||
echo x.hash
|
for x in m.keys:
|
||||||
|
yield x.address
|
||||||
|
|
||||||
var parentGroup = m.initGroup()
|
iterator storageKeys*(m :MultikeysRef): MultikeysRef =
|
||||||
var depth = 3
|
for x in m.keys:
|
||||||
var bg = m.groups(parentGroup, depth)
|
yield x.storageKeys
|
||||||
|
|
||||||
for i in 0..<16:
|
|
||||||
if branchMaskBitIsSet(bg.mask, i):
|
|
||||||
echo bg.groups[i]
|
|
||||||
|
|
||||||
var p = Group(first: 0, last: 2)
|
|
||||||
var n = hexToByteArray[1]("1F")
|
|
||||||
for j in groups(m, 3, initNibbleRange(n), p):
|
|
||||||
debugEcho j
|
|
||||||
|
|
||||||
main()
|
|
||||||
|
|
|
@ -4,47 +4,35 @@ import
|
||||||
stew/byteutils, faststreams/input_stream,
|
stew/byteutils, faststreams/input_stream,
|
||||||
../tests/[test_helpers, test_config],
|
../tests/[test_helpers, test_config],
|
||||||
../nimbus/db/accounts_cache, ./witness_types,
|
../nimbus/db/accounts_cache, ./witness_types,
|
||||||
../stateless/[witness_from_tree, tree_from_witness]
|
../stateless/[witness_from_tree, tree_from_witness],
|
||||||
|
./multi_keys
|
||||||
|
|
||||||
type
|
type
|
||||||
Tester = object
|
Tester = object
|
||||||
address: seq[EthAddress]
|
keys: MultikeysRef
|
||||||
memDB: TrieDatabaseRef
|
memDB: TrieDatabaseRef
|
||||||
|
|
||||||
proc isValidBranch(branch: openArray[seq[byte]], rootHash: KeccakHash, key, value: openArray[byte]): bool =
|
|
||||||
# branch must not be empty
|
|
||||||
doAssert(branch.len != 0)
|
|
||||||
|
|
||||||
var db = newMemoryDB()
|
|
||||||
for node in branch:
|
|
||||||
doAssert(node.len != 0)
|
|
||||||
let nodeHash = hexary.keccak(node)
|
|
||||||
db.put(nodeHash.data, node)
|
|
||||||
|
|
||||||
var trie = initHexaryTrie(db, rootHash)
|
|
||||||
result = trie.get(key) == value
|
|
||||||
|
|
||||||
proc testGetBranch(tester: Tester, rootHash: KeccakHash, testStatusIMPL: var TestStatus) =
|
proc testGetBranch(tester: Tester, rootHash: KeccakHash, testStatusIMPL: var TestStatus) =
|
||||||
var trie = initSecureHexaryTrie(tester.memdb, rootHash)
|
var trie = initSecureHexaryTrie(tester.memdb, rootHash)
|
||||||
let flags = {wfEIP170}
|
let flags = {wfEIP170}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for address in tester.address:
|
var wb = initWitnessBuilder(tester.memdb, rootHash, flags)
|
||||||
|
var witness = wb.buildWitness(tester.keys)
|
||||||
|
|
||||||
|
var db = newMemoryDB()
|
||||||
|
when defined(useInputStream):
|
||||||
|
var input = memoryInput(witness)
|
||||||
|
var tb = initTreeBuilder(input, db, flags)
|
||||||
|
else:
|
||||||
|
var tb = initTreeBuilder(witness, db, flags)
|
||||||
|
|
||||||
|
var root = tb.buildTree()
|
||||||
|
check root.data == rootHash.data
|
||||||
|
|
||||||
|
let newTrie = initSecureHexaryTrie(tb.getDB(), root)
|
||||||
|
for address in tester.keys.addresses:
|
||||||
let account = rlp.decode(trie.get(address), Account)
|
let account = rlp.decode(trie.get(address), Account)
|
||||||
var wb = initWitnessBuilder(tester.memdb, rootHash, flags)
|
|
||||||
var witness = wb.buildWitness(address)
|
|
||||||
|
|
||||||
var db = newMemoryDB()
|
|
||||||
when defined(useInputStream):
|
|
||||||
var input = memoryInput(witness)
|
|
||||||
var tb = initTreeBuilder(input, db, flags)
|
|
||||||
else:
|
|
||||||
var tb = initTreeBuilder(witness, db, flags)
|
|
||||||
|
|
||||||
var root = tb.buildTree()
|
|
||||||
check root.data == rootHash.data
|
|
||||||
|
|
||||||
let newTrie = initSecureHexaryTrie(tb.getDB(), root)
|
|
||||||
let recordFound = newTrie.get(address)
|
let recordFound = newTrie.get(address)
|
||||||
if recordFound.len > 0:
|
if recordFound.len > 0:
|
||||||
let acc = rlp.decode(recordFound, Account)
|
let acc = rlp.decode(recordFound, Account)
|
||||||
|
@ -59,11 +47,20 @@ func parseHash256(n: JsonNode, name: string): Hash256 =
|
||||||
hexToByteArray(n[name].getStr(), result.data)
|
hexToByteArray(n[name].getStr(), result.data)
|
||||||
|
|
||||||
proc setupStateDB(tester: var Tester, wantedState: JsonNode, stateDB: var AccountsCache): Hash256 =
|
proc setupStateDB(tester: var Tester, wantedState: JsonNode, stateDB: var AccountsCache): Hash256 =
|
||||||
|
var keys = newSeqOfCap[AccountKey](wantedState.len)
|
||||||
|
|
||||||
for ac, accountData in wantedState:
|
for ac, accountData in wantedState:
|
||||||
let account = ethAddressFromHex(ac)
|
let account = ethAddressFromHex(ac)
|
||||||
tester.address.add(account)
|
let slotVals = accountData{"storage"}
|
||||||
for slot, value in accountData{"storage"}:
|
var storageKeys = newSeqOfCap[StorageSlot](slotVals.len)
|
||||||
stateDB.setStorage(account, fromHex(UInt256, slot), fromHex(UInt256, value.getStr))
|
|
||||||
|
for slotStr, value in slotVals:
|
||||||
|
let slot = fromHex(UInt256, slotStr)
|
||||||
|
storageKeys.add(slot.toBytesBE)
|
||||||
|
stateDB.setStorage(account, slot, fromHex(UInt256, value.getStr))
|
||||||
|
|
||||||
|
var sKeys = if storageKeys.len != 0: newMultiKeys(storageKeys) else: MultikeysRef(nil)
|
||||||
|
keys.add((account, sKeys))
|
||||||
|
|
||||||
let nonce = accountData{"nonce"}.getHexadecimalInt.AccountNonce
|
let nonce = accountData{"nonce"}.getHexadecimalInt.AccountNonce
|
||||||
let code = accountData{"code"}.getStr.safeHexToSeqByte
|
let code = accountData{"code"}.getStr.safeHexToSeqByte
|
||||||
|
@ -73,6 +70,7 @@ proc setupStateDB(tester: var Tester, wantedState: JsonNode, stateDB: var Accoun
|
||||||
stateDB.setCode(account, code)
|
stateDB.setCode(account, code)
|
||||||
stateDB.setBalance(account, balance)
|
stateDB.setBalance(account, balance)
|
||||||
|
|
||||||
|
tester.keys = newMultiKeys(keys)
|
||||||
stateDB.persist()
|
stateDB.persist()
|
||||||
result = stateDB.rootHash
|
result = stateDB.rootHash
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,27 @@
|
||||||
import
|
import
|
||||||
randutils, random,
|
randutils, random, unittest,
|
||||||
eth/[common, rlp], eth/trie/[hexary, db, trie_defs],
|
eth/[common, rlp], eth/trie/[hexary, db, trie_defs],
|
||||||
faststreams/input_stream, nimcrypto/sysrand,
|
faststreams/input_stream, nimcrypto/sysrand,
|
||||||
../stateless/[witness_from_tree, tree_from_witness],
|
../stateless/[witness_from_tree, tree_from_witness],
|
||||||
../nimbus/db/storage_types, ./witness_types
|
../nimbus/db/storage_types, ./witness_types, ./multi_keys
|
||||||
|
|
||||||
type
|
type
|
||||||
DB = TrieDatabaseRef
|
DB = TrieDatabaseRef
|
||||||
|
|
||||||
|
StorageKeys = tuple[hash: Hash256, keys: MultikeysRef]
|
||||||
|
|
||||||
|
AccountDef = object
|
||||||
|
storageKeys: MultiKeysRef
|
||||||
|
account: Account
|
||||||
|
|
||||||
proc randU256(): UInt256 =
|
proc randU256(): UInt256 =
|
||||||
var bytes: array[32, byte]
|
var bytes: array[32, byte]
|
||||||
discard randomBytes(bytes[0].addr, sizeof(result))
|
discard randomBytes(bytes[0].addr, sizeof(result))
|
||||||
result = UInt256.fromBytesBE(bytes)
|
result = UInt256.fromBytesBE(bytes)
|
||||||
|
|
||||||
|
proc randStorageSlot(): StorageSlot =
|
||||||
|
discard randomBytes(result[0].addr, sizeof(result))
|
||||||
|
|
||||||
proc randNonce(): AccountNonce =
|
proc randNonce(): AccountNonce =
|
||||||
discard randomBytes(result.addr, sizeof(result))
|
discard randomBytes(result.addr, sizeof(result))
|
||||||
|
|
||||||
|
@ -25,66 +34,80 @@ proc randCode(db: DB): Hash256 =
|
||||||
result = hexary.keccak(code)
|
result = hexary.keccak(code)
|
||||||
db.put(contractHashKey(result).toOpenArray, code)
|
db.put(contractHashKey(result).toOpenArray, code)
|
||||||
|
|
||||||
proc randStorage(db: DB): Hash256 =
|
proc randStorage(db: DB): StorageKeys =
|
||||||
if rand(0..1) == 0:
|
if rand(0..1) == 0:
|
||||||
result = emptyRlpHash
|
result = (emptyRlpHash, MultikeysRef(nil))
|
||||||
else:
|
else:
|
||||||
var trie = initSecureHexaryTrie(db)
|
var trie = initSecureHexaryTrie(db)
|
||||||
let numPairs = rand(1..5)
|
let numPairs = rand(1..10)
|
||||||
|
var keys = newSeq[StorageSlot](numPairs)
|
||||||
|
|
||||||
for i in 0..<numPairs:
|
for i in 0..<numPairs:
|
||||||
# we bypass u256 key to slot conversion
|
keys[i] = randStorageSlot()
|
||||||
# discard randomBytes(key.addr, sizeof(key))
|
trie.put(keys[i], rlp.encode(randU256()))
|
||||||
trie.put(i.u256.toByteArrayBE, rlp.encode(randU256()))
|
|
||||||
result = trie.rootHash
|
|
||||||
|
|
||||||
proc randAccount(db: DB): Account =
|
if rand(0..1) == 0:
|
||||||
result.nonce = randNonce()
|
result = (trie.rootHash, MultikeysRef(nil))
|
||||||
result.balance = randU256()
|
else:
|
||||||
result.codeHash = randCode(db)
|
var m = newMultikeys(keys)
|
||||||
result.storageRoot = randStorage(db)
|
result = (trie.rootHash, m)
|
||||||
|
|
||||||
|
proc randAccount(db: DB): AccountDef =
|
||||||
|
result.account.nonce = randNonce()
|
||||||
|
result.account.balance = randU256()
|
||||||
|
result.account.codeHash = randCode(db)
|
||||||
|
(result.account.storageRoot, result.storageKeys) = randStorage(db)
|
||||||
|
|
||||||
proc randAddress(): EthAddress =
|
proc randAddress(): EthAddress =
|
||||||
discard randomBytes(result.addr, sizeof(result))
|
discard randomBytes(result.addr, sizeof(result))
|
||||||
|
|
||||||
proc runTest(numPairs: int) =
|
proc runTest(numPairs: int, testStatusIMPL: var TestStatus) =
|
||||||
var memDB = newMemoryDB()
|
var memDB = newMemoryDB()
|
||||||
var trie = initSecureHexaryTrie(memDB)
|
var trie = initSecureHexaryTrie(memDB)
|
||||||
var addrs = newSeq[EthAddress](numPairs)
|
var addrs = newSeq[AccountKey](numPairs)
|
||||||
var accs = newSeq[Account](numPairs)
|
var accs = newSeq[Account](numPairs)
|
||||||
|
|
||||||
for i in 0..<numPairs:
|
for i in 0..<numPairs:
|
||||||
addrs[i] = randAddress()
|
let acc = randAccount(memDB)
|
||||||
accs[i] = randAccount(memDB)
|
addrs[i] = (randAddress(), acc.storageKeys)
|
||||||
trie.put(addrs[i], rlp.encode(accs[i]))
|
accs[i] = acc.account
|
||||||
|
trie.put(addrs[i].address, rlp.encode(accs[i]))
|
||||||
|
|
||||||
|
var mkeys = newMultiKeys(addrs)
|
||||||
let rootHash = trie.rootHash
|
let rootHash = trie.rootHash
|
||||||
|
|
||||||
for i in 0..<numPairs:
|
var wb = initWitnessBuilder(memDB, rootHash, {wfEIP170})
|
||||||
var wb = initWitnessBuilder(memDB, rootHash, {wfEIP170})
|
var witness = wb.buildWitness(mkeys)
|
||||||
var witness = wb.buildWitness(addrs[i])
|
var db = newMemoryDB()
|
||||||
var db = newMemoryDB()
|
when defined(useInputStream):
|
||||||
when defined(useInputStream):
|
var input = memoryInput(witness)
|
||||||
var input = memoryInput(witness)
|
var tb = initTreeBuilder(input, db, {wfEIP170})
|
||||||
var tb = initTreeBuilder(input, db, {wfEIP170})
|
else:
|
||||||
else:
|
var tb = initTreeBuilder(witness, db, {wfEIP170})
|
||||||
var tb = initTreeBuilder(witness, db, {wfEIP170})
|
let root = tb.buildTree()
|
||||||
let root = tb.buildTree()
|
check root.data == rootHash.data
|
||||||
doAssert root.data == rootHash.data
|
|
||||||
|
|
||||||
let newTrie = initSecureHexaryTrie(tb.getDB(), root)
|
let newTrie = initSecureHexaryTrie(tb.getDB(), root)
|
||||||
let recordFound = newTrie.get(addrs[i])
|
for i in 0..<numPairs:
|
||||||
|
let recordFound = newTrie.get(addrs[i].address)
|
||||||
if recordFound.len > 0:
|
if recordFound.len > 0:
|
||||||
let acc = rlp.decode(recordFound, Account)
|
let acc = rlp.decode(recordFound, Account)
|
||||||
doAssert acc == accs[i]
|
check acc == accs[i]
|
||||||
else:
|
else:
|
||||||
doAssert(false, "BUG IN TREE BUILDER")
|
debugEcho "BUG IN TREE BUILDER"
|
||||||
|
check false
|
||||||
|
|
||||||
proc main() =
|
proc main() =
|
||||||
randomize()
|
suite "random keys block witness roundtrip test":
|
||||||
|
randomize()
|
||||||
|
|
||||||
for i in 0..<30:
|
test "random multiple keys":
|
||||||
runTest(rand(1..30))
|
for i in 0..<100:
|
||||||
echo "OK"
|
runTest(rand(1..30), testStatusIMPL)
|
||||||
|
|
||||||
|
test "there is no short node":
|
||||||
|
let acc = newAccount()
|
||||||
|
let rlpBytes = rlp.encode(acc)
|
||||||
|
check rlpBytes.len > 32
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -11,6 +11,10 @@ type
|
||||||
usedBytes: int
|
usedBytes: int
|
||||||
data: array[32, byte]
|
data: array[32, byte]
|
||||||
|
|
||||||
|
AccountAndSlots* = object
|
||||||
|
address*: EthAddress
|
||||||
|
slots*: seq[StorageSlot]
|
||||||
|
|
||||||
TreeBuilder = object
|
TreeBuilder = object
|
||||||
when defined(useInputStream):
|
when defined(useInputStream):
|
||||||
input: InputStream
|
input: InputStream
|
||||||
|
@ -20,6 +24,10 @@ type
|
||||||
db: DB
|
db: DB
|
||||||
root: KeccakHash
|
root: KeccakHash
|
||||||
flags: WitnessFlags
|
flags: WitnessFlags
|
||||||
|
keys: seq[AccountAndSlots]
|
||||||
|
|
||||||
|
# this TreeBuilder support short node parsing
|
||||||
|
# but a block witness should not contains short node
|
||||||
|
|
||||||
# the InputStream still unstable
|
# the InputStream still unstable
|
||||||
# when using large dataset for testing
|
# when using large dataset for testing
|
||||||
|
@ -101,7 +109,7 @@ proc safeReadU32(t: var TreeBuilder): uint32 =
|
||||||
template safeReadEnum(t: var TreeBuilder, T: type): untyped =
|
template safeReadEnum(t: var TreeBuilder, T: type): untyped =
|
||||||
let typ = t.safeReadByte.int
|
let typ = t.safeReadByte.int
|
||||||
if typ < low(T).int or typ > high(T).int:
|
if typ < low(T).int or typ > high(T).int:
|
||||||
raise newException(ParsingError, "Wrong " & T.name & " value")
|
raise newException(ParsingError, "Wrong " & T.name & " value " & $typ)
|
||||||
T(typ)
|
T(typ)
|
||||||
|
|
||||||
template safeReadBytes(t: var TreeBuilder, length: int, body: untyped) =
|
template safeReadBytes(t: var TreeBuilder, length: int, body: untyped) =
|
||||||
|
@ -208,7 +216,8 @@ proc branchNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
|
||||||
doAssert(readDepth == depth, "branchNode " & $readDepth & " vs. " & $depth)
|
doAssert(readDepth == depth, "branchNode " & $readDepth & " vs. " & $depth)
|
||||||
|
|
||||||
when defined(debugHash):
|
when defined(debugHash):
|
||||||
let hash = toKeccak(t.read(32))
|
var hash: NodeKey
|
||||||
|
toKeccak(hash, t.read(32))
|
||||||
|
|
||||||
var r = initRlpList(17)
|
var r = initRlpList(17)
|
||||||
|
|
||||||
|
@ -270,7 +279,8 @@ proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
|
||||||
doAssert(readDepth == depth, "extensionNode " & $readDepth & " vs. " & $depth)
|
doAssert(readDepth == depth, "extensionNode " & $readDepth & " vs. " & $depth)
|
||||||
|
|
||||||
when defined(debugHash):
|
when defined(debugHash):
|
||||||
let hash = toKeccak(t.read(32))
|
var hash: NodeKey
|
||||||
|
toKeccak(hash, t.read(32))
|
||||||
|
|
||||||
assert(depth + nibblesLen < 65)
|
assert(depth + nibblesLen < 65)
|
||||||
let nodeType = safeReadEnum(t, TrieNodeType)
|
let nodeType = safeReadEnum(t, TrieNodeType)
|
||||||
|
@ -286,6 +296,13 @@ proc extensionNode(t: var TreeBuilder, depth: int, storageMode: bool): NodeKey =
|
||||||
debugEcho "DEPTH: ", depth
|
debugEcho "DEPTH: ", depth
|
||||||
doAssert(result == hash, "EXT HASH DIFF " & result.data.toHex & " vs. " & hash.data.toHex)
|
doAssert(result == hash, "EXT HASH DIFF " & result.data.toHex & " vs. " & hash.data.toHex)
|
||||||
|
|
||||||
|
func toAddress(x: openArray[byte]): EthAddress =
|
||||||
|
result[0..19] = result[0..19]
|
||||||
|
|
||||||
|
proc readAddress(t: var TreeBuilder) =
|
||||||
|
safeReadBytes(t, 20):
|
||||||
|
t.keys.add AccountAndSlots(address: toAddress(t.read(20)))
|
||||||
|
|
||||||
proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
|
proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
|
||||||
assert(depth < 65)
|
assert(depth < 65)
|
||||||
|
|
||||||
|
@ -306,8 +323,7 @@ proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
|
||||||
safeReadBytes(t, pathLen):
|
safeReadBytes(t, pathLen):
|
||||||
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
||||||
|
|
||||||
# TODO: parse address
|
t.readAddress()
|
||||||
# let address = toAddress(t.read(20))
|
|
||||||
|
|
||||||
safeReadBytes(t, 64):
|
safeReadBytes(t, 64):
|
||||||
var acc = Account(
|
var acc = Account(
|
||||||
|
@ -353,23 +369,41 @@ proc accountNode(t: var TreeBuilder, depth: int): NodeKey =
|
||||||
|
|
||||||
doAssert(result == nodeKey, "account node parsing error")
|
doAssert(result == nodeKey, "account node parsing error")
|
||||||
|
|
||||||
|
func toStorageSlot(x: openArray[byte]): StorageSlot =
|
||||||
|
result[0..31] = result[0..31]
|
||||||
|
|
||||||
|
proc readStorageSlot(t: var TreeBuilder) =
|
||||||
|
safeReadBytes(t, 32):
|
||||||
|
t.keys[^1].slots.add toStorageSlot(t.read(32))
|
||||||
|
|
||||||
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey =
|
proc accountStorageLeafNode(t: var TreeBuilder, depth: int): NodeKey =
|
||||||
assert(depth < 65)
|
assert(depth < 65)
|
||||||
|
|
||||||
|
when defined(debugHash):
|
||||||
|
let len = t.safeReadU32().int
|
||||||
|
let node = @(t.read(len))
|
||||||
|
let nodeKey = t.toNodeKey(node)
|
||||||
|
|
||||||
|
when defined(debugDepth):
|
||||||
|
let readDepth = t.safeReadByte().int
|
||||||
|
doAssert(readDepth == depth, "accountNode " & $readDepth & " vs. " & $depth)
|
||||||
|
|
||||||
let nibblesLen = 64 - depth
|
let nibblesLen = 64 - depth
|
||||||
var r = initRlpList(2)
|
var r = initRlpList(2)
|
||||||
let pathLen = nibblesLen div 2 + nibblesLen mod 2
|
let pathLen = nibblesLen div 2 + nibblesLen mod 2
|
||||||
safeReadBytes(t, pathLen):
|
safeReadBytes(t, pathLen):
|
||||||
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
r.hexPrefix(t.read(pathLen), nibblesLen, true)
|
||||||
|
|
||||||
# TODO: parse key
|
t.readStorageSlot()
|
||||||
# let key = @(t.read(32))
|
|
||||||
# UInt256 -> BytesBE -> keccak
|
|
||||||
|
|
||||||
safeReadBytes(t, 32):
|
safeReadBytes(t, 32):
|
||||||
let val = UInt256.fromBytesBE(t.read(32))
|
let val = UInt256.fromBytesBE(t.read(32))
|
||||||
r.append rlp.encode(val)
|
r.append rlp.encode(val)
|
||||||
result = t.toNodeKey(r.finish)
|
result = t.toNodeKey(r.finish)
|
||||||
|
|
||||||
|
when defined(debugHash):
|
||||||
|
doAssert(result == nodeKey, "account storage no parsing error")
|
||||||
|
|
||||||
proc hashNode(t: var TreeBuilder): NodeKey =
|
proc hashNode(t: var TreeBuilder): NodeKey =
|
||||||
safeReadBytes(t, 32):
|
safeReadBytes(t, 32):
|
||||||
result.toKeccak(t.read(32))
|
result.toKeccak(t.read(32))
|
||||||
|
|
|
@ -4,7 +4,7 @@ import
|
||||||
eth/trie/[trie_defs, nibbles, db],
|
eth/trie/[trie_defs, nibbles, db],
|
||||||
faststreams/output_stream,
|
faststreams/output_stream,
|
||||||
./witness_types, ../nimbus/constants,
|
./witness_types, ../nimbus/constants,
|
||||||
../nimbus/db/storage_types
|
../nimbus/db/storage_types, ./multi_keys
|
||||||
|
|
||||||
type
|
type
|
||||||
DB = TrieDatabaseRef
|
DB = TrieDatabaseRef
|
||||||
|
@ -15,6 +15,13 @@ type
|
||||||
output: OutputStream
|
output: OutputStream
|
||||||
flags: WitnessFlags
|
flags: WitnessFlags
|
||||||
|
|
||||||
|
StackElem = object
|
||||||
|
node: seq[byte]
|
||||||
|
parentGroup: Group
|
||||||
|
keys: MultikeysRef
|
||||||
|
depth: int
|
||||||
|
storageMode: bool
|
||||||
|
|
||||||
proc initWitnessBuilder*(db: DB, rootHash: KeccakHash, flags: WitnessFlags = {}): WitnessBuilder =
|
proc initWitnessBuilder*(db: DB, rootHash: KeccakHash, flags: WitnessFlags = {}): WitnessBuilder =
|
||||||
result.db = db
|
result.db = db
|
||||||
result.root = rootHash
|
result.root = rootHash
|
||||||
|
@ -32,7 +39,7 @@ proc expectHash(r: Rlp): seq[byte] =
|
||||||
|
|
||||||
template getNode(elem: untyped): untyped =
|
template getNode(elem: untyped): untyped =
|
||||||
if elem.isList: @(elem.rawData)
|
if elem.isList: @(elem.rawData)
|
||||||
else: get(wb.db, elem.expectHash)
|
else: @(get(wb.db, elem.expectHash))
|
||||||
|
|
||||||
proc rlpListToBitmask(r: var Rlp): uint =
|
proc rlpListToBitmask(r: var Rlp): uint =
|
||||||
var i = 0
|
var i = 0
|
||||||
|
@ -98,9 +105,11 @@ proc writeHashNode(wb: var WitnessBuilder, node: openArray[byte]) =
|
||||||
wb.output.append(HashNodeType.byte)
|
wb.output.append(HashNodeType.byte)
|
||||||
wb.output.append(node)
|
wb.output.append(node)
|
||||||
|
|
||||||
proc getBranchRecurseAux(wb: var WitnessBuilder, node: openArray[byte], path: NibblesSeq, depth: int, storageMode: bool)
|
proc getBranchRecurseAux(wb: var WitnessBuilder, z: var StackElem) {.raises: [ContractCodeError, IOError, Defect, CatchableError, Exception].}
|
||||||
|
|
||||||
|
proc writeAccountNode(wb: var WitnessBuilder, storageKeys: MultikeysRef, address: EthAddress,
|
||||||
|
acc: Account, nibbles: NibblesSeq, node: openArray[byte], depth: int) {.raises: [ContractCodeError, IOError, Defect, CatchableError, Exception].} =
|
||||||
|
|
||||||
proc writeAccountNode(wb: var WitnessBuilder, acc: Account, nibbles: NibblesSeq, node: openArray[byte], depth: int) =
|
|
||||||
# write type
|
# write type
|
||||||
wb.output.append(AccountNodeType.byte)
|
wb.output.append(AccountNodeType.byte)
|
||||||
|
|
||||||
|
@ -117,10 +126,7 @@ proc writeAccountNode(wb: var WitnessBuilder, acc: Account, nibbles: NibblesSeq,
|
||||||
|
|
||||||
wb.output.append(accountType.byte)
|
wb.output.append(accountType.byte)
|
||||||
wb.writeNibbles(nibbles, false)
|
wb.writeNibbles(nibbles, false)
|
||||||
# TODO: where the address come from?
|
wb.output.append(address)
|
||||||
# single proof is easy, but multiproof will be harder
|
|
||||||
# concat the path and then look into LUT?
|
|
||||||
# wb.output.append(acc.address)
|
|
||||||
wb.output.append(acc.balance.toBytesBE)
|
wb.output.append(acc.balance.toBytesBE)
|
||||||
wb.output.append(acc.nonce.u256.toBytesBE)
|
wb.output.append(acc.nonce.u256.toBytesBE)
|
||||||
|
|
||||||
|
@ -134,107 +140,97 @@ proc writeAccountNode(wb: var WitnessBuilder, acc: Account, nibbles: NibblesSeq,
|
||||||
else:
|
else:
|
||||||
wb.writeU32(0'u32)
|
wb.writeU32(0'u32)
|
||||||
|
|
||||||
if acc.storageRoot != emptyRlpHash:
|
if storageKeys.isNil:
|
||||||
# switch to account mode
|
# we have storage but not touched by EVM
|
||||||
var node = wb.db.get(acc.storageRoot.data)
|
wb.writeHashNode(acc.storageRoot.data)
|
||||||
var key = keccak(0.u256.toByteArrayBE)
|
elif acc.storageRoot != emptyRlpHash:
|
||||||
getBranchRecurseAux(wb, node, initNibbleRange(key.data), 0, true)
|
var zz = StackElem(
|
||||||
|
node: wb.db.get(acc.storageRoot.data),
|
||||||
|
parentGroup: storageKeys.initGroup(),
|
||||||
|
keys: storageKeys,
|
||||||
|
depth: 0, # reset depth
|
||||||
|
storageMode: true # switch to storage mode
|
||||||
|
)
|
||||||
|
getBranchRecurseAux(wb, zz)
|
||||||
else:
|
else:
|
||||||
wb.writeHashNode(emptyRlpHash.data)
|
wb.writeHashNode(emptyRlpHash.data)
|
||||||
|
|
||||||
#0x00 pathnibbles:<Nibbles(64-d)> address:<Address> balance:<Bytes32> nonce:<Bytes32>
|
#0x00 pathnibbles:<Nibbles(64-d)> address:<Address> balance:<Bytes32> nonce:<Bytes32>
|
||||||
#0x01 pathnibbles:<Nibbles(64-d)> address:<Address> balance:<Bytes32> nonce:<Bytes32> bytecode:<Bytecode> storage:<Tree_Node(0,1)>
|
#0x01 pathnibbles:<Nibbles(64-d)> address:<Address> balance:<Bytes32> nonce:<Bytes32> bytecode:<Bytecode> storage:<Tree_Node(0,1)>
|
||||||
|
|
||||||
proc writeAccountStorageLeafNode(wb: var WitnessBuilder, val: UInt256, nibbles: NibblesSeq, node: openArray[byte], depth: int) =
|
proc writeAccountStorageLeafNode(wb: var WitnessBuilder, key: openArray[byte], val: UInt256, nibbles: NibblesSeq, node: openArray[byte], depth: int) =
|
||||||
wb.output.append(StorageLeafNodeType.byte)
|
wb.output.append(StorageLeafNodeType.byte)
|
||||||
|
|
||||||
|
when defined(debugHash):
|
||||||
|
wb.writeU32(node.len.uint32)
|
||||||
|
wb.output.append(node)
|
||||||
|
|
||||||
|
when defined(debugDepth):
|
||||||
|
wb.output.append(depth.byte)
|
||||||
|
|
||||||
doAssert(nibbles.len == 64 - depth)
|
doAssert(nibbles.len == 64 - depth)
|
||||||
wb.writeNibbles(nibbles, false)
|
wb.writeNibbles(nibbles, false)
|
||||||
|
|
||||||
# TODO: write key
|
wb.output.append(key)
|
||||||
# wb.output.append(key.toByteArrayBE)
|
wb.output.append(val.toBytesBE)
|
||||||
wb.output.append(val.toByteArrayBE)
|
|
||||||
|
|
||||||
#<Storage_Leaf_Node(d<65)> := pathnibbles:<Nibbles(64-d))> key:<Bytes32> val:<Bytes32>
|
#<Storage_Leaf_Node(d<65)> := pathnibbles:<Nibbles(64-d))> key:<Bytes32> val:<Bytes32>
|
||||||
|
|
||||||
proc writeShortNode(wb: var WitnessBuilder, node: openArray[byte], depth: int, storageMode: bool) =
|
proc getBranchRecurseAux(wb: var WitnessBuilder, z: var StackElem) =
|
||||||
var nodeRlp = rlpFromBytes node
|
if z.node.len == 0: return
|
||||||
if not nodeRlp.hasData or nodeRlp.isEmpty: return
|
var nodeRlp = rlpFromBytes z.node
|
||||||
case nodeRlp.listLen
|
|
||||||
of 2:
|
|
||||||
let (isLeaf, k) = nodeRlp.extensionNodeKey
|
|
||||||
if isLeaf:
|
|
||||||
if storageMode:
|
|
||||||
let val = nodeRlp.listElem(1).toBytes.decode(UInt256)
|
|
||||||
writeAccountStorageLeafNode(wb, val, k, node, depth)
|
|
||||||
else:
|
|
||||||
let acc = nodeRlp.listElem(1).toBytes.decode(Account)
|
|
||||||
writeAccountNode(wb, acc, k, node, depth)
|
|
||||||
else:
|
|
||||||
# why this short extension node have no
|
|
||||||
# child and still valid when we reconstruct
|
|
||||||
# the trie on the other side?
|
|
||||||
# a bug in hexary trie algo?
|
|
||||||
# or a bug in nim hexary trie implementation?
|
|
||||||
writeExtensionNode(wb, k, depth, node)
|
|
||||||
of 17:
|
|
||||||
let branchMask = rlpListToBitmask(nodeRlp)
|
|
||||||
writeBranchNode(wb, branchMask, depth, node)
|
|
||||||
|
|
||||||
for i in 0..<16:
|
|
||||||
if branchMask.branchMaskBitIsSet(i):
|
|
||||||
var branch = nodeRlp.listElem(i)
|
|
||||||
let nextLookup = branch.getNode
|
|
||||||
writeShortNode(wb, nextLookup, depth + 1, storageMode)
|
|
||||||
|
|
||||||
# contrary to yellow paper spec,
|
|
||||||
# the 17th elem never exist in reality.
|
|
||||||
# block witness spec also omit it.
|
|
||||||
# probably a flaw in hexary trie design
|
|
||||||
# 17th elem should always empty
|
|
||||||
doAssert branchMask.branchMaskBitIsSet(16) == false
|
|
||||||
else:
|
|
||||||
raise newException(CorruptedTrieDatabase,
|
|
||||||
"HexaryTrie short node with an unexpected number of children")
|
|
||||||
|
|
||||||
proc getBranchRecurseAux(wb: var WitnessBuilder, node: openArray[byte], path: NibblesSeq, depth: int, storageMode: bool) =
|
|
||||||
var nodeRlp = rlpFromBytes node
|
|
||||||
if not nodeRlp.hasData or nodeRlp.isEmpty: return
|
|
||||||
|
|
||||||
case nodeRlp.listLen
|
case nodeRlp.listLen
|
||||||
of 2:
|
of 2:
|
||||||
let (isLeaf, k) = nodeRlp.extensionNodeKey
|
let (isLeaf, k) = nodeRlp.extensionNodeKey
|
||||||
let sharedNibbles = sharedPrefixLen(path, k)
|
var match = false
|
||||||
if sharedNibbles == k.len:
|
for mg in groups(z.keys, z.depth, k, z.parentGroup):
|
||||||
let value = nodeRlp.listElem(1)
|
if mg.match:
|
||||||
if not isLeaf:
|
doAssert(match == false) # should be only one match
|
||||||
# ExtensionNodeType
|
match = true
|
||||||
writeExtensionNode(wb, k, depth, node)
|
let value = nodeRlp.listElem(1)
|
||||||
let nextLookup = value.getNode
|
if not isLeaf:
|
||||||
getBranchRecurseAux(wb, nextLookup, path.slice(sharedNibbles), depth + sharedNibbles, storageMode)
|
# ExtensionNodeType
|
||||||
else:
|
writeExtensionNode(wb, k, z.depth, z.node)
|
||||||
# AccountNodeType
|
var zz = StackElem(
|
||||||
if storageMode:
|
node: value.getNode,
|
||||||
writeAccountStorageLeafNode(wb, value.toBytes.decode(UInt256), k, node, depth)
|
parentGroup: mg.group,
|
||||||
|
keys: z.keys,
|
||||||
|
depth: z.depth + k.len,
|
||||||
|
storageMode: z.storageMode
|
||||||
|
)
|
||||||
|
getBranchRecurseAux(wb, zz)
|
||||||
else:
|
else:
|
||||||
writeAccountNode(wb, value.toBytes.decode(Account), k, node, depth)
|
let kd = keyData(z.keys, mg.group)
|
||||||
else:
|
if z.storageMode:
|
||||||
# this is a potential branch for multiproof
|
doAssert(kd.storageMode)
|
||||||
writeHashNode(wb, keccak(node).data)
|
writeAccountStorageLeafNode(wb, kd.storageSlot, value.toBytes.decode(UInt256), k, z.node, z.depth)
|
||||||
|
else:
|
||||||
|
doAssert(not kd.storageMode)
|
||||||
|
writeAccountNode(wb, kd.storageKeys, kd.address, value.toBytes.decode(Account), k, z.node, z.depth)
|
||||||
|
if not match:
|
||||||
|
writeHashNode(wb, keccak(z.node).data)
|
||||||
of 17:
|
of 17:
|
||||||
let branchMask = rlpListToBitmask(nodeRlp)
|
let branchMask = rlpListToBitmask(nodeRlp)
|
||||||
writeBranchNode(wb, branchMask, depth, node)
|
writeBranchNode(wb, branchMask, z.depth, z.node)
|
||||||
|
let path = groups(z.keys, z.parentGroup, z.depth)
|
||||||
|
|
||||||
let notLeaf = path.len != 0
|
let notLeaf = z.depth != 63 # path.len == 0
|
||||||
for i in 0..<16:
|
for i in 0..<16:
|
||||||
if branchMask.branchMaskBitIsSet(i):
|
if branchMask.branchMaskBitIsSet(i):
|
||||||
var branch = nodeRlp.listElem(i)
|
var branch = nodeRlp.listElem(i)
|
||||||
if notLeaf and i == path[0].int:
|
if notLeaf and branchMaskBitIsSet(path.mask, i):
|
||||||
let nextLookup = branch.getNode
|
var zz = StackElem(
|
||||||
getBranchRecurseAux(wb, nextLookup, path.slice(1), depth + 1, storageMode)
|
node: branch.getNode,
|
||||||
|
parentGroup: path.groups[i],
|
||||||
|
keys: z.keys,
|
||||||
|
depth: z.depth + 1,
|
||||||
|
storageMode: z.storageMode
|
||||||
|
)
|
||||||
|
getBranchRecurseAux(wb, zz)
|
||||||
else:
|
else:
|
||||||
if branch.isList:
|
if branch.isList:
|
||||||
let nextLookup = branch.getNode
|
doAssert(false, "Short node should not exist in block witness")
|
||||||
writeShortNode(wb, nextLookup, depth + 1, storageMode)
|
|
||||||
else:
|
else:
|
||||||
# this is a potential branch for multiproof
|
# this is a potential branch for multiproof
|
||||||
writeHashNode(wb, branch.expectHash)
|
writeHashNode(wb, branch.expectHash)
|
||||||
|
@ -245,8 +241,8 @@ proc getBranchRecurseAux(wb: var WitnessBuilder, node: openArray[byte], path: Ni
|
||||||
raise newException(CorruptedTrieDatabase,
|
raise newException(CorruptedTrieDatabase,
|
||||||
"HexaryTrie node with an unexpected number of children")
|
"HexaryTrie node with an unexpected number of children")
|
||||||
|
|
||||||
proc buildWitness*(wb: var WitnessBuilder; address: EthAddress, withVersion: bool = true): seq[byte]
|
proc buildWitness*(wb: var WitnessBuilder, keys: MultikeysRef, withVersion: bool = true): seq[byte]
|
||||||
{.raises: [ContractCodeError, IOError, WitnessError, Defect].} =
|
{.raises: [ContractCodeError, IOError, Defect, CatchableError, Exception].} =
|
||||||
|
|
||||||
# witness version
|
# witness version
|
||||||
wb.output.append(BlockWitnessVersion.byte)
|
wb.output.append(BlockWitnessVersion.byte)
|
||||||
|
@ -255,13 +251,15 @@ proc buildWitness*(wb: var WitnessBuilder; address: EthAddress, withVersion: boo
|
||||||
|
|
||||||
# we only output one tree
|
# we only output one tree
|
||||||
wb.output.append(MetadataNothing.byte)
|
wb.output.append(MetadataNothing.byte)
|
||||||
let key = keccak(address)
|
|
||||||
|
|
||||||
try:
|
var z = StackElem(
|
||||||
var node = wb.db.get(wb.root.data)
|
node: @(wb.db.get(wb.root.data)),
|
||||||
getBranchRecurseAux(wb, node, initNibbleRange(key.data), 0, false)
|
parentGroup: keys.initGroup(),
|
||||||
except CorruptedTrieDatabase, RlpTypeMismatch, CatchableError, Exception:
|
keys: keys,
|
||||||
raise newException(WitnessError, getCurrentExceptionMsg())
|
depth: 0,
|
||||||
|
storageMode: false
|
||||||
|
)
|
||||||
|
getBranchRecurseAux(wb, z)
|
||||||
|
|
||||||
# result
|
# result
|
||||||
result = wb.output.getOutput(seq[byte])
|
result = wb.output.getOutput(seq[byte])
|
||||||
|
|
|
@ -23,7 +23,8 @@ type
|
||||||
|
|
||||||
ContractCodeError* = object of ValueError
|
ContractCodeError* = object of ValueError
|
||||||
ParsingError* = object of ValueError
|
ParsingError* = object of ValueError
|
||||||
WitnessError* = object of ValueError
|
|
||||||
|
StorageSlot* = array[32, byte]
|
||||||
|
|
||||||
const
|
const
|
||||||
StorageLeafNodeType* = AccountNodeType
|
StorageLeafNodeType* = AccountNodeType
|
||||||
|
@ -40,4 +41,4 @@ func branchMaskBitIsSet*(x: uint, i: int): bool {.inline.} =
|
||||||
func constructBranchMask*(b1, b2: byte): uint {.inline.} =
|
func constructBranchMask*(b1, b2: byte): uint {.inline.} =
|
||||||
result = uint(b1) shl 8 or uint(b2)
|
result = uint(b1) shl 8 or uint(b2)
|
||||||
if countOnes(result) < 2 or ((result and (not 0x1FFFF'u)) != 0):
|
if countOnes(result) < 2 or ((result and (not 0x1FFFF'u)) != 0):
|
||||||
raise newException(ParsingError, "Invalid branch mask pattern")
|
raise newException(ParsingError, "Invalid branch mask pattern " & $result)
|
||||||
|
|
Loading…
Reference in New Issue