Core db and aristo updates for destructor and tx logic (#1894)
* Disable `TransactionID` related functions from `state_db.nim`
why:
Functions `getCommittedStorage()` and `updateOriginalRoot()` from
the `state_db` module are nowhere used. The emulation of a legacy
`TransactionID` type functionality is administratively expensive to
provide by `Aristo` (the legacy DB version is only partially
implemented, anyway).
As there is no other place where `TransactionID`s are used, they will
not be provided by the `Aristo` variant of the `CoreDb`. For the
legacy DB API, nothing will change.
* Fix copyright headers in source code
* Get rid of compiler warning
* Update Aristo code, remove unused `merge()` variant, export `hashify()`
why:
Adapt to upcoming `CoreDb` wrapper
* Remove synced tx feature from `Aristo`
why:
+ This feature allowed to synchronise transaction methods like begin,
commit, and rollback for a group of descriptors.
+ The feature is over engineered and not needed for `CoreDb`, neither
is it complete (some convergence features missing.)
* Add debugging helpers to `Kvt`
also:
Update database iterator, add count variable yield argument similar
to `Aristo`.
* Provide optional destructors for `CoreDb` API
why;
For the upcoming Aristo wrapper, this allows to control when certain
smart destruction and update can take place. The auto destructor works
fine in general when the storage/cache strategy is known and acceptable
when creating descriptors.
* Add update option for `CoreDb` API function `hash()`
why;
The hash function is typically used to get the state root of the MPT.
Due to lazy hashing, this might be not available on the `Aristo` DB.
So the `update` function asks for re-hashing the gurrent state changes
if needed.
* Update API tracking log mode: `info` => `debug
* Use shared `Kvt` descriptor in new Ledger API
why:
No need to create a new descriptor all the time
2023-11-16 19:35:03 +00:00
|
|
|
# Copyright (c) 2022-2023 Status Research & Development GmbH
|
2022-06-29 17:44:08 +02:00
|
|
|
# Licensed under either of
|
|
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
|
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
|
|
|
# at your option.
|
|
|
|
# This file may not be copied, modified, or distributed except according to
|
|
|
|
# those terms.
|
|
|
|
|
|
|
|
|
|
|
|
import
|
|
|
|
std/[options, strutils, typetraits],
|
|
|
|
unittest2,
|
2023-01-31 13:38:08 +01:00
|
|
|
eth/[common/eth_types],
|
2022-06-29 17:44:08 +02:00
|
|
|
nimcrypto/hash,
|
|
|
|
stew/byteutils,
|
|
|
|
../nimbus/rpc/filters,
|
2023-12-08 16:35:50 +07:00
|
|
|
../nimbus/beacon/web3_eth_conv,
|
2022-06-29 17:44:08 +02:00
|
|
|
./test_block_fixture
|
|
|
|
|
|
|
|
let allLogs = deriveLogs(blockHeader4514995, blockBody4514995.transactions, receipts4514995)
|
|
|
|
|
2023-12-08 16:35:50 +07:00
|
|
|
func w3Hash(x: string): Web3Hash =
|
|
|
|
Web3Hash hexToByteArray[32](x)
|
|
|
|
|
2022-06-29 17:44:08 +02:00
|
|
|
proc filtersMain*() =
|
|
|
|
# All magic numbers and addresses in following tests are confirmed with geth eth_getLogs,
|
|
|
|
# responses
|
|
|
|
suite "Log filters":
|
|
|
|
# specific tests comparing results with geth
|
|
|
|
test "Proper log number and indexes":
|
|
|
|
check:
|
|
|
|
len(allLogs) == 54
|
|
|
|
|
|
|
|
for i, log in allLogs:
|
|
|
|
check:
|
2023-12-08 16:35:50 +07:00
|
|
|
log.logIndex.unsafeGet() == w3Qty(i.uint64)
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
test "Filter with empty parameters should return all logs":
|
2023-12-08 16:35:50 +07:00
|
|
|
let addrs = newSeq[EthAddress]()
|
|
|
|
let filtered = filterLogs(allLogs, addrs, @[])
|
2022-06-29 17:44:08 +02:00
|
|
|
check:
|
|
|
|
len(filtered) == len(allLogs)
|
|
|
|
|
|
|
|
test "Filter and BloomFilter for one address with one valid log":
|
|
|
|
let address = hexToByteArray[20]("0x0e0989b1f9b8a38983c2ba8053269ca62ec9b195")
|
|
|
|
let filteredLogs = filterLogs(allLogs, @[address], @[])
|
|
|
|
|
|
|
|
check:
|
|
|
|
headerBloomFilter(blockHeader4514995, @[address], @[])
|
|
|
|
len(filteredLogs) == 1
|
2023-12-08 16:35:50 +07:00
|
|
|
filteredLogs[0].address == w3Addr address
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
test "Filter and BloomFilter for one address with multiple valid logs":
|
|
|
|
let address = hexToByteArray[20]("0x878d7ed5c194349f37b18688964e8db1eb0fcca1")
|
|
|
|
let filteredLogs = filterLogs(allLogs, @[address], @[])
|
|
|
|
|
|
|
|
check:
|
|
|
|
headerBloomFilter(blockHeader4514995, @[address], @[])
|
|
|
|
len(filteredLogs) == 2
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
2023-12-08 16:35:50 +07:00
|
|
|
log.address == w3Addr address
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
test "Filter and BloomFilter for multiple address with multiple valid logs":
|
|
|
|
let address = hexToByteArray[20]("0x878d7ed5c194349f37b18688964e8db1eb0fcca1")
|
|
|
|
let address1 = hexToByteArray[20]("0x0e0989b1f9b8a38983c2ba8053269ca62ec9b195")
|
|
|
|
let filteredLogs = filterLogs(allLogs, @[address, address1], @[])
|
|
|
|
|
|
|
|
check:
|
|
|
|
headerBloomFilter(blockHeader4514995, @[address, address1], @[])
|
|
|
|
len(filteredLogs) == 3
|
|
|
|
|
|
|
|
test "Filter topics, too many filters":
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
|
|
|
@[
|
2023-12-08 16:35:50 +07:00
|
|
|
none[seq[Web3Hash]](),
|
|
|
|
none[seq[Web3Hash]](),
|
|
|
|
none[seq[Web3Hash]](),
|
|
|
|
none[seq[Web3Hash]](),
|
|
|
|
none[seq[Web3Hash]]()
|
2022-06-29 17:44:08 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 0
|
|
|
|
|
|
|
|
test "Filter topics, specific topic at first position":
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic = w3Hash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
|
|
|
@[some(@[topic])]
|
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 15
|
|
|
|
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
|
|
|
log.topics[0] == topic
|
|
|
|
|
|
|
|
test "Filter topics, specific topic at first position and second position":
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic = w3Hash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef")
|
|
|
|
let topic1 = w3Hash("0x000000000000000000000000919040a01a0adcef25ed6ecbc6ab2a86ca6d77df")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
|
|
|
@[some(@[topic]), some(@[topic1])]
|
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 1
|
|
|
|
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
|
|
|
log.topics[0] == topic
|
|
|
|
log.topics[1] == topic1
|
|
|
|
|
|
|
|
test "Filter topics, specific topic at first position and third position":
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic = w3Hash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef")
|
|
|
|
let topic1 = w3Hash("0x000000000000000000000000fdc183d01a793613736cd40a5a578f49add1772b")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
2023-12-08 16:35:50 +07:00
|
|
|
@[some(@[topic]), none[seq[Web3Hash]](), some(@[topic1])]
|
2022-06-29 17:44:08 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 1
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
|
|
|
log.topics[0] == topic
|
|
|
|
log.topics[2] == topic1
|
|
|
|
|
|
|
|
test "Filter topics, or query at first position":
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic = w3Hash("0x4a504a94899432a9846e1aa406dceb1bcfd538bb839071d49d1e5e23f5be30ef")
|
|
|
|
let topic1 = w3Hash("0x526441bb6c1aba3c9a4a6ca1d6545da9c2333c8c48343ef398eb858d72b79236")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
|
|
|
@[
|
|
|
|
some(@[topic, topic1])
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 2
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
|
|
|
log.topics[0] == topic or log.topics[0] == topic1
|
|
|
|
|
|
|
|
test "Filter topics, or query at first position and or query at second position":
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic = w3Hash("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef")
|
|
|
|
let topic1 = w3Hash("0xa64da754fccf55aa65a1f0128a648633fade3884b236e879ee9f64c78df5d5d7")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
2023-12-08 16:35:50 +07:00
|
|
|
let topic2 = w3Hash("0x000000000000000000000000e16c02eac87920033ac72fc55ee1df3151c75786")
|
|
|
|
let topic3 = w3Hash("0x000000000000000000000000b626a5facc4de1c813f5293ec3be31979f1d1c78")
|
2022-06-29 17:44:08 +02:00
|
|
|
|
|
|
|
let filteredLogs =
|
|
|
|
filterLogs(
|
|
|
|
allLogs,
|
|
|
|
@[],
|
|
|
|
@[
|
|
|
|
some(@[topic, topic1]),
|
|
|
|
some(@[topic2, topic3])
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filteredLogs) == 2
|
|
|
|
|
|
|
|
for log in filteredLogs:
|
|
|
|
check:
|
|
|
|
log.topics[0] == topic or log.topics[0] == topic1
|
|
|
|
log.topics[1] == topic2 or log.topics[1] == topic3
|
|
|
|
|
|
|
|
# general propety based tests
|
|
|
|
test "Specific address query should provide results only with given address":
|
|
|
|
for log in allLogs:
|
|
|
|
let filtered = filterLogs(allLogs, @[log.address], @[])
|
|
|
|
|
|
|
|
check:
|
|
|
|
len(filtered) > 0
|
|
|
|
|
|
|
|
for filteredLog in filtered:
|
|
|
|
check:
|
|
|
|
filteredLog.address == log.address
|
|
|
|
|
|
|
|
when isMainModule:
|
|
|
|
filtersMain()
|