2022-06-29 15:44:08 +00:00
|
|
|
# Nimbus
|
2024-01-13 01:41:57 +00:00
|
|
|
# Copyright (c) 2022-2024 Status Research & Development GmbH
|
2022-06-29 15:44:08 +00:00
|
|
|
# Licensed and distributed under either of
|
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
|
|
|
import
|
|
|
|
std/options,
|
2024-10-16 01:34:12 +00:00
|
|
|
eth/common/eth_types_rlp,
|
|
|
|
web3/eth_api_types,
|
2022-06-29 15:44:08 +00:00
|
|
|
eth/bloom as bFilter,
|
|
|
|
stint,
|
2023-12-08 09:35:50 +00:00
|
|
|
./rpc_types
|
2022-06-29 15:44:08 +00:00
|
|
|
|
2022-09-03 18:15:35 +00:00
|
|
|
export rpc_types
|
|
|
|
|
2023-01-31 01:32:17 +00:00
|
|
|
{.push raises: [].}
|
|
|
|
|
2024-10-16 01:34:12 +00:00
|
|
|
proc deriveLogs*(header: Header, transactions: seq[Transaction], receipts: seq[Receipt]): seq[FilterLog] =
|
2022-06-29 15:44:08 +00:00
|
|
|
## Derive log fields, does not deal with pending log, only the logs with
|
|
|
|
## full data set
|
|
|
|
doAssert(len(transactions) == len(receipts))
|
|
|
|
|
|
|
|
var resLogs: seq[FilterLog] = @[]
|
2024-06-14 07:31:08 +00:00
|
|
|
var logIndex = 0'u64
|
2022-06-29 15:44:08 +00:00
|
|
|
|
|
|
|
for i, receipt in receipts:
|
|
|
|
for log in receipt.logs:
|
|
|
|
let filterLog = FilterLog(
|
|
|
|
# TODO investigate how to handle this field
|
|
|
|
# - in nimbus info about log removel would need to be kept at synchronization
|
|
|
|
# level, to keep track about potential re-orgs
|
|
|
|
# - in fluffy there is no concept of re-org
|
|
|
|
removed: false,
|
2024-10-16 01:34:12 +00:00
|
|
|
logIndex: Opt.some(Quantity(logIndex)),
|
2024-06-14 07:31:08 +00:00
|
|
|
transactionIndex: Opt.some(Quantity(i)),
|
2024-10-16 01:34:12 +00:00
|
|
|
transactionHash: Opt.some(transactions[i].rlpHash),
|
|
|
|
blockHash: Opt.some(header.blockHash),
|
|
|
|
blockNumber: Opt.some(Quantity(header.number)),
|
|
|
|
address: log.address,
|
2022-06-29 15:44:08 +00:00
|
|
|
data: log.data,
|
2024-10-16 06:51:38 +00:00
|
|
|
# TODO topics should probably be kept as Hash32 in receipts
|
2024-10-16 01:34:12 +00:00
|
|
|
topics: log.topics
|
2022-06-29 15:44:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
inc logIndex
|
|
|
|
resLogs.add(filterLog)
|
|
|
|
|
|
|
|
return resLogs
|
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
func participateInFilter(x: AddressOrList): bool =
|
|
|
|
if x.kind == slkNull:
|
|
|
|
return false
|
|
|
|
if x.kind == slkList:
|
|
|
|
if x.list.len == 0:
|
|
|
|
return false
|
|
|
|
true
|
|
|
|
|
2022-06-29 15:44:08 +00:00
|
|
|
proc bloomFilter*(
|
2024-10-16 01:34:12 +00:00
|
|
|
bloom: Bloom,
|
2024-01-13 01:41:57 +00:00
|
|
|
addresses: AddressOrList,
|
|
|
|
topics: seq[TopicOrList]): bool =
|
2022-06-29 15:44:08 +00:00
|
|
|
|
2024-09-29 12:37:09 +00:00
|
|
|
let bloomFilter = bFilter.BloomFilter(value: bloom.to(StUint[2048]))
|
2022-06-29 15:44:08 +00:00
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
if addresses.participateInFilter():
|
2022-06-29 15:44:08 +00:00
|
|
|
var addrIncluded: bool = false
|
2024-01-13 01:41:57 +00:00
|
|
|
if addresses.kind == slkSingle:
|
2024-10-16 01:34:12 +00:00
|
|
|
addrIncluded = bloomFilter.contains(addresses.single.data)
|
2024-01-13 01:41:57 +00:00
|
|
|
elif addresses.kind == slkList:
|
|
|
|
for address in addresses.list:
|
2024-10-16 01:34:12 +00:00
|
|
|
if bloomFilter.contains(address.data):
|
2024-01-13 01:41:57 +00:00
|
|
|
addrIncluded = true
|
|
|
|
break
|
2022-06-29 15:44:08 +00:00
|
|
|
if not addrIncluded:
|
|
|
|
return false
|
|
|
|
|
|
|
|
for sub in topics:
|
2024-01-13 01:41:57 +00:00
|
|
|
if sub.kind == slkNull:
|
2022-06-29 15:44:08 +00:00
|
|
|
# catch all wildcard
|
|
|
|
continue
|
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
var topicIncluded = false
|
|
|
|
if sub.kind == slkSingle:
|
2024-10-16 01:34:12 +00:00
|
|
|
if bloomFilter.contains(sub.single.data):
|
2022-06-29 15:44:08 +00:00
|
|
|
topicIncluded = true
|
2024-01-13 01:41:57 +00:00
|
|
|
else:
|
|
|
|
topicIncluded = sub.list.len == 0
|
|
|
|
for topic in sub.list:
|
|
|
|
# This is is quite not obvious, but passing topic as MDigest256 fails, as
|
|
|
|
# it does not use internal keccak256 hashing. To achieve desired semantics,
|
|
|
|
# we need use digest bare bytes so that they will be properly kec256 hashes
|
2024-10-16 01:34:12 +00:00
|
|
|
if bloomFilter.contains(topic.data):
|
2024-01-13 01:41:57 +00:00
|
|
|
topicIncluded = true
|
|
|
|
break
|
2022-06-29 15:44:08 +00:00
|
|
|
|
|
|
|
if not topicIncluded:
|
|
|
|
return false
|
|
|
|
|
|
|
|
return true
|
|
|
|
|
|
|
|
proc headerBloomFilter*(
|
2024-10-16 01:34:12 +00:00
|
|
|
header: Header,
|
2024-01-13 01:41:57 +00:00
|
|
|
addresses: AddressOrList,
|
|
|
|
topics: seq[TopicOrList]): bool =
|
2024-06-14 07:31:08 +00:00
|
|
|
return bloomFilter(header.logsBloom, addresses, topics)
|
2022-06-29 15:44:08 +00:00
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
proc matchTopics(log: FilterLog, topics: seq[TopicOrList]): bool =
|
2022-06-29 15:44:08 +00:00
|
|
|
for i, sub in topics:
|
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
if sub.kind == slkNull:
|
2022-06-29 15:44:08 +00:00
|
|
|
# null subtopic i.e it matches all possible move to nex
|
|
|
|
continue
|
|
|
|
|
2024-01-13 01:41:57 +00:00
|
|
|
var match = false
|
|
|
|
if sub.kind == slkSingle:
|
|
|
|
match = log.topics[i] == sub.single
|
|
|
|
else:
|
|
|
|
# treat empty as wildcard, although caller should rather use none kind of
|
|
|
|
# option to indicate that. If nim would have NonEmptySeq type that would be
|
|
|
|
# use case for it.
|
|
|
|
match = sub.list.len == 0
|
|
|
|
for topic in sub.list:
|
|
|
|
if log.topics[i] == topic:
|
|
|
|
match = true
|
|
|
|
break
|
2022-06-29 15:44:08 +00:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
return false
|
|
|
|
|
|
|
|
return true
|
|
|
|
|
|
|
|
proc filterLogs*(
|
|
|
|
logs: openArray[FilterLog],
|
2024-01-13 01:41:57 +00:00
|
|
|
addresses: AddressOrList,
|
|
|
|
topics: seq[TopicOrList]): seq[FilterLog] =
|
2022-06-29 15:44:08 +00:00
|
|
|
|
|
|
|
var filteredLogs: seq[FilterLog] = newSeq[FilterLog]()
|
|
|
|
|
|
|
|
for log in logs:
|
2024-01-13 01:41:57 +00:00
|
|
|
if addresses.kind == slkSingle and (addresses.single != log.address):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if addresses.kind == slkList and
|
|
|
|
addresses.list.len > 0 and
|
|
|
|
(not addresses.list.contains(log.address)):
|
2022-06-29 15:44:08 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if len(topics) > len(log.topics):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not matchTopics(log, topics):
|
|
|
|
continue
|
|
|
|
|
|
|
|
filteredLogs.add(log)
|
|
|
|
|
|
|
|
return filteredLogs
|