mirror of
https://github.com/status-im/nimbus-eth1.git
synced 2025-01-11 21:04:11 +00:00
StyleCheck fixes for Fluffy and lcproxy (#1220)
Also add the compiler options in the nim.cfg of Fluffy and lcproxy (only visible with direct nimble usage).
This commit is contained in:
parent
952ac647a6
commit
87f28458a5
@ -66,7 +66,7 @@ proc getPersistentNetKey*(
|
||||
|
||||
let netKeyInHex = readResult.get()
|
||||
if netKeyInHex.len() == 64:
|
||||
let netKey = PrivateKey.fromHex(netkeyInHex)
|
||||
let netKey = PrivateKey.fromHex(netKeyInHex)
|
||||
if netKey.isOk():
|
||||
info "Network key was successfully read"
|
||||
netKey.get()
|
||||
|
@ -1,2 +0,0 @@
|
||||
-d:"chronicles_runtime_filtering=on"
|
||||
-d:"chronicles_disable_thread_id"
|
@ -46,7 +46,7 @@ func decode*(contentKey: ByteList): Option[ContentKey] =
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha2.sha_256.digest(contentKey.asSeq())
|
||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
||||
func toContentId*(contentKey: ContentKey): ContentId =
|
||||
|
@ -75,7 +75,7 @@ func decode*(contentKey: ByteList): Option[ContentKey] =
|
||||
|
||||
func toContentId*(contentKey: ByteList): ContentId =
|
||||
# TODO: Should we try to parse the content key here for invalid ones?
|
||||
let idHash = sha2.sha_256.digest(contentKey.asSeq())
|
||||
let idHash = sha2.sha256.digest(contentKey.asSeq())
|
||||
readUintBE[256](idHash.data)
|
||||
|
||||
func toContentId*(contentKey: ContentKey): ContentId =
|
||||
|
@ -218,7 +218,7 @@ proc validateReceiptsBytes*(
|
||||
|
||||
## ContentDB helper calls for specific history network types
|
||||
|
||||
proc get(db: ContentDB, T: type BlockHeader, contentId: ContentID): Option[T] =
|
||||
proc get(db: ContentDB, T: type BlockHeader, contentId: ContentId): Option[T] =
|
||||
let contentFromDB = db.get(contentId)
|
||||
if contentFromDB.isSome():
|
||||
let res = decodeRlp(contentFromDB.get(), T)
|
||||
@ -229,7 +229,7 @@ proc get(db: ContentDB, T: type BlockHeader, contentId: ContentID): Option[T] =
|
||||
else:
|
||||
none(T)
|
||||
|
||||
proc get(db: ContentDB, T: type BlockBody, contentId: ContentID): Option[T] =
|
||||
proc get(db: ContentDB, T: type BlockBody, contentId: ContentId): Option[T] =
|
||||
let contentFromDB = db.getSszDecoded(contentId, BlockBodySSZ)
|
||||
if contentFromDB.isSome():
|
||||
let res = T.fromPortalBlockBody(contentFromDB.get())
|
||||
@ -240,7 +240,7 @@ proc get(db: ContentDB, T: type BlockBody, contentId: ContentID): Option[T] =
|
||||
else:
|
||||
none(T)
|
||||
|
||||
proc get(db: ContentDB, T: type seq[Receipt], contentId: ContentID): Option[T] =
|
||||
proc get(db: ContentDB, T: type seq[Receipt], contentId: ContentId): Option[T] =
|
||||
let contentFromDB = db.getSszDecoded(contentId, ReceiptsSSZ)
|
||||
if contentFromDB.isSome():
|
||||
let res = T.fromReceipts(contentFromDB.get())
|
||||
@ -252,7 +252,7 @@ proc get(db: ContentDB, T: type seq[Receipt], contentId: ContentID): Option[T] =
|
||||
none(T)
|
||||
|
||||
proc get(
|
||||
db: ContentDB, T: type EpochAccumulator, contentId: ContentID): Option[T] =
|
||||
db: ContentDB, T: type EpochAccumulator, contentId: ContentId): Option[T] =
|
||||
db.getSszDecoded(contentId, T)
|
||||
|
||||
proc getAccumulator(db: ContentDB): Option[Accumulator] =
|
||||
@ -503,7 +503,7 @@ proc getEpochAccumulator(
|
||||
return none(EpochAccumulator)
|
||||
|
||||
proc getBlock*(
|
||||
n: HistoryNetwork, chainId: uint16, bn: Uint256):
|
||||
n: HistoryNetwork, chainId: uint16, bn: UInt256):
|
||||
Future[Result[Option[Block], string]] {.async.} =
|
||||
|
||||
# TODO for now checking accumulator only in db, we could also ask our
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2021 Status Research & Development GmbH
|
||||
# Copyright (c) 2021-2022 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
@ -9,8 +9,9 @@ import
|
||||
eth/p2p/discoveryv5/routing_table,
|
||||
stint
|
||||
|
||||
const MID* = u256(2).pow(u256(255))
|
||||
const MAX* = high(Uint256)
|
||||
const
|
||||
MID* = u256(2).pow(u256(255))
|
||||
MAX* = high(UInt256)
|
||||
|
||||
# Custom distance function described in: https://notes.ethereum.org/h58LZcqqRRuarxx4etOnGQ#Storage-Layout
|
||||
# The implementation looks different than in spec, due to the fact that in practice
|
||||
@ -34,7 +35,7 @@ func stateDistance*(node_id: UInt256, content_id: UInt256): UInt256 =
|
||||
content_id - node_id
|
||||
|
||||
if rawDiff > MID:
|
||||
# If rawDiff is larger than mid this means that distance between node_id and
|
||||
# If rawDiff is larger than mid this means that distance between node_id and
|
||||
# content_id is smaller when going from max side.
|
||||
MAX - rawDiff + UInt256.one
|
||||
else:
|
||||
|
@ -39,9 +39,9 @@ type
|
||||
MessageKind* = enum
|
||||
ping = 0x00
|
||||
pong = 0x01
|
||||
findnodes = 0x02
|
||||
findNodes = 0x02
|
||||
nodes = 0x03
|
||||
findcontent = 0x04
|
||||
findContent = 0x04
|
||||
content = 0x05
|
||||
offer = 0x06
|
||||
accept = 0x07
|
||||
@ -92,12 +92,12 @@ type
|
||||
ping*: PingMessage
|
||||
of pong:
|
||||
pong*: PongMessage
|
||||
of findnodes:
|
||||
findnodes*: FindNodesMessage
|
||||
of findNodes:
|
||||
findNodes*: FindNodesMessage
|
||||
of nodes:
|
||||
nodes*: NodesMessage
|
||||
of findcontent:
|
||||
findcontent*: FindContentMessage
|
||||
of findContent:
|
||||
findContent*: FindContentMessage
|
||||
of content:
|
||||
content*: ContentMessage
|
||||
of offer:
|
||||
@ -114,9 +114,9 @@ type
|
||||
template messageKind*(T: typedesc[SomeMessage]): MessageKind =
|
||||
when T is PingMessage: ping
|
||||
elif T is PongMessage: pong
|
||||
elif T is FindNodesMessage: findnodes
|
||||
elif T is FindNodesMessage: findNodes
|
||||
elif T is NodesMessage: nodes
|
||||
elif T is FindContentMessage: findcontent
|
||||
elif T is FindContentMessage: findContent
|
||||
elif T is ContentMessage: content
|
||||
elif T is OfferMessage: offer
|
||||
elif T is AcceptMessage: accept
|
||||
@ -136,9 +136,9 @@ func encodeMessage*[T: SomeMessage](m: T): seq[byte] =
|
||||
# or we just use SSZ.encode(Message) directly
|
||||
when T is PingMessage: SSZ.encode(Message(kind: ping, ping: m))
|
||||
elif T is PongMessage: SSZ.encode(Message(kind: pong, pong: m))
|
||||
elif T is FindNodesMessage: SSZ.encode(Message(kind: findnodes, findnodes: m))
|
||||
elif T is FindNodesMessage: SSZ.encode(Message(kind: findNodes, findNodes: m))
|
||||
elif T is NodesMessage: SSZ.encode(Message(kind: nodes, nodes: m))
|
||||
elif T is FindContentMessage: SSZ.encode(Message(kind: findcontent, findcontent: m))
|
||||
elif T is FindContentMessage: SSZ.encode(Message(kind: findContent, findContent: m))
|
||||
elif T is ContentMessage: SSZ.encode(Message(kind: content, content: m))
|
||||
elif T is OfferMessage: SSZ.encode(Message(kind: offer, offer: m))
|
||||
elif T is AcceptMessage: SSZ.encode(Message(kind: accept, accept: m))
|
||||
|
@ -231,7 +231,7 @@ func neighbours*(p: PortalProtocol, id: NodeId, seenOnly = false): seq[Node] =
|
||||
proc inRange(
|
||||
p: PortalProtocol,
|
||||
nodeId: NodeId,
|
||||
nodeRadius: Uint256,
|
||||
nodeRadius: UInt256,
|
||||
contentId: ContentId): bool =
|
||||
let distance = p.routingTable.distance(nodeId, contentId)
|
||||
distance <= nodeRadius
|
||||
@ -313,7 +313,7 @@ proc handleFindContent(
|
||||
maxPayloadSize = maxDiscv5PacketSize - talkRespOverhead - contentOverhead
|
||||
enrOverhead = 4 # per added ENR, 4 bytes offset overhead
|
||||
|
||||
let (contentIdOpt, contentOpt) = p.dbGet(p.contentDb, fc.contentKey)
|
||||
let (contentIdOpt, contentOpt) = p.dbGet(p.contentDB, fc.contentKey)
|
||||
if contentOpt.isSome():
|
||||
let content = contentOpt.get()
|
||||
if content.len <= maxPayloadSize:
|
||||
@ -406,10 +406,10 @@ proc messageHandler(protocol: TalkProtocol, request: seq[byte],
|
||||
case message.kind
|
||||
of MessageKind.ping:
|
||||
p.handlePing(message.ping, srcId)
|
||||
of MessageKind.findnodes:
|
||||
of MessageKind.findNodes:
|
||||
p.handleFindNodes(message.findNodes)
|
||||
of MessageKind.findcontent:
|
||||
p.handleFindContent(message.findcontent, srcId)
|
||||
of MessageKind.findContent:
|
||||
p.handleFindContent(message.findContent, srcId)
|
||||
of MessageKind.offer:
|
||||
p.handleOffer(message.offer, srcId)
|
||||
else:
|
||||
@ -489,7 +489,7 @@ proc reqResponse[Request: SomeMessage, Response: SomeMessage](
|
||||
labelValues = [$p.protocolId, $messageKind(Request)])
|
||||
|
||||
let talkresp =
|
||||
await talkreq(p.baseProtocol, dst, @(p.protocolId), encodeMessage(request))
|
||||
await talkReq(p.baseProtocol, dst, @(p.protocolId), encodeMessage(request))
|
||||
|
||||
# Note: Failure of `decodeMessage` might also simply mean that the peer is
|
||||
# not supporting the specific talk protocol, as according to specification
|
||||
@ -560,7 +560,7 @@ proc ping*(p: PortalProtocol, dst: Node):
|
||||
Future[PortalResult[PongMessage]] {.async.} =
|
||||
let pongResponse = await p.pingImpl(dst)
|
||||
|
||||
if pongResponse.isOK():
|
||||
if pongResponse.isOk():
|
||||
let pong = pongResponse.get()
|
||||
# TODO: This should become custom per Portal Network
|
||||
let customPayloadDecoded =
|
||||
@ -1282,7 +1282,7 @@ proc populateTable(p: PortalProtocol) {.async.} =
|
||||
proc revalidateNode*(p: PortalProtocol, n: Node) {.async.} =
|
||||
let pong = await p.ping(n)
|
||||
|
||||
if pong.isOK():
|
||||
if pong.isOk():
|
||||
let res = pong.get()
|
||||
if res.enrSeq > n.record.seqNum:
|
||||
# Request new ENR
|
||||
|
9
fluffy/nim.cfg
Normal file
9
fluffy/nim.cfg
Normal file
@ -0,0 +1,9 @@
|
||||
-d:"chronicles_runtime_filtering=on"
|
||||
-d:"chronicles_disable_thread_id"
|
||||
|
||||
-d:chronosStrictException
|
||||
--styleCheck:usages
|
||||
--styleCheck:hint
|
||||
--hint[XDeclaredButNotUsed]:off
|
||||
--hint[ConvFromXtoItselfNotNeeded]:off
|
||||
--hint[Processing]:off
|
@ -113,7 +113,7 @@ proc installDiscoveryApiHandlers*(rpcServer: RpcServer|RpcProxy,
|
||||
rpcServer.rpc("discv5_talkReq") do(enr: Record, protocol, payload: string) -> string:
|
||||
let
|
||||
node = toNodeWithAddress(enr)
|
||||
talkresp = await d.talkreq(
|
||||
talkresp = await d.talkReq(
|
||||
node, hexToSeqByte(protocol), hexToSeqByte(payload))
|
||||
if talkresp.isErr():
|
||||
raise newException(ValueError, $talkresp.error)
|
||||
|
@ -83,7 +83,7 @@ func init*(
|
||||
totalDifficulty: encodeQuantity(UInt256.low()),
|
||||
gasLimit: encodeQuantity(header.gasLimit.uint64),
|
||||
gasUsed: encodeQuantity(header.gasUsed.uint64),
|
||||
timestamp: encodeQuantity(header.timeStamp.toUnix.uint64)
|
||||
timestamp: encodeQuantity(header.timestamp.toUnix.uint64)
|
||||
)
|
||||
|
||||
let size = sizeof(BlockHeader) - sizeof(Blob) + header.extraData.len
|
||||
@ -265,11 +265,11 @@ proc installEthApiHandlers*(
|
||||
|
||||
rpcServerWithProxy.rpc("eth_getLogs") do(
|
||||
filterOptions: FilterOptions) -> seq[FilterLog]:
|
||||
if filterOptions.blockhash.isNone():
|
||||
if filterOptions.blockHash.isNone():
|
||||
# Currently only queries by blockhash are supported.
|
||||
# To support range queries the Indicies network is required.
|
||||
raise newException(ValueError,
|
||||
"Unsupported query: Only `blockhash` queries are currently supported")
|
||||
"Unsupported query: Only `blockHash` queries are currently supported")
|
||||
else:
|
||||
let hash = filterOptions.blockHash.unsafeGet()
|
||||
|
||||
|
@ -16,10 +16,10 @@ import
|
||||
|
||||
proc generateNRandomU256(rng: var HmacDrbgContext, n: int): seq[UInt256] =
|
||||
var i = 0
|
||||
var res = newSeq[Uint256]()
|
||||
var res = newSeq[UInt256]()
|
||||
while i < n:
|
||||
let bytes = rng.generateBytes(32)
|
||||
let num = Uint256.fromBytesBE(bytes)
|
||||
let num = UInt256.fromBytesBE(bytes)
|
||||
res.add(num)
|
||||
inc i
|
||||
return res
|
||||
|
@ -38,10 +38,10 @@ proc setupTest(rng: ref HmacDrbgContext): Future[TestCase] {.async.} =
|
||||
await client.connect(localSrvAddress, Port(localSrvPort), false)
|
||||
return TestCase(localDiscovery: localDiscoveryNode, server: rpcHttpServerWithProxy, client: client)
|
||||
|
||||
proc stop(t: TestCase) {.async.} =
|
||||
await t.server.stop()
|
||||
await t.server.closeWait()
|
||||
await t.localDiscovery.closeWait()
|
||||
proc stop(testCase: TestCase) {.async.} =
|
||||
await testCase.server.stop()
|
||||
await testCase.server.closeWait()
|
||||
await testCase.localDiscovery.closeWait()
|
||||
|
||||
procSuite "Discovery Rpc":
|
||||
let rng = newRng()
|
||||
|
@ -46,6 +46,6 @@ suite "Header Gossip ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.newBlockHeaderKey == contentKey.newBlockHeaderKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
@ -44,7 +44,7 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.blockHeaderKey == contentKey.blockHeaderKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -77,7 +77,7 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.blockBodyKey == contentKey.blockBodyKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -110,7 +110,7 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.receiptsKey == contentKey.receiptsKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -143,7 +143,7 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.epochAccumulatorKey == contentKey.epochAccumulatorKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -173,7 +173,7 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.masterAccumulatorKey.accumulaterKeyType ==
|
||||
contentKey.masterAccumulatorKey.accumulaterKeyType
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -210,6 +210,6 @@ suite "History ContentKey Encodings":
|
||||
contentKeyDecoded.masterAccumulatorKey.masterHashKey ==
|
||||
contentKey.masterAccumulatorKey.masterHashKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
@ -44,7 +44,7 @@ proc stop(hn: HistoryNode) {.async.} =
|
||||
await hn.discoveryProtocol.closeWait()
|
||||
|
||||
proc containsId(hn: HistoryNode, contentId: ContentId): bool =
|
||||
return hn.historyNetwork.contentDb.get(contentId).isSome()
|
||||
return hn.historyNetwork.contentDB.get(contentId).isSome()
|
||||
|
||||
proc createEmptyHeaders(fromNum: int, toNum: int): seq[BlockHeader] =
|
||||
var headers: seq[BlockHeader]
|
||||
|
@ -54,7 +54,7 @@ suite "State ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.accountTrieNodeKey == contentKey.accountTrieNodeKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -92,7 +92,7 @@ suite "State ContentKey Encodings":
|
||||
contentKeyDecoded.contractStorageTrieNodeKey ==
|
||||
contentKey.contractStorageTrieNodeKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -124,7 +124,7 @@ suite "State ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.accountTrieProofKey == contentKey.accountTrieProofKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -160,7 +160,7 @@ suite "State ContentKey Encodings":
|
||||
contentKeyDecoded.contractStorageTrieProofKey ==
|
||||
contentKey.contractStorageTrieProofKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
||||
@ -196,6 +196,6 @@ suite "State ContentKey Encodings":
|
||||
contentKeyDecoded.contentType == contentKey.contentType
|
||||
contentKeyDecoded.contractBytecodeKey == contentKey.contractBytecodeKey
|
||||
|
||||
toContentId(contentKey) == parse(contentId, Stuint[256], 10)
|
||||
toContentId(contentKey) == parse(contentId, StUint[256], 10)
|
||||
# In stint this does BE hex string
|
||||
toContentId(contentKey).toHex() == contentIdHexBE
|
||||
|
@ -27,7 +27,7 @@ suite "State network custom distance function":
|
||||
# Additional test cases to check some basic properties
|
||||
stateDistance(UInt256.zero, MID + MID) == UInt256.zero
|
||||
stateDistance(UInt256.zero, UInt256.one) == stateDistance(UInt256.zero, high(UInt256))
|
||||
|
||||
|
||||
test "Calculate logarithimic distance":
|
||||
check:
|
||||
stateLogDistance(u256(0), u256(0)) == 0
|
||||
@ -38,7 +38,7 @@ suite "State network custom distance function":
|
||||
stateLogDistance(u256(8), u256(0)) == 3
|
||||
stateLogDistance(UInt256.zero, MID) == 255
|
||||
stateLogDistance(UInt256.zero, MID + UInt256.one) == 254
|
||||
|
||||
|
||||
test "Calculate id at log distance":
|
||||
let logDistances = @[
|
||||
0'u16, 1, 2, 3, 4, 5, 6, 7, 8
|
||||
@ -49,8 +49,8 @@ suite "State network custom distance function":
|
||||
# starting log distances
|
||||
let logCalculated = logDistances.map(
|
||||
proc (x: uint16): uint16 =
|
||||
let nodeAtDist = stateIdAtDistance(Uint256.zero, x)
|
||||
return stateLogDistance(Uint256.zero, nodeAtDist)
|
||||
let nodeAtDist = stateIdAtDistance(UInt256.zero, x)
|
||||
return stateLogDistance(UInt256.zero, nodeAtDist)
|
||||
)
|
||||
|
||||
check:
|
||||
|
@ -23,7 +23,7 @@ proc genesisToTrie(filePath: string): HexaryTrie =
|
||||
quit(1)
|
||||
|
||||
var chainDB = newBaseChainDB(
|
||||
newMemoryDb(),
|
||||
newMemoryDB(),
|
||||
pruneTrie = false,
|
||||
CustomNet,
|
||||
cn
|
||||
|
@ -214,10 +214,10 @@ proc createAndOpenFile(dataDir: string, fileName: string): OutputStreamHandle =
|
||||
# Creates directory and file, if file already exists
|
||||
# program is aborted with info to user, to avoid losing data
|
||||
let fileName: string =
|
||||
if not filename.endsWith(".json"):
|
||||
filename & ".json"
|
||||
if not fileName.endsWith(".json"):
|
||||
fileName & ".json"
|
||||
else:
|
||||
filename
|
||||
fileName
|
||||
|
||||
let filePath = dataDir / fileName
|
||||
|
||||
@ -285,7 +285,7 @@ proc writeBlocksToJson(config: ExporterConf, client: RpcClient) =
|
||||
quit 1
|
||||
|
||||
proc writeBlocksToDb(config: ExporterConf, client: RpcClient) =
|
||||
let db = SeedDb.new(distinctBase(config.dataDir), config.filename)
|
||||
let db = SeedDb.new(distinctBase(config.dataDir), config.fileName)
|
||||
|
||||
defer:
|
||||
db.close()
|
||||
|
@ -1,2 +0,0 @@
|
||||
-d:"chronicles_runtime_filtering=on"
|
||||
-d:"chronicles_disable_thread_id"
|
@ -32,8 +32,8 @@ type
|
||||
PortalCmd* = enum
|
||||
noCommand
|
||||
ping
|
||||
findnodes
|
||||
findcontent
|
||||
findNodes
|
||||
findContent
|
||||
|
||||
PortalCliConf* = object
|
||||
logLevel* {.
|
||||
@ -124,7 +124,7 @@ type
|
||||
argument
|
||||
desc: "ENR URI of the node to a send ping message"
|
||||
name: "node" .}: Node
|
||||
of findnodes:
|
||||
of findNodes:
|
||||
distance* {.
|
||||
defaultValue: 255
|
||||
desc: "Distance parameter for the findNodes message"
|
||||
@ -135,7 +135,7 @@ type
|
||||
argument
|
||||
desc: "ENR URI of the node to send a findNodes message"
|
||||
name: "node" .}: Node
|
||||
of findcontent:
|
||||
of findContent:
|
||||
findContentTarget* {.
|
||||
argument
|
||||
desc: "ENR URI of the node to send a findContent message"
|
||||
@ -255,7 +255,7 @@ proc run(config: PortalCliConf) =
|
||||
echo pong.get()
|
||||
else:
|
||||
echo pong.error
|
||||
of findnodes:
|
||||
of findNodes:
|
||||
let distances = @[config.distance]
|
||||
let nodes = waitFor portal.findNodes(config.findNodesTarget, distances)
|
||||
|
||||
@ -264,7 +264,7 @@ proc run(config: PortalCliConf) =
|
||||
echo $node.record & " - " & shortLog(node)
|
||||
else:
|
||||
echo nodes.error
|
||||
of findcontent:
|
||||
of findContent:
|
||||
proc random(T: type UInt256, rng: var HmacDrbgContext): T =
|
||||
rng.generate(T)
|
||||
|
||||
@ -282,7 +282,7 @@ proc run(config: PortalCliConf) =
|
||||
of noCommand:
|
||||
d.start()
|
||||
portal.start()
|
||||
waitfor(discover(d))
|
||||
waitFor(discover(d))
|
||||
|
||||
when isMainModule:
|
||||
let config = PortalCliConf.load()
|
||||
|
9
lc_proxy/nim.cfg
Normal file
9
lc_proxy/nim.cfg
Normal file
@ -0,0 +1,9 @@
|
||||
-d:"chronicles_runtime_filtering=on"
|
||||
-d:"chronicles_disable_thread_id"
|
||||
|
||||
-d:chronosStrictException
|
||||
--styleCheck:usages
|
||||
--styleCheck:hint
|
||||
--hint[XDeclaredButNotUsed]:off
|
||||
--hint[ConvFromXtoItselfNotNeeded]:off
|
||||
--hint[Processing]:off
|
@ -28,7 +28,7 @@ template encodeQuantity(value: UInt256): HexQuantityStr =
|
||||
|
||||
|
||||
template encodeHexData(value: UInt256): HexDataStr =
|
||||
hexDataStr("0x" & toBytesBe(value).toHex)
|
||||
hexDataStr("0x" & toBytesBE(value).toHex)
|
||||
|
||||
template encodeQuantity(value: Quantity): HexQuantityStr =
|
||||
hexQuantityStr(encodeQuantity(value.uint64))
|
||||
|
@ -128,7 +128,7 @@ type
|
||||
toBlock*: Option[string] # (optional, default: "latest") integer block number, or "latest" for the last mined block or "pending", "earliest" for not yet mined transactions.
|
||||
address*: seq[EthAddress] # (optional) contract address or a list of addresses from which logs should originate.
|
||||
topics*: seq[Option[seq[Hash256]]] # (optional) list of DATA topics. Topics are order-dependent. Each topic can also be a list of DATA with "or" options.
|
||||
blockhash*: Option[Hash256] # (optional) hash of the block. If its present, fromBlock and toBlock, should be none. Introduced in EIP234
|
||||
blockHash*: Option[Hash256] # (optional) hash of the block. If its present, fromBlock and toBlock, should be none. Introduced in EIP234
|
||||
|
||||
proc fromJson*(n: JsonNode, argName: string, result: var FilterOptions) =
|
||||
proc getOptionString(argName: string): Option[string] =
|
||||
@ -223,7 +223,7 @@ proc fromJson*(n: JsonNode, argName: string, result: var FilterOptions) =
|
||||
return filterArr
|
||||
|
||||
proc getBlockHash(): Option[Hash256] =
|
||||
let s = getOptionString("blockhash")
|
||||
let s = getOptionString("blockHash")
|
||||
if s.isNone():
|
||||
return none[Hash256]()
|
||||
else:
|
||||
@ -233,7 +233,7 @@ proc fromJson*(n: JsonNode, argName: string, result: var FilterOptions) =
|
||||
hexToByteArray(strHash, hash.data)
|
||||
return some(hash)
|
||||
else:
|
||||
let msg = "Invalid 'blockhash'. Expected 32byte hex string"
|
||||
let msg = "Invalid 'blockHash'. Expected 32byte hex string"
|
||||
raise newException(ValueError, msg)
|
||||
|
||||
n.kind.expect(JObject, argName)
|
||||
@ -252,4 +252,4 @@ proc fromJson*(n: JsonNode, argName: string, result: var FilterOptions) =
|
||||
result.toBlock = toBlock
|
||||
result.address = getAddress()
|
||||
result.topics = getTopics()
|
||||
result.blockhash = blockHash
|
||||
result.blockHash = blockHash
|
||||
|
Loading…
x
Reference in New Issue
Block a user