mirror of
https://github.com/status-im/nimbus-eth2.git
synced 2025-01-10 14:26:26 +00:00
rm unused code (#5538)
This commit is contained in:
parent
173582ad70
commit
556d5e7114
@ -553,8 +553,6 @@ func init*(
|
||||
dag.putShufflingRef(tmp)
|
||||
tmp
|
||||
|
||||
attester_dependent_root = withState(state):
|
||||
forkyState.attester_dependent_root
|
||||
total_active_balance = withState(state):
|
||||
get_total_active_balance(forkyState.data, cache)
|
||||
epochRef = EpochRef(
|
||||
@ -2421,7 +2419,6 @@ proc updateHead*(
|
||||
|
||||
if not(isNil(dag.onHeadChanged)):
|
||||
let
|
||||
currentEpoch = epoch(newHead.slot)
|
||||
depRoot = withState(dag.headState): forkyState.proposer_dependent_root
|
||||
prevDepRoot = withState(dag.headState):
|
||||
forkyState.attester_dependent_root
|
||||
@ -2733,7 +2730,6 @@ proc rebuildIndex*(dag: ChainDAGRef) =
|
||||
if state_root.isZero:
|
||||
# If we can find an era file with this state, use it as an alternative
|
||||
# starting point - ignore failures for now
|
||||
var bytes: seq[byte]
|
||||
if dag.era.getState(
|
||||
historicalRoots, historicalSummaries, slot, state[]).isOk():
|
||||
state_root = getStateRoot(state[])
|
||||
|
@ -723,11 +723,7 @@ proc initLightClientDataCache*(dag: ChainDAGRef) =
|
||||
blocks.add bid
|
||||
|
||||
# Process blocks (reuses `dag.headState`, but restores it to the current head)
|
||||
var
|
||||
tmpState = assignClone(dag.headState)
|
||||
tmpCache, cache: StateCache
|
||||
oldCheckpoint: Checkpoint
|
||||
cpIndex = 0
|
||||
var cache: StateCache
|
||||
for i in countdown(blocks.high, blocks.low):
|
||||
bid = blocks[i]
|
||||
if not dag.updateExistingState(
|
||||
|
@ -71,9 +71,6 @@ func unknownRoleMsg(role: string): string =
|
||||
template raiseError(reader: var TomlReader, msg: string) =
|
||||
raiseTomlErr(reader.lex, msg)
|
||||
|
||||
template raiseError(reader: var JsonReader, msg: string) =
|
||||
raiseTomlErr(reader.lex, msg)
|
||||
|
||||
proc readValue*(reader: var TomlReader, value: var EngineApiRoles)
|
||||
{.raises: [SerializationError, IOError].} =
|
||||
let roles = reader.readValue seq[string]
|
||||
|
@ -96,7 +96,6 @@ const
|
||||
type
|
||||
Eth1BlockNumber* = uint64
|
||||
Eth1BlockTimestamp* = uint64
|
||||
Eth1BlockHeader = engine_api.BlockHeader
|
||||
|
||||
Eth1Block* = ref object
|
||||
hash*: Eth2Digest
|
||||
@ -393,9 +392,6 @@ template trackedRequestWithTimeout[T](connection: ELConnection,
|
||||
template cfg(m: ELManager): auto =
|
||||
m.eth1Chain.cfg
|
||||
|
||||
template db(m: ELManager): BeaconChainDB =
|
||||
m.eth1Chain.db
|
||||
|
||||
func hasJwtSecret*(m: ELManager): bool =
|
||||
for c in m.elConnections:
|
||||
if c.engineUrl.jwtSecret.isSome:
|
||||
@ -409,12 +405,6 @@ func isSynced*(m: ELManager): bool =
|
||||
template eth1ChainBlocks*(m: ELManager): Deque[Eth1Block] =
|
||||
m.eth1Chain.blocks
|
||||
|
||||
template finalizedDepositsMerkleizer(m: ELManager): auto =
|
||||
m.eth1Chain.finalizedDepositsMerkleizer
|
||||
|
||||
template headMerkleizer(m: ELManager): auto =
|
||||
m.eth1Chain.headMerkleizer
|
||||
|
||||
template toGaugeValue(x: Quantity): int64 =
|
||||
toGaugeValue(distinctBase x)
|
||||
|
||||
@ -885,15 +875,6 @@ template EngineApiResponseType*(T: type capella.ExecutionPayloadForSigning): typ
|
||||
template EngineApiResponseType*(T: type deneb.ExecutionPayloadForSigning): type =
|
||||
engine_api.GetPayloadV3Response
|
||||
|
||||
template payload(response: engine_api.ExecutionPayloadV1): engine_api.ExecutionPayloadV1 =
|
||||
response
|
||||
|
||||
template payload(response: engine_api.GetPayloadV2Response): engine_api.ExecutionPayloadV1OrV2 =
|
||||
response.executionPayload
|
||||
|
||||
template payload(response: engine_api.GetPayloadV3Response): engine_api.ExecutionPayloadV3 =
|
||||
response.executionPayload
|
||||
|
||||
template toEngineWithdrawals*(withdrawals: seq[capella.Withdrawal]): seq[WithdrawalV1] =
|
||||
mapIt(withdrawals, toEngineWithdrawal(it))
|
||||
|
||||
@ -1810,10 +1791,6 @@ func new*(T: type ELConnection,
|
||||
engineUrl: engineUrl,
|
||||
depositContractSyncStatus: DepositContractSyncStatus.unknown)
|
||||
|
||||
template getOrDefault[T, E](r: Result[T, E]): T =
|
||||
type TT = T
|
||||
get(r, default(TT))
|
||||
|
||||
proc init*(T: type Eth1Chain,
|
||||
cfg: RuntimeConfig,
|
||||
db: BeaconChainDB,
|
||||
@ -2017,12 +1994,6 @@ proc syncBlockRange(m: ELManager,
|
||||
blockNumber = lastBlock.number,
|
||||
depositsProcessed = lastBlock.depositCount
|
||||
|
||||
func init(T: type FullBlockId, blk: Eth1BlockHeader|BlockObject): T =
|
||||
FullBlockId(number: Eth1BlockNumber blk.number, hash: blk.hash)
|
||||
|
||||
func isNewLastBlock(m: ELManager, blk: Eth1BlockHeader|BlockObject): bool =
|
||||
m.latestEth1Block.isNone or blk.number.uint64 > m.latestEth1BlockNumber
|
||||
|
||||
func hasConnection*(m: ELManager): bool =
|
||||
m.elConnections.len > 0
|
||||
|
||||
@ -2121,7 +2092,6 @@ proc syncEth1Chain(m: ELManager, connection: ELConnection) {.async.} =
|
||||
|
||||
debug "Starting Eth1 syncing", `from` = shortLog(m.eth1Chain.blocks[^1])
|
||||
|
||||
var didPollOnce = false
|
||||
while true:
|
||||
debug "syncEth1Chain tick"
|
||||
|
||||
@ -2182,7 +2152,7 @@ proc startChainSyncingLoop(m: ELManager) {.async.} =
|
||||
continue
|
||||
|
||||
await syncEth1Chain(m, syncedConnectionFut.read)
|
||||
except CatchableError as err:
|
||||
except CatchableError:
|
||||
await sleepAsync(10.seconds)
|
||||
|
||||
# A more detailed error is already logged by trackEngineApiRequest
|
||||
@ -2238,17 +2208,17 @@ proc testWeb3Provider*(web3Url: Uri,
|
||||
stdout.write "\n"
|
||||
res
|
||||
|
||||
let
|
||||
chainId = request "Chain ID":
|
||||
web3.provider.eth_chainId()
|
||||
discard request "Chain ID":
|
||||
web3.provider.eth_chainId()
|
||||
|
||||
discard request "Sync status":
|
||||
web3.provider.eth_syncing()
|
||||
|
||||
let
|
||||
latestBlock = request "Latest block":
|
||||
web3.provider.eth_getBlockByNumber(blockId("latest"), false)
|
||||
|
||||
syncStatus = request "Sync status":
|
||||
web3.provider.eth_syncing()
|
||||
|
||||
ns = web3.contractSender(DepositContract, depositContractAddress)
|
||||
|
||||
depositRoot = request "Deposit root":
|
||||
ns.get_deposit_root.call(blockNumber = latestBlock.number.uint64)
|
||||
discard request "Deposit root":
|
||||
ns.get_deposit_root.call(blockNumber = latestBlock.number.uint64)
|
||||
|
@ -316,7 +316,7 @@ proc getBlock*(
|
||||
readSszBytes(tmp, result.get(), updateRoot = root.isNone)
|
||||
if root.isSome():
|
||||
result.get().root = root.get()
|
||||
except CatchableError as exc:
|
||||
except CatchableError:
|
||||
result.err()
|
||||
|
||||
proc getStateSZ*(
|
||||
|
@ -1887,9 +1887,6 @@ proc new(T: type Eth2Node,
|
||||
|
||||
node
|
||||
|
||||
template publicKey(node: Eth2Node): keys.PublicKey =
|
||||
node.discovery.privKey.toPublicKey
|
||||
|
||||
proc startListening*(node: Eth2Node) {.async.} =
|
||||
if node.discoveryEnabled:
|
||||
try:
|
||||
@ -2173,14 +2170,6 @@ proc peerTrimmerHeartbeat(node: Eth2Node) {.async.} =
|
||||
func asEthKey*(key: PrivateKey): keys.PrivateKey =
|
||||
keys.PrivateKey(key.skkey)
|
||||
|
||||
proc initAddress(T: type MultiAddress, str: string): T =
|
||||
let address = MultiAddress.init(str)
|
||||
if IPFS.match(address) and matchPartial(multiaddress.TCP, address):
|
||||
result = address
|
||||
else:
|
||||
raise newException(MultiAddressError,
|
||||
"Invalid bootstrap node multi-address")
|
||||
|
||||
template tcpEndPoint(address, port): auto =
|
||||
MultiAddress.init(address, tcpProtocol, port)
|
||||
|
||||
|
@ -474,7 +474,7 @@ when const_preset in ["mainnet", "gnosis"]:
|
||||
toOpenArray(metadata.genesis.bakedBytes, 0, sizeof(BeaconStateHeader) - 1),
|
||||
BeaconStateHeader)
|
||||
Opt.some header.genesis_validators_root
|
||||
except SerializationError as err:
|
||||
except SerializationError:
|
||||
raiseAssert "Invalid baken-in genesis state"
|
||||
else:
|
||||
Opt.none Eth2Digest
|
||||
|
@ -1102,20 +1102,6 @@ proc initialize_beacon_state_from_eth1*(
|
||||
# TODO https://github.com/nim-lang/Nim/issues/19094
|
||||
# state
|
||||
|
||||
proc initialize_hashed_beacon_state_from_eth1(
|
||||
cfg: RuntimeConfig,
|
||||
eth1_block_hash: Eth2Digest,
|
||||
eth1_timestamp: uint64,
|
||||
deposits: openArray[DepositData],
|
||||
execution_payload_header: ForkyExecutionPayloadHeader,
|
||||
flags: UpdateFlags = {}): auto =
|
||||
# TODO https://github.com/nim-lang/Nim/issues/19094
|
||||
result = initHashedBeaconState(
|
||||
initialize_beacon_state_from_eth1(
|
||||
cfg, eth1_block_hash, eth1_timestamp, deposits,
|
||||
execution_payload_header, flags))
|
||||
result.root = hash_tree_root(result.data)
|
||||
|
||||
# https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.1/specs/altair/fork.md#upgrading-the-state
|
||||
func translate_participation(
|
||||
state: var altair.BeaconState,
|
||||
|
@ -646,11 +646,11 @@ template forky(
|
||||
template withEpochInfo*(x: ForkedEpochInfo, body: untyped): untyped =
|
||||
case x.kind
|
||||
of EpochInfoFork.Phase0:
|
||||
const infoFork {.inject.} = EpochInfoFork.Phase0
|
||||
const infoFork {.inject, used.} = EpochInfoFork.Phase0
|
||||
template info: untyped {.inject.} = x.phase0Data
|
||||
body
|
||||
of EpochInfoFork.Altair:
|
||||
const infoFork {.inject.} = EpochInfoFork.Altair
|
||||
const infoFork {.inject, used.} = EpochInfoFork.Altair
|
||||
template info: untyped {.inject.} = x.altairData
|
||||
body
|
||||
|
||||
@ -797,11 +797,11 @@ template withBlck*(
|
||||
case x.kind
|
||||
of ConsensusFork.Phase0:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Phase0
|
||||
template forkyBlck: untyped {.inject.} = x.phase0Data
|
||||
template forkyBlck: untyped {.inject, used.} = x.phase0Data
|
||||
body
|
||||
of ConsensusFork.Altair:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Altair
|
||||
template forkyBlck: untyped {.inject.} = x.altairData
|
||||
template forkyBlck: untyped {.inject, used.} = x.altairData
|
||||
body
|
||||
of ConsensusFork.Bellatrix:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Bellatrix
|
||||
@ -809,11 +809,11 @@ template withBlck*(
|
||||
body
|
||||
of ConsensusFork.Capella:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Capella
|
||||
template forkyBlck: untyped {.inject.} = x.capellaData
|
||||
template forkyBlck: untyped {.inject, used.} = x.capellaData
|
||||
body
|
||||
of ConsensusFork.Deneb:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Deneb
|
||||
template forkyBlck: untyped {.inject.} = x.denebData
|
||||
template forkyBlck: untyped {.inject, used.} = x.denebData
|
||||
body
|
||||
|
||||
func proposer_index*(x: ForkedBeaconBlock): uint64 =
|
||||
@ -899,8 +899,8 @@ template withStateAndBlck*(
|
||||
body
|
||||
of ConsensusFork.Phase0:
|
||||
const consensusFork {.inject.} = ConsensusFork.Phase0
|
||||
template forkyState: untyped {.inject.} = s.phase0Data
|
||||
template forkyBlck: untyped {.inject.} = b.phase0Data
|
||||
template forkyState: untyped {.inject, used.} = s.phase0Data
|
||||
template forkyBlck: untyped {.inject, used.} = b.phase0Data
|
||||
body
|
||||
|
||||
func toBeaconBlockHeader*(
|
||||
|
@ -1021,10 +1021,7 @@ proc createLocalValidatorFiles*(
|
||||
encodedStorage: string
|
||||
): Result[void, KeystoreGenerationError] {.raises: [].} =
|
||||
|
||||
var
|
||||
success = false # becomes true when everything is created successfully
|
||||
cleanupSecretsDir = true # becomes false if secretsDir already existed
|
||||
cleanupValidatorsDir = true # becomes false if validatorsDir already existed
|
||||
var success = false # becomes true when everything is created successfully
|
||||
|
||||
# secretsDir:
|
||||
let secretsDirExisted: bool = dirExists(secretsDir)
|
||||
@ -1068,10 +1065,7 @@ proc createLockedLocalValidatorFiles(
|
||||
encodedStorage: string
|
||||
): Result[FileLockHandle, KeystoreGenerationError] {.raises: [].} =
|
||||
|
||||
var
|
||||
success = false # becomes true when everything is created successfully
|
||||
cleanupSecretsDir = true # becomes false if secretsDir already existed
|
||||
cleanupValidatorsDir = true # becomes false if validatorsDir already existed
|
||||
var success = false # becomes true when everything is created successfully
|
||||
|
||||
# secretsDir:
|
||||
let secretsDirExisted: bool = dirExists(secretsDir)
|
||||
|
@ -10,19 +10,6 @@ import
|
||||
|
||||
from stew/io2 import IoErrorCode
|
||||
|
||||
const
|
||||
LogTraceName = "Beacon-Chain LogTrace Tool"
|
||||
LogTraceMajor: int = 0
|
||||
LogTraceMinor: int = 0
|
||||
LogTracePatch: int = 4
|
||||
LogTraceVersion = $LogTraceMajor & "." & $LogTraceMinor & "." &
|
||||
$LogTracePatch
|
||||
LogTraceCopyright = "Copyright(C) 2021-2023" &
|
||||
" Status Research & Development GmbH"
|
||||
LogTraceHeader = LogTraceName & ", Version " & LogTraceVersion &
|
||||
" [" & hostOS & ": " & hostCPU & "]\r\n" &
|
||||
LogTraceCopyright & "\r\n"
|
||||
|
||||
type
|
||||
StartUpCommand* {.pure.} = enum
|
||||
pubsub, asl, asr, aggasr, scmsr, csr, lat, traceAll, localSimChecks
|
||||
@ -314,9 +301,6 @@ proc print(r: FileReport) =
|
||||
template fatal(issuesGroup: IssuesGroup, msg: string) =
|
||||
issuesGroup.fatalIssues.add msg
|
||||
|
||||
template warning(issuesGroup: IssuesGroup, msg: string) =
|
||||
issuesGroup.warnings.add msg
|
||||
|
||||
proc new(T: type IssuesGroup, name: string): T =
|
||||
T(name: name)
|
||||
|
||||
@ -466,56 +450,6 @@ proc readLogFileForASRMessages(file: string, srnode: var SRANode,
|
||||
finally:
|
||||
stream.close()
|
||||
|
||||
proc readLogFileForSCMSendMessages(file: string,
|
||||
ignoreErrors = true,
|
||||
dumpErrors = false): seq[SlotMessage] =
|
||||
var res = newSeq[SlotMessage]()
|
||||
var stream = newFileStream(file)
|
||||
var line: string
|
||||
var counter = 0
|
||||
try:
|
||||
while not(stream.atEnd()):
|
||||
line = stream.readLine()
|
||||
inc(counter)
|
||||
var m: LogMessage
|
||||
try:
|
||||
m = Json.decode(line, LogMessage, allowUnknownFields = true)
|
||||
except SerializationError as exc:
|
||||
if dumpErrors:
|
||||
error "Serialization error while reading file, ignoring", file = file,
|
||||
line_number = counter, errorMsg = exc.formatMsg(line)
|
||||
else:
|
||||
error "Serialization error while reading file, ignoring", file = file,
|
||||
line_number = counter
|
||||
if not(ignoreErrors):
|
||||
raise exc
|
||||
else:
|
||||
continue
|
||||
|
||||
if m.msg == "Sync committee message sent":
|
||||
let scmm = Json.decode(line, SCMSentMessage,
|
||||
allowUnknownFields = true)
|
||||
let m = SlotMessage(kind: SMessageType.SCMSent,
|
||||
scmsmsg: scmm)
|
||||
res.add(m)
|
||||
elif m.msg == "Slot start":
|
||||
let sm = Json.decode(line, SlotStartMessage,
|
||||
allowUnknownFields = true)
|
||||
let m = SlotMessage(kind: SMessageType.SlotStart,
|
||||
ssmsg: sm)
|
||||
res.add(m)
|
||||
|
||||
if counter mod 10_000 == 0:
|
||||
info "Processing file", file = extractFilename(file),
|
||||
lines_processed = counter,
|
||||
lines_filtered = len(res)
|
||||
result = res
|
||||
|
||||
except CatchableError as exc:
|
||||
warn "Error reading data from file", file = file, errorMsg = exc.msg
|
||||
finally:
|
||||
stream.close()
|
||||
|
||||
proc readLogFileForSCMSRMessages(file: string, srnode: var SRSCNode,
|
||||
ignoreErrors = true, dumpErrors = false) =
|
||||
var stream = newFileStream(file)
|
||||
@ -1191,6 +1125,19 @@ proc run*(conf: LogTraceConf) =
|
||||
quit ord(issuesDetected)
|
||||
|
||||
when isMainModule:
|
||||
const
|
||||
LogTraceName = "Beacon-Chain LogTrace Tool"
|
||||
LogTraceMajor: int = 0
|
||||
LogTraceMinor: int = 0
|
||||
LogTracePatch: int = 4
|
||||
LogTraceVersion = $LogTraceMajor & "." & $LogTraceMinor & "." &
|
||||
$LogTracePatch
|
||||
LogTraceCopyright = "Copyright(C) 2021-2023" &
|
||||
" Status Research & Development GmbH"
|
||||
LogTraceHeader = LogTraceName & ", Version " & LogTraceVersion &
|
||||
" [" & hostOS & ": " & hostCPU & "]\r\n" &
|
||||
LogTraceCopyright & "\r\n"
|
||||
|
||||
echo LogTraceHeader
|
||||
var conf = LogTraceConf.load(version = LogTraceVersion)
|
||||
run(conf)
|
||||
|
Loading…
x
Reference in New Issue
Block a user