rm unused code in {ncli,research,tests}/ (#5809)

This commit is contained in:
tersec 2024-01-21 06:55:03 +00:00 committed by GitHub
parent d669eef97b
commit 7fd8beb418
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 69 additions and 120 deletions

View File

@ -64,48 +64,6 @@ func init*(T: type ValidatorDbAggregator, outputDir: string,
participationEpochsCount: newSeqOfCap[uint](initialCapacity),
inclusionDelaysCount: newSeqOfCap[uint](initialCapacity))
var shouldShutDown = false
proc determineStartAndEndEpochs(config: AggregatorConf):
tuple[startEpoch, endEpoch: Epoch] =
if config.startEpoch.isNone or config.endEpoch.isNone:
(result.startEpoch, result.endEpoch) = getUnaggregatedFilesEpochRange(
config.inputDir.string)
if config.startEpoch.isSome:
result.startEpoch = config.startEpoch.get.Epoch
if config.endEpoch.isSome:
result.endEpoch = config.endEpoch.get.Epoch
if result.startEpoch > result.endEpoch:
fatal "Start epoch cannot be bigger than the end epoch.",
startEpoch = result.startEpoch, endEpoch = result.endEpoch
quit QuitFailure
proc checkIntegrity(startEpoch, endEpoch: Epoch, dir: string) =
for epoch in startEpoch .. endEpoch:
let filePath = getFilePathForEpoch(epoch, dir)
if not filePath.fileExists:
fatal "File for epoch does not exist.", epoch = epoch, filePath = filePath
quit QuitFailure
func parseRow(csvRow: CsvRow): RewardsAndPenalties =
result = RewardsAndPenalties(
source_outcome: parseBiggestInt(csvRow[0]),
max_source_reward: parseBiggestUInt(csvRow[1]),
target_outcome: parseBiggestInt(csvRow[2]),
max_target_reward: parseBiggestUInt(csvRow[3]),
head_outcome: parseBiggestInt(csvRow[4]),
max_head_reward: parseBiggestUInt(csvRow[5]),
inclusion_delay_outcome: parseBiggestInt(csvRow[6]),
max_inclusion_delay_reward: parseBiggestUInt(csvRow[7]),
sync_committee_outcome: parseBiggestInt(csvRow[8]),
max_sync_committee_reward: parseBiggestUInt(csvRow[9]),
proposer_outcome: parseBiggestInt(csvRow[10]),
inactivity_penalty: parseBiggestUInt(csvRow[11]),
slashing_outcome: parseBiggestInt(csvRow[12]),
deposits: parseBiggestUInt(csvRow[13]))
if csvRow[14].len > 0:
result.inclusion_delay = some(parseBiggestUInt(csvRow[14]))
func `+=`(lhs: var RewardsAndPenalties, rhs: RewardsAndPenalties) =
lhs.source_outcome += rhs.source_outcome
lhs.max_source_reward += rhs.max_source_reward
@ -205,6 +163,48 @@ when isMainModule:
when defined(posix):
import system/ansi_c
var shouldShutDown = false
proc determineStartAndEndEpochs(config: AggregatorConf):
tuple[startEpoch, endEpoch: Epoch] =
if config.startEpoch.isNone or config.endEpoch.isNone:
(result.startEpoch, result.endEpoch) = getUnaggregatedFilesEpochRange(
config.inputDir.string)
if config.startEpoch.isSome:
result.startEpoch = config.startEpoch.get.Epoch
if config.endEpoch.isSome:
result.endEpoch = config.endEpoch.get.Epoch
if result.startEpoch > result.endEpoch:
fatal "Start epoch cannot be bigger than the end epoch.",
startEpoch = result.startEpoch, endEpoch = result.endEpoch
quit QuitFailure
proc checkIntegrity(startEpoch, endEpoch: Epoch, dir: string) =
for epoch in startEpoch .. endEpoch:
let filePath = getFilePathForEpoch(epoch, dir)
if not filePath.fileExists:
fatal "File for epoch does not exist.", epoch = epoch, filePath = filePath
quit QuitFailure
func parseRow(csvRow: CsvRow): RewardsAndPenalties =
result = RewardsAndPenalties(
source_outcome: parseBiggestInt(csvRow[0]),
max_source_reward: parseBiggestUInt(csvRow[1]),
target_outcome: parseBiggestInt(csvRow[2]),
max_target_reward: parseBiggestUInt(csvRow[3]),
head_outcome: parseBiggestInt(csvRow[4]),
max_head_reward: parseBiggestUInt(csvRow[5]),
inclusion_delay_outcome: parseBiggestInt(csvRow[6]),
max_inclusion_delay_reward: parseBiggestUInt(csvRow[7]),
sync_committee_outcome: parseBiggestInt(csvRow[8]),
max_sync_committee_reward: parseBiggestUInt(csvRow[9]),
proposer_outcome: parseBiggestInt(csvRow[10]),
inactivity_penalty: parseBiggestUInt(csvRow[11]),
slashing_outcome: parseBiggestInt(csvRow[12]),
deposits: parseBiggestUInt(csvRow[13]))
if csvRow[14].len > 0:
result.inclusion_delay = some(parseBiggestUInt(csvRow[14]))
proc aggregateEpochs(startEpoch, endEpoch: Epoch, resolution: uint,
inputDir, outputDir: string) =
if startEpoch > endEpoch:

View File

@ -49,9 +49,6 @@ type Timers = enum
tSyncCommittees = "Produce sync committee actions"
tReplay = "Replay all produced blocks"
template seconds(x: uint64): timer.Duration =
timer.seconds(int(x))
# TODO The rest of nimbus-eth2 uses only the forked version of these, and in
# general it's better for the validator_duties caller to use the forkedstate
# version, so isolate these here pending refactoring of block_sim to prefer,

View File

@ -132,11 +132,6 @@ proc block_for_next_slot(
addTestBlock(
forked, cache, attestations = attestations, cfg = cfg)
let full_sync_committee_bits = block:
var res: BitArray[SYNC_COMMITTEE_SIZE]
res.bytes.fill(byte.high)
res
# https://github.com/ethereum/consensus-specs/blob/v1.4.0-beta.5/specs/altair/light-client/sync-protocol.md#initialize_light_client_store
func initialize_light_client_store(
state: auto, storeDataFork: static LightClientDataFork): auto =

View File

@ -125,7 +125,7 @@ proc runTest(suiteName, path: string) =
test "Light client - Sync - " & path.relativePath(SszTestsDir):
# Reduce stack size by making this a `proc`
proc loadTestMeta(): (RuntimeConfig, TestMeta) =
let (cfg, unknowns) = readRuntimeConfig(path/"config.yaml")
let (cfg, _) = readRuntimeConfig(path/"config.yaml")
when false:
# TODO evaluate whether this is useful and if so, fix it
@ -265,5 +265,4 @@ suite "EF - Light client - Sync" & preset():
if kind != pcDir or not dirExists(basePath):
continue
for kind, path in walkDir(basePath, relative = true, checkDir = true):
let combinedPath = basePath/path
runTest(suiteName, basePath/path)

View File

@ -60,8 +60,6 @@ suite "BlockSlot and helpers":
s4 = BlockRef(bid: BlockId(slot: Slot(4)), parent: s2)
se1 = BlockRef(bid:
BlockId(slot: Epoch(1).start_slot()), parent: s2)
se2 = BlockRef(bid:
BlockId(slot: Epoch(2).start_slot()), parent: se1)
check:
s0.atSlot(Slot(0)).blck == s0

View File

@ -40,13 +40,11 @@ suite "Block processor" & preset():
validatorMonitor = newClone(ValidatorMonitor.init())
dag = init(ChainDAGRef, defaultRuntimeConfig, db, validatorMonitor, {})
taskpool = Taskpool.new()
verifier = BatchVerifier.init(rng, taskpool)
quarantine = newClone(Quarantine.init())
blobQuarantine = newClone(BlobQuarantine())
attestationPool = newClone(AttestationPool.init(dag, quarantine))
elManager = new ELManager # TODO: initialise this properly
actionTracker: ActionTracker
keymanagerHost: ref KeymanagerHost
consensusManager = ConsensusManager.new(
dag, attestationPool, quarantine, elManager, actionTracker,
newClone(DynamicFeeRecipientsStore.init()), "",

View File

@ -213,9 +213,8 @@ suite "Block pool processing" & preset():
assign(state[], dag.epochRefState)
let
bnext = addTestBlock(state[], cache).phase0Data
bnextAdd = dag.addHeadBlock(verifier, bnext, nilPhase0Callback)
let bnext = addTestBlock(state[], cache).phase0Data
discard dag.addHeadBlock(verifier, bnext, nilPhase0Callback)
check:
# Getting an EpochRef should not result in states being stored
@ -952,9 +951,7 @@ suite "Backfill":
dag2.backfill == blocks[^2].phase0Data.message.toBeaconBlockSummary()
test "Init without genesis / block":
let
tailBlock = blocks[^1]
genBlock = get_initial_beacon_block(genState[])
let genBlock = get_initial_beacon_block(genState[])
ChainDAGRef.preInit(db, tailState[])

View File

@ -22,7 +22,6 @@ const ROOT = "342cecb5a18945fbbda7c62ede3016f3"
template databaseRoot: string = getTempDir().joinPath(ROOT)
template key1: array[1, byte] = [byte(kOldDepositContractSnapshot)]
template key2: array[1, byte] = [byte(kDepositTreeSnapshot)]
type
DepositSnapshotUpgradeProc = proc(old: OldDepositContractSnapshot): DepositTreeSnapshot
@ -157,7 +156,6 @@ suite "DepositTreeSnapshot":
inspectDCS(snapshot, 11052984)
test "depositCount":
let now = getTime()
var rand = initRand(12345678)
for i in 1..1000:
let n = rand.next()

View File

@ -328,7 +328,7 @@ suite "Gossip validation - Altair":
let
(subcommitteeIdx, indexInSubcommittee) =
dag.getFirstAggregator(signatureSlot)
(validator, expectedCount, msg) = dag.getSyncCommitteeMessage(
(validator, _, msg) = dag.getSyncCommitteeMessage(
slot, subcommitteeIdx, indexInSubcommittee,
signatureSlot = Opt.some(signatureSlot))
msgVerdict = waitFor dag.validateSyncCommitteeMessage(

View File

@ -46,7 +46,7 @@ suite "Spec helpers":
proc process(anchor: object, index: GeneralizedIndex) =
var i = index
anchor.enumInstanceSerializedFields(fieldNameVar, fieldVar):
anchor.enumInstanceSerializedFields(_, fieldVar):
let depth = log2trunc(i)
var proof = newSeq[Eth2Digest](depth)
state.build_proof(i, proof).get

View File

@ -336,7 +336,6 @@ const
secretBytes = hexToSeqByte "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
salt = hexToSeqByte "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"
iv = hexToSeqByte "264daa3f303d7259501c93d997d84fe6"
secretNetBytes = hexToSeqByte "08021220fe442379443d6e2d7d75d3a58f96fbb35f0a9c7217796825fc9040e3b89c5736"
proc listLocalValidators(validatorsDir,
secretsDir: string): seq[ValidatorPubKey] {.
@ -746,7 +745,7 @@ proc runTests(keymanager: KeymanagerToTest) {.async.} =
responseJson["message"].getStr() == InvalidAuthorizationError
expect RestError:
let keystores = await client.listKeys("Invalid Token")
discard await client.listKeys("Invalid Token")
suite "ImportKeystores requests" & testFlavour:
asyncTest "ImportKeystores/ListKeystores/DeleteKeystores" & testFlavour:
@ -936,7 +935,7 @@ proc runTests(keymanager: KeymanagerToTest) {.async.} =
responseJson["message"].getStr() == InvalidAuthorizationError
expect RestError:
let keystores = await client.listKeys("Invalid Token")
discard await client.listKeys("Invalid Token")
suite "Fee recipient management" & testFlavour:
asyncTest "Missing Authorization header" & testFlavour:

View File

@ -60,39 +60,6 @@ const
"version": 4
}"""
scryptVector2 = """
{
"crypto": {
"kdf": {
"function": "scrypt",
"params": {
"dklen": 32,
"n": 262144,
"p": 1,
"r": 8,
"salt": "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"
},
"message": ""
},
"checksum": {
"function": "sha256",
"params": {},
"message": "d2217fe5f3e9a1e34581ef8a78f7c9928e436d36dacc5e846690a5581e8ea484"
},
"cipher": {
"function": "aes-128-ctr",
"params": {
"iv": "264daa3f303d7259501c93d997d84fe6"
},
"message": "06ae90d55fe0a6e9c5c3bc5b170827b2e5cce3929ed3f116c2811e6366dfe20f"
}
},
"pubkey": "9612d7a727c9d0a22e185a1c768478dfe919cada9266988cb32359c11f2b7b27f4ae4040902382ae2910c15e2b420d07",
"path": "m/12381/60/3141592653/589793238",
"uuid": "1d85ae20-35c5-4611-98e8-aa14a633906f",
"version": 4
}"""
pbkdf2Vector = """
{
"crypto": {

View File

@ -561,8 +561,9 @@ suite "createValidatorFiles()":
# Creating `secrets` dir with `UserRead` permissions before
# calling `createValidatorFiles` which should result in problem
# with creating a secret file inside the dir:
discard createPath(testSecretsDir, 0o400)
let
secretsDirNoPermissions = createPath(testSecretsDir, 0o400)
res = createLocalValidatorFiles(testSecretsDir, testValidatorsDir,
keystoreDir,
secretFile, password,
@ -582,8 +583,9 @@ suite "createValidatorFiles()":
# Creating `validators` dir with `UserRead` permissions before
# calling `createValidatorFiles` which should result in problems
# creating `keystoreDir` inside the dir.
discard createPath(testValidatorsDir, 0o400)
let
validatorsDirNoPermissions = createPath(testValidatorsDir, 0o400)
res = createLocalValidatorFiles(testSecretsDir, testValidatorsDir,
keystoreDir,
secretFile, password,
@ -603,9 +605,10 @@ suite "createValidatorFiles()":
# Creating `keystore` dir with `UserRead` permissions before
# calling `createValidatorFiles` which should result in problems
# creating keystore file inside this dir:
discard createPath(testValidatorsDir, 0o700)
discard createPath(keystoreDir, 0o400)
let
validatorsDir = createPath(testValidatorsDir, 0o700)
keystoreDirNoPermissions = createPath(keystoreDir, 0o400)
res = createLocalValidatorFiles(testSecretsDir, testValidatorsDir,
keystoreDir,
secretFile, password,
@ -622,8 +625,7 @@ suite "createValidatorFiles()":
test "`createValidatorFiles` with already existing dirs and any error":
# Generate deposits so we have files and dirs already existing
# before testing `createValidatorFiles` failure
let
deposits = generateDeposits(
discard generateDeposits(
cfg,
rng[],
seed,
@ -631,13 +633,15 @@ suite "createValidatorFiles()":
testValidatorsDir,
testSecretsDir)
let
validatorsCountBefore = directoryItemsCount(testValidatorsDir)
secretsCountBefore = directoryItemsCount(testSecretsDir)
# Creating `keystore` dir with `UserRead` permissions before calling
# `createValidatorFiles` which will result in error
keystoreDirNoPermissions = createPath(keystoreDir, 0o400)
discard createPath(keystoreDir, 0o400)
let
res = createLocalValidatorFiles(testSecretsDir, testValidatorsDir,
keystoreDir,
secretFile, password,

View File

@ -176,7 +176,6 @@ suite "Message signatures":
test "Sync committee message signatures":
let
slot = default(Slot)
epoch = slot.epoch
block_root = default(Eth2Digest)
check:

View File

@ -765,8 +765,6 @@ suite "Validator Client test suite":
score == vector[5]
test "getUniqueVotes() test vectors":
var data = CommitteeValidatorsBits.init(16)
for vector in AttestationBitsVectors:
let
a1 = Attestation.init(vector[0][0][0], vector[0][0][1], vector[0][0][2])