reduce `nim-eth` dependencies just for RNG (#5099)
We have several modules that import `nim-eth` for the sole purpose of its `keys.newRng` function. This function is meanwhile a simple wrapper around `nim-bearssl`'s `HmacDrbgContext.new()`, so the import doesn't really serve a use anymore. Replace `keys.newRng` with the direct call to reduce `nim-eth` imports.
This commit is contained in:
parent
3dd256b686
commit
2722778ce5
|
@ -202,7 +202,7 @@ proc main() {.async.} =
|
|||
except Exception as exc: # TODO fix confutils
|
||||
raiseAssert exc.msg
|
||||
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
if conf.cmd == StartUpCommand.generateSimulationDeposits:
|
||||
let
|
||||
|
|
|
@ -8,7 +8,7 @@ import
|
|||
std/os,
|
||||
chronicles,
|
||||
stew/results, snappy, taskpools,
|
||||
../ncli/e2store, eth/keys,
|
||||
../ncli/e2store,
|
||||
./spec/datatypes/[altair, bellatrix, phase0],
|
||||
./spec/[beaconstate, forks, signatures_batch],
|
||||
./consensus_object_pools/block_dag # TODO move to somewhere else to avoid circular deps
|
||||
|
@ -167,9 +167,9 @@ proc verify*(f: EraFile, cfg: RuntimeConfig): Result[Eth2Digest, string] =
|
|||
startSlot = f.stateIdx.startSlot
|
||||
era = startSlot.era
|
||||
|
||||
var
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
var verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
|
||||
var tmp: seq[byte]
|
||||
? f.getStateSSZ(startSlot, tmp)
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
|
||||
import
|
||||
chronicles,
|
||||
eth/keys,
|
||||
./gossip_processing/light_client_processor,
|
||||
./networking/[eth2_network, topic_params],
|
||||
./spec/datatypes/altair,
|
||||
|
|
|
@ -10,10 +10,10 @@
|
|||
import
|
||||
std/[os, strutils],
|
||||
chronicles, stew/shims/net, stew/results,
|
||||
eth/keys, eth/p2p/discoveryv5/[enr, protocol, node],
|
||||
eth/p2p/discoveryv5/[enr, protocol, node],
|
||||
".."/[conf, conf_light_client]
|
||||
|
||||
export protocol, keys
|
||||
export protocol
|
||||
|
||||
type
|
||||
Eth2DiscoveryProtocol* = protocol.Protocol
|
||||
|
|
|
@ -13,7 +13,6 @@ import
|
|||
metrics, metrics/chronos_httpserver,
|
||||
stew/[byteutils, io2],
|
||||
eth/p2p/discoveryv5/[enr, random2],
|
||||
eth/keys,
|
||||
./consensus_object_pools/blob_quarantine,
|
||||
./consensus_object_pools/vanity_logs/vanity_logs,
|
||||
./networking/topic_params,
|
||||
|
@ -2008,7 +2007,7 @@ proc doSlashingInterchange(conf: BeaconNodeConf) {.raises: [Defect, CatchableErr
|
|||
proc handleStartUpCmd(config: var BeaconNodeConf) {.raises: [Defect, CatchableError].} =
|
||||
# Single RNG instance for the application - will be seeded on construction
|
||||
# and avoid using system resources (such as urandom) after that
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
case config.cmd
|
||||
of BNStartUpCmd.noCommand: doRunBeaconNode(config, rng)
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import
|
||||
std/os,
|
||||
chronicles, chronos, stew/io2,
|
||||
eth/db/kvstore_sqlite3, eth/keys,
|
||||
eth/db/kvstore_sqlite3,
|
||||
./el/el_manager,
|
||||
./gossip_processing/optimistic_processor,
|
||||
./networking/[topic_params, network_metadata],
|
||||
|
@ -79,7 +79,7 @@ programMain:
|
|||
|
||||
genesisBlockRoot = get_initial_beacon_block(genesisState[]).root
|
||||
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
netKeys = getRandomNetKeys(rng[])
|
||||
network = createEth2Node(
|
||||
rng, config, netKeys, cfg,
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
import
|
||||
stew/io2, presto, metrics, metrics/chronos_httpserver,
|
||||
libp2p/crypto/crypto,
|
||||
./rpc/rest_key_management_api,
|
||||
./validator_client/[
|
||||
common, fallback_service, duties_service, fork_service, block_service,
|
||||
|
@ -512,7 +511,7 @@ programMain:
|
|||
|
||||
# Single RNG instance for the application - will be seeded on construction
|
||||
# and avoid using system resources (such as urandom) after that
|
||||
rng = crypto.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
|
||||
setupLogging(config.logLevel, config.logStdout, config.logFile)
|
||||
waitFor runValidatorClient(config, rng)
|
||||
|
|
|
@ -11,7 +11,7 @@ import
|
|||
std/[os, strutils, terminal, wordwrap, unicode],
|
||||
chronicles, chronos, json_serialization, zxcvbn,
|
||||
bearssl/rand,
|
||||
serialization, blscurve, eth/common/eth_types, eth/keys, confutils,
|
||||
serialization, blscurve, eth/common/eth_types, confutils,
|
||||
nimbus_security_resources,
|
||||
".."/spec/[eth2_merkleization, keystore, crypto],
|
||||
".."/spec/datatypes/base,
|
||||
|
|
|
@ -21,7 +21,7 @@ import
|
|||
chronicles, chronicles/timings,
|
||||
json_serialization/std/[options, sets, net],
|
||||
eth/db/kvstore,
|
||||
eth/keys, eth/p2p/discoveryv5/[protocol, enr],
|
||||
eth/p2p/discoveryv5/[protocol, enr],
|
||||
web3/ethtypes,
|
||||
|
||||
# Local modules
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2022-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
import
|
||||
std/os,
|
||||
confutils, eth/keys,
|
||||
confutils,
|
||||
../beacon_chain/validators/keystore_management,
|
||||
../beacon_chain/spec/[keystore, crypto],
|
||||
../beacon_chain/conf
|
||||
|
@ -58,9 +65,8 @@ proc main =
|
|||
error "The specified treshold must be lower or equal to the number of signers"
|
||||
quit 1
|
||||
|
||||
var
|
||||
rng = keys.newRng()
|
||||
rngCtx = rng[]
|
||||
let rng = HmacDrbgContext.new()
|
||||
template rngCtx: untyped = rng[]
|
||||
|
||||
let
|
||||
validatorsDir = conf.validatorsDir
|
||||
|
|
|
@ -523,7 +523,7 @@ proc main() {.async.} =
|
|||
except Exception as exc: # TODO fix confutils
|
||||
raiseAssert exc.msg
|
||||
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
if conf.cmd == StartUpCommand.generateDeposits:
|
||||
let
|
||||
|
@ -589,7 +589,7 @@ proc main() {.async.} =
|
|||
|
||||
case conf.cmd
|
||||
of StartUpCommand.createTestnet:
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
doCreateTestnet(conf, rng[])
|
||||
|
||||
of StartUpCommand.deployDepositContract:
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
import
|
||||
confutils, chronicles, eth/db/kvstore_sqlite3,
|
||||
chronos/timer, eth/keys, taskpools,
|
||||
chronos/timer, taskpools,
|
||||
../tests/testblockutil,
|
||||
../beacon_chain/spec/[forks, state_transition],
|
||||
../beacon_chain/spec/datatypes/[phase0, altair, bellatrix, deneb],
|
||||
|
@ -313,19 +313,20 @@ cli do(slots = SLOTS_PER_EPOCH * 6,
|
|||
ChainDAGRef.preInit(db, genesisState[])
|
||||
db.putDepositTreeSnapshot(depositTreeSnapshot)
|
||||
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = ChainDAGRef.init(cfg, db, validatorMonitor, {})
|
||||
eth1Chain = Eth1Chain.init(cfg, db, 0, default Eth2Digest)
|
||||
merkleizer = DepositsMerkleizer.init(depositTreeSnapshot.depositContractState)
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
quarantine = newClone(Quarantine.init())
|
||||
attPool = AttestationPool.init(dag, quarantine)
|
||||
batchCrypto = BatchCrypto.new(
|
||||
keys.newRng(), eager = func(): bool = true,
|
||||
rng, eager = func(): bool = true,
|
||||
genesis_validators_root = dag.genesis_validators_root, taskpool)
|
||||
syncCommitteePool = newClone SyncCommitteeMsgPool.init(keys.newRng(), cfg)
|
||||
syncCommitteePool = newClone SyncCommitteeMsgPool.init(rng, cfg)
|
||||
timers: array[Timers, RunningStat]
|
||||
attesters: RunningStat
|
||||
r = initRand(1)
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
# Status libraries
|
||||
stew/results, chronicles,
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
# Internals
|
||||
../../beacon_chain/spec/[helpers, forks, state_transition_block],
|
||||
../../beacon_chain/spec/datatypes/[
|
||||
|
@ -348,9 +348,10 @@ proc doRunTest(path: string, fork: ConsensusFork) =
|
|||
of ConsensusFork.Phase0:
|
||||
initialLoad(path, db, phase0.BeaconState, phase0.BeaconBlock)
|
||||
|
||||
var
|
||||
let
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
var verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
|
||||
let steps = loadOps(path, fork)
|
||||
var time = stores.fkChoice.checkpoints.time
|
||||
|
|
|
@ -1,13 +1,19 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2021-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||
|
||||
{.used.}
|
||||
|
||||
import
|
||||
unittest2,
|
||||
eth/keys,
|
||||
../beacon_chain/validators/action_tracker
|
||||
|
||||
suite "subnet tracker":
|
||||
setup:
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
test "should register stability subnets on attester duties":
|
||||
var tracker = ActionTracker.init(rng, false)
|
||||
|
|
|
@ -13,7 +13,7 @@ import
|
|||
unittest2,
|
||||
chronicles, chronos,
|
||||
stew/byteutils,
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
# Internal
|
||||
../beacon_chain/gossip_processing/[gossip_validation],
|
||||
../beacon_chain/fork_choice/[fork_choice_types, fork_choice],
|
||||
|
@ -58,13 +58,14 @@ suite "Attestation pool processing" & preset():
|
|||
|
||||
setup:
|
||||
# Genesis state that results in 6 members per committee
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(
|
||||
ChainDAGRef, defaultRuntimeConfig, makeTestDB(SLOTS_PER_EPOCH * 6),
|
||||
validatorMonitor, {})
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
quarantine = newClone(Quarantine.init())
|
||||
pool = newClone(AttestationPool.init(dag, quarantine))
|
||||
state = newClone(dag.headState)
|
||||
|
|
|
@ -11,7 +11,7 @@ import
|
|||
chronos,
|
||||
std/sequtils,
|
||||
unittest2,
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
../beacon_chain/[conf, beacon_clock],
|
||||
../beacon_chain/spec/[beaconstate, forks, helpers, state_transition],
|
||||
../beacon_chain/spec/datatypes/deneb,
|
||||
|
@ -34,12 +34,13 @@ proc pruneAtFinalization(dag: ChainDAGRef) =
|
|||
|
||||
suite "Block processor" & preset():
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, defaultRuntimeConfig, db, validatorMonitor, {})
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
quarantine = newClone(Quarantine.init())
|
||||
blobQuarantine = newClone(BlobQuarantine())
|
||||
attestationPool = newClone(AttestationPool.init(dag, quarantine))
|
||||
|
@ -56,7 +57,7 @@ suite "Block processor" & preset():
|
|||
b2 = addTestBlock(state[], cache).phase0Data
|
||||
getTimeFn = proc(): BeaconTime = b2.message.slot.start_beacon_time()
|
||||
processor = BlockProcessor.new(
|
||||
false, "", "", keys.newRng(), taskpool, consensusManager,
|
||||
false, "", "", rng, taskpool, consensusManager,
|
||||
validatorMonitor, blobQuarantine, getTimeFn)
|
||||
|
||||
asyncTest "Reverse order block add & get" & preset():
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
std/[random, sequtils],
|
||||
unittest2,
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
../beacon_chain/el/merkle_minimal,
|
||||
../beacon_chain/spec/datatypes/base,
|
||||
../beacon_chain/spec/[beaconstate, forks, helpers, signatures, state_transition],
|
||||
|
@ -39,11 +39,12 @@ type
|
|||
|
||||
suite "Block pool processing" & preset():
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, defaultRuntimeConfig, db, validatorMonitor, {})
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = Quarantine.init()
|
||||
state = newClone(dag.headState)
|
||||
cache = StateCache()
|
||||
|
@ -282,6 +283,8 @@ when declared(GC_fullCollect): # i386 test machines seem to run low..
|
|||
|
||||
suite "Block pool altair processing" & preset():
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
var
|
||||
cfg = defaultRuntimeConfig
|
||||
cfg.ALTAIR_FORK_EPOCH = Epoch(1)
|
||||
|
@ -290,7 +293,7 @@ suite "Block pool altair processing" & preset():
|
|||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, cfg, db, validatorMonitor, {})
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = Quarantine.init()
|
||||
state = newClone(dag.headState)
|
||||
cache = StateCache()
|
||||
|
@ -361,11 +364,12 @@ suite "Block pool altair processing" & preset():
|
|||
|
||||
suite "chain DAG finalization tests" & preset():
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, defaultRuntimeConfig, db, validatorMonitor, {})
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = Quarantine.init()
|
||||
cache = StateCache()
|
||||
info = ForkedEpochInfo()
|
||||
|
@ -628,13 +632,14 @@ suite "chain DAG finalization tests" & preset():
|
|||
suite "Old database versions" & preset():
|
||||
setup:
|
||||
let
|
||||
rng = HmacDrbgContext.new()
|
||||
genState = newClone(initialize_hashed_beacon_state_from_eth1(
|
||||
defaultRuntimeConfig, ZERO_HASH, 0,
|
||||
makeInitialDeposits(SLOTS_PER_EPOCH.uint64, flags = {skipBlsValidation}),
|
||||
{skipBlsValidation}))
|
||||
genBlock = get_initial_beacon_block(genState[])
|
||||
var
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = Quarantine.init()
|
||||
|
||||
test "pre-1.1.0":
|
||||
|
@ -669,6 +674,8 @@ suite "Old database versions" & preset():
|
|||
|
||||
suite "Diverging hardforks":
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
var
|
||||
phase0RuntimeConfig = defaultRuntimeConfig
|
||||
altairRuntimeConfig = defaultRuntimeConfig
|
||||
|
@ -680,7 +687,7 @@ suite "Diverging hardforks":
|
|||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, phase0RuntimeConfig, db, validatorMonitor, {})
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = newClone(Quarantine.init())
|
||||
cache = StateCache()
|
||||
info = ForkedEpochInfo()
|
||||
|
@ -917,9 +924,12 @@ suite "Backfill":
|
|||
dag.addBackfillBlock(
|
||||
genBlock.phase0Data.asSigned) == AddBackRes.err VerifierError.Duplicate
|
||||
|
||||
let
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
var
|
||||
cache: StateCache
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
quarantine = newClone(Quarantine.init())
|
||||
|
||||
let
|
||||
|
@ -1050,6 +1060,8 @@ suite "Starting states":
|
|||
|
||||
suite "Latest valid hash" & preset():
|
||||
setup:
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
var runtimeConfig = defaultRuntimeConfig
|
||||
runtimeConfig.ALTAIR_FORK_EPOCH = 1.Epoch
|
||||
runtimeConfig.BELLATRIX_FORK_EPOCH = 2.Epoch
|
||||
|
@ -1058,7 +1070,7 @@ suite "Latest valid hash" & preset():
|
|||
db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(ChainDAGRef, runtimeConfig, db, validatorMonitor, {})
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = newClone(Quarantine.init())
|
||||
cache = StateCache()
|
||||
info = ForkedEpochInfo()
|
||||
|
@ -1114,6 +1126,7 @@ suite "Latest valid hash" & preset():
|
|||
suite "Pruning":
|
||||
setup:
|
||||
let
|
||||
rng = HmacDrbgContext.new()
|
||||
cfg = block:
|
||||
var res = defaultRuntimeConfig
|
||||
res.MIN_VALIDATOR_WITHDRAWABILITY_DELAY = 4
|
||||
|
@ -1126,7 +1139,7 @@ suite "Pruning":
|
|||
tmpState = assignClone(dag.headState)
|
||||
|
||||
var
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
quarantine = Quarantine.init()
|
||||
cache = StateCache()
|
||||
blocks = @[dag.head]
|
||||
|
@ -1186,10 +1199,11 @@ suite "Shufflings":
|
|||
flags = {}, cfg = cfg),
|
||||
validatorMonitor, {})
|
||||
quarantine = newClone(Quarantine.init())
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
|
||||
var
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
graffiti: GraffitiBytes
|
||||
proc addBlocks(blocks: uint64, attested: bool, cache: var StateCache) =
|
||||
inc distinctBase(graffiti)[0] # Avoid duplicate blocks across branches
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2021-2022 Status Research & Development GmbH
|
||||
# Copyright (c) 2021-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -36,7 +36,7 @@ const noSyncnetsPreference = SyncnetBits()
|
|||
|
||||
procSuite "Eth2 specific discovery tests":
|
||||
let
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
enrForkId = ENRForkID(
|
||||
fork_digest: ForkDigest([byte 0, 1, 2, 3]),
|
||||
next_fork_version: Version([byte 0, 0, 0, 0]),
|
||||
|
|
|
@ -13,7 +13,7 @@ import
|
|||
# Status lib
|
||||
unittest2,
|
||||
chronos,
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
# Internal
|
||||
../beacon_chain/[beacon_clock],
|
||||
../beacon_chain/gossip_processing/[gossip_validation, batch_validation],
|
||||
|
@ -36,20 +36,21 @@ proc pruneAtFinalization(dag: ChainDAGRef, attPool: AttestationPool) =
|
|||
suite "Gossip validation " & preset():
|
||||
setup:
|
||||
# Genesis state that results in 3 members per committee
|
||||
let rng = HmacDrbgContext.new()
|
||||
var
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
dag = init(
|
||||
ChainDAGRef, defaultRuntimeConfig, makeTestDB(SLOTS_PER_EPOCH * 3),
|
||||
validatorMonitor, {})
|
||||
taskpool = Taskpool.new()
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
quarantine = newClone(Quarantine.init())
|
||||
pool = newClone(AttestationPool.init(dag, quarantine))
|
||||
state = newClone(dag.headState)
|
||||
cache = StateCache()
|
||||
info = ForkedEpochInfo()
|
||||
batchCrypto = BatchCrypto.new(
|
||||
keys.newRng(), eager = proc(): bool = false,
|
||||
rng, eager = proc(): bool = false,
|
||||
genesis_validators_root = dag.genesis_validators_root, taskpool)
|
||||
# Slot 0 is a finalized slot - won't be making attestations for it..
|
||||
check:
|
||||
|
@ -187,8 +188,9 @@ suite "Gossip validation - Extra": # Not based on preset config
|
|||
cfg
|
||||
taskpool = Taskpool.new()
|
||||
quarantine = newClone(Quarantine.init())
|
||||
rng = HmacDrbgContext.new()
|
||||
var
|
||||
verifier = BatchVerifier(rng: keys.newRng(), taskpool: Taskpool.new())
|
||||
verifier = BatchVerifier(rng: rng, taskpool: Taskpool.new())
|
||||
dag = block:
|
||||
let
|
||||
validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
|
@ -220,7 +222,7 @@ suite "Gossip validation - Extra": # Not based on preset config
|
|||
dag
|
||||
|
||||
let batchCrypto = BatchCrypto.new(
|
||||
keys.newRng(), eager = proc(): bool = false,
|
||||
rng, eager = proc(): bool = false,
|
||||
genesis_validators_root = dag.genesis_validators_root, taskpool)
|
||||
|
||||
var
|
||||
|
@ -243,8 +245,7 @@ suite "Gossip validation - Extra": # Not based on preset config
|
|||
slot, state[].latest_block_root)
|
||||
msg = resMsg.get()
|
||||
|
||||
syncCommitteePool = newClone(
|
||||
SyncCommitteeMsgPool.init(keys.newRng(), cfg))
|
||||
syncCommitteePool = newClone(SyncCommitteeMsgPool.init(rng, cfg))
|
||||
res = waitFor validateSyncCommitteeMessage(
|
||||
dag, quarantine, batchCrypto, syncCommitteePool,
|
||||
msg, subcommitteeIdx, slot.start_beacon_time(), true)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2022 Status Research & Development GmbH
|
||||
# Copyright (c) 2022-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -9,7 +9,7 @@
|
|||
|
||||
import
|
||||
std/[typetraits, sequtils],
|
||||
unittest2, eth/keys, stew/byteutils,
|
||||
unittest2, stew/byteutils,
|
||||
../beacon_chain/spec/[crypto, keystore],
|
||||
./testutil
|
||||
|
||||
|
@ -24,7 +24,7 @@ suite "Key spliting":
|
|||
password = string.fromBytes hexToSeqByte("7465737470617373776f7264f09f9491")
|
||||
salt = hexToSeqByte "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"
|
||||
iv = hexToSeqByte "264daa3f303d7259501c93d997d84fe6"
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
msg = rng[].generateBytes(32)
|
||||
|
||||
test "single share":
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
std/[typetraits, os, options, json, sequtils, uri, algorithm],
|
||||
testutils/unittests, chronicles, stint, json_serialization, confutils,
|
||||
chronos, eth/keys, blscurve, libp2p/crypto/crypto as lcrypto,
|
||||
chronos, blscurve, libp2p/crypto/crypto as lcrypto,
|
||||
stew/[byteutils, io2], stew/shims/net,
|
||||
|
||||
../beacon_chain/spec/[crypto, keystore, eth2_merkleization],
|
||||
|
@ -121,7 +121,7 @@ func contains*(keylist: openArray[KeystoreInfo], key: string): bool =
|
|||
|
||||
proc prepareNetwork =
|
||||
let
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
mnemonic = generateMnemonic(rng[])
|
||||
seed = getSeed(mnemonic, KeystorePass.init "")
|
||||
cfg = defaultRuntimeConfig
|
||||
|
@ -271,7 +271,7 @@ proc addPreTestRemoteKeystores(validatorsDir: string) =
|
|||
quit 1
|
||||
|
||||
proc startBeaconNode(basePort: int) {.raises: [Defect, CatchableError].} =
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
copyHalfValidators(nodeDataDir, true)
|
||||
addPreTestRemoteKeystores(nodeValidatorsDir)
|
||||
|
@ -309,7 +309,7 @@ proc startBeaconNode(basePort: int) {.raises: [Defect, CatchableError].} =
|
|||
# os.removeDir dataDir
|
||||
|
||||
proc startValidatorClient(basePort: int) {.async, thread.} =
|
||||
let rng = keys.newRng()
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
copyHalfValidators(vcDataDir, false)
|
||||
addPreTestRemoteKeystores(vcValidatorsDir)
|
||||
|
@ -390,7 +390,7 @@ func `==`(a: seq[ValidatorPubKey],
|
|||
proc runTests(keymanager: KeymanagerToTest) {.async.} =
|
||||
let
|
||||
client = RestClientRef.new(initTAddress("127.0.0.1", keymanager.port))
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
privateKey = ValidatorPrivKey.fromRaw(secretBytes).get
|
||||
|
||||
allValidators = listLocalValidators(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
std/[json, typetraits],
|
||||
unittest2,
|
||||
stew/byteutils, blscurve, eth/keys, json_serialization,
|
||||
stew/byteutils, blscurve, json_serialization,
|
||||
libp2p/crypto/crypto as lcrypto,
|
||||
../beacon_chain/spec/[crypto, keystore],
|
||||
./testutil
|
||||
|
@ -268,7 +268,7 @@ const
|
|||
iv = hexToSeqByte "264daa3f303d7259501c93d997d84fe6"
|
||||
|
||||
let
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
|
||||
suite "KeyStorage testing suite":
|
||||
setup:
|
||||
|
@ -472,4 +472,3 @@ suite "eth2.0-deposits-cli compatibility":
|
|||
|
||||
v3SK.toHex == "1445cec3861d7cbf80e409d79aeee131622dcb0c815ff97ceab2515e14c41a1a"
|
||||
v3WK.toHex == "1ccd5dce4c842bd3f65bbd59a382662e689fcf01ddc39aaaf2dcc7d073f11a93"
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2021-2022 Status Research & Development GmbH
|
||||
# Copyright (c) 2021-2023 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
std/[os, options, json, typetraits, uri, algorithm],
|
||||
unittest2, chronos, chronicles, stint, json_serialization,
|
||||
blscurve, eth/keys,
|
||||
blscurve,
|
||||
libp2p/crypto/crypto as lcrypto,
|
||||
stew/[io2, byteutils],
|
||||
../beacon_chain/filepath,
|
||||
|
@ -44,7 +44,7 @@ proc contentEquals(filePath, expectedContent: string): bool =
|
|||
expectedContent == readAll(file)
|
||||
|
||||
let
|
||||
rng = keys.newRng()
|
||||
rng = HmacDrbgContext.new()
|
||||
mnemonic = generateMnemonic(rng[])
|
||||
seed = getSeed(mnemonic, KeystorePass.init "")
|
||||
cfg = defaultRuntimeConfig
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
import
|
||||
# Status libraries
|
||||
eth/keys, taskpools,
|
||||
taskpools,
|
||||
# Beacon chain internals
|
||||
../beacon_chain/consensus_object_pools/
|
||||
[block_clearance, block_quarantine, blockchain_dag],
|
||||
|
@ -94,8 +94,9 @@ suite "Light client" & preset():
|
|||
serve: true,
|
||||
importMode: LightClientDataImportMode.OnlyNew))
|
||||
quarantine = newClone(Quarantine.init())
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
var verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
var verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
|
||||
test "Pre-Altair":
|
||||
# Genesis
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
import
|
||||
# Status libraries
|
||||
chronos, eth/keys,
|
||||
chronos,
|
||||
# Beacon chain internals
|
||||
../beacon_chain/consensus_object_pools/
|
||||
[block_clearance, block_quarantine, blockchain_dag],
|
||||
|
@ -42,8 +42,9 @@ suite "Light client processor" & preset():
|
|||
serve: true,
|
||||
importMode: LightClientDataImportMode.OnlyNew))
|
||||
quarantine = newClone(Quarantine.init())
|
||||
rng = HmacDrbgContext.new()
|
||||
taskpool = Taskpool.new()
|
||||
var verifier = BatchVerifier(rng: keys.newRng(), taskpool: taskpool)
|
||||
var verifier = BatchVerifier(rng: rng, taskpool: taskpool)
|
||||
|
||||
var cache: StateCache
|
||||
proc addBlocks(blocks: uint64, syncCommitteeRatio: float) =
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
|
||||
import
|
||||
unittest2,
|
||||
eth/keys,
|
||||
../beacon_chain/spec/[beaconstate, helpers, signatures],
|
||||
../beacon_chain/consensus_object_pools/sync_committee_msg_pool,
|
||||
./testblockutil
|
||||
|
@ -23,12 +22,14 @@ func aggregate(sigs: openArray[CookedSig]): CookedSig =
|
|||
|
||||
suite "Sync committee pool":
|
||||
setup:
|
||||
let cfg = block:
|
||||
var res = defaultRuntimeConfig
|
||||
res.ALTAIR_FORK_EPOCH = 0.Epoch
|
||||
res.BELLATRIX_FORK_EPOCH = 20.Epoch
|
||||
res
|
||||
var pool = SyncCommitteeMsgPool.init(keys.newRng(), cfg)
|
||||
let
|
||||
rng = HmacDrbgContext.new()
|
||||
cfg = block:
|
||||
var res = defaultRuntimeConfig
|
||||
res.ALTAIR_FORK_EPOCH = 0.Epoch
|
||||
res.BELLATRIX_FORK_EPOCH = 20.Epoch
|
||||
res
|
||||
var pool = SyncCommitteeMsgPool.init(rng, cfg)
|
||||
|
||||
test "An empty pool is safe to use":
|
||||
let headBid =
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
import
|
||||
chronicles,
|
||||
eth/keys,
|
||||
stew/endians2,
|
||||
../beacon_chain/consensus_object_pools/sync_committee_msg_pool,
|
||||
../beacon_chain/spec/datatypes/bellatrix,
|
||||
|
@ -423,7 +422,8 @@ proc makeSyncAggregate(
|
|||
getStateField(state, slot)
|
||||
latest_block_id =
|
||||
withState(state): forkyState.latest_block_id
|
||||
syncCommitteePool = newClone(SyncCommitteeMsgPool.init(keys.newRng(), cfg))
|
||||
rng = HmacDrbgContext.new()
|
||||
syncCommitteePool = newClone(SyncCommitteeMsgPool.init(rng, cfg))
|
||||
|
||||
type
|
||||
Aggregator = object
|
||||
|
|
Loading…
Reference in New Issue