var => let in specs/ and tests/ (#3425)
This commit is contained in:
parent
9790c4958b
commit
84588b34da
|
@ -689,7 +689,7 @@ proc process_attestation*(
|
|||
# The genericSeqAssign generated by the compiler to copy the attestation
|
||||
# data sadly is a processing hotspot - the business with the addDefault
|
||||
# pointer is here simply to work around the poor codegen
|
||||
var pa = attestations.addDefault()
|
||||
let pa = attestations.addDefault()
|
||||
if pa.isNil:
|
||||
return err("process_attestation: too many pending attestations")
|
||||
assign(pa[].aggregation_bits, attestation.aggregation_bits)
|
||||
|
@ -807,7 +807,7 @@ func upgrade_to_altair*(cfg: RuntimeConfig, pre: phase0.BeaconState):
|
|||
doAssert inactivity_scores.data.setLen(pre.validators.len)
|
||||
inactivity_scores.resetCache()
|
||||
|
||||
var post = (ref altair.BeaconState)(
|
||||
let post = (ref altair.BeaconState)(
|
||||
genesis_time: pre.genesis_time,
|
||||
genesis_validators_root: pre.genesis_validators_root,
|
||||
slot: pre.slot,
|
||||
|
|
|
@ -66,7 +66,7 @@ func eth2digest*(v: openArray[byte]): Eth2Digest {.noinit.} =
|
|||
else:
|
||||
# We use the init-update-finish interface to avoid
|
||||
# the expensive burning/clearing memory (20~30% perf)
|
||||
var ctx: Eth2DigestCtx
|
||||
let ctx: Eth2DigestCtx
|
||||
ctx.init()
|
||||
ctx.update(v)
|
||||
ctx.finish()
|
||||
|
@ -93,7 +93,7 @@ template withEth2Hash*(body: untyped): Eth2Digest =
|
|||
res
|
||||
else:
|
||||
block:
|
||||
var h {.inject, noinit.}: Eth2DigestCtx
|
||||
let h {.inject, noinit.}: Eth2DigestCtx
|
||||
init(h)
|
||||
body
|
||||
finish(h)
|
||||
|
|
|
@ -418,7 +418,7 @@ proc readRuntimeConfig*(
|
|||
if line.len == 0 or line[0] == '#': continue
|
||||
# remove any trailing comments
|
||||
let line = line.split("#")[0]
|
||||
var lineParts = line.split(":")
|
||||
let lineParts = line.split(":")
|
||||
if lineParts.len != 2:
|
||||
fail "Invalid syntax: A preset file should include only assignments in the form 'ConstName: Value'"
|
||||
|
||||
|
|
|
@ -157,7 +157,7 @@ func maybeUpgradeStateToAltair(
|
|||
# once by checking for existing fork.
|
||||
if getStateField(state, slot).epoch == cfg.ALTAIR_FORK_EPOCH and
|
||||
state.kind == BeaconStateFork.Phase0:
|
||||
var newState = upgrade_to_altair(cfg, state.phase0Data.data)
|
||||
let newState = upgrade_to_altair(cfg, state.phase0Data.data)
|
||||
state = (ref ForkedHashedBeaconState)(
|
||||
kind: BeaconStateFork.Altair,
|
||||
altairData: altair.HashedBeaconState(
|
||||
|
@ -169,7 +169,7 @@ func maybeUpgradeStateToBellatrix(
|
|||
# once by checking for existing fork.
|
||||
if getStateField(state, slot).epoch == cfg.BELLATRIX_FORK_EPOCH and
|
||||
state.kind == BeaconStateFork.Altair:
|
||||
var newState = upgrade_to_bellatrix(cfg, state.altairData.data)
|
||||
let newState = upgrade_to_bellatrix(cfg, state.altairData.data)
|
||||
state = (ref ForkedHashedBeaconState)(
|
||||
kind: BeaconStateFork.Bellatrix,
|
||||
bellatrixData: bellatrix.HashedBeaconState(
|
||||
|
|
|
@ -37,7 +37,7 @@ proc runTest(identifier: string) =
|
|||
|
||||
let
|
||||
proof = block:
|
||||
var s = openFileStream(testDir/"proof.yaml")
|
||||
let s = openFileStream(testDir/"proof.yaml")
|
||||
defer: close(s)
|
||||
var res: TestProof
|
||||
yaml.load(s, res)
|
||||
|
|
|
@ -52,7 +52,7 @@ proc runTest[T, U](
|
|||
"[Invalid] "
|
||||
|
||||
test prefix & baseDescription & testSuiteName & " - " & identifier:
|
||||
var preState = newClone(
|
||||
let preState = newClone(
|
||||
parseTest(testDir/"pre.ssz_snappy", SSZ, altair.BeaconState))
|
||||
let done = applyProc(
|
||||
preState[], parseTest(testDir/(applyFile & ".ssz_snappy"), SSZ, T))
|
||||
|
|
|
@ -71,7 +71,7 @@ proc checkSSZ(T: type, dir: string, expectedHash: SSZHashTreeRoot) =
|
|||
# TODO check the value (requires YAML loader)
|
||||
|
||||
proc loadExpectedHashTreeRoot(dir: string): SSZHashTreeRoot =
|
||||
var s = openFileStream(dir/"roots.yaml")
|
||||
let s = openFileStream(dir/"roots.yaml")
|
||||
yaml.load(s, result)
|
||||
s.close()
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ template runSuite(
|
|||
test testName & " - " & unitTestName & preset():
|
||||
# BeaconState objects are stored on the heap to avoid stack overflow
|
||||
type T = altair.BeaconState
|
||||
var preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let postState = newClone(parseTest(testDir/"post.ssz_snappy", SSZ, T))
|
||||
var cache {.inject, used.} = StateCache()
|
||||
template state: untyped {.inject, used.} = preState[]
|
||||
|
|
|
@ -94,7 +94,7 @@ suite "EF - Altair - Unittests - Sync protocol" & preset():
|
|||
|
||||
# https://github.com/ethereum/consensus-specs/blob/v1.1.9/tests/core/pyspec/eth2spec/test/altair/unittests/test_sync_protocol.py#L36-L90
|
||||
test "test_process_light_client_update_not_timeout":
|
||||
var forked = assignClone(genesisState[])
|
||||
let forked = assignClone(genesisState[])
|
||||
template state: untyped {.inject.} = forked[].altairData.data
|
||||
var store = initialize_light_client_store(state)
|
||||
|
||||
|
@ -146,7 +146,7 @@ suite "EF - Altair - Unittests - Sync protocol" & preset():
|
|||
|
||||
# https://github.com/ethereum/consensus-specs/blob/v1.1.9/tests/core/pyspec/eth2spec/test/altair/unittests/test_sync_protocol.py#L93-L154
|
||||
test "process_light_client_update_timeout":
|
||||
var forked = assignClone(genesisState[])
|
||||
let forked = assignClone(genesisState[])
|
||||
template state: untyped {.inject.} = forked[].altairData.data
|
||||
var store = initialize_light_client_store(state)
|
||||
|
||||
|
@ -207,7 +207,7 @@ suite "EF - Altair - Unittests - Sync protocol" & preset():
|
|||
|
||||
# https://github.com/ethereum/consensus-specs/blob/v1.1.9/tests/core/pyspec/eth2spec/test/altair/unittests/test_sync_protocol.py#L157-L224
|
||||
test "process_light_client_update_finality_updated":
|
||||
var forked = assignClone(genesisState[])
|
||||
let forked = assignClone(genesisState[])
|
||||
template state: untyped {.inject.} = forked[].altairData.data
|
||||
var store = initialize_light_client_store(state)
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ proc runTest(testName, testDir, unitTestName: string) =
|
|||
let testPath = testDir / unitTestName
|
||||
|
||||
var transitionInfo: TransitionInfo
|
||||
var s = openFileStream(testPath/"meta.yaml")
|
||||
let s = openFileStream(testPath/"meta.yaml")
|
||||
defer: close(s)
|
||||
yaml.load(s, transitionInfo)
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ proc runTest(identifier: string) =
|
|||
postState = newClone(
|
||||
parseTest(testDir/"post.ssz_snappy", SSZ, bellatrix.BeaconState))
|
||||
|
||||
var cfg = defaultRuntimeConfig
|
||||
let cfg = defaultRuntimeConfig
|
||||
|
||||
let upgradedState = upgrade_to_bellatrix(cfg, preState[])
|
||||
check: upgradedState[].hash_tree_root() == postState[].hash_tree_root()
|
||||
|
|
|
@ -53,7 +53,7 @@ proc runTest[T, U](
|
|||
"[Invalid] "
|
||||
|
||||
test prefix & baseDescription & testSuiteName & " - " & identifier:
|
||||
var preState = newClone(
|
||||
let preState = newClone(
|
||||
parseTest(testDir/"pre.ssz_snappy", SSZ, bellatrix.BeaconState))
|
||||
let done = applyProc(
|
||||
preState[], parseTest(testDir/(applyFile & ".ssz_snappy"), SSZ, T))
|
||||
|
|
|
@ -71,7 +71,7 @@ proc checkSSZ(T: type, dir: string, expectedHash: SSZHashTreeRoot) =
|
|||
# TODO check the value (requires YAML loader)
|
||||
|
||||
proc loadExpectedHashTreeRoot(dir: string): SSZHashTreeRoot =
|
||||
var s = openFileStream(dir/"roots.yaml")
|
||||
let s = openFileStream(dir/"roots.yaml")
|
||||
yaml.load(s, result)
|
||||
s.close()
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ template runSuite(
|
|||
test testName & " - " & unitTestName & preset():
|
||||
# BeaconState objects are stored on the heap to avoid stack overflow
|
||||
type T = bellatrix.BeaconState
|
||||
var preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let postState = newClone(parseTest(testDir/"post.ssz_snappy", SSZ, T))
|
||||
var cache {.inject, used.} = StateCache()
|
||||
template state: untyped {.inject, used.} = preState[]
|
||||
|
|
|
@ -36,7 +36,7 @@ proc runTest(testName, testDir, unitTestName: string) =
|
|||
let testPath = testDir / unitTestName
|
||||
|
||||
var transitionInfo: TransitionInfo
|
||||
var s = openFileStream(testPath/"meta.yaml")
|
||||
let s = openFileStream(testPath/"meta.yaml")
|
||||
defer: close(s)
|
||||
yaml.load(s, transitionInfo)
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ template readFileBytes*(path: string): seq[byte] =
|
|||
cast[seq[byte]](readFile(path))
|
||||
|
||||
proc sszDecodeEntireInput*(input: openArray[byte], Decoded: type): Decoded =
|
||||
var stream = unsafeMemoryInput(input)
|
||||
let stream = unsafeMemoryInput(input)
|
||||
var reader = init(SszReader, stream)
|
||||
reader.readValue(result)
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ proc runTest[T, U](
|
|||
"[Invalid] "
|
||||
|
||||
test prefix & baseDescription & testSuiteName & " - " & identifier:
|
||||
var preState = newClone(
|
||||
let preState = newClone(
|
||||
parseTest(testDir/"pre.ssz_snappy", SSZ, phase0.BeaconState))
|
||||
let done = applyProc(
|
||||
preState[], parseTest(testDir/(applyFile & ".ssz_snappy"), SSZ, T))
|
||||
|
|
|
@ -71,7 +71,7 @@ proc checkSSZ(T: type, dir: string, expectedHash: SSZHashTreeRoot) =
|
|||
# TODO check the value (requires YAML loader)
|
||||
|
||||
proc loadExpectedHashTreeRoot(dir: string): SSZHashTreeRoot =
|
||||
var s = openFileStream(dir/"roots.yaml")
|
||||
let s = openFileStream(dir/"roots.yaml")
|
||||
yaml.load(s, result)
|
||||
s.close()
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ template runSuite(suiteDir, testName: string, transitionProc: untyped): untyped
|
|||
test testName & " - " & unitTestName & preset():
|
||||
# BeaconState objects are stored on the heap to avoid stack overflow
|
||||
type T = phase0.BeaconState
|
||||
var preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let preState {.inject.} = newClone(parseTest(testDir/"pre.ssz_snappy", SSZ, T))
|
||||
let postState = newClone(parseTest(testDir/"post.ssz_snappy", SSZ, T))
|
||||
var cache {.inject, used.} = StateCache()
|
||||
var info {.inject.}: EpochInfo
|
||||
|
|
|
@ -266,7 +266,7 @@ proc runTest(path: string, fork: BeaconBlockFork) =
|
|||
defer:
|
||||
db.close()
|
||||
|
||||
var stores = case fork
|
||||
let stores = case fork
|
||||
of BeaconBlockFork.Phase0:
|
||||
initialLoad(
|
||||
path, db,
|
||||
|
|
|
@ -84,8 +84,8 @@ type
|
|||
proc checkBasic(T: typedesc,
|
||||
dir: string,
|
||||
expectedHash: SSZHashTreeRoot) =
|
||||
var fileContents = snappy.decode(readFileBytes(dir/"serialized.ssz_snappy"), MaxObjectSize)
|
||||
var deserialized = newClone(sszDecodeEntireInput(fileContents, T))
|
||||
let fileContents = snappy.decode(readFileBytes(dir/"serialized.ssz_snappy"), MaxObjectSize)
|
||||
let deserialized = newClone(sszDecodeEntireInput(fileContents, T))
|
||||
|
||||
let expectedHash = expectedHash.root
|
||||
let actualHash = "0x" & toLowerASCII($hash_tree_root(deserialized[]))
|
||||
|
@ -106,10 +106,10 @@ macro testVector(typeIdent: string, size: int): untyped =
|
|||
let types = ["bool", "uint8", "uint16", "uint32", "uint64", "uint128", "uint256"]
|
||||
let sizes = [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]
|
||||
|
||||
var dispatcher = nnkIfStmt.newTree()
|
||||
let dispatcher = nnkIfStmt.newTree()
|
||||
for t in types:
|
||||
# if typeIdent == t // elif typeIdent == t
|
||||
var sizeDispatch = nnkIfStmt.newTree()
|
||||
let sizeDispatch = nnkIfStmt.newTree()
|
||||
for s in sizes:
|
||||
# if size == s // elif size == s
|
||||
let T = nnkBracketExpr.newTree(
|
||||
|
@ -119,7 +119,7 @@ macro testVector(typeIdent: string, size: int): untyped =
|
|||
of "uint256": ident("UInt256")
|
||||
else: ident(t)
|
||||
)
|
||||
var testStmt = quote do:
|
||||
let testStmt = quote do:
|
||||
checkBasic(`T`, dir, expectedHash)
|
||||
sizeDispatch.add nnkElifBranch.newTree(
|
||||
newCall(ident"==", size, newLit(s)),
|
||||
|
@ -196,7 +196,7 @@ proc sszCheck(baseDir, sszType, sszSubType: string) =
|
|||
# Hash tree root
|
||||
var expectedHash: SSZHashTreeRoot
|
||||
if fileExists(dir/"meta.yaml"):
|
||||
var s = openFileStream(dir/"meta.yaml")
|
||||
let s = openFileStream(dir/"meta.yaml")
|
||||
defer: close(s)
|
||||
yaml.load(s, expectedHash)
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# import ../interpreter # included to be able to use "suite"
|
||||
|
||||
func setup_finality_01(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
|
||||
var balances = @[Gwei(1), Gwei(1)]
|
||||
let balances = @[Gwei(1), Gwei(1)]
|
||||
let GenesisRoot = fakeHash(0)
|
||||
|
||||
# Initialize the fork choice context
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# import ../interpreter # included to be able to use "suite"
|
||||
|
||||
func setup_finality_02(): tuple[fork_choice: ForkChoiceBackend, ops: seq[Operation]] =
|
||||
var balances = @[Gwei(1), Gwei(1)]
|
||||
let balances = @[Gwei(1), Gwei(1)]
|
||||
let GenesisRoot = fakeHash(0)
|
||||
|
||||
# Initialize the fork choice context
|
||||
|
|
|
@ -32,7 +32,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
|
|||
template valid_deposit(deposit_amount: uint64, name: string): untyped =
|
||||
test "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
|
||||
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
|
||||
var state = assignClone(genesisState[])
|
||||
let state = assignClone(genesisState[])
|
||||
|
||||
# Test configuration
|
||||
# ----------------------------------------
|
||||
|
@ -73,7 +73,7 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
|
|||
valid_deposit(MAX_EFFECTIVE_BALANCE + 1, "over")
|
||||
|
||||
test "Validator top-up":
|
||||
var state = assignClone(genesisState[])
|
||||
let state = assignClone(genesisState[])
|
||||
|
||||
# Test configuration
|
||||
# ----------------------------------------
|
||||
|
@ -113,12 +113,12 @@ suite "[Unit - Spec - Block processing] Deposits " & preset():
|
|||
template invalid_signature(deposit_amount: uint64, name: string): untyped =
|
||||
test "Invalid deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
|
||||
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
|
||||
var state = assignClone(genesisState[])
|
||||
let state = assignClone(genesisState[])
|
||||
|
||||
# Test configuration
|
||||
# ----------------------------------------
|
||||
let validator_index = state.data.validators.len
|
||||
var deposit = mockUpdateStateForNewDeposit(
|
||||
let deposit = mockUpdateStateForNewDeposit(
|
||||
state.data,
|
||||
uint64 validator_index,
|
||||
deposit_amount,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -222,7 +222,7 @@ proc payload =
|
|||
doAssert getStateField(genesisState[], validators).lenu64 == NumValidators
|
||||
|
||||
setup:
|
||||
var state = assignClone(genesisState[])
|
||||
let state = assignClone(genesisState[])
|
||||
|
||||
test " Rule I - 234 finalization with enough support":
|
||||
finalizeOn234(state[], Epoch 5, sufficient_support = true)
|
||||
|
|
|
@ -253,7 +253,7 @@ suite "Attestation pool processing" & preset():
|
|||
|
||||
for j in 0..<bc0.len():
|
||||
root.data[8..<16] = toBytesBE(j.uint64)
|
||||
var att = makeAttestation(state[].data, root, bc0[j], cache)
|
||||
let att = makeAttestation(state[].data, root, bc0[j], cache)
|
||||
pool[].addAttestation(
|
||||
att, @[bc0[j]], att.loadSig, att.data.slot.start_beacon_time)
|
||||
inc attestations
|
||||
|
|
|
@ -92,7 +92,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.getPhase0Block(Eth2Digest()).isNone
|
||||
|
||||
test "sanity check phase 0 blocks" & preset():
|
||||
var db = BeaconChainDB.new("", inMemory = true)
|
||||
let db = BeaconChainDB.new("", inMemory = true)
|
||||
|
||||
let
|
||||
signedBlock = withDigest((phase0.TrustedBeaconBlock)())
|
||||
|
@ -131,7 +131,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Altair blocks" & preset():
|
||||
var db = BeaconChainDB.new("", inMemory = true)
|
||||
let db = BeaconChainDB.new("", inMemory = true)
|
||||
|
||||
let
|
||||
signedBlock = withDigest((altair.TrustedBeaconBlock)())
|
||||
|
@ -170,7 +170,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Bellatrix blocks" & preset():
|
||||
var db = BeaconChainDB.new("", inMemory = true)
|
||||
let db = BeaconChainDB.new("", inMemory = true)
|
||||
|
||||
let
|
||||
signedBlock = withDigest((bellatrix.TrustedBeaconBlock)())
|
||||
|
@ -209,7 +209,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check phase 0 states" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
|
||||
for state in testStatesPhase0:
|
||||
let root = state[].phase0Data.root
|
||||
|
@ -227,7 +227,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Altair states" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
|
||||
for state in testStatesAltair:
|
||||
let root = state[].altairData.root
|
||||
|
@ -245,7 +245,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Bellatrix states" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
|
||||
for state in testStatesBellatrix:
|
||||
let root = state[].bellatrixData.root
|
||||
|
@ -263,7 +263,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check phase 0 states, reusing buffers" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let stateBuffer = (phase0.BeaconStateRef)()
|
||||
|
||||
for state in testStatesPhase0:
|
||||
|
@ -283,7 +283,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Altair states, reusing buffers" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let stateBuffer = (altair.BeaconStateRef)()
|
||||
|
||||
for state in testStatesAltair:
|
||||
|
@ -303,7 +303,7 @@ suite "Beacon chain DB" & preset():
|
|||
db.close()
|
||||
|
||||
test "sanity check Bellatrix states, reusing buffers" & preset():
|
||||
var db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let db = makeTestDB(SLOTS_PER_EPOCH)
|
||||
let stateBuffer = (bellatrix.BeaconStateRef)()
|
||||
|
||||
for state in testStatesBellatrix:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -224,7 +224,7 @@ suite "Block pool processing" & preset():
|
|||
bs1_3 = b1Add[].atSlot(3.Slot)
|
||||
bs2_3 = b2Add[].atSlot(3.Slot)
|
||||
|
||||
var tmpState = assignClone(dag.headState)
|
||||
let tmpState = assignClone(dag.headState)
|
||||
|
||||
# move to specific block
|
||||
var cache = StateCache()
|
||||
|
@ -461,7 +461,7 @@ suite "chain DAG finalization tests" & preset():
|
|||
getStateRoot(dag2.headState.data) == getStateRoot(dag.headState.data)
|
||||
|
||||
test "orphaned epoch block" & preset():
|
||||
var prestate = (ref ForkedHashedBeaconState)(kind: BeaconStateFork.Phase0)
|
||||
let prestate = (ref ForkedHashedBeaconState)(kind: BeaconStateFork.Phase0)
|
||||
for i in 0 ..< SLOTS_PER_EPOCH:
|
||||
if i == SLOTS_PER_EPOCH - 1:
|
||||
assign(prestate[], dag.headState.data)
|
||||
|
@ -512,7 +512,7 @@ suite "chain DAG finalization tests" & preset():
|
|||
defaultRuntimeConfig, dag.headState.data, Slot(SLOTS_PER_EPOCH * 6 + 2),
|
||||
cache, info, {}).isOk()
|
||||
|
||||
var blck = makeTestBlock(
|
||||
let blck = makeTestBlock(
|
||||
dag.headState.data, cache,
|
||||
attestations = makeFullAttestations(
|
||||
dag.headState.data, dag.head.root, getStateField(dag.headState.data, slot),
|
||||
|
@ -628,7 +628,7 @@ suite "Diverging hardforks":
|
|||
|
||||
let validatorMonitorAltair = newClone(ValidatorMonitor.init())
|
||||
|
||||
var dagAltair = init(
|
||||
let dagAltair = init(
|
||||
ChainDAGRef, altairRuntimeConfig, db, validatorMonitorAltair, {})
|
||||
discard AttestationPool.init(dagAltair, quarantine)
|
||||
|
||||
|
@ -660,7 +660,7 @@ suite "Diverging hardforks":
|
|||
|
||||
let validatorMonitor = newClone(ValidatorMonitor.init())
|
||||
|
||||
var dagAltair = init(
|
||||
let dagAltair = init(
|
||||
ChainDAGRef, altairRuntimeConfig, db, validatorMonitor, {})
|
||||
discard AttestationPool.init(dagAltair, quarantine)
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ suite "Gossip validation - Extra": # Not based on preset config
|
|||
|
||||
let
|
||||
contribution = block:
|
||||
var contribution = (ref SignedContributionAndProof)()
|
||||
let contribution = (ref SignedContributionAndProof)()
|
||||
check: syncCommitteeMsgPool[].produceContribution(
|
||||
slot, state[].root, subcommitteeIdx,
|
||||
contribution.message.contribution)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -288,7 +288,7 @@ suite "KeyStorage testing suite":
|
|||
checkVariant "75ea" # checksum
|
||||
checkVariant "b722" # cipher
|
||||
|
||||
var badKdf = parseJson(pbkdf2Vector)
|
||||
let badKdf = parseJson(pbkdf2Vector)
|
||||
badKdf{"crypto", "kdf", "function"} = %"invalid"
|
||||
|
||||
check decryptKeystore(JsonString $badKdf,
|
||||
|
@ -296,7 +296,7 @@ suite "KeyStorage testing suite":
|
|||
|
||||
suite "eth2.0-deposits-cli compatibility":
|
||||
test "restoring mnemonic without password":
|
||||
var mnemonic = Mnemonic "camera dad smile sail injury warfare grid kiwi report minute fold slot before stem firm wet vague shove version medal one alley vibrant mushroom"
|
||||
let mnemonic = Mnemonic "camera dad smile sail injury warfare grid kiwi report minute fold slot before stem firm wet vague shove version medal one alley vibrant mushroom"
|
||||
let seed = getSeed(mnemonic, KeystorePass.init "")
|
||||
check byteutils.toHex(distinctBase seed) == "60043d6e1efe0eea2ef1c8e7d4bb2d79cb27d3403e992b6058998c27c373cfb6fe047b11405360bb224803726fd6b0ee9e3335ae7d9032e6cb49baf08697cf2a"
|
||||
|
||||
|
@ -324,7 +324,7 @@ suite "eth2.0-deposits-cli compatibility":
|
|||
v3WK.toHex == "56b158b3b170e9c339b94b895afc28964a0b6d7a0809a39b558ca8b6688487cd"
|
||||
|
||||
test "restoring mnemonic with password":
|
||||
var mnemonic = Mnemonic "swear umbrella lesson couch void gentle rocket valley distance match floor rocket flag solve muscle common modify target city youth pottery predict flip ghost"
|
||||
let mnemonic = Mnemonic "swear umbrella lesson couch void gentle rocket valley distance match floor rocket flag solve muscle common modify target city youth pottery predict flip ghost"
|
||||
let seed = getSeed(mnemonic, KeystorePass.init "abracadabra!@#$%^7890")
|
||||
check byteutils.toHex(distinctBase seed) == "f129c3ac003a07e54974d8dbeb08d20c2343fc516e0e3704570c500a4b6ed98bad2e6fec6a3b9a88076c17feaa0d01163855578cb08bae53860d0ae2558cf03e"
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# beacon_chain
|
||||
# Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
# Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
# Licensed and distributed under either of
|
||||
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
@ -35,7 +35,7 @@ suite "state diff tests" & preset():
|
|||
getStateField(testStates[j][], slot)
|
||||
if getStateField(testStates[i][], slot) + SLOTS_PER_EPOCH != getStateField(testStates[j][], slot):
|
||||
continue
|
||||
var tmpStateApplyBase = assignClone(testStates[i].altairData.data)
|
||||
let tmpStateApplyBase = assignClone(testStates[i].altairData.data)
|
||||
let diff = diffStates(
|
||||
testStates[i].altairData.data, testStates[j].altairData.data)
|
||||
# Immutable parts of validators stored separately, so aren't part of
|
||||
|
|
|
@ -149,7 +149,7 @@ proc makeTestBlock*(
|
|||
# It's a bit awkward - in order to produce a block for N+1, we need to
|
||||
# calculate what the state will look like after that block has been applied,
|
||||
# because the block includes the state root.
|
||||
var tmpState = assignClone(state)
|
||||
let tmpState = assignClone(state)
|
||||
addTestBlock(
|
||||
tmpState[], cache, eth1_data,
|
||||
attestations, deposits, sync_aggregate, graffiti, cfg = cfg)
|
||||
|
|
Loading…
Reference in New Issue