remove rest of withState.state usage (#4120)

* remove rest of `withState.state` usage

* remove scaffolding
This commit is contained in:
tersec 2022-09-16 15:35:00 +02:00 committed by GitHub
parent 43188a0990
commit 0410aec9d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 32 additions and 31 deletions

View File

@ -674,7 +674,7 @@ proc getAttestationsForBlock*(pool: var AttestationPool,
state: ForkedHashedBeaconState,
cache: var StateCache): seq[Attestation] =
withState(state):
pool.getAttestationsForBlock(state, cache)
pool.getAttestationsForBlock(forkyState, cache)
func bestValidation(aggregates: openArray[Validation]): (int, int) =
# Look for best validation based on number of votes in the aggregate

View File

@ -313,17 +313,14 @@ template withState*(x: ForkedHashedBeaconState, body: untyped): untyped =
of BeaconStateFork.Bellatrix:
const stateFork {.inject, used.} = BeaconStateFork.Bellatrix
template forkyState: untyped {.inject, used.} = x.bellatrixData
template state: untyped {.inject, used.} = x.bellatrixData
body
of BeaconStateFork.Altair:
const stateFork {.inject, used.} = BeaconStateFork.Altair
template forkyState: untyped {.inject, used.} = x.altairData
template state: untyped {.inject, used.} = x.altairData
body
of BeaconStateFork.Phase0:
const stateFork {.inject, used.} = BeaconStateFork.Phase0
template forkyState: untyped {.inject, used.} = x.phase0Data
template state: untyped {.inject, used.} = x.phase0Data
body
template withEpochInfo*(x: ForkedEpochInfo, body: untyped): untyped =

View File

@ -309,20 +309,20 @@ proc cmdBench(conf: DbConf, cfg: RuntimeConfig) =
case stateFork
of BeaconStateFork.Phase0:
doAssert dbBenchmark.getState(
state.root, loadedState[0][].data, noRollback)
forkyState.root, loadedState[0][].data, noRollback)
of BeaconStateFork.Altair:
doAssert dbBenchmark.getState(
state.root, loadedState[1][].data, noRollback)
forkyState.root, loadedState[1][].data, noRollback)
of BeaconStateFork.Bellatrix:
doAssert dbBenchmark.getState(
state.root, loadedState[2][].data, noRollback)
forkyState.root, loadedState[2][].data, noRollback)
if state.data.slot.epoch mod 16 == 0:
if forkyState.data.slot.epoch mod 16 == 0:
let loadedRoot = case stateFork
of BeaconStateFork.Phase0: hash_tree_root(loadedState[0][].data)
of BeaconStateFork.Altair: hash_tree_root(loadedState[1][].data)
of BeaconStateFork.Bellatrix: hash_tree_root(loadedState[2][].data)
doAssert hash_tree_root(state.data) == loadedRoot
doAssert hash_tree_root(forkyState.data) == loadedRoot
processBlocks(blocks[0])
processBlocks(blocks[1])
@ -366,7 +366,7 @@ proc cmdPutState(conf: DbConf, cfg: RuntimeConfig) =
let state = newClone(readSszForkedHashedBeaconState(
cfg, readAllBytes(file).tryGet()))
withState(state[]):
db.putState(state)
db.putState(forkyState)
proc cmdDumpBlock(conf: DbConf) =
let db = BeaconChainDB.new(conf.databaseDir.string, readOnly = true)
@ -432,7 +432,7 @@ proc cmdRewindState(conf: DbConf, cfg: RuntimeConfig) =
tmpState[], dag.atSlot(bid, Slot(conf.slot)).expect("block found")) do:
echo "Writing state..."
withState(state):
dump("./", state)
dump("./", forkyState)
do: raiseAssert "withUpdatedState failed"
func atCanonicalSlot(dag: ChainDAGRef, bid: BlockId, slot: Slot): Opt[BlockSlotId] =
@ -494,7 +494,8 @@ proc cmdExportEra(conf: DbConf, cfg: RuntimeConfig) =
let
eraRoot = withState(dag.headState):
eraRoot(
state.data.genesis_validators_root, state.data.historical_roots.asSeq,
forkyState.data.genesis_validators_root,
forkyState.data.historical_roots.asSeq,
era).expect("have era root since we checked slot")
name = eraFileName(cfg, era, eraRoot)
@ -517,7 +518,7 @@ proc cmdExportEra(conf: DbConf, cfg: RuntimeConfig) =
withTimer(timers[tState]):
dag.withUpdatedState(tmpState[], eraBid) do:
withState(state):
group.finish(e2, state.data).get()
group.finish(e2, forkyState.data).get()
do: raiseAssert "withUpdatedState failed"
era += 1
@ -759,7 +760,7 @@ proc createInsertValidatorProc(db: SqStoreRef): auto =
proc collectBalances(balances: var seq[uint64], forkedState: ForkedHashedBeaconState) =
withState(forkedState):
balances = seq[uint64](state.data.balances.data)
balances = seq[uint64](forkyState.data.balances.data)
proc calculateDelta(info: RewardsAndPenalties): int64 =
info.source_outcome +
@ -823,7 +824,7 @@ proc insertValidators(db: SqStoreRef, state: ForkedHashedBeaconState,
db.inTransaction("DB"):
for i in startIndex ..< endIndex:
insertValidator.exec(
(i, state.data.validators[i].pubkey.toRaw)).expect("DB")
(i, forkyState.data.validators[i].pubkey.toRaw)).expect("DB")
proc cmdValidatorDb(conf: DbConf, cfg: RuntimeConfig) =
# Create a database with performance information for every epoch
@ -925,15 +926,16 @@ proc cmdValidatorDb(conf: DbConf, cfg: RuntimeConfig) =
withState(tmpState[]):
withEpochInfo(forkedInfo):
doAssert state.data.balances.len == info.validators.len
doAssert state.data.balances.len == previousEpochBalances.len
doAssert state.data.balances.len == rewardsAndPenalties.len
doAssert forkyState.data.balances.len == info.validators.len
doAssert forkyState.data.balances.len == previousEpochBalances.len
doAssert forkyState.data.balances.len == rewardsAndPenalties.len
for index, validator in info.validators:
template rp: untyped = rewardsAndPenalties[index]
checkBalance(index, validator, state.data.balances.item(index).int64,
previousEpochBalances[index].int64, rp)
checkBalance(
index, validator, forkyState.data.balances.item(index).int64,
previousEpochBalances[index].int64, rp)
when infoFork == EpochInfoFork.Phase0:
rp.inclusion_delay = block:
@ -970,7 +972,7 @@ proc cmdValidatorDb(conf: DbConf, cfg: RuntimeConfig) =
if nextSlot.is_epoch:
withState(tmpState[]):
var stateData = newClone(state.data)
var stateData = newClone(forkyState.data)
rewardsAndPenalties.collectEpochRewardsAndPenalties(
stateData[], cache, cfg, flags)

View File

@ -66,7 +66,7 @@ cli do(validatorsDir: string, secretsDir: string,
warn "Unkownn validator", pubkey
var
blockRoot = withState(state[]): state.latest_block_root
blockRoot = withState(state[]): forkyState.latest_block_root
cache: StateCache
info: ForkedEpochInfo
aggregates: seq[Attestation]
@ -78,7 +78,8 @@ cli do(validatorsDir: string, secretsDir: string,
block:
let
active = withState(state[]):
get_active_validator_indices_len(state.data, state.data.slot.epoch)
get_active_validator_indices_len(
forkyState.data, forkyState.data.slot.epoch)
notice "Let's play",
validators = validators.len(),
@ -108,7 +109,7 @@ cli do(validatorsDir: string, secretsDir: string,
if slot.epoch != (slot - 1).epoch:
let
active = withState(state[]):
get_active_validator_indices_len(state.data, slot.epoch)
get_active_validator_indices_len(forkyState.data, slot.epoch)
balance = block:
var b: uint64
for k, _ in validators:
@ -133,7 +134,7 @@ cli do(validatorsDir: string, secretsDir: string,
avgBalance
if slot.epoch mod 32 == 0:
withState(state[]): dump(".", state)
withState(state[]): dump(".", forkyState)
let
fork = getStateField(state[], fork)
@ -198,14 +199,15 @@ cli do(validatorsDir: string, secretsDir: string,
withState(state[]):
let committees_per_slot = get_committee_count_per_slot(
state.data, slot.epoch, cache)
forkyState.data, slot.epoch, cache)
for committee_index in get_committee_indices(committees_per_slot):
let committee = get_beacon_committee(state.data, slot, committee_index, cache)
let committee = get_beacon_committee(
forkyState.data, slot, committee_index, cache)
var
attestation = Attestation(
data: makeAttestationData(
state.data, slot, committee_index, blockRoot),
forkyState.data, slot, committee_index, blockRoot),
aggregation_bits: CommitteeValidatorsBits.init(committee.len))
agg: AggregateSignature
@ -236,9 +238,9 @@ cli do(validatorsDir: string, secretsDir: string,
nextSlot = slot + 1
pubkeys =
if slot.sync_committee_period == nextSlot.sync_committee_period:
state.data.current_sync_committee.pubkeys
forkyState.data.current_sync_committee.pubkeys
else:
state.data.next_sync_committee.pubkeys
forkyState.data.next_sync_committee.pubkeys
var
agg: AggregateSignature

View File

@ -324,7 +324,7 @@ proc makeSyncAggregate(
slot =
getStateField(state, slot)
latest_block_root =
withState(state): state.latest_block_root
withState(state): forkyState.latest_block_root
syncCommitteePool = newClone(SyncCommitteeMsgPool.init(keys.newRng()))
type