Pass the test suite with a BeaconState ref type

This commit is contained in:
Zahary Karadjov 2020-04-23 02:35:55 +03:00 committed by zah
parent 740b76d152
commit fdcbfdff05
32 changed files with 284 additions and 297 deletions

View File

@ -1,7 +1,7 @@
{.push raises: [Defect].}
import
options, typetraits, stew/endians2,
options, typetraits, stew/[results, endians2],
serialization, chronicles,
spec/[datatypes, digest, crypto],
kvstore, ssz
@ -68,11 +68,11 @@ proc init*(T: type BeaconChainDB, backend: KVStoreRef): BeaconChainDB =
proc put(db: BeaconChainDB, key: openArray[byte], v: auto) =
db.backend.put(key, SSZ.encode(v)).expect("working database")
proc get(db: BeaconChainDB, key: openArray[byte], T: typedesc): Option[T] =
var res: Option[T]
proc get(db: BeaconChainDB, key: openArray[byte], T: typedesc): Opt[T] =
var res: Opt[T]
proc decode(data: openArray[byte]) =
try:
res = some(SSZ.decode(data, T))
res.ok SSZ.decode(data, T)
except SerializationError as e:
# If the data can't be deserialized, it could be because it's from a
# version of the software that uses a different SSZ encoding
@ -119,20 +119,21 @@ proc putHeadBlock*(db: BeaconChainDB, key: Eth2Digest) =
proc putTailBlock*(db: BeaconChainDB, key: Eth2Digest) =
db.backend.put(subkey(kTailBlock), key.data).expect("working database")
proc getBlock*(db: BeaconChainDB, key: Eth2Digest): Option[SignedBeaconBlock] =
proc getBlock*(db: BeaconChainDB, key: Eth2Digest): Opt[SignedBeaconBlock] =
db.get(subkey(SignedBeaconBlock, key), SignedBeaconBlock)
proc getState*(db: BeaconChainDB, key: Eth2Digest): Option[BeaconState] =
proc getState*(db: BeaconChainDB, key: Eth2Digest): Opt[BeaconState] =
db.get(subkey(BeaconState, key), BeaconState)
proc getStateRoot*(db: BeaconChainDB, root: Eth2Digest, slot: Slot):
Option[Eth2Digest] =
proc getStateRoot*(db: BeaconChainDB,
root: Eth2Digest,
slot: Slot): Opt[Eth2Digest] =
db.get(subkey(root, slot), Eth2Digest)
proc getHeadBlock*(db: BeaconChainDB): Option[Eth2Digest] =
proc getHeadBlock*(db: BeaconChainDB): Opt[Eth2Digest] =
db.get(subkey(kHeadBlock), Eth2Digest)
proc getTailBlock*(db: BeaconChainDB): Option[Eth2Digest] =
proc getTailBlock*(db: BeaconChainDB): Opt[Eth2Digest] =
db.get(subkey(kTailBlock), Eth2Digest)
proc containsBlock*(
@ -151,7 +152,7 @@ iterator getAncestors*(db: BeaconChainDB, root: Eth2Digest):
## The search will go on until the ancestor cannot be found.
var root = root
while (let blck = db.getBlock(root); blck.isSome()):
while (let blck = db.getBlock(root); blck.isOk()):
yield (root, blck.get())
root = blck.get().message.parent_root

View File

@ -86,7 +86,7 @@ proc saveValidatorKey(keyName, key: string, conf: BeaconNodeConf) =
writeFile(outputFile, key)
info "Imported validator key", file = outputFile
proc getStateFromSnapshot(conf: BeaconNodeConf, state: var BeaconState): bool =
proc getStateFromSnapshot(conf: BeaconNodeConf): NilableBeaconState =
var
genesisPath = conf.dataDir/genesisFile
snapshotContents: TaintedString
@ -122,7 +122,7 @@ proc getStateFromSnapshot(conf: BeaconNodeConf, state: var BeaconState): bool =
quit 1
try:
state = SSZ.decode(snapshotContents, BeaconState)
result = SSZ.decode(snapshotContents, BeaconState)
except SerializationError:
error "Failed to import genesis file", path = genesisPath
quit 1
@ -138,8 +138,6 @@ proc getStateFromSnapshot(conf: BeaconNodeConf, state: var BeaconState): bool =
err = err.msg, genesisFile = conf.dataDir/genesisFile
quit 1
result = true
proc enrForkIdFromState(state: BeaconState): ENRForkID =
let
forkVer = state.fork.current_version
@ -161,10 +159,10 @@ proc init*(T: type BeaconNode, conf: BeaconNodeConf): Future[BeaconNode] {.async
if not BlockPool.isInitialized(db):
# Fresh start - need to load a genesis state from somewhere
var genesisState = new BeaconState
var genesisState = conf.getStateFromSnapshot()
# Try file from command line first
if not conf.getStateFromSnapshot(genesisState[]):
if genesisState.isNil:
# Didn't work, try creating a genesis state using main chain monitor
# TODO Could move this to a separate "GenesisMonitor" process or task
# that would do only this - see
@ -178,23 +176,27 @@ proc init*(T: type BeaconNode, conf: BeaconNodeConf): Future[BeaconNode] {.async
error "No initial state, need genesis state or deposit contract address"
quit 1
genesisState[] = await mainchainMonitor.getGenesis()
genesisState = await mainchainMonitor.getGenesis()
if genesisState[].slot != GENESIS_SLOT:
# TODO how to get a block from a non-genesis state?
error "Starting from non-genesis state not supported",
stateSlot = genesisState[].slot,
stateRoot = hash_tree_root(genesisState[])
quit 1
# This is needed to prove the not nil property from here on
if genesisState == nil:
doAssert false
else:
if genesisState.slot != GENESIS_SLOT:
# TODO how to get a block from a non-genesis state?
error "Starting from non-genesis state not supported",
stateSlot = genesisState.slot,
stateRoot = hash_tree_root(genesisState)
quit 1
let tailBlock = get_initial_beacon_block(genesisState[])
let tailBlock = get_initial_beacon_block(genesisState)
try:
BlockPool.preInit(db, genesisState[], tailBlock)
doAssert BlockPool.isInitialized(db), "preInit should have initialized db"
except CatchableError as e:
error "Failed to initialize database", err = e.msg
quit 1
try:
BlockPool.preInit(db, genesisState, tailBlock)
doAssert BlockPool.isInitialized(db), "preInit should have initialized db"
except CatchableError as e:
error "Failed to initialize database", err = e.msg
quit 1
# TODO check that genesis given on command line (if any) matches database
let
@ -1057,7 +1059,7 @@ proc installBeaconApiHandlers(rpcServer: RpcServer, node: BeaconNode) =
requireOneOf(slot, root)
if slot.isSome:
let blk = node.blockPool.head.blck.atSlot(slot.get)
var tmpState: StateData
var tmpState = emptyStateData()
node.blockPool.withState(tmpState, blk):
return jsonResult(state)
else:

View File

@ -225,6 +225,20 @@ type
root*: Eth2Digest
historySlots*: uint64
func emptyStateData*: StateData =
StateData(
data: HashedBeaconState(
# Please note that this initialization is needed in order
# to allocate memory for the BeaconState:
data: BeaconState(),
root: default(Eth2Digest)
),
blck: default(BlockRef))
func clone*(other: StateData): StateData =
StateData(data: clone(other.data),
blck: other.blck)
proc shortLog*(v: AttachedValidator): string = shortLog(v.pubKey)
chronicles.formatIt BlockSlot:

View File

@ -9,7 +9,7 @@
import
bitops, chronicles, options, tables,
ssz, beacon_chain_db, state_transition, extras, kvstore,
stew/results, ssz, beacon_chain_db, state_transition, extras, kvstore,
beacon_node_types, metrics,
spec/[crypto, datatypes, digest, helpers, validator]
@ -204,21 +204,20 @@ proc init*(T: type BlockPool, db: BeaconChainDB): BlockPool =
"state data missing for tail block, database corrupt?"
latestStateRoot = some((tailBlock.message.state_root, tailRef))
# TODO can't do straight init because in mainnet config, there are too
# many live beaconstates on the stack...
var tmpState = new Option[BeaconState]
# We're only saving epoch boundary states in the database right now, so when
# we're loading the head block, the corresponding state does not necessarily
# exist in the database - we'll load this latest state we know about and use
# that as finalization point.
tmpState[] = db.getState(latestStateRoot.get().stateRoot)
let stateOpt = db.getState(latestStateRoot.get().stateRoot)
doAssert stateOpt.isSome, "failed to obtain latest state. database corrupt?"
let tmpState = stateOpt.get
let
finalizedSlot =
tmpState[].get().finalized_checkpoint.epoch.compute_start_slot_at_epoch()
tmpState.finalized_checkpoint.epoch.compute_start_slot_at_epoch()
finalizedHead = headRef.findAncestorBySlot(finalizedSlot)
justifiedSlot =
tmpState[].get().current_justified_checkpoint.epoch.compute_start_slot_at_epoch()
tmpState.current_justified_checkpoint.epoch.compute_start_slot_at_epoch()
justifiedHead = headRef.findAncestorBySlot(justifiedSlot)
head = Head(blck: headRef, justified: justifiedHead)
justifiedBlock = db.getBlock(justifiedHead.blck.root).get()
@ -231,6 +230,18 @@ proc init*(T: type BlockPool, db: BeaconChainDB): BlockPool =
head = head.blck, finalizedHead, tail = tailRef,
totalBlocks = blocks.len
let headState = StateData(
data: HashedBeaconState(
data: tmpState, root: latestStateRoot.get().stateRoot),
blck: latestStateRoot.get().blckRef)
let justifiedState = db.getState(justifiedStateRoot)
doAssert justifiedState.isSome,
"failed to obtain latest justified state. database corrupt?"
# For the initialization of `tmpState` below.
# Please note that it's initialized few lines below
{.push warning[UnsafeDefault]: off.}
let res = BlockPool(
pending: initTable[Eth2Digest, SignedBeaconBlock](),
missing: initTable[Eth2Digest, MissingBlock](),
@ -249,21 +260,17 @@ proc init*(T: type BlockPool, db: BeaconChainDB): BlockPool =
finalizedHead: finalizedHead,
db: db,
heads: @[head],
headState: headState,
justifiedState: StateData(
data: HashedBeaconState(data: justifiedState.get, root: justifiedStateRoot),
blck: justifiedHead.blck),
tmpState: default(StateData)
)
{.pop.}
res.headState = StateData(
data: HashedBeaconState(
data: tmpState[].get(), root: latestStateRoot.get().stateRoot),
blck: latestStateRoot.get().blckRef)
res.updateStateData(res.headState, BlockSlot(blck: head.blck, slot: head.blck.slot))
res.tmpState = res.headState
tmpState[] = db.getState(justifiedStateRoot)
res.justifiedState = StateData(
data: HashedBeaconState(data: tmpState[].get(), root: justifiedStateRoot),
blck: justifiedHead.blck)
res.updateStateData(res.headState, BlockSlot(blck: head.blck,
slot: head.blck.slot))
res.tmpState = clone(res.headState)
res
proc addResolvedBlock(
@ -568,7 +575,7 @@ proc skipAndUpdateState(
skipAndUpdateState(state, signedBlock.message.slot - 1, afterUpdate)
let ok = state_transition(state, signedBlock, flags)
let ok = state_transition(state, signedBlock, flags)
afterUpdate(state)
@ -653,7 +660,7 @@ proc rewindState(pool: BlockPool, state: var StateData, bs: BlockSlot):
# writing and deleting state+root mappings in a single transaction, it's
# likely to happen and we guard against it here.
if stateRoot.isSome() and not pool.db.containsState(stateRoot.get()):
stateRoot = none(type(stateRoot.get()))
stateRoot.err()
while stateRoot.isNone():
let parBs = curBs.parent()
@ -718,7 +725,7 @@ proc rewindState(pool: BlockPool, state: var StateData, bs: BlockSlot):
ancestors = ancestors.len,
cat = "replay_state"
state.data.data = ancestorState.get()
state.data.data[] = ancestorState.get()[]
state.data.root = stateRoot.get()
state.blck = ancestor.refs

View File

@ -41,7 +41,7 @@ type
depositContractAddress: Address
dataProviderFactory*: DataProviderFactory
genesisState: ref BeaconState
genesisState: NilableBeaconState
genesisStateFut: Future[void]
eth1Chain: Eth1Chain
@ -87,6 +87,10 @@ type
const
reorgDepthLimit = 1000
# TODO Nim's analysis on the lock level of the methods in this
# module seems broken. Investigate and file this as an issue.
{.push warning[LockLevel]: off.}
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/validator.md#get_eth1_data
func compute_time_at_slot(state: BeaconState, slot: Slot): uint64 =
return state.genesis_time + slot * SECONDS_PER_SLOT
@ -346,8 +350,7 @@ proc checkForGenesisEvent(m: MainchainMonitor) =
# https://github.com/ethereum/eth2.0-pm/tree/6e41fcf383ebeb5125938850d8e9b4e9888389b4/interop/mocked_start#create-genesis-state
s.genesis_time = startTime
m.genesisState.new()
m.genesisState[] = s
m.genesisState = clone(s)
if not m.genesisStateFut.isNil:
m.genesisStateFut.complete()
m.genesisStateFut = nil
@ -436,8 +439,11 @@ proc getGenesis*(m: MainchainMonitor): Future[BeaconState] {.async.} =
await m.genesisStateFut
m.genesisStateFut = nil
doAssert(not m.genesisState.isNil)
return m.genesisState[]
if m.genesisState == nil:
doAssert(false)
return BeaconState()
else:
return m.genesisState
method getBlockByHash*(p: Web3DataProviderRef, hash: BlockHash): Future[BlockObject] =
discard
@ -585,7 +591,11 @@ proc stop*(m: MainchainMonitor) =
proc getLatestEth1BlockHash*(url: string): Future[Eth2Digest] {.async.} =
let web3 = await newWeb3(url)
defer: await web3.close()
let blk = await web3.provider.eth_getBlockByNumber("latest", false)
return Eth2Digest(data: array[32, byte](blk.hash))
try:
let blk = await web3.provider.eth_getBlockByNumber("latest", false)
return Eth2Digest(data: array[32, byte](blk.hash))
finally:
await web3.close()
{.pop.}

View File

@ -20,6 +20,8 @@
# TODO report compiler crash when this is uncommented
# {.push raises: [Defect].}
{.experimental: "notnil".}
import
macros, hashes, json, strutils, tables,
stew/[byteutils, bitseqs], chronicles,
@ -242,7 +244,7 @@ type
voluntary_exits*: List[SignedVoluntaryExit, MAX_VOLUNTARY_EXITS]
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/beacon-chain.md#beaconstate
BeaconState* = object
BeaconStateObj* = object
# Versioning
genesis_time*: uint64
genesis_validators_root*: Eth2Digest
@ -294,6 +296,9 @@ type
current_justified_checkpoint*: Checkpoint
finalized_checkpoint*: Checkpoint
BeaconState* = ref BeaconStateObj not nil
NilableBeaconState* = ref BeaconStateObj
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/beacon-chain.md#validator
Validator* = object
pubkey*: ValidatorPubKey
@ -565,6 +570,15 @@ template readValue*(reader: var JsonReader, value: var BitList) =
template writeValue*(writer: var JsonWriter, value: BitList) =
writeValue(writer, BitSeq value)
func clone*[T](x: ref T): ref T not nil =
new result
result[] = x[]
func clone*(other: HashedBeaconState): HashedBeaconState =
HashedBeaconState(
data: clone(other.data),
root: other.root)
template init*(T: type BitList, len: int): auto = T init(BitSeq, len)
template len*(x: BitList): auto = len(BitSeq(x))
template bytes*(x: BitList): auto = bytes(BitSeq(x))

View File

@ -170,7 +170,7 @@ func get_domain*(
state: BeaconState, domain_type: DomainType, epoch: Epoch): Domain =
## Return the signature domain (fork version concatenated with domain type)
## of a message.
get_domain(state.fork, domain_type, epoch, state. genesis_validators_root)
get_domain(state.fork, domain_type, epoch, state.genesis_validators_root)
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/beacon-chain.md#compute_signing_root
func compute_signing_root*(ssz_object: auto, domain: Domain): Eth2Digest =

View File

@ -485,7 +485,7 @@ proc makeBeaconBlock*(
deposits: deposits)
)
var tmpState = state
var tmpState = clone(state)
let ok = process_block(tmpState, blck, {skipBlsValidation}, cache)
if not ok:

View File

@ -148,7 +148,7 @@ template writeFixedSized(s: OutputStream, x: auto) =
template supports*(_: type SSZ, T: type): bool =
mixin toSszType
anonConst compiles(fixedPortionSize toSszType(default(T)))
anonConst compiles(fixedPortionSize toSszType(declval T))
func init*(T: type SszWriter, stream: OutputStream): T {.raises: [Defect].} =
result.stream = stream

View File

@ -84,9 +84,8 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
result = T readSszValue(input, seq[ElemType])
elif result is ptr|ref:
if input.len > 0:
new result
result[] = readSszValue(input, type(result[]))
new result
result[] = readSszValue(input, type(result[]))
elif result is Option:
if input.len > 0:
@ -161,8 +160,8 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
const boundingOffsets = T.getFieldBoundingOffsets(fieldName)
trs "BOUNDING OFFSET FOR FIELD ", fieldName, " = ", boundingOffsets
type FieldType = type field
type SszType = type toSszType(default(FieldType))
type FieldType = type maybeDeref(field)
type SszType = type toSszType(declval FieldType)
when isFixedSize(SszType):
const
@ -185,13 +184,21 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
# TODO The extra type escaping here is a work-around for a Nim issue:
when type(FieldType) is type(SszType):
trs "READING NATIVE ", fieldName, ": ", name(SszType)
field = readSszValue(input.toOpenArray(startOffset, endOffset - 1), SszType)
maybeDeref(field) = readSszValue(
input.toOpenArray(startOffset, endOffset - 1),
SszType)
trs "READING COMPLETE ", fieldName
elif useListType and FieldType is List:
field = readSszValue(input.toOpenArray(startOffset, endOffset - 1), FieldType)
maybeDeref(field) = readSszValue(
input.toOpenArray(startOffset, endOffset - 1),
FieldType)
else:
trs "READING FOREIGN ", fieldName, ": ", name(SszType)
field = fromSszBytes(FieldType, input.toOpenArray(startOffset, endOffset - 1))
maybeDeref(field) = fromSszBytes(
FieldType,
input.toOpenArray(startOffset, endOffset - 1))
else:
unsupported T

View File

@ -3,7 +3,7 @@
import
strutils, parseutils,
faststreams/output_stream, json_serialization/writer,
stew/objects, faststreams/output_stream, json_serialization/writer,
../spec/datatypes,
types, bytes_reader, navigator
@ -79,7 +79,7 @@ proc typeInfo*(T: type): TypeInfo =
func genTypeInfo(T: type): TypeInfo =
mixin toSszType, enumAllSerializedFields
type SszType = type(toSszType default(T))
type SszType = type toSszType(declval T)
result = when type(SszType) isnot T:
TypeInfo(kind: LeafValue)
elif T is object:

View File

@ -40,7 +40,7 @@ func navigateToField*[T](n: SszNavigator[T],
fieldName: static string,
FieldType: type): SszNavigator[FieldType] {.raisesssz.} =
mixin toSszType
type SszFieldType = type toSszType(default FieldType)
type SszFieldType = type toSszType(declval FieldType)
const boundingOffsets = getFieldBoundingOffsets(T, fieldName)
checkBounds(n.m, boundingOffsets[1])
@ -101,7 +101,7 @@ func indexVarSizeList(m: MemRange, idx: int): MemRange {.raisesssz.} =
template indexList(n, idx, T: untyped): untyped =
type R = T
mixin toSszType
type ElemType = type toSszType(default R)
type ElemType = type toSszType(declval R)
when isFixedSize(ElemType):
const elemSize = fixedPortionSize(ElemType)
let elemPos = idx * elemSize
@ -119,11 +119,16 @@ template `[]`*[R, T](n: SszNavigator[array[R, T]], idx: int): SszNavigator[T] =
func `[]`*[T](n: SszNavigator[T]): T {.raisesssz.} =
mixin toSszType, fromSszBytes
type SszRepr = type(toSszType default(T))
when type(SszRepr) is type(T):
readSszValue(toOpenArray(n.m), T)
when T is ref:
type ObjectType = type(result[])
new result
result[] = SszNavigator[ObjectType](n)[]
else:
fromSszBytes(T, toOpenArray(n.m))
type SszRepr = type toSszType(declval T)
when type(SszRepr) is type(T):
readSszValue(toOpenArray(n.m), T)
else:
fromSszBytes(T, toOpenArray(n.m))
converter derefNavigator*[T](n: SszNavigator[T]): T {.raisesssz.} =
n[]

View File

@ -84,13 +84,19 @@ template ElemType*[T](A: type[openarray[T]]): untyped =
template ElemType*(T: type[seq|string|List]): untyped =
type(default(T)[0])
template maybeDeref*(x: auto): auto =
when type(x) is ref|ptr:
x[]
else:
x
func isFixedSize*(T0: type): bool {.compileTime.} =
mixin toSszType, enumAllSerializedFields
when T0 is openarray|Option|ref|ptr:
return false
else:
type T = type toSszType(default T0)
type T = type toSszType(declval T0)
when T is BasicType:
return true
@ -104,7 +110,7 @@ func isFixedSize*(T0: type): bool {.compileTime.} =
func fixedPortionSize*(T0: type): int {.compileTime.} =
mixin enumAllSerializedFields, toSszType
type T = type toSszType(default T0)
type T = type toSszType(declval T0)
when T is BasicType: sizeof(T)
elif T is array:
@ -123,7 +129,7 @@ func fixedPortionSize*(T0: type): int {.compileTime.} =
func sszSchemaType*(T0: type): SszType {.compileTime.} =
mixin toSszType, enumAllSerializedFields
type T = type toSszType(default T0)
type T = type toSszType(declval T0)
when T is bool:
SszType(kind: sszBool)

View File

@ -170,7 +170,7 @@ proc state_transition*(
## TODO, of cacheState/processEpoch/processSlot/processBloc, only the last
## might fail, so should this bother capturing here, or?
var old_state = state
var old_state = clone(state)
# These should never fail.
process_slots(state, signedBlock.message.slot)
@ -194,7 +194,7 @@ proc state_transition*(
return true
# Block processing failed, roll back changes
state = old_state
state[] = old_state[]
false
# Hashed-state transition functions
@ -253,7 +253,7 @@ proc process_slots*(state: var HashedBeaconState, slot: Slot) =
proc state_transition*(
state: var HashedBeaconState, signedBlock: SignedBeaconBlock, flags: UpdateFlags): bool =
# Save for rollback
var old_state = state
var old_state = clone(state)
process_slots(state, signedBlock.message.slot)
@ -275,5 +275,6 @@ proc state_transition*(
return true
# Block processing failed, roll back changes
state = old_state
state.data[] = old_state.data[]
state.root = old_state.root
false

View File

@ -139,10 +139,8 @@ proc parseSSZ(path: string, T: typedesc): T =
proc runFullTransition*(dir, preState, blocksPrefix: string, blocksQty: int, skipBLS: bool) =
let prePath = dir / preState & ".ssz"
var state: ref BeaconState
new state
echo "Running: ", prePath
state[] = parseSSZ(prePath, BeaconState)
var state = parseSSZ(prePath, BeaconState)
for i in 0 ..< blocksQty:
let blockPath = dir / blocksPrefix & $i & ".ssz"
@ -151,18 +149,16 @@ proc runFullTransition*(dir, preState, blocksPrefix: string, blocksQty: int, ski
let signedBlock = parseSSZ(blockPath, SignedBeaconBlock)
let flags = if skipBLS: {skipBlsValidation}
else: {}
let success = state_transition(state[], signedBlock.message, flags)
let success = state_transition(state, signedBlock.message, flags)
echo "State transition status: ", if success: "SUCCESS ✓" else: "FAILURE ⚠️"
proc runProcessSlots*(dir, preState: string, numSlots: uint64) =
let prePath = dir / preState & ".ssz"
var state: ref BeaconState
new state
echo "Running: ", prePath
state[] = parseSSZ(prePath, BeaconState)
var state = parseSSZ(prePath, BeaconState)
process_slots(state[], state.slot + numSlots)
process_slots(state, state.slot + numSlots)
template processEpochScenarioImpl(
dir, preState: string,
@ -170,19 +166,17 @@ template processEpochScenarioImpl(
needCache: static bool): untyped =
let prePath = dir/preState & ".ssz"
var state: ref BeaconState
new state
echo "Running: ", prePath
state[] = parseSSZ(prePath, BeaconState)
var state = parseSSZ(prePath, BeaconState)
when needCache:
var cache = get_empty_per_epoch_cache()
# Epoch transitions can't fail (TODO is this true?)
when needCache:
transitionFn(state[], cache)
transitionFn(state, cache)
else:
transitionFn(state[])
transitionFn(state)
echo astToStr(transitionFn) & " status: ", "Done" # if success: "SUCCESS ✓" else: "FAILURE ⚠️"
@ -197,10 +191,8 @@ template processBlockScenarioImpl(
needFlags, needCache: static bool): untyped =
let prePath = dir/preState & ".ssz"
var state: ref BeaconState
new state
echo "Running: ", prePath
state[] = parseSSZ(prePath, BeaconState)
var state = parseSSZ(prePath, BeaconState)
var consObj: ref `ConsensusObject`
new consObj
@ -215,13 +207,13 @@ template processBlockScenarioImpl(
consObj[] = parseSSZ(consObjPath, ConsensusObject)
when needFlags and needCache:
let success = transitionFn(state[], consObj[], flags, cache)
let success = transitionFn(state, consObj[], flags, cache)
elif needFlags:
let success = transitionFn(state[], consObj[], flags)
let success = transitionFn(state, consObj[], flags)
elif needCache:
let success = transitionFn(state[], consObj[], flags, cache)
let success = transitionFn(state, consObj[], flags, cache)
else:
let success = transitionFn(state[], consObj[])
let success = transitionFn(state, consObj[])
echo astToStr(transitionFn) & " status: ", if success: "SUCCESS ✓" else: "FAILURE ⚠️"

View File

@ -37,28 +37,20 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & identifier:
var stateRef, postRef: ref BeaconState
var attestationRef: ref Attestation
new attestationRef
new stateRef
var cache = get_empty_per_epoch_cache()
attestationRef[] = parseTest(testDir/"attestation.ssz", SSZ, Attestation)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let attestation = parseTest(testDir/"attestation.ssz", SSZ, Attestation)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
if postRef.isNil:
let done = process_attestation(stateRef[], attestationRef[], {}, cache)
doAssert done == false, "We didn't expect this invalid attestation to be processed."
else:
let done = process_attestation(stateRef[], attestationRef[], {}, cache)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let done = process_attestation(preState, attestation, {}, cache)
doAssert done, "Valid attestation not processed"
check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
else:
let done = process_attestation(preState, attestation, {}, cache)
doAssert done == false, "We didn't expect this invalid attestation to be processed."
`testImpl _ operations_attestations _ identifier`()

View File

@ -37,30 +37,22 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & identifier:
var stateRef, postRef: ref BeaconState
var attesterSlashingRef: ref AttesterSlashing
new attesterSlashingRef
new stateRef
var cache = get_empty_per_epoch_cache()
attesterSlashingRef[] = parseTest(testDir/"attester_slashing.ssz", SSZ, AttesterSlashing)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let attesterSlashing = parseTest(testDir/"attester_slashing.ssz", SSZ, AttesterSlashing)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
if postRef.isNil:
let done = process_attester_slashing(stateRef[], attesterSlashingRef[],
{}, cache)
doAssert done == false, "We didn't expect this invalid attester slashing to be processed."
else:
let done = process_attester_slashing(stateRef[], attesterSlashingRef[],
{}, cache)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let done = process_attester_slashing(preState, attesterSlashing,
{}, cache)
doAssert done, "Valid attestater slashing not processed"
check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
else:
let done = process_attester_slashing(preState, attesterSlashing,
{}, cache)
doAssert done == false, "We didn't expect this invalid attester slashing to be processed."
`testImpl _ operations_attester_slashing _ identifier`()

View File

@ -37,28 +37,20 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & identifier:
var stateRef, postRef: ref BeaconState
var blck: ref BeaconBlock
new blck
new stateRef
var cache = get_empty_per_epoch_cache()
blck[] = parseTest(testDir/"block.ssz", SSZ, BeaconBlock)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let blck = parseTest(testDir/"block.ssz", SSZ, BeaconBlock)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
if postRef.isNil:
let done = process_block_header(stateRef[], blck[], {}, cache)
doAssert done == false, "We didn't expect this invalid block header to be processed."
else:
let done = process_block_header(stateRef[], blck[], {}, cache)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let done = process_block_header(preState, blck, {}, cache)
doAssert done, "Valid block header not processed"
check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
else:
let done = process_block_header(preState, blck, {}, cache)
doAssert done == false, "We didn't expect this invalid block header to be processed."
`testImpl _ blockheader _ identifier`()

View File

@ -40,23 +40,15 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & " " & identifier:
var stateRef, postRef: ref BeaconState
var depositRef: ref Deposit
new depositRef
new stateRef
depositRef[] = parseTest(testDir/"deposit.ssz", SSZ, Deposit)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let deposit = parseTest(testDir/"deposit.ssz", SSZ, Deposit)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
if postRef.isNil:
check not process_deposit(stateRef[], depositRef[], flags)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
discard process_deposit(preState, deposit, flags)
reportDiff(preState, postState)
else:
discard process_deposit(stateRef[], depositRef[], flags)
reportDiff(stateRef, postRef)
check not process_deposit(preState, deposit, flags)
`testImpl _ operations_deposits _ identifier`()

View File

@ -37,28 +37,20 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & astToStr(identifier):
var stateRef, postRef: ref BeaconState
var proposerSlashing: ref ProposerSlashing
new proposerSlashing
new stateRef
proposerSlashing[] = parseTest(testDir/"proposer_slashing.ssz", SSZ, ProposerSlashing)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let proposerSlashing = parseTest(testDir/"proposer_slashing.ssz", SSZ, ProposerSlashing)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
var cache = get_empty_per_epoch_cache()
if postRef.isNil:
let done = process_proposer_slashing(stateRef[], proposerSlashing[], {}, cache)
doAssert done == false, "We didn't expect this invalid proposer slashing to be processed."
else:
let done = process_proposer_slashing(stateRef[], proposerSlashing[], {}, cache)
if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let done = process_proposer_slashing(preState, proposerSlashing, {}, cache)
doAssert done, "Valid proposer slashing not processed"
check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
else:
let done = process_proposer_slashing(preState, proposerSlashing, {}, cache)
doAssert done == false, "We didn't expect this invalid proposer slashing to be processed."
`testImpl_proposer_slashing _ identifier`()

View File

@ -37,26 +37,18 @@ proc runTest(identifier: string) =
prefix = "[Invalid] "
timedTest prefix & identifier:
var stateRef, postRef: ref BeaconState
var voluntaryExit: ref SignedVoluntaryExit
new voluntaryExit
new stateRef
voluntaryExit[] = parseTest(testDir/"voluntary_exit.ssz", SSZ, SignedVoluntaryExit)
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let voluntaryExit = parseTest(testDir/"voluntary_exit.ssz", SSZ, SignedVoluntaryExit)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
if postRef.isNil:
let done = process_voluntary_exit(stateRef[], voluntaryExit[], {})
doAssert done == false, "We didn't expect this invalid voluntary exit to be processed."
else:
let done = process_voluntary_exit(stateRef[], voluntaryExit[], {})
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
let done = process_voluntary_exit(preState, voluntaryExit, {})
doAssert done, "Valid voluntary exit not processed"
check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
else:
let done = process_voluntary_exit(preState, voluntaryExit, {})
doAssert done == false, "We didn't expect this invalid voluntary exit to be processed."
`testImpl _ voluntary_exit _ identifier`()

View File

@ -34,32 +34,28 @@ proc runTest(identifier: string) =
"[Invalid] "
timedTest prefix & identifier:
var stateRef, postRef: ref BeaconState
new stateRef
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
if existsFile(testDir/"post.ssz"):
new postRef
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
var hasPostState = existsFile(testDir/"post.ssz")
# In test cases with more than 10 blocks the first 10 aren't 0-prefixed,
# so purely lexicographic sorting wouldn't sort properly.
for i in 0 ..< toSeq(walkPattern(testDir/"blocks_*.ssz")).len:
let blck = parseTest(testDir/"blocks_" & $i & ".ssz", SSZ, SignedBeaconBlock)
if postRef.isNil:
let success = state_transition(stateRef[], blck, flags = {})
doAssert not success, "We didn't expect this invalid block to be processed"
else:
if hasPostState:
# TODO: The EF is using invalid BLS keys so we can't verify them
let success = state_transition(stateRef[], blck, flags = {skipBlsValidation})
let success = state_transition(preState, blck, flags = {skipBlsValidation})
doAssert success, "Failure when applying block " & $i
else:
let success = state_transition(preState, blck, flags = {})
doAssert not success, "We didn't expect this invalid block to be processed"
# check: stateRef.hash_tree_root() == postRef.hash_tree_root()
if not postRef.isNil:
# check: preState.hash_tree_root() == postState.hash_tree_root()
if hasPostState:
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
when false:
reportDiff(stateRef, postRef)
doAssert stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(preState, postState)
doAssert preState.hash_tree_root() == postState.hash_tree_root()
`testImpl _ blck _ identifier`()

View File

@ -31,16 +31,13 @@ proc runTest(identifier: string) =
proc `testImpl _ slots _ identifier`() =
timedTest "Slots - " & identifier:
var stateRef, postRef: ref BeaconState
new stateRef
new postRef
stateRef[] = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
postRef[] = parseTest(testDir/"post.ssz", SSZ, BeaconState)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
process_slots(stateRef[], stateRef.slot + num_slots)
process_slots(preState, preState.slot + num_slots)
# check: stateRef.hash_tree_root() == postRef.hash_tree_root()
reportDiff(stateRef, postRef)
# check: preState.hash_tree_root() == postState.hash_tree_root()
reportDiff(preState, postState)
`testImpl _ slots _ identifier`()

View File

@ -82,11 +82,7 @@ proc checkBasic(T: typedesc,
var fileContents = readFile(dir/"serialized.ssz")
var stream = memoryInput(fileContents)
var reader = init(SszReader, stream)
# We are using heap allocation to avoid stack overflow
# issues caused by large objects such as `BeaconState`:
var deserialized = new T
reader.readValue(deserialized[])
var deserialized = reader.readValue(T)
if stream.readable:
raise newException(UnconsumedInput, "Remaining bytes in the input")

View File

@ -38,16 +38,16 @@ template runSuite(suiteDir, testName: string, transitionProc: untyped{ident}, us
let unitTestName = testDir.rsplit(DirSep, 1)[1]
timedTest testName & " - " & unitTestName & preset():
let stateRef = parseTest(testDir/"pre.ssz", SSZ, ref BeaconState)
let postRef = parseTest(testDir/"post.ssz", SSZ, ref BeaconState)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconState)
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconState)
when useCache:
var cache = get_empty_per_epoch_cache()
transitionProc(stateRef[], cache)
transitionProc(preState, cache)
else:
transitionProc(stateRef[])
transitionProc(preState)
reportDiff(stateRef, postRef)
reportDiff(preState, postState)
`suiteImpl _ transitionProc`()

View File

@ -33,8 +33,7 @@ suiteReport "[Unit - Spec - Block processing] Attestations " & preset():
# The attestation to process must be named "attestation" in the calling context
timedTest name:
var state{.inject.}: BeaconState
deepCopy(state, genesisState)
var state {.inject.} = clone(genesisState)
# Attestation setup body
# ----------------------------------------

View File

@ -33,8 +33,7 @@ suiteReport "[Unit - Spec - Block processing] Deposits " & preset():
# TODO: BLS signature
timedTest "Deposit " & name & " MAX_EFFECTIVE_BALANCE balance (" &
$(MAX_EFFECTIVE_BALANCE div 10'u64^9) & " ETH)":
var state: BeaconState
deepCopy(state, genesisState)
var state = clone(genesisState)
# Test configuration
# ----------------------------------------
@ -75,9 +74,7 @@ suiteReport "[Unit - Spec - Block processing] Deposits " & preset():
valid_deposit(MAX_EFFECTIVE_BALANCE + 1, "over")
timedTest "Validator top-up":
var state: BeaconState
deepCopy(state, genesisState)
var state = clone(genesisState)
# Test configuration
# ----------------------------------------

View File

@ -212,46 +212,39 @@ proc finalizeOn12(state: var BeaconState, epoch: Epoch, sufficient_support: bool
doAssert state.current_justified_checkpoint == c2 # still old current
doAssert state.finalized_checkpoint == old_finalized # no new finalized checkpoint
suiteReport "[Unit - Spec - Epoch processing] Justification and Finalization " & preset():
echo " Finalization rules are detailed at https://github.com/protolambda/eth2-docs#justification-and-finalization"
proc payload =
suiteReport "[Unit - Spec - Epoch processing] Justification and Finalization " & preset():
echo " Finalization rules are detailed at https://github.com/protolambda/eth2-docs#justification-and-finalization"
const NumValidators = uint64(8) * SLOTS_PER_EPOCH
let genesisState = initGenesisState(NumValidators)
doAssert genesisState.validators.len == int NumValidators
const NumValidators = uint64(8) * SLOTS_PER_EPOCH
let genesisState = initGenesisState(NumValidators)
doAssert genesisState.validators.len == int NumValidators
var state: BeaconState
template resetState: untyped =
deepCopy(state, genesisState)
setup:
var state = clone(genesisState)
timedTest " Rule I - 234 finalization with enough support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = true)
timedTest " Rule I - 234 finalization with enough support":
finalizeOn234(state, Epoch 5, sufficient_support = true)
timedTest " Rule I - 234 finalization without support":
resetState()
finalizeOn234(state, Epoch 5, sufficient_support = false)
timedTest " Rule I - 234 finalization without support":
finalizeOn234(state, Epoch 5, sufficient_support = false)
timedTest " Rule II - 23 finalization with enough support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = true)
timedTest " Rule II - 23 finalization with enough support":
finalizeOn23(state, Epoch 4, sufficient_support = true)
timedTest " Rule II - 23 finalization without support":
resetState()
finalizeOn23(state, Epoch 4, sufficient_support = false)
timedTest " Rule II - 23 finalization without support":
finalizeOn23(state, Epoch 4, sufficient_support = false)
timedTest " Rule III - 123 finalization with enough support":
finalizeOn123(state, Epoch 6, sufficient_support = true)
timedTest " Rule III - 123 finalization with enough support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = true)
timedTest " Rule III - 123 finalization without support":
finalizeOn123(state, Epoch 6, sufficient_support = false)
timedTest " Rule III - 123 finalization without support":
resetState()
finalizeOn123(state, Epoch 6, sufficient_support = false)
timedTest " Rule IV - 12 finalization with enough support":
finalizeOn12(state, Epoch 3, sufficient_support = true)
timedTest " Rule IV - 12 finalization with enough support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = true)
timedTest " Rule IV - 12 finalization without support":
finalizeOn12(state, Epoch 3, sufficient_support = false)
timedTest " Rule IV - 12 finalization without support":
resetState()
finalizeOn12(state, Epoch 3, sufficient_support = false)
payload()

View File

@ -55,7 +55,7 @@ suiteReport "Beacon chain DB" & preset():
check:
db.containsState(root)
db.getState(root).get() == state
db.getState(root).get[] == state[]
timedTest "find ancestors" & preset():
var
@ -106,4 +106,4 @@ suiteReport "Beacon chain DB" & preset():
check:
db.containsState(root)
db.getState(root).get() == state
db.getState(root).get[] == state[]

View File

@ -242,7 +242,7 @@ when const_preset == "minimal": # Too much stack space used on mainnet
bs1_3 = b1Add.atSlot(3.Slot)
bs2_3 = b2Add.atSlot(3.Slot)
var tmpState = pool.headState
var tmpState = clone(pool.headState)
# move to specific block
pool.updateStateData(tmpState, bs1)

View File

@ -26,8 +26,7 @@ suiteReport "Block processing" & preset():
genesisRoot = hash_tree_root(genesisBlock.message)
timedTest "Passes from genesis state, no block" & preset():
var
state = genesisState
var state = clone(genesisState)
process_slots(state, state.slot + 1)
check:
@ -35,7 +34,7 @@ suiteReport "Block processing" & preset():
timedTest "Passes from genesis state, empty block" & preset():
var
state = genesisState
state = clone(genesisState)
previous_block_root = hash_tree_root(genesisBlock.message)
new_block = makeTestBlock(state, previous_block_root)
@ -47,8 +46,7 @@ suiteReport "Block processing" & preset():
state.slot == genesisState.slot + 1
timedTest "Passes through epoch update, no block" & preset():
var
state = genesisState
var state = clone(genesisState)
process_slots(state, Slot(SLOTS_PER_EPOCH))
@ -57,7 +55,7 @@ suiteReport "Block processing" & preset():
timedTest "Passes through epoch update, empty block" & preset():
var
state = genesisState
state = clone(genesisState)
previous_block_root = genesisRoot
for i in 1..SLOTS_PER_EPOCH.int:
@ -75,7 +73,7 @@ suiteReport "Block processing" & preset():
timedTest "Attestation gets processed at epoch" & preset():
var
state = genesisState
state = clone(genesisState)
previous_block_root = genesisRoot
cache = get_empty_per_epoch_cache()

View File

@ -148,7 +148,7 @@ proc makeTestBlock*(
# It's a bit awkward - in order to produce a block for N+1, we need to
# calculate what the state will look like after that block has been applied,
# because the block includes the state root.
var tmpState = state
var tmpState = clone(state)
addTestBlock(
tmpState, parent_root, eth1_data, attestations, deposits, graffiti, flags)