ssz: move ref support outside (#943)

* ssz: move ref support outside

Instead of allocating ref's inside SSZ, move it to separate helper:

* makes `ref` allocations explicit
* less magic inside SSZ
* `ref` in nim generally means reference whereas SSZ was loading as
value - if a type indeed used references it would get copies instead of
references to a single value on roundtrip which is unexpected

TODO: EF tests would benefit from some refactoring since they all do the
same thing practically..

Co-authored-by: Zahary Karadjov <zahary@gmail.com>
This commit is contained in:
Jacek Sieka 2020-04-29 22:12:07 +02:00 committed by GitHub
parent 7b840440bc
commit c74ba5c0c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 114 additions and 154 deletions

View File

@ -122,8 +122,8 @@ proc getStateFromSnapshot(conf: BeaconNodeConf): NilableBeaconStateRef =
error "Failed to read genesis file", err = err.msg error "Failed to read genesis file", err = err.msg
quit 1 quit 1
try: result = try:
result = SSZ.decode(snapshotContents, BeaconStateRef) newClone(SSZ.decode(snapshotContents, BeaconState))
except SerializationError: except SerializationError:
error "Failed to import genesis file", path = genesisPath error "Failed to import genesis file", path = genesisPath
quit 1 quit 1
@ -187,7 +187,7 @@ proc init*(T: type BeaconNode, conf: BeaconNodeConf): Future[BeaconNode] {.async
# TODO how to get a block from a non-genesis state? # TODO how to get a block from a non-genesis state?
error "Starting from non-genesis state not supported", error "Starting from non-genesis state not supported",
stateSlot = genesisState.slot, stateSlot = genesisState.slot,
stateRoot = hash_tree_root(genesisState) stateRoot = hash_tree_root(genesisState[])
quit 1 quit 1
let tailBlock = get_initial_beacon_block(genesisState[]) let tailBlock = get_initial_beacon_block(genesisState[])
@ -1397,7 +1397,7 @@ programMain:
echo "Wrote ", outGenesis echo "Wrote ", outGenesis
let outSszGenesis = outGenesis.changeFileExt "ssz" let outSszGenesis = outGenesis.changeFileExt "ssz"
SSZ.saveFile(outSszGenesis, initialState) SSZ.saveFile(outSszGenesis, initialState[])
echo "Wrote ", outSszGenesis echo "Wrote ", outSszGenesis
let bootstrapFile = config.outputBootstrapFile.string let bootstrapFile = config.outputBootstrapFile.string

View File

@ -405,36 +405,6 @@ type
Table[Epoch, seq[ValidatorIndex]] Table[Epoch, seq[ValidatorIndex]]
committee_count_cache*: Table[Epoch, uint64] committee_count_cache*: Table[Epoch, uint64]
template foreachSpecType*(op: untyped) =
## These are all spec types that will appear in network messages
## and persistent consensus data. This helper template is useful
## for populating RTTI tables that concern them.
op AggregateAndProof
op Attestation
op AttestationData
op AttesterSlashing
op BeaconBlock
op BeaconBlockBody
op BeaconBlockHeader
op BeaconState
op Deposit
op DepositData
op Eth1Block
op Eth1Data
op Fork
op ForkData
op HistoricalBatch
op IndexedAttestation
op PendingAttestation
op ProposerSlashing
op SignedAggregateAndProof
op SignedBeaconBlock
op SignedBeaconBlockHeader
op SignedVoluntaryExit
op SigningRoot
op Validator
op VoluntaryExit
macro fieldMaxLen*(x: typed): untyped = macro fieldMaxLen*(x: typed): untyped =
# TODO This macro is a temporary solution for the lack of a # TODO This macro is a temporary solution for the lack of a
# more proper way to specify the max length of the List[T; N] # more proper way to specify the max length of the List[T; N]

View File

@ -35,13 +35,12 @@ type
Eth2Digest* = MDigest[32 * 8] ## `hash32` from spec Eth2Digest* = MDigest[32 * 8] ## `hash32` from spec
Eth2Hash* = sha256 ## Context for hash function Eth2Hash* = sha256 ## Context for hash function
chronicles.formatIt Eth2Digest:
mixin toHex
it.data[0..3].toHex()
func shortLog*(x: Eth2Digest): string = func shortLog*(x: Eth2Digest): string =
x.data[0..3].toHex() x.data[0..3].toHex()
chronicles.formatIt Eth2Digest:
shortLog(it)
# TODO: expose an in-place digest function # TODO: expose an in-place digest function
# when hashing in loop or into a buffer # when hashing in loop or into a buffer
# See: https://github.com/cheatfate/nimcrypto/blob/b90ba3abd/nimcrypto/sha2.nim#L570 # See: https://github.com/cheatfate/nimcrypto/blob/b90ba3abd/nimcrypto/sha2.nim#L570

View File

@ -492,7 +492,7 @@ proc makeBeaconBlock*(
warn "Unable to apply new block to state", blck = shortLog(blck) warn "Unable to apply new block to state", blck = shortLog(blck)
return return
blck.state_root = hash_tree_root(tmpState) blck.state_root = hash_tree_root(tmpState[])
some(blck) some(blck)

View File

@ -35,7 +35,7 @@
{.push raises: [Defect].} {.push raises: [Defect].}
import import
math, options, sequtils, tables, math, sequtils, tables,
stew/[bitseqs, bitops2], chronicles, json_serialization/std/sets, stew/[bitseqs, bitops2], chronicles, json_serialization/std/sets,
metrics, ../ssz, metrics, ../ssz,
beaconstate, crypto, datatypes, digest, helpers, validator, beaconstate, crypto, datatypes, digest, helpers, validator,

View File

@ -109,8 +109,6 @@ template toSszType*(x: auto): auto =
elif x is Eth2Digest: x.data elif x is Eth2Digest: x.data
elif x is BlsCurveType: toRaw(x) elif x is BlsCurveType: toRaw(x)
elif x is BitSeq|BitList: ByteList(x) elif x is BitSeq|BitList: ByteList(x)
elif x is ref|ptr: toSszType x[]
elif x is Option: toSszType x.get
elif x is TypeWithMaxLen: toSszType valueOf(x) elif x is TypeWithMaxLen: toSszType valueOf(x)
elif useListType and x is List: seq[x.T](x) elif useListType and x is List: seq[x.T](x)
else: x else: x
@ -217,10 +215,6 @@ proc writeVarSizeType(w: var SszWriter, value: auto) {.raises: [Defect, IOError]
var cursor = w.stream.delayFixedSizeWrite offset var cursor = w.stream.delayFixedSizeWrite offset
for elem in value: for elem in value:
cursor.writeFixedSized uint32(offset) cursor.writeFixedSized uint32(offset)
when elem is Option:
if not isSome(elem): continue
elif elem is ptr|ref:
if isNil(elem): continue
let initPos = w.stream.pos let initPos = w.stream.pos
w.writeVarSizeType toSszType(elem) w.writeVarSizeType toSszType(elem)
offset += w.stream.pos - initPos offset += w.stream.pos - initPos
@ -509,7 +503,9 @@ func bitlistHashTreeRoot(merkleizer: SszChunksMerkleizer, x: BitSeq): Eth2Digest
mixInLength contentsHash, x.len mixInLength contentsHash, x.len
func hashTreeRootImpl[T](x: T): Eth2Digest = func hashTreeRootImpl[T](x: T): Eth2Digest =
when T is uint64: when T is SignedBeaconBlock:
unsupported T # Blocks are identified by htr(BeaconBlock) so we avoid these
elif T is uint64:
trs "UINT64; LITTLE-ENDIAN IDENTITY MAPPING" trs "UINT64; LITTLE-ENDIAN IDENTITY MAPPING"
result.data[0..<8] = x.toBytesLE() result.data[0..<8] = x.toBytesLE()
elif (when T is array: ElemType(T) is byte and elif (when T is array: ElemType(T) is byte and
@ -557,8 +553,6 @@ func maxChunksCount(T: type, maxLen: static int64): int64 {.compileTime.} =
func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} = func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} =
trs "STARTING HASH TREE ROOT FOR TYPE ", name(type(x)) trs "STARTING HASH TREE ROOT FOR TYPE ", name(type(x))
mixin toSszType mixin toSszType
when x is SignedBeaconBlock:
doassert false
when x is TypeWithMaxLen: when x is TypeWithMaxLen:
const maxLen = x.maxLen const maxLen = x.maxLen
type T = type valueOf(x) type T = type valueOf(x)

View File

@ -91,14 +91,6 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
type ElemType = type result[0] type ElemType = type result[0]
result = T readSszValue(input, seq[ElemType]) result = T readSszValue(input, seq[ElemType])
elif result is ptr|ref:
new result
result[] = readSszValue(input, type(result[]))
elif result is Option:
if input.len > 0:
result = some readSszValue(input, result.T)
elif result is string|seq|openarray|array: elif result is string|seq|openarray|array:
type ElemType = type result[0] type ElemType = type result[0]
when ElemType is byte|char: when ElemType is byte|char:
@ -168,7 +160,7 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
const boundingOffsets = T.getFieldBoundingOffsets(fieldName) const boundingOffsets = T.getFieldBoundingOffsets(fieldName)
trs "BOUNDING OFFSET FOR FIELD ", fieldName, " = ", boundingOffsets trs "BOUNDING OFFSET FOR FIELD ", fieldName, " = ", boundingOffsets
type FieldType = type maybeDeref(field) type FieldType = type field
type SszType = type toSszType(declval FieldType) type SszType = type toSszType(declval FieldType)
when isFixedSize(SszType): when isFixedSize(SszType):
@ -192,19 +184,19 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
# TODO The extra type escaping here is a work-around for a Nim issue: # TODO The extra type escaping here is a work-around for a Nim issue:
when type(FieldType) is type(SszType): when type(FieldType) is type(SszType):
trs "READING NATIVE ", fieldName, ": ", name(SszType) trs "READING NATIVE ", fieldName, ": ", name(SszType)
maybeDeref(field) = readSszValue( field = readSszValue(
input.toOpenArray(startOffset, endOffset - 1), input.toOpenArray(startOffset, endOffset - 1),
SszType) SszType)
trs "READING COMPLETE ", fieldName trs "READING COMPLETE ", fieldName
elif useListType and FieldType is List: elif useListType and FieldType is List:
maybeDeref(field) = readSszValue( field = readSszValue(
input.toOpenArray(startOffset, endOffset - 1), input.toOpenArray(startOffset, endOffset - 1),
FieldType) FieldType)
else: else:
trs "READING FOREIGN ", fieldName, ": ", name(SszType) trs "READING FOREIGN ", fieldName, ": ", name(SszType)
maybeDeref(field) = fromSszBytes( field = fromSszBytes(
FieldType, FieldType,
input.toOpenArray(startOffset, endOffset - 1)) input.toOpenArray(startOffset, endOffset - 1))

View File

@ -119,16 +119,11 @@ template `[]`*[R, T](n: SszNavigator[array[R, T]], idx: int): SszNavigator[T] =
func `[]`*[T](n: SszNavigator[T]): T {.raisesssz.} = func `[]`*[T](n: SszNavigator[T]): T {.raisesssz.} =
mixin toSszType, fromSszBytes mixin toSszType, fromSszBytes
when T is ref: type SszRepr = type toSszType(declval T)
type ObjectType = type(result[]) when type(SszRepr) is type(T):
new result readSszValue(toOpenArray(n.m), T)
result[] = SszNavigator[ObjectType](n)[]
else: else:
type SszRepr = type toSszType(declval T) fromSszBytes(T, toOpenArray(n.m))
when type(SszRepr) is type(T):
readSszValue(toOpenArray(n.m), T)
else:
fromSszBytes(T, toOpenArray(n.m))
converter derefNavigator*[T](n: SszNavigator[T]): T {.raisesssz.} = converter derefNavigator*[T](n: SszNavigator[T]): T {.raisesssz.} =
n[] n[]

View File

@ -84,16 +84,10 @@ template ElemType*[T](A: type[openarray[T]]): untyped =
template ElemType*(T: type[seq|string|List]): untyped = template ElemType*(T: type[seq|string|List]): untyped =
type(default(T)[0]) type(default(T)[0])
template maybeDeref*(x: auto): auto =
when type(x) is ref|ptr:
x[]
else:
x
func isFixedSize*(T0: type): bool {.compileTime.} = func isFixedSize*(T0: type): bool {.compileTime.} =
mixin toSszType, enumAllSerializedFields mixin toSszType, enumAllSerializedFields
when T0 is openarray|Option|ref|ptr: when T0 is openarray:
return false return false
else: else:
type T = type toSszType(declval T0) type T = type toSszType(declval T0)
@ -117,7 +111,7 @@ func fixedPortionSize*(T0: type): int {.compileTime.} =
type E = ElemType(T) type E = ElemType(T)
when isFixedSize(E): len(T) * fixedPortionSize(E) when isFixedSize(E): len(T) * fixedPortionSize(E)
else: len(T) * offsetSize else: len(T) * offsetSize
elif T is seq|string|openarray|ref|ptr|Option: offsetSize elif T is seq|string|openarray: offsetSize
elif T is object|tuple: elif T is object|tuple:
enumAllSerializedFields(T): enumAllSerializedFields(T):
when isFixedSize(FieldType): when isFixedSize(FieldType):

View File

@ -140,7 +140,7 @@ proc runFullTransition*(dir, preState, blocksPrefix: string, blocksQty: int, ski
let prePath = dir / preState & ".ssz" let prePath = dir / preState & ".ssz"
echo "Running: ", prePath echo "Running: ", prePath
var state = parseSSZ(prePath, BeaconStateRef) var state = newClone(parseSSZ(prePath, BeaconState))
for i in 0 ..< blocksQty: for i in 0 ..< blocksQty:
let blockPath = dir / blocksPrefix & $i & ".ssz" let blockPath = dir / blocksPrefix & $i & ".ssz"
@ -157,7 +157,7 @@ proc runProcessSlots*(dir, preState: string, numSlots: uint64) =
let prePath = dir / preState & ".ssz" let prePath = dir / preState & ".ssz"
echo "Running: ", prePath echo "Running: ", prePath
var state = parseSSZ(prePath, BeaconStateRef) var state = newClone(parseSSZ(prePath, BeaconState))
process_slots(state[], state.slot + numSlots) process_slots(state[], state.slot + numSlots)
@ -168,7 +168,7 @@ template processEpochScenarioImpl(
let prePath = dir/preState & ".ssz" let prePath = dir/preState & ".ssz"
echo "Running: ", prePath echo "Running: ", prePath
var state = parseSSZ(prePath, BeaconStateRef) var state = newClone(parseSSZ(prePath, BeaconState))
when needCache: when needCache:
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()
@ -193,7 +193,7 @@ template processBlockScenarioImpl(
let prePath = dir/preState & ".ssz" let prePath = dir/preState & ".ssz"
echo "Running: ", prePath echo "Running: ", prePath
var state = parseSSZ(prePath, BeaconStateRef) var state = newClone(parseSSZ(prePath, BeaconState))
when needCache: when needCache:
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()

View File

@ -1,6 +1,6 @@
import import
confutils, os, strutils, chronicles, json_serialization, confutils, os, strutils, chronicles, json_serialization,
nimcrypto/utils, stew/byteutils,
../beacon_chain/spec/[crypto, datatypes, digest], ../beacon_chain/spec/[crypto, datatypes, digest],
../beacon_chain/ssz ../beacon_chain/ssz
@ -8,16 +8,16 @@ import
cli do(kind: string, file: string): cli do(kind: string, file: string):
template printit(t: untyped) {.dirty.} = template printit(t: untyped) {.dirty.} =
let v = let v = newClone(
if cmpIgnoreCase(ext, ".ssz") == 0: if cmpIgnoreCase(ext, ".ssz") == 0:
SSZ.loadFile(file, t) SSZ.loadFile(file, t)
elif cmpIgnoreCase(ext, ".json") == 0: elif cmpIgnoreCase(ext, ".json") == 0:
JSON.loadFile(file, t) JSON.loadFile(file, t)
else: else:
echo "Unknown file type: ", ext echo "Unknown file type: ", ext
quit 1 quit 1
)
echo hash_tree_root(v).data.toHex(true) echo hash_tree_root(v[]).data.toHex()
let ext = splitFile(file).ext let ext = splitFile(file).ext
@ -30,6 +30,6 @@ cli do(kind: string, file: string):
of "deposit": printit(Deposit) of "deposit": printit(Deposit)
of "deposit_data": printit(DepositData) of "deposit_data": printit(DepositData)
of "eth1_data": printit(Eth1Data) of "eth1_data": printit(Eth1Data)
of "state": printit(BeaconStateRef) of "state": printit(BeaconState)
of "proposer_slashing": printit(ProposerSlashing) of "proposer_slashing": printit(ProposerSlashing)
of "voluntary_exit": printit(VoluntaryExit) of "voluntary_exit": printit(VoluntaryExit)

View File

@ -5,18 +5,17 @@ import
# TODO turn into arguments # TODO turn into arguments
cli do(kind: string, file: string): cli do(kind: string, file: string):
template printit(t: untyped) {.dirty.} = template printit(t: untyped) {.dirty.} =
let v = let v = newClone(
if cmpIgnoreCase(ext, ".ssz") == 0: if cmpIgnoreCase(ext, ".ssz") == 0:
SSZ.loadFile(file, t) SSZ.loadFile(file, t)
elif cmpIgnoreCase(ext, ".json") == 0: elif cmpIgnoreCase(ext, ".json") == 0:
JSON.loadFile(file, t) JSON.loadFile(file, t)
else: else:
echo "Unknown file type: ", ext echo "Unknown file type: ", ext
quit 1 quit 1
)
echo JSON.encode(v, pretty = true) echo JSON.encode(v[], pretty = true)
let ext = splitFile(file).ext let ext = splitFile(file).ext
@ -29,7 +28,7 @@ cli do(kind: string, file: string):
of "deposit": printit(Deposit) of "deposit": printit(Deposit)
of "deposit_data": printit(DepositData) of "deposit_data": printit(DepositData)
of "eth1_data": printit(Eth1Data) of "eth1_data": printit(Eth1Data)
of "state": printit(BeaconStateRef) of "state": printit(BeaconState)
of "proposer_slashing": printit(ProposerSlashing) of "proposer_slashing": printit(ProposerSlashing)
of "voluntary_exit": printit(VoluntaryExit) of "voluntary_exit": printit(VoluntaryExit)
else: echo "Unknown kind" else: echo "Unknown kind"

View File

@ -5,12 +5,15 @@ import
cli do(pre: string, blck: string, post: string, verifyStateRoot = false): cli do(pre: string, blck: string, post: string, verifyStateRoot = false):
let let
stateX = SSZ.loadFile(pre, BeaconStateRef) stateY = (ref HashedBeaconState)(
data: SSZ.loadFile(pre, BeaconState),
)
blckX = SSZ.loadFile(blck, SignedBeaconBlock) blckX = SSZ.loadFile(blck, SignedBeaconBlock)
flags = if verifyStateRoot: {skipStateRootValidation} else: {} flags = if verifyStateRoot: {skipStateRootValidation} else: {}
var stateY = HashedBeaconState(data: stateX[], root: hash_tree_root(stateX[])) stateY.root = hash_tree_root(stateY.data)
if not state_transition(stateY, blckX, flags, noRollback):
if not state_transition(stateY[], blckX, flags, noRollback):
error "State transition failed" error "State transition failed"
else: else:
SSZ.saveFile(post, stateY.data) SSZ.saveFile(post, stateY.data)

View File

@ -10,23 +10,23 @@ import
type type
AttestationInput = object AttestationInput = object
state: BeaconStateRef state: BeaconState
attestation: Attestation attestation: Attestation
AttesterSlashingInput = object AttesterSlashingInput = object
state: BeaconStateRef state: BeaconState
attesterSlashing: AttesterSlashing attesterSlashing: AttesterSlashing
BlockInput = object BlockInput = object
state: BeaconStateRef state: BeaconState
beaconBlock: SignedBeaconBlock beaconBlock: SignedBeaconBlock
BlockHeaderInput = BlockInput BlockHeaderInput = BlockInput
DepositInput = object DepositInput = object
state: BeaconStateRef state: BeaconState
deposit: Deposit deposit: Deposit
ProposerSlashingInput = object ProposerSlashingInput = object
state: BeaconStateRef state: BeaconState
proposerSlashing: ProposerSlashing proposerSlashing: ProposerSlashing
VoluntaryExitInput = object VoluntaryExitInput = object
state: BeaconStateRef state: BeaconState
exit: SignedVoluntaryExit exit: SignedVoluntaryExit
# This and AssertionError are raised to indicate programming bugs # This and AssertionError are raised to indicate programming bugs
# A wrapper to allow exception tracking to identify unexpected exceptions # A wrapper to allow exception tracking to identify unexpected exceptions
@ -61,7 +61,7 @@ template decodeAndProcess(typ, process: untyped): bool =
var var
cache {.used, inject.} = get_empty_per_epoch_cache() cache {.used, inject.} = get_empty_per_epoch_cache()
data {.inject.} = data {.inject.} = newClone(
try: try:
SSZ.decode(input, typ) SSZ.decode(input, typ)
except MalformedSszError as e: except MalformedSszError as e:
@ -72,6 +72,7 @@ template decodeAndProcess(typ, process: untyped): bool =
raise newException( raise newException(
FuzzCrashError, FuzzCrashError,
"SSZ size mismatch, likely bug in preprocessing.", e) "SSZ size mismatch, likely bug in preprocessing.", e)
)
let processOk = let processOk =
try: try:
process process
@ -89,44 +90,44 @@ template decodeAndProcess(typ, process: untyped): bool =
raise newException(FuzzCrashError, "Unexpected Exception in state transition", e) raise newException(FuzzCrashError, "Unexpected Exception in state transition", e)
if processOk: if processOk:
copyState(data.state[], output, output_size) copyState(data.state, output, output_size)
else: else:
false false
proc nfuzz_attestation(input: openArray[byte], output: ptr byte, proc nfuzz_attestation(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(AttestationInput): decodeAndProcess(AttestationInput):
process_attestation(data.state[], data.attestation, flags, cache) process_attestation(data.state, data.attestation, flags, cache)
proc nfuzz_attester_slashing(input: openArray[byte], output: ptr byte, proc nfuzz_attester_slashing(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(AttesterSlashingInput): decodeAndProcess(AttesterSlashingInput):
process_attester_slashing(data.state[], data.attesterSlashing, flags, cache) process_attester_slashing(data.state, data.attesterSlashing, flags, cache)
proc nfuzz_block(input: openArray[byte], output: ptr byte, proc nfuzz_block(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(BlockInput): decodeAndProcess(BlockInput):
state_transition(data.state[], data.beaconBlock, flags, noRollback) state_transition(data.state, data.beaconBlock, flags, noRollback)
proc nfuzz_block_header(input: openArray[byte], output: ptr byte, proc nfuzz_block_header(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(BlockHeaderInput): decodeAndProcess(BlockHeaderInput):
process_block_header(data.state[], data.beaconBlock.message, flags, cache) process_block_header(data.state, data.beaconBlock.message, flags, cache)
proc nfuzz_deposit(input: openArray[byte], output: ptr byte, proc nfuzz_deposit(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(DepositInput): decodeAndProcess(DepositInput):
process_deposit(data.state[], data.deposit, flags) process_deposit(data.state, data.deposit, flags)
proc nfuzz_proposer_slashing(input: openArray[byte], output: ptr byte, proc nfuzz_proposer_slashing(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(ProposerSlashingInput): decodeAndProcess(ProposerSlashingInput):
process_proposer_slashing(data.state[], data.proposerSlashing, flags, cache) process_proposer_slashing(data.state, data.proposerSlashing, flags, cache)
proc nfuzz_voluntary_exit(input: openArray[byte], output: ptr byte, proc nfuzz_voluntary_exit(input: openArray[byte], output: ptr byte,
output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} = output_size: ptr uint, disable_bls: bool): bool {.exportc, raises: [FuzzCrashError, Defect].} =
decodeAndProcess(VoluntaryExitInput): decodeAndProcess(VoluntaryExitInput):
process_voluntary_exit(data.state[], data.exit, flags) process_voluntary_exit(data.state, data.exit, flags)
# Note: Could also accept raw input pointer and access list_size + seed here. # Note: Could also accept raw input pointer and access list_size + seed here.
# However, list_size needs to be known also outside this proc to allocate output. # However, list_size needs to be known also outside this proc to allocate output.

View File

@ -40,13 +40,13 @@ proc runTest(identifier: string) =
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()
let attestation = parseTest(testDir/"attestation.ssz", SSZ, Attestation) let attestation = parseTest(testDir/"attestation.ssz", SSZ, Attestation)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let done = process_attestation(preState[], attestation, {}, cache) let done = process_attestation(preState[], attestation, {}, cache)
doAssert done, "Valid attestation not processed" doAssert done, "Valid attestation not processed"
check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:
let done = process_attestation(preState[], attestation, {}, cache) let done = process_attestation(preState[], attestation, {}, cache)

View File

@ -40,14 +40,14 @@ proc runTest(identifier: string) =
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()
let attesterSlashing = parseTest(testDir/"attester_slashing.ssz", SSZ, AttesterSlashing) let attesterSlashing = parseTest(testDir/"attester_slashing.ssz", SSZ, AttesterSlashing)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let done = process_attester_slashing(preState[], attesterSlashing, let done = process_attester_slashing(preState[], attesterSlashing,
{}, cache) {}, cache)
doAssert done, "Valid attestater slashing not processed" doAssert done, "Valid attestater slashing not processed"
check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:
let done = process_attester_slashing(preState[], attesterSlashing, let done = process_attester_slashing(preState[], attesterSlashing,

View File

@ -40,13 +40,13 @@ proc runTest(identifier: string) =
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()
let blck = parseTest(testDir/"block.ssz", SSZ, BeaconBlock) let blck = parseTest(testDir/"block.ssz", SSZ, BeaconBlock)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let done = process_block_header(preState[], blck, {}, cache) let done = process_block_header(preState[], blck, {}, cache)
doAssert done, "Valid block header not processed" doAssert done, "Valid block header not processed"
check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:
let done = process_block_header(preState[], blck, {}, cache) let done = process_block_header(preState[], blck, {}, cache)

View File

@ -41,10 +41,10 @@ proc runTest(identifier: string) =
timedTest prefix & " " & identifier: timedTest prefix & " " & identifier:
let deposit = parseTest(testDir/"deposit.ssz", SSZ, Deposit) let deposit = parseTest(testDir/"deposit.ssz", SSZ, Deposit)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
discard process_deposit(preState[], deposit, flags) discard process_deposit(preState[], deposit, flags)
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:

View File

@ -38,15 +38,15 @@ proc runTest(identifier: string) =
timedTest prefix & astToStr(identifier): timedTest prefix & astToStr(identifier):
let proposerSlashing = parseTest(testDir/"proposer_slashing.ssz", SSZ, ProposerSlashing) let proposerSlashing = parseTest(testDir/"proposer_slashing.ssz", SSZ, ProposerSlashing)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let done = process_proposer_slashing(preState[], proposerSlashing, {}, cache) let done = process_proposer_slashing(preState[], proposerSlashing, {}, cache)
doAssert done, "Valid proposer slashing not processed" doAssert done, "Valid proposer slashing not processed"
check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:
let done = process_proposer_slashing(preState[], proposerSlashing, {}, cache) let done = process_proposer_slashing(preState[], proposerSlashing, {}, cache)

View File

@ -38,13 +38,13 @@ proc runTest(identifier: string) =
timedTest prefix & identifier: timedTest prefix & identifier:
let voluntaryExit = parseTest(testDir/"voluntary_exit.ssz", SSZ, SignedVoluntaryExit) let voluntaryExit = parseTest(testDir/"voluntary_exit.ssz", SSZ, SignedVoluntaryExit)
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
if existsFile(testDir/"post.ssz"): if existsFile(testDir/"post.ssz"):
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
let done = process_voluntary_exit(preState[], voluntaryExit, {}) let done = process_voluntary_exit(preState[], voluntaryExit, {})
doAssert done, "Valid voluntary exit not processed" doAssert done, "Valid voluntary exit not processed"
check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
else: else:
let done = process_voluntary_exit(preState[], voluntaryExit, {}) let done = process_voluntary_exit(preState[], voluntaryExit, {})

View File

@ -34,7 +34,7 @@ proc runTest(identifier: string) =
"[Invalid] " "[Invalid] "
timedTest prefix & identifier: timedTest prefix & identifier:
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
var hasPostState = existsFile(testDir/"post.ssz") var hasPostState = existsFile(testDir/"post.ssz")
# In test cases with more than 10 blocks the first 10 aren't 0-prefixed, # In test cases with more than 10 blocks the first 10 aren't 0-prefixed,
@ -54,10 +54,10 @@ proc runTest(identifier: string) =
# check: preState.hash_tree_root() == postState.hash_tree_root() # check: preState.hash_tree_root() == postState.hash_tree_root()
if hasPostState: if hasPostState:
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
when false: when false:
reportDiff(preState, postState) reportDiff(preState, postState)
doAssert preState.hash_tree_root() == postState.hash_tree_root() doAssert preState[].hash_tree_root() == postState[].hash_tree_root()
`testImpl _ blck _ identifier`() `testImpl _ blck _ identifier`()

View File

@ -31,12 +31,12 @@ proc runTest(identifier: string) =
proc `testImpl _ slots _ identifier`() = proc `testImpl _ slots _ identifier`() =
timedTest "Slots - " & identifier: timedTest "Slots - " & identifier:
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
process_slots(preState[], preState.slot + num_slots) process_slots(preState[], preState.slot + num_slots)
# check: preState.hash_tree_root() == postState.hash_tree_root() check: preState[].hash_tree_root() == postState[].hash_tree_root()
reportDiff(preState, postState) reportDiff(preState, postState)
`testImpl _ slots _ identifier`() `testImpl _ slots _ identifier`()

View File

@ -41,11 +41,24 @@ setDefaultValue(SSZHashTreeRoot, signing_root, "")
# Note this only tracks HashTreeRoot # Note this only tracks HashTreeRoot
# Checking the values against the yaml file is TODO (require more flexible Yaml parser) # Checking the values against the yaml file is TODO (require more flexible Yaml parser)
proc checkSSZ(T: type SignedBeaconBlock, dir: string, expectedHash: SSZHashTreeRoot) =
# Deserialize into a ref object to not fill Nim stack
var deserialized = newClone(SSZ.loadFile(dir/"serialized.ssz", T))
# SignedBeaconBlocks usually not hashed because they're identified by
# htr(BeaconBlock), so do it manually
check: expectedHash.root == "0x" & toLowerASCII($hash_tree_root(
[hash_tree_root(deserialized.message),
hash_tree_root(deserialized.signature)]))
# TODO check the value
proc checkSSZ(T: typedesc, dir: string, expectedHash: SSZHashTreeRoot) = proc checkSSZ(T: typedesc, dir: string, expectedHash: SSZHashTreeRoot) =
# Deserialize into a ref object to not fill Nim stack # Deserialize into a ref object to not fill Nim stack
var deserialized = SSZ.loadFile(dir/"serialized.ssz", ref T) var deserialized = newClone(SSZ.loadFile(dir/"serialized.ssz", T))
check: expectedHash.root == "0x" & toLowerASCII($deserialized.hashTreeRoot()) check: expectedHash.root == "0x" & toLowerASCII($hash_tree_root(deserialized[]))
# TODO check the value # TODO check the value

View File

@ -38,8 +38,8 @@ template runSuite(suiteDir, testName: string, transitionProc: untyped{ident}, us
let unitTestName = testDir.rsplit(DirSep, 1)[1] let unitTestName = testDir.rsplit(DirSep, 1)[1]
timedTest testName & " - " & unitTestName & preset(): timedTest testName & " - " & unitTestName & preset():
var preState = parseTest(testDir/"pre.ssz", SSZ, BeaconStateRef) var preState = newClone(parseTest(testDir/"pre.ssz", SSZ, BeaconState))
let postState = parseTest(testDir/"post.ssz", SSZ, BeaconStateRef) let postState = newClone(parseTest(testDir/"post.ssz", SSZ, BeaconState))
when useCache: when useCache:
var cache = get_empty_per_epoch_cache() var cache = get_empty_per_epoch_cache()

View File

@ -52,7 +52,7 @@ suiteReport "Beacon chain DB" & preset():
let let
state = BeaconStateRef() state = BeaconStateRef()
root = hash_tree_root(state) root = hash_tree_root(state[])
db.putState(state[]) db.putState(state[])
@ -103,7 +103,7 @@ suiteReport "Beacon chain DB" & preset():
let let
state = initialize_beacon_state_from_eth1( state = initialize_beacon_state_from_eth1(
eth1BlockHash, 0, makeInitialDeposits(SLOTS_PER_EPOCH), {skipBlsValidation}) eth1BlockHash, 0, makeInitialDeposits(SLOTS_PER_EPOCH), {skipBlsValidation})
root = hash_tree_root(state) root = hash_tree_root(state[])
db.putState(state[]) db.putState(state[])

View File

@ -165,4 +165,4 @@ suiteReport "Interop":
else: else:
"unimplemented" "unimplemented"
check: check:
hash_tree_root(initialState).data.toHex() == expected hash_tree_root(initialState[]).data.toHex() == expected