diff --git a/beacon_chain/beacon_node.nim b/beacon_chain/beacon_node.nim index 8c9b5ab7f..f856e74c8 100644 --- a/beacon_chain/beacon_node.nim +++ b/beacon_chain/beacon_node.nim @@ -1277,13 +1277,17 @@ when hasPrompt: when compiles(defaultChroniclesStream.output.writer): defaultChroniclesStream.output.writer = - proc (logLevel: LogLevel, msg: LogOutputStr) {.gcsafe.} = - # p.hidePrompt - erase statusBar - # p.writeLine msg - stdout.write msg - render statusBar - # p.showPrompt + proc (logLevel: LogLevel, msg: LogOutputStr) {.gcsafe, raises: [Defect].} = + try: + # p.hidePrompt + erase statusBar + # p.writeLine msg + stdout.write msg + render statusBar + # p.showPrompt + except Exception as e: # render raises Exception + if e is Defect: raise (ref Defect)(e) + discard # Status bar not critical proc statusBarUpdatesPollingLoop() {.async.} = while true: @@ -1302,8 +1306,11 @@ when isMainModule: when compiles(defaultChroniclesStream.output.writer): defaultChroniclesStream.output.writer = - proc (logLevel: LogLevel, msg: LogOutputStr) {.gcsafe.} = - stdout.write(msg) + proc (logLevel: LogLevel, msg: LogOutputStr) {.gcsafe, raises: [Defect].} = + try: + stdout.write(msg) + except IOError: + discard # nothing to do.. randomize() diff --git a/beacon_chain/beacon_node_types.nim b/beacon_chain/beacon_node_types.nim index 2b4d3f285..9b94f6e14 100644 --- a/beacon_chain/beacon_node_types.nim +++ b/beacon_chain/beacon_node_types.nim @@ -1,3 +1,5 @@ +{.push raises: [Defect].} + import deques, tables, options, stew/[endians2, byteutils], chronicles, diff --git a/beacon_chain/conf.nim b/beacon_chain/conf.nim index 5ef9a3ed1..16c689659 100644 --- a/beacon_chain/conf.nim +++ b/beacon_chain/conf.nim @@ -1,3 +1,5 @@ +{.push raises: [Defect].} + import os, options, strformat, strutils, chronicles, confutils, json_serialization, @@ -278,7 +280,10 @@ proc defaultDataDir*(conf: BeaconNodeConf): string = proc validatorFileBaseName*(validatorIdx: int): string = # there can apparently be tops 4M validators so we use 7 digits.. - fmt"v{validatorIdx:07}" + try: + fmt"v{validatorIdx:07}" + except ValueError as e: + raiseAssert e.msg func dumpDir*(conf: BeaconNodeConf): string = conf.dataDir / "dump" @@ -292,10 +297,10 @@ func databaseDir*(conf: BeaconNodeConf): string = func defaultListenAddress*(conf: BeaconNodeConf): IpAddress = # TODO: How should we select between IPv4 and IPv6 # Maybe there should be a config option for this. - parseIpAddress("0.0.0.0") + return static: parseIpAddress("0.0.0.0") func defaultAdminListenAddress*(conf: BeaconNodeConf): IpAddress = - parseIpAddress("127.0.0.1") + return static: parseIpAddress("127.0.0.1") iterator validatorKeys*(conf: BeaconNodeConf): ValidatorPrivKey = for validatorKeyFile in conf.validators: @@ -305,13 +310,17 @@ iterator validatorKeys*(conf: BeaconNodeConf): ValidatorPrivKey = warn "Failed to load validator private key", file = validatorKeyFile.string, err = err.msg - for kind, file in walkDir(conf.localValidatorsDir): - if kind in {pcFile, pcLinkToFile} and - cmpIgnoreCase(".privkey", splitFile(file).ext) == 0: - try: - yield ValidatorPrivKey.init(readFile(file).string) - except CatchableError as err: - warn "Failed to load a validator private key", file, err = err.msg + try: + for kind, file in walkDir(conf.localValidatorsDir): + if kind in {pcFile, pcLinkToFile} and + cmpIgnoreCase(".privkey", splitFile(file).ext) == 0: + try: + yield ValidatorPrivKey.init(readFile(file).string) + except CatchableError as err: + warn "Failed to load a validator private key", file, err = err.msg + except OSError as err: + warn "Cannot load validator keys", + dir = conf.localValidatorsDir, err = err.msg template writeValue*(writer: var JsonWriter, value: TypedInputFile|InputFile|InputDir|OutPath|OutDir|OutFile) = diff --git a/beacon_chain/extras.nim b/beacon_chain/extras.nim index 30489f8a5..1b6367ab7 100644 --- a/beacon_chain/extras.nim +++ b/beacon_chain/extras.nim @@ -5,6 +5,8 @@ # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. +{.push raises: [Defect].} + # Temporary dumping ground for extra types and helpers that could make it into # the spec potentially # diff --git a/beacon_chain/interop.nim b/beacon_chain/interop.nim index 35eea80f3..6aed238d9 100644 --- a/beacon_chain/interop.nim +++ b/beacon_chain/interop.nim @@ -1,3 +1,5 @@ +{.push raises: [Defect].} + import stew/endians2, stint, ./extras, ./ssz, @@ -14,7 +16,7 @@ func get_eth1data_stub*(deposit_count: uint64, current_epoch: Epoch): Eth1Data = block_hash: hash_tree_root(hash_tree_root(voting_period).data), ) -func makeInteropPrivKey*(i: int): ValidatorPrivKey = +func makeInteropPrivKey*(i: int): BlsResult[ValidatorPrivKey] = var bytes: array[32, byte] bytes[0..7] = uint64(i).toBytesLE() @@ -26,7 +28,7 @@ func makeInteropPrivKey*(i: int): ValidatorPrivKey = privkeyBytes = eth2hash(bytes) key = (UInt256.fromBytesLE(privkeyBytes.data) mod curveOrder).toBytesBE() - ValidatorPrivKey.fromRaw(key).tryGet() + ValidatorPrivKey.fromRaw(key) const eth1BlockHash* = block: var x: Eth2Digest diff --git a/beacon_chain/merkle_minimal.nim b/beacon_chain/merkle_minimal.nim index 9ff7e938b..b4fa6eb3f 100644 --- a/beacon_chain/merkle_minimal.nim +++ b/beacon_chain/merkle_minimal.nim @@ -10,6 +10,8 @@ # Merkle tree helpers # --------------------------------------------------------------- +{.push raises: [Defect].} + import sequtils, strutils, macros, bitops, # Specs diff --git a/beacon_chain/spec/beaconstate.nim b/beacon_chain/spec/beaconstate.nim index 29b8040af..972470303 100644 --- a/beacon_chain/spec/beaconstate.nim +++ b/beacon_chain/spec/beaconstate.nim @@ -5,6 +5,8 @@ # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. +{.push raises: [Defect].} + import tables, algorithm, math, sequtils, options, json_serialization/std/sets, chronicles, stew/bitseqs, diff --git a/beacon_chain/spec/datatypes.nim b/beacon_chain/spec/datatypes.nim index 08107ca89..832f74fe9 100644 --- a/beacon_chain/spec/datatypes.nim +++ b/beacon_chain/spec/datatypes.nim @@ -17,6 +17,9 @@ # `ref` - this can be achieved by wrapping them in higher-level # types / composition +# TODO report compiler crash when this is uncommented +# {.push raises: [Defect].} + import macros, hashes, json, strutils, tables, stew/[byteutils, bitseqs], chronicles, diff --git a/beacon_chain/spec/helpers.nim b/beacon_chain/spec/helpers.nim index 0864331f2..c1d6a4e03 100644 --- a/beacon_chain/spec/helpers.nim +++ b/beacon_chain/spec/helpers.nim @@ -7,6 +7,8 @@ # Uncategorized helper functions from the spec +{.push raises: [Defect].} + import # Standard lib math, diff --git a/beacon_chain/spec/network.nim b/beacon_chain/spec/network.nim index dbfbb87ad..2f62d6edb 100644 --- a/beacon_chain/spec/network.nim +++ b/beacon_chain/spec/network.nim @@ -5,6 +5,8 @@ # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. +{.push raises: [Defect].} + import strformat, stew/byteutils, @@ -27,22 +29,39 @@ const defaultEth2RpcPort* = 9090 func getBeaconBlocksTopic*(forkDigest: ForkDigest): string = - &"/eth2/{toHex forkDigest}/{topicBeaconBlocksSuffix}" + try: + &"/eth2/{toHex forkDigest}/{topicBeaconBlocksSuffix}" + except ValueError as e: + raiseAssert e.msg func getVoluntaryExitsTopic*(forkDigest: ForkDigest): string = - &"/eth2/{toHex forkDigest}/{topicVoluntaryExitsSuffix}" + try: + &"/eth2/{toHex forkDigest}/{topicVoluntaryExitsSuffix}" + except ValueError as e: + raiseAssert e.msg func getProposerSlashingsTopic*(forkDigest: ForkDigest): string = - &"/eth2/{toHex forkDigest}/{topicProposerSlashingsSuffix}" + try: + &"/eth2/{toHex forkDigest}/{topicProposerSlashingsSuffix}" + except ValueError as e: + raiseAssert e.msg func getAttesterSlashingsTopic*(forkDigest: ForkDigest): string = - &"/eth2/{toHex forkDigest}/{topicAttesterSlashingsSuffix}" + try: + &"/eth2/{toHex forkDigest}/{topicAttesterSlashingsSuffix}" + except ValueError as e: + raiseAssert e.msg func getAggregateAndProofsTopic*(forkDigest: ForkDigest): string = - &"/eth2/{toHex forkDigest}/{topicAggregateAndProofsSuffix}" + try: + &"/eth2/{toHex forkDigest}/{topicAggregateAndProofsSuffix}" + except ValueError as e: + raiseAssert e.msg func getAttestationTopic*(forkDigest: ForkDigest, committeeIndex: uint64): string = # https://github.com/ethereum/eth2.0-specs/blob/v0.11.0/specs/phase0/validator.md#broadcast-attestation - let topicIndex = committeeIndex mod ATTESTATION_SUBNET_COUNT - &"/eth2/{toHex forkDigest}/committee_index{topicIndex}{topicAttestationsSuffix}" - + try: + let topicIndex = committeeIndex mod ATTESTATION_SUBNET_COUNT + &"/eth2/{toHex forkDigest}/committee_index{topicIndex}{topicAttestationsSuffix}" + except ValueError as e: + raiseAssert e.msg diff --git a/beacon_chain/spec/state_transition_block.nim b/beacon_chain/spec/state_transition_block.nim index f9396d945..60838ffd8 100644 --- a/beacon_chain/spec/state_transition_block.nim +++ b/beacon_chain/spec/state_transition_block.nim @@ -32,6 +32,8 @@ # improvements to be made - other than that, keep things similar to spec for # now. +{.push raises: [Defect].} + import algorithm, collections/sets, chronicles, options, sequtils, sets, ../extras, ../ssz, metrics, @@ -400,16 +402,19 @@ proc process_block*( # https://github.com/ethereum/eth2.0-metrics/blob/master/metrics.md#additional-metrics # doesn't seem to specify at what point in block processing this metric is to be read, # and this avoids the early-return issue (could also use defer, etc). - beacon_pending_deposits.set( - state.eth1_data.deposit_count.int64 - state.eth1_deposit_index.int64) - beacon_processed_deposits_total.set(state.eth1_deposit_index.int64) + try: + beacon_pending_deposits.set( + state.eth1_data.deposit_count.int64 - state.eth1_deposit_index.int64) + beacon_processed_deposits_total.set(state.eth1_deposit_index.int64) - # Adds nontrivial additional computation, but only does so when metrics - # enabled. - beacon_current_live_validators.set(toHashSet( - mapIt(state.current_epoch_attestations, it.proposerIndex)).len.int64) - beacon_previous_live_validators.set(toHashSet( - mapIt(state.previous_epoch_attestations, it.proposerIndex)).len.int64) + # Adds nontrivial additional computation, but only does so when metrics + # enabled. + beacon_current_live_validators.set(toHashSet( + mapIt(state.current_epoch_attestations, it.proposerIndex)).len.int64) + beacon_previous_live_validators.set(toHashSet( + mapIt(state.previous_epoch_attestations, it.proposerIndex)).len.int64) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg if not process_block_header(state, blck, flags, stateCache): notice "Block header not valid", slot = shortLog(state.slot) diff --git a/beacon_chain/spec/state_transition_epoch.nim b/beacon_chain/spec/state_transition_epoch.nim index 7f8426277..d1490f6ae 100644 --- a/beacon_chain/spec/state_transition_epoch.nim +++ b/beacon_chain/spec/state_transition_epoch.nim @@ -32,6 +32,8 @@ # improvements to be made - other than that, keep things similar to spec for # now. +{.push raises: [Defect].} + import math, options, sequtils, tables, stew/[bitseqs, bitops2], chronicles, json_serialization/std/sets, @@ -449,13 +451,16 @@ proc process_epoch*(state: var BeaconState) {.nbench.}= process_final_updates(state) # Once per epoch metrics - beacon_finalized_epoch.set(state.finalized_checkpoint.epoch.int64) - beacon_finalized_root.set(state.finalized_checkpoint.root.toGaugeValue) - beacon_current_justified_epoch.set( - state.current_justified_checkpoint.epoch.int64) - beacon_current_justified_root.set( - state.current_justified_checkpoint.root.toGaugeValue) - beacon_previous_justified_epoch.set( - state.previous_justified_checkpoint.epoch.int64) - beacon_previous_justified_root.set( - state.previous_justified_checkpoint.root.toGaugeValue) + try: + beacon_finalized_epoch.set(state.finalized_checkpoint.epoch.int64) + beacon_finalized_root.set(state.finalized_checkpoint.root.toGaugeValue) + beacon_current_justified_epoch.set( + state.current_justified_checkpoint.epoch.int64) + beacon_current_justified_root.set( + state.current_justified_checkpoint.root.toGaugeValue) + beacon_previous_justified_epoch.set( + state.previous_justified_checkpoint.epoch.int64) + beacon_previous_justified_root.set( + state.previous_justified_checkpoint.root.toGaugeValue) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg diff --git a/beacon_chain/spec/state_transition_helpers.nim b/beacon_chain/spec/state_transition_helpers.nim index f6eb6c668..3e6aee73b 100644 --- a/beacon_chain/spec/state_transition_helpers.nim +++ b/beacon_chain/spec/state_transition_helpers.nim @@ -5,6 +5,8 @@ # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. +{.push raises: [Defect].} + import # Standard library sets, diff --git a/beacon_chain/spec/validator.nim b/beacon_chain/spec/validator.nim index c786e11ef..dc9416198 100644 --- a/beacon_chain/spec/validator.nim +++ b/beacon_chain/spec/validator.nim @@ -6,6 +6,8 @@ # Helpers and functions pertaining to managing the validator set +{.push raises: [Defect].} + import options, nimcrypto, sequtils, math, tables, ./datatypes, ./digest, ./helpers @@ -91,25 +93,27 @@ func compute_committee(indices: seq[ValidatorIndex], seed: Eth2Digest, index: uint64, count: uint64, stateCache: var StateCache): seq[ValidatorIndex] = ## Return the committee corresponding to ``indices``, ``seed``, ``index``, ## and committee ``count``. + try: + let + start = (len(indices).uint64 * index) div count + endIdx = (len(indices).uint64 * (index + 1)) div count + key = (indices.len, seed) - let - start = (len(indices).uint64 * index) div count - endIdx = (len(indices).uint64 * (index + 1)) div count - key = (indices.len, seed) + if key notin stateCache.beacon_committee_cache: + stateCache.beacon_committee_cache[key] = + get_shuffled_seq(seed, len(indices).uint64) - if key notin stateCache.beacon_committee_cache: - stateCache.beacon_committee_cache[key] = - get_shuffled_seq(seed, len(indices).uint64) + # These assertions from compute_shuffled_index(...) + let index_count = indices.len().uint64 + doAssert endIdx <= index_count + doAssert index_count <= 2'u64^40 - # These assertions from compute_shuffled_index(...) - let index_count = indices.len().uint64 - doAssert endIdx <= index_count - doAssert index_count <= 2'u64^40 - - # In spec, this calls get_shuffled_index() every time, but that's wasteful - mapIt( - start.int .. (endIdx.int-1), - indices[stateCache.beacon_committee_cache[key][it]]) + # In spec, this calls get_shuffled_index() every time, but that's wasteful + mapIt( + start.int .. (endIdx.int-1), + indices[stateCache.beacon_committee_cache[key][it]]) + except KeyError: + raiseAssert("Cached entries are added before use") # https://github.com/ethereum/eth2.0-specs/blob/v0.10.1/specs/phase0/beacon-chain.md#get_beacon_committee func get_beacon_committee*( @@ -119,26 +123,29 @@ func get_beacon_committee*( let epoch = compute_epoch_at_slot(slot) - ## This is a somewhat more fragile, but high-ROI, caching setup -- - ## get_active_validator_indices() is slow to run in a loop and only - ## changes once per epoch. - if epoch notin cache.active_validator_indices_cache: - cache.active_validator_indices_cache[epoch] = - get_active_validator_indices(state, epoch) + try: + ## This is a somewhat more fragile, but high-ROI, caching setup -- + ## get_active_validator_indices() is slow to run in a loop and only + ## changes once per epoch. + if epoch notin cache.active_validator_indices_cache: + cache.active_validator_indices_cache[epoch] = + get_active_validator_indices(state, epoch) - # Constant throughout an epoch - if epoch notin cache.committee_count_cache: - cache.committee_count_cache[epoch] = - get_committee_count_at_slot(state, slot) + # Constant throughout an epoch + if epoch notin cache.committee_count_cache: + cache.committee_count_cache[epoch] = + get_committee_count_at_slot(state, slot) - compute_committee( - cache.active_validator_indices_cache[epoch], - get_seed(state, epoch, DOMAIN_BEACON_ATTESTER), - (slot mod SLOTS_PER_EPOCH) * cache.committee_count_cache[epoch] + - index.uint64, - cache.committee_count_cache[epoch] * SLOTS_PER_EPOCH, - cache - ) + compute_committee( + cache.active_validator_indices_cache[epoch], + get_seed(state, epoch, DOMAIN_BEACON_ATTESTER), + (slot mod SLOTS_PER_EPOCH) * cache.committee_count_cache[epoch] + + index.uint64, + cache.committee_count_cache[epoch] * SLOTS_PER_EPOCH, + cache + ) + except KeyError: + raiseAssert "values are added to cache before using them" # Not from spec func get_empty_per_epoch_cache*(): StateCache = diff --git a/beacon_chain/ssz.nim b/beacon_chain/ssz.nim index dd88f764e..800b24c89 100644 --- a/beacon_chain/ssz.nim +++ b/beacon_chain/ssz.nim @@ -8,6 +8,10 @@ # SSZ Serialization (simple serialize) # See https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md +# TODO Cannot override push, even though the function is annotated +# nim-beacon-chain/beacon_chain/ssz.nim(212, 18) Error: can raise an unlisted exception: IOError +#{.push raises: [Defect].} + import stew/shims/macros, options, algorithm, options, stew/[bitops2, bitseqs, endians2, objects, varints, ptrops, ranges/ptr_arith], stint, @@ -72,15 +76,15 @@ template sizePrefixed*[TT](x: TT): untyped = proc init*(T: type SszReader, stream: InputStream, - maxObjectSize = defaultMaxObjectSize): T = + maxObjectSize = defaultMaxObjectSize): T {.raises: [Defect].} = T(stream: stream, maxObjectSize: maxObjectSize) -proc mount*(F: type SSZ, stream: InputStream, T: type): T = +proc mount*(F: type SSZ, stream: InputStream, T: type): T {.raises: [Defect].} = mixin readValue var reader = init(SszReader, stream) reader.readValue(T) -method formatMsg*(err: ref SszSizeMismatchError, filename: string): string {.gcsafe.} = +method formatMsg*(err: ref SszSizeMismatchError, filename: string): string {.gcsafe, raises: [Defect].} = # TODO: implement proper error string "Serialisation error while processing " & filename @@ -111,7 +115,7 @@ template toSszType*(x: auto): auto = elif useListType and x is List: seq[x.T](x) else: x -proc writeFixedSized(c: var WriteCursor, x: auto) = +proc writeFixedSized(c: var WriteCursor, x: auto) {.raises: [Defect, IOError].} = mixin toSszType when x is byte: @@ -146,7 +150,7 @@ template supports*(_: type SSZ, T: type): bool = mixin toSszType anonConst compiles(fixedPortionSize toSszType(default(T))) -func init*(T: type SszWriter, stream: OutputStream): T = +func init*(T: type SszWriter, stream: OutputStream): T {.raises: [Defect].} = result.stream = stream template enumerateSubFields(holder, fieldVar, body: untyped) = @@ -157,7 +161,7 @@ template enumerateSubFields(holder, fieldVar, body: untyped) = proc writeVarSizeType(w: var SszWriter, value: auto) {.gcsafe.} -proc beginRecord*(w: var SszWriter, TT: type): auto = +proc beginRecord*(w: var SszWriter, TT: type): auto {.raises: [Defect].} = type T = TT when isFixedSize(T): FixedSizedWriterCtx() @@ -193,7 +197,7 @@ template endRecord*(w: var SszWriter, ctx: var auto) = when ctx is VarSizedWriterCtx: finalize ctx.fixedParts -proc writeVarSizeType(w: var SszWriter, value: auto) = +proc writeVarSizeType(w: var SszWriter, value: auto) {.raises: [Defect, IOError].} = trs "STARTING VAR SIZE TYPE" mixin toSszType type T = type toSszType(value) @@ -230,7 +234,7 @@ proc writeVarSizeType(w: var SszWriter, value: auto) = writeField w, ctx, astToStr(field), field endRecord w, ctx -proc writeValue*(w: var SszWriter, x: auto) {.gcsafe.} = +proc writeValue*(w: var SszWriter, x: auto) {.gcsafe, raises: [Defect, IOError].} = mixin toSszType type T = type toSszType(x) @@ -241,7 +245,7 @@ proc writeValue*(w: var SszWriter, x: auto) {.gcsafe.} = else: unsupported type(x) -proc writeValue*[T](w: var SszWriter, x: SizePrefixed[T]) = +proc writeValue*[T](w: var SszWriter, x: SizePrefixed[T]) {.raises: [Defect, IOError].} = var cursor = w.stream.delayVarSizeWrite(10) let initPos = w.stream.pos w.writeValue T(x) @@ -260,7 +264,7 @@ template fromSszBytes*[T; N](_: type TypeWithMaxLen[T, N], mixin fromSszBytes fromSszBytes(T, bytes) -proc readValue*[T](r: var SszReader, val: var T) = +proc readValue*[T](r: var SszReader, val: var T) {.raises: [Defect, MalformedSszError, SszSizeMismatchError].} = when isFixedSize(T): const minimalSize = fixedPortionSize(T) if r.stream.readable(minimalSize): @@ -272,7 +276,7 @@ proc readValue*[T](r: var SszReader, val: var T) = # the dynamic portion to consume the right number of bytes. val = readSszValue(r.stream.read(r.stream.endPos), T) -proc readValue*[T](r: var SszReader, val: var SizePrefixed[T]) = +proc readValue*[T](r: var SszReader, val: var SizePrefixed[T]) {.raises: [Defect].} = let length = r.stream.readVarint(uint64) if length > r.maxObjectSize: raise newException(SszMaxSizeExceeded, @@ -415,19 +419,23 @@ func mixInLength(root: Eth2Digest, length: int): Eth2Digest = func merkleizeSerializedChunks(merkleizer: SszChunksMerkleizer, obj: auto): Eth2Digest = - - var hashingStream = newSszHashingStream merkleizer - {.noSideEffect.}: - # We assume there are no side-effects here, because the - # SszHashingStream is keeping all of its output in memory. - hashingStream.writeFixedSized obj - hashingStream.flush - merkleizer.getFinalHash + try: + var hashingStream = newSszHashingStream merkleizer + {.noSideEffect.}: + # We assume there are no side-effects here, because the + # SszHashingStream is keeping all of its output in memory. + hashingStream.writeFixedSized obj + hashingStream.flush + merkleizer.getFinalHash + except IOError as e: + # Hashing shouldn't raise in theory but because of abstraction + # tax in the faststreams library, we have to do this at runtime + raiseAssert($e.msg) func merkleizeSerializedChunks(obj: auto): Eth2Digest = merkleizeSerializedChunks(SszChunksMerkleizer(), obj) -func hash_tree_root*(x: auto): Eth2Digest {.gcsafe.} +func hash_tree_root*(x: auto): Eth2Digest {.gcsafe, raises: [Defect].} template merkleizeFields(body: untyped): Eth2Digest {.dirty.} = var merkleizer {.inject.} = SszChunksMerkleizer() @@ -546,7 +554,7 @@ func maxChunksCount(T: type, maxLen: static int64): int64 {.compileTime.} = else: unsupported T # This should never happen -func hash_tree_root*(x: auto): Eth2Digest = +func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} = trs "STARTING HASH TREE ROOT FOR TYPE ", name(type(x)) mixin toSszType when x is SignedBeaconBlock: diff --git a/beacon_chain/ssz/bytes_reader.nim b/beacon_chain/ssz/bytes_reader.nim index bdef60519..fe93f4fba 100644 --- a/beacon_chain/ssz/bytes_reader.nim +++ b/beacon_chain/ssz/bytes_reader.nim @@ -1,3 +1,6 @@ +{.push raises: [Defect].} +{.pragma: raisesssz, raises: [Defect, MalformedSszError, SszSizeMismatchError].} + import typetraits, options, stew/[bitseqs, endians2, objects, bitseqs], serialization/testing/tracing, @@ -10,19 +13,19 @@ template setOutputSize[R, T](a: var array[R, T], length: int) = if length != a.len: raise newException(MalformedSszError, "SSZ input of insufficient size") -proc setOutputSize[T](s: var seq[T], length: int) {.inline.} = +proc setOutputSize[T](s: var seq[T], length: int) {.inline, raisesssz.} = if sizeof(T) * length > maxListAllocation: raise newException(MalformedSszError, "SSZ list size is too large to fit in memory") s.setLen length -proc setOutputSize(s: var string, length: int) {.inline.} = +proc setOutputSize(s: var string, length: int) {.inline, raisesssz.} = if length > maxListAllocation: raise newException(MalformedSszError, "SSZ string is too large to fit in memory") s.setLen length # fromSszBytes copies the wire representation to a Nim variable, # assuming there's enough data in the buffer -func fromSszBytes*(T: type SomeInteger, data: openarray[byte]): T = +func fromSszBytes*(T: type SomeInteger, data: openarray[byte]): T {.raisesssz.} = ## Convert directly to bytes the size of the int. (e.g. ``uint16 = 2 bytes``) ## All integers are serialized as **little endian**. if data.len < sizeof(result): @@ -30,14 +33,14 @@ func fromSszBytes*(T: type SomeInteger, data: openarray[byte]): T = T.fromBytesLE(data) -func fromSszBytes*(T: type bool, data: openarray[byte]): T = +func fromSszBytes*(T: type bool, data: openarray[byte]): T {.raisesssz.} = # TODO: spec doesn't say what to do if the value is >1 - we'll use the C # definition for now, but maybe this should be a parse error instead? if data.len == 0 or data[0] > byte(1): raise newException(MalformedSszError, "invalid boolean value") data[0] == 1 -func fromSszBytes*(T: type Eth2Digest, data: openarray[byte]): T = +func fromSszBytes*(T: type Eth2Digest, data: openarray[byte]): T {.raisesssz.} = if data.len < sizeof(result.data): raise newException(MalformedSszError, "SSZ input of insufficient size") copyMem(result.data.addr, unsafeAddr data[0], sizeof(result.data)) @@ -48,16 +51,16 @@ template fromSszBytes*(T: type Slot, bytes: openarray[byte]): Slot = template fromSszBytes*(T: type Epoch, bytes: openarray[byte]): Epoch = Epoch fromSszBytes(uint64, bytes) -template fromSszBytes*(T: type enum, bytes: openarray[byte]): auto = +template fromSszBytes*(T: type enum, bytes: openarray[byte]): auto = T fromSszBytes(uint64, bytes) template fromSszBytes*(T: type BitSeq, bytes: openarray[byte]): auto = BitSeq @bytes -func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto = +func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto {.raisesssz.} = BitList[N] @bytes -func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T = +func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T {.raisesssz.} = # A bit vector doesn't have a marker bit, but we'll use the helper from # nim-stew to determine the position of the leading (marker) bit. # If it's outside the BitArray size, we have an overflow: @@ -68,7 +71,7 @@ func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T = proc `[]`[T, U, V](s: openArray[T], x: HSlice[U, V]) {.error: "Please don't use openarray's [] as it allocates a result sequence".} -func readSszValue*(input: openarray[byte], T: type): T = +func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} = mixin fromSszBytes, toSszType type T {.used.} = type(result) diff --git a/beacon_chain/ssz/dynamic_navigator.nim b/beacon_chain/ssz/dynamic_navigator.nim index a336c9ea7..93a0624fc 100644 --- a/beacon_chain/ssz/dynamic_navigator.nim +++ b/beacon_chain/ssz/dynamic_navigator.nim @@ -1,3 +1,6 @@ +{.push raises: [Defect].} +{.pragma: raisesssz, raises: [Defect, IOError, MalformedSszError, SszSizeMismatchError].} + import strutils, parseutils, faststreams/output_stream, json_serialization/writer, @@ -18,8 +21,7 @@ type fieldType: TypeInfo navigator: proc (m: MemRange): MemRange {. gcsafe noSideEffect - raises: [Defect, - MalformedSszError] } + raisesssz } TypeInfo = ref object case kind: ObjKind of Record: @@ -28,20 +30,19 @@ type elemType: TypeInfo navigator: proc (m: MemRange, idx: int): MemRange {. gcsafe noSideEffect - raises: [Defect, - MalformedSszError] } + raisesssz } else: discard jsonPrinter: proc (m: MemRange, outStream: OutputStream, - pretty: bool) {.gcsafe.} + pretty: bool) {.gcsafe, raisesssz.} DynamicSszNavigator* = object m: MemRange typ: TypeInfo -proc jsonPrinterImpl[T](m: MemRange, outStream: OutputStream, pretty: bool) = +proc jsonPrinterImpl[T](m: MemRange, outStream: OutputStream, pretty: bool) {.raisesssz.} = var typedNavigator = sszMount(m, T) var jsonWriter = init(JsonWriter, outStream, pretty) # TODO: it should be possible to serialize the navigator object @@ -55,12 +56,12 @@ func findField(fields: seq[FieldInfo], name: string): FieldInfo = if field.name == name: return field -func indexableNavigatorImpl[T](m: MemRange, idx: int): MemRange = +func indexableNavigatorImpl[T](m: MemRange, idx: int): MemRange {.raisesssz.} = var typedNavigator = sszMount(m, T) getMemRange(typedNavigator[idx]) func fieldNavigatorImpl[RecordType; FieldType; - fieldName: static string](m: MemRange): MemRange {.raises: [MalformedSszError].} = + fieldName: static string](m: MemRange): MemRange {.raisesssz.} = # TODO: Make sure this doesn't fail with a Defect when # navigating to an inactive field in a case object. var typedNavigator = sszMount(m, RecordType) @@ -97,12 +98,12 @@ func genTypeInfo(T: type): TypeInfo = result.jsonPrinter = jsonPrinterImpl[T] -func `[]`*(n: DynamicSszNavigator, idx: int): DynamicSszNavigator = +func `[]`*(n: DynamicSszNavigator, idx: int): DynamicSszNavigator {.raisesssz.} = doAssert n.typ.kind == Indexable DynamicSszNavigator(m: n.typ.navigator(n.m, idx), typ: n.typ.elemType) func navigate*(n: DynamicSszNavigator, path: string): DynamicSszNavigator {. - raises: [Defect, ValueError, MalformedSszError] .} = + raises: [Defect, KeyError, IOError, MalformedSszError, SszSizeMismatchError, ValueError] .} = case n.typ.kind of Record: let fieldInfo = n.typ.fields.findField(path) @@ -129,11 +130,11 @@ template navigatePathImpl(nav, iterabalePathFragments: untyped) = return func navigatePath*(n: DynamicSszNavigator, path: string): DynamicSszNavigator {. - raises: [Defect, ValueError, MalformedSszError] .} = + raises: [Defect, IOError, ValueError, MalformedSszError, SszSizeMismatchError] .} = navigatePathImpl n, split(path, '/') func navigatePath*(n: DynamicSszNavigator, path: openarray[string]): DynamicSszNavigator {. - raises: [Defect, ValueError, MalformedSszError] .} = + raises: [Defect, IOError, ValueError, MalformedSszError, SszSizeMismatchError] .} = navigatePathImpl n, path func init*(T: type DynamicSszNavigator, @@ -141,10 +142,10 @@ func init*(T: type DynamicSszNavigator, T(m: MemRange(startAddr: unsafeAddr bytes[0], length: bytes.len), typ: typeInfo(Navigated)) -proc writeJson*(n: DynamicSszNavigator, outStream: OutputStream, pretty = true) = +proc writeJson*(n: DynamicSszNavigator, outStream: OutputStream, pretty = true) {.raisesssz.} = n.typ.jsonPrinter(n.m, outStream, pretty) -func toJson*(n: DynamicSszNavigator, pretty = true): string = +func toJson*(n: DynamicSszNavigator, pretty = true): string {.raisesssz.} = var outStream = memoryOutput() {.noSideEffect.}: # We are assuming that there are no side-effects here @@ -154,4 +155,3 @@ func toJson*(n: DynamicSszNavigator, pretty = true): string = # from a file or a network device. writeJson(n, outStream, pretty) outStream.getOutput(string) - diff --git a/beacon_chain/ssz/navigator.nim b/beacon_chain/ssz/navigator.nim index 6f8473e98..2c923e571 100644 --- a/beacon_chain/ssz/navigator.nim +++ b/beacon_chain/ssz/navigator.nim @@ -1,3 +1,6 @@ +{.push raises: [Defect].} +{.pragma: raisesssz, raises: [Defect, MalformedSszError, SszSizeMismatchError].} + import stew/[ptrops, objects], stew/ranges/ptr_arith, ./types, ./bytes_reader @@ -35,7 +38,7 @@ template toOpenArray(m: MemRange): auto = func navigateToField*[T](n: SszNavigator[T], fieldName: static string, - FieldType: type): SszNavigator[FieldType] = + FieldType: type): SszNavigator[FieldType] {.raisesssz.} = mixin toSszType type SszFieldType = type toSszType(default FieldType) @@ -67,7 +70,7 @@ template `.`*[T](n: SszNavigator[T], field: untyped): auto = type FieldType = type(default(RecType).field) navigateToField(n, astToStr(field), FieldType) -func indexVarSizeList(m: MemRange, idx: int): MemRange = +func indexVarSizeList(m: MemRange, idx: int): MemRange {.raisesssz.} = template readOffset(pos): int = int fromSszBytes(uint32, makeOpenArray(offset(m.startAddr, pos), offsetSize)) @@ -114,7 +117,7 @@ template `[]`*[T](n: SszNavigator[seq[T]], idx: int): SszNavigator[T] = template `[]`*[R, T](n: SszNavigator[array[R, T]], idx: int): SszNavigator[T] = indexList(n, idx, T) -func `[]`*[T](n: SszNavigator[T]): T = +func `[]`*[T](n: SszNavigator[T]): T {.raisesssz.} = mixin toSszType, fromSszBytes type SszRepr = type(toSszType default(T)) when type(SszRepr) is type(T): @@ -122,6 +125,6 @@ func `[]`*[T](n: SszNavigator[T]): T = else: fromSszBytes(T, toOpenArray(n.m)) -converter derefNavigator*[T](n: SszNavigator[T]): T = +converter derefNavigator*[T](n: SszNavigator[T]): T {.raisesssz.} = n[] diff --git a/beacon_chain/ssz/types.nim b/beacon_chain/ssz/types.nim index 474ad283a..5c9171006 100644 --- a/beacon_chain/ssz/types.nim +++ b/beacon_chain/ssz/types.nim @@ -1,3 +1,5 @@ +{.push raises: [Defect].} + import tables, options, stew/shims/macros, stew/[objects, bitseqs], @@ -185,9 +187,12 @@ proc fieldInfos*(RecordType: type): seq[tuple[name: string, fieldOffset = val[] val[] += fieldSize do: - let parentBranch = nestedUnder.getOrDefault(fieldCaseDiscriminator, "") - fieldOffset = offsetInBranch[parentBranch] - offsetInBranch[branchKey] = fieldOffset + fieldSize + try: + let parentBranch = nestedUnder.getOrDefault(fieldCaseDiscriminator, "") + fieldOffset = offsetInBranch[parentBranch] + offsetInBranch[branchKey] = fieldOffset + fieldSize + except KeyError as e: + raiseAssert e.msg result.add((fieldName, fieldOffset, fixedSize, branchKey)) diff --git a/beacon_chain/state_transition.nim b/beacon_chain/state_transition.nim index 937441ecf..42687c41f 100644 --- a/beacon_chain/state_transition.nim +++ b/beacon_chain/state_transition.nim @@ -30,6 +30,8 @@ # improvements to be made - other than that, keep things similar to spec for # now. +{.push raises: [Defect].} + import chronicles, ./extras, ./ssz, metrics, @@ -95,11 +97,17 @@ proc process_slots*(state: var BeaconState, slot: Slot) {.nbench.}= let is_epoch_transition = (state.slot + 1) mod SLOTS_PER_EPOCH == 0 if is_epoch_transition: # Note: Genesis epoch = 0, no need to test if before Genesis - beacon_previous_validators.set(get_epoch_validator_count(state)) + try: + beacon_previous_validators.set(get_epoch_validator_count(state)) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg process_epoch(state) state.slot += 1 if is_epoch_transition: - beacon_current_validators.set(get_epoch_validator_count(state)) + try: + beacon_current_validators.set(get_epoch_validator_count(state)) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg # https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/beacon-chain.md#verify_block_signature proc verify_block_signature*( @@ -234,11 +242,17 @@ proc process_slots*(state: var HashedBeaconState, slot: Slot) = let is_epoch_transition = (state.data.slot + 1) mod SLOTS_PER_EPOCH == 0 if is_epoch_transition: # Note: Genesis epoch = 0, no need to test if before Genesis - beacon_previous_validators.set(get_epoch_validator_count(state.data)) + try: + beacon_previous_validators.set(get_epoch_validator_count(state.data)) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg process_epoch(state.data) state.data.slot += 1 if is_epoch_transition: - beacon_current_validators.set(get_epoch_validator_count(state.data)) + try: + beacon_current_validators.set(get_epoch_validator_count(state.data)) + except Exception as e: # TODO https://github.com/status-im/nim-metrics/pull/22 + trace "Couldn't update metrics", msg = e.msg state.root = hash_tree_root(state.data) proc state_transition*( diff --git a/beacon_chain/validator_keygen.nim b/beacon_chain/validator_keygen.nim index 8ed302dc4..91ac14442 100644 --- a/beacon_chain/validator_keygen.nim +++ b/beacon_chain/validator_keygen.nim @@ -44,7 +44,7 @@ proc generateDeposits*(totalValidators: int, if randomKeys: (pubKey, privKey) = crypto.newKeyPair().tryGet() else: - privKey = makeInteropPrivKey(i) + privKey = makeInteropPrivKey(i).tryGet() pubKey = privKey.toPubKey() let dp = makeDeposit(pubKey, privKey) diff --git a/beacon_chain/version.nim b/beacon_chain/version.nim index c7159e8ee..0638a0c06 100644 --- a/beacon_chain/version.nim +++ b/beacon_chain/version.nim @@ -1,3 +1,5 @@ +{.push raises: [Defect].} + when not defined(nimscript): import times let copyrights* = "Copyright (c) 2019-" & $(now().utc.year) & " Status Research & Development GmbH" @@ -23,4 +25,3 @@ const fullVersionStr* = versionAsStr & " (" & gitRevision & ")" - diff --git a/tests/fork_choice/interpreter.nim b/tests/fork_choice/interpreter.nim index 987ab36c9..f9406245d 100644 --- a/tests/fork_choice/interpreter.nim +++ b/tests/fork_choice/interpreter.nim @@ -9,12 +9,12 @@ import # Standard library std/strformat, std/tables, std/options, # Status libraries - stew/[result, endians2], + stew/[results, endians2], # Internals ../../beacon_chain/spec/[datatypes, digest], ../../beacon_chain/fork_choice/[fork_choice, fork_choice_types] -export result, datatypes, digest, fork_choice, fork_choice_types, tables, options +export results, datatypes, digest, fork_choice, fork_choice_types, tables, options func fakeHash*(index: SomeInteger): Eth2Digest = ## Create fake hashes diff --git a/tests/test_interop.nim b/tests/test_interop.nim index efa04ce24..3684870b7 100644 --- a/tests/test_interop.nim +++ b/tests/test_interop.nim @@ -119,7 +119,7 @@ suiteReport "Interop": timedTest "Mocked start private key": for i, k in privateKeys: let - key = makeInteropPrivKey(i) + key = makeInteropPrivKey(i)[] v = k.parse(UInt256, 16) check: @@ -144,7 +144,7 @@ suiteReport "Interop": for i in 0..<64: let - privKey = makeInteropPrivKey(i) + privKey = makeInteropPrivKey(i)[] deposits.add(makeDeposit(privKey.toPubKey(), privKey)) const genesis_time = 1570500000 diff --git a/tests/test_sync_manager.nim b/tests/test_sync_manager.nim index bfc5d30d2..bf3c7fcba 100644 --- a/tests/test_sync_manager.nim +++ b/tests/test_sync_manager.nim @@ -1,3 +1,5 @@ +{.used.} + import unittest import chronos import ../beacon_chain/sync_manager diff --git a/vendor/nim-chronicles b/vendor/nim-chronicles index 114cdccaa..3e7f422f1 160000 --- a/vendor/nim-chronicles +++ b/vendor/nim-chronicles @@ -1 +1 @@ -Subproject commit 114cdccaa087c54abcc4dad9ed3366af247de79e +Subproject commit 3e7f422f11754732df673ed280389024e1ed3cb1