mirror of
https://github.com/status-im/nimbus-eth2.git
synced 2025-01-10 22:36:01 +00:00
Merge branch 'stable' into unstable
This commit is contained in:
commit
82677c66ce
4
.gitmodules
vendored
4
.gitmodules
vendored
@ -223,3 +223,7 @@
|
||||
url = https://github.com/status-im/nim-toml-serialization.git
|
||||
ignore = untracked
|
||||
branch = master
|
||||
[submodule "vendor/merge-testnets"]
|
||||
path = vendor/merge-testnets
|
||||
url = https://github.com/eth-clients/merge-testnets.git
|
||||
branch = main
|
||||
|
18
CHANGELOG.md
18
CHANGELOG.md
@ -1,3 +1,21 @@
|
||||
2022-05-20 v22.5.1
|
||||
==================
|
||||
|
||||
Nimbus `v22.5.1` is a `low-urgency` maintenance release addressing a Web3 compatibility regression and introducing Ropsten testnet support.
|
||||
|
||||
### Improvements:
|
||||
|
||||
* Support for the Ropsten testnet, intended for merge testing
|
||||
https://github.com/status-im/nimbus-eth2/pull/3648
|
||||
|
||||
### Fixes:
|
||||
|
||||
* Restore compatibility with certain Web3 endpoints
|
||||
https://github.com/status-im/nimbus-eth2/pull/3645
|
||||
|
||||
* More spec-compliant handling of JSON fields in REST, for better compatibility with added and optional fields
|
||||
https://github.com/status-im/nimbus-eth2/pull/3647
|
||||
|
||||
2022-05-17 v22.5.0
|
||||
==================
|
||||
|
||||
|
27
Makefile
27
Makefile
@ -26,6 +26,7 @@ BASE_PORT := 9000
|
||||
BASE_REST_PORT := 5052
|
||||
BASE_METRICS_PORT := 8008
|
||||
|
||||
ROPSTEN_WEB3_URL := "--web3-url=wss://ropsten.infura.io/ws/v3/809a18497dd74102b5f37d25aae3c85a"
|
||||
GOERLI_WEB3_URL := "--web3-url=wss://goerli.infura.io/ws/v3/809a18497dd74102b5f37d25aae3c85a"
|
||||
GNOSIS_WEB3_URLS := "--web3-url=wss://rpc.gnosischain.com/wss --web3-url=wss://xdai.poanetwork.dev/wss"
|
||||
|
||||
@ -399,6 +400,32 @@ prater-dev-deposit: | prater-build deposit_contract
|
||||
clean-prater:
|
||||
$(call CLEAN_NETWORK,prater)
|
||||
|
||||
###
|
||||
### Ropsten
|
||||
###
|
||||
ropsten-build: | nimbus_beacon_node nimbus_signing_node
|
||||
|
||||
# https://www.gnu.org/software/make/manual/html_node/Call-Function.html#Call-Function
|
||||
ropsten: | ropsten-build
|
||||
$(call CONNECT_TO_NETWORK,ropsten,nimbus_beacon_node,$(ROPSTEN_WEB3_URL))
|
||||
|
||||
ropsten-vc: | ropsten-build nimbus_validator_client
|
||||
$(call CONNECT_TO_NETWORK_WITH_VALIDATOR_CLIENT,ropsten,nimbus_beacon_node,$(ROPSTEN_WEB3_URL))
|
||||
|
||||
ifneq ($(LOG_LEVEL), TRACE)
|
||||
ropsten-dev:
|
||||
+ "$(MAKE)" LOG_LEVEL=TRACE $@
|
||||
else
|
||||
ropsten-dev: | ropsten-build
|
||||
$(call CONNECT_TO_NETWORK_IN_DEV_MODE,ropsten,nimbus_beacon_node,$(ROPSTEN_WEB3_URL))
|
||||
endif
|
||||
|
||||
ropsten-dev-deposit: | ropsten-build deposit_contract
|
||||
$(call MAKE_DEPOSIT,ropsten,$(ROPSTEN_WEB3_URL))
|
||||
|
||||
clean-ropsten:
|
||||
$(call CLEAN_NETWORK,ropsten)
|
||||
|
||||
###
|
||||
### Gnosis chain binary
|
||||
###
|
||||
|
@ -1292,6 +1292,7 @@ proc startEth1Syncing(m: Eth1Monitor, delayBeforeStart: Duration) {.async.} =
|
||||
providerNetwork = awaitWithRetries m.dataProvider.web3.provider.net_version()
|
||||
expectedNetwork = case m.eth1Network.get
|
||||
of mainnet: "1"
|
||||
of ropsten: "3"
|
||||
of rinkeby: "4"
|
||||
of goerli: "5"
|
||||
if expectedNetwork != providerNetwork:
|
||||
|
@ -37,6 +37,7 @@ type
|
||||
|
||||
Eth1Network* = enum
|
||||
mainnet
|
||||
ropsten
|
||||
rinkeby
|
||||
goerli
|
||||
|
||||
@ -86,6 +87,7 @@ type
|
||||
|
||||
const
|
||||
eth2NetworksDir = currentSourcePath.parentDir.replace('\\', '/') & "/../../vendor/eth2-networks"
|
||||
mergeTestnetsDir = currentSourcePath.parentDir.replace('\\', '/') & "/../../vendor/merge-testnets"
|
||||
|
||||
proc readBootstrapNodes*(path: string): seq[string] {.raises: [IOError, Defect].} =
|
||||
# Read a list of ENR values from a YAML file containing a flat list of entries
|
||||
@ -115,6 +117,7 @@ proc loadEth2NetworkMetadata*(path: string, eth1Network = none(Eth1Network)): Et
|
||||
genesisPath = path & "/genesis.ssz"
|
||||
genesisDepositsSnapshotPath = path & "/genesis_deposit_contract_snapshot.ssz"
|
||||
configPath = path & "/config.yaml"
|
||||
deployBlockPath = path & "/deploy_block.txt"
|
||||
depositContractBlockPath = path & "/deposit_contract_block.txt"
|
||||
bootstrapNodesPath = path & "/bootstrap_nodes.txt"
|
||||
bootEnrPath = path & "/boot_enr.yaml"
|
||||
@ -134,8 +137,16 @@ proc loadEth2NetworkMetadata*(path: string, eth1Network = none(Eth1Network)): Et
|
||||
readFile(depositContractBlockPath).strip
|
||||
else:
|
||||
""
|
||||
|
||||
deployBlock = if fileExists(deployBlockPath):
|
||||
readFile(deployBlockPath).strip
|
||||
else:
|
||||
""
|
||||
|
||||
depositContractDeployedAt = if depositContractBlock.len > 0:
|
||||
BlockHashOrNumber.init(depositContractBlock)
|
||||
elif deployBlock.len > 0:
|
||||
BlockHashOrNumber.init(deployBlock)
|
||||
else:
|
||||
BlockHashOrNumber(isHash: false, number: 1)
|
||||
|
||||
@ -217,11 +228,16 @@ template eth2Network(path: string, eth1Network: Eth1Network): Eth2NetworkMetadat
|
||||
loadCompileTimeNetworkMetadata(eth2NetworksDir & "/" & path,
|
||||
some eth1Network)
|
||||
|
||||
template mergeTestnet(path: string, eth1Network: Eth1Network): Eth2NetworkMetadata =
|
||||
loadCompileTimeNetworkMetadata(mergeTestnetsDir & "/" & path,
|
||||
some eth1Network)
|
||||
|
||||
when not defined(gnosisChainBinary):
|
||||
when const_preset == "mainnet":
|
||||
const
|
||||
mainnetMetadata* = eth2Network("shared/mainnet", mainnet)
|
||||
praterMetadata* = eth2Network("shared/prater", goerli)
|
||||
ropstenMetadata = mergeTestnet("ropsten-beacon-chain", ropsten)
|
||||
|
||||
proc getMetadataForNetwork*(networkName: string): Eth2NetworkMetadata {.raises: [Defect, IOError].} =
|
||||
template loadRuntimeMetadata: auto =
|
||||
@ -242,6 +258,8 @@ when not defined(gnosisChainBinary):
|
||||
mainnetMetadata
|
||||
of "prater":
|
||||
praterMetadata
|
||||
of "ropsten":
|
||||
ropstenMetadata
|
||||
else:
|
||||
loadRuntimeMetadata()
|
||||
else:
|
||||
|
@ -405,7 +405,7 @@ proc init*(T: type BeaconNode,
|
||||
quit 1
|
||||
|
||||
let optJwtSecret =
|
||||
block:
|
||||
if cfg.BELLATRIX_FORK_EPOCH != FAR_FUTURE_EPOCH:
|
||||
let jwtSecret = rng[].checkJwtSecret(
|
||||
string(config.dataDir), config.jwtSecret)
|
||||
if jwtSecret.isErr:
|
||||
@ -414,6 +414,8 @@ proc init*(T: type BeaconNode,
|
||||
quit 1
|
||||
|
||||
some jwtSecret.get
|
||||
else:
|
||||
none(seq[byte])
|
||||
|
||||
template getDepositContractSnapshot: auto =
|
||||
if depositContractSnapshot.isSome:
|
||||
@ -427,7 +429,8 @@ proc init*(T: type BeaconNode,
|
||||
if snapshotRes.isErr:
|
||||
fatal "Failed to locate the deposit contract deployment block",
|
||||
depositContract = cfg.DEPOSIT_CONTRACT_ADDRESS,
|
||||
deploymentBlock = $depositContractDeployedAt
|
||||
deploymentBlock = $depositContractDeployedAt,
|
||||
err = snapshotRes.error
|
||||
quit 1
|
||||
else:
|
||||
some snapshotRes.get
|
||||
|
@ -7,7 +7,8 @@
|
||||
import std/typetraits
|
||||
import stew/[assign2, results, base10, byteutils], presto/common,
|
||||
libp2p/peerid, serialization, json_serialization,
|
||||
json_serialization/std/[options, net, sets]
|
||||
json_serialization/std/[options, net, sets],
|
||||
chronicles
|
||||
import ".."/[eth2_ssz_serialization, forks, keystore],
|
||||
".."/datatypes/[phase0, altair, bellatrix],
|
||||
".."/mev/bellatrix_mev,
|
||||
@ -16,7 +17,7 @@ import ".."/[eth2_ssz_serialization, forks, keystore],
|
||||
import nimcrypto/utils as ncrutils
|
||||
|
||||
export
|
||||
eth2_ssz_serialization, results, peerid, common, serialization,
|
||||
eth2_ssz_serialization, results, peerid, common, serialization, chronicles,
|
||||
json_serialization, options, net, sets, rest_types, slashing_protection_common
|
||||
|
||||
from web3/ethtypes import BlockHash
|
||||
@ -24,6 +25,33 @@ export ethtypes.BlockHash
|
||||
|
||||
Json.createFlavor RestJson
|
||||
|
||||
## The RestJson format implements JSON serialization in the way specified
|
||||
## by the Beacon API:
|
||||
##
|
||||
## https://ethereum.github.io/beacon-APIs/
|
||||
##
|
||||
## In this format, we must always set `allowUnknownFields = true` in the
|
||||
## decode calls in order to conform the following spec:
|
||||
##
|
||||
## All JSON responses return the requested data under a data key in the top
|
||||
## level of their response. Additional metadata may or may not be present
|
||||
## in other keys at the top level of the response, dependent on the endpoint.
|
||||
## The rules that require an increase in version number are as follows:
|
||||
##
|
||||
## - no field that is listed in an endpoint shall be removed without an increase
|
||||
## in the version number
|
||||
##
|
||||
## - no field that is listed in an endpoint shall be altered in terms of format
|
||||
## (e.g. from a string to an array) without an increase in the version number
|
||||
##
|
||||
## Note that it is possible for a field to be added to an endpoint's data or
|
||||
## metadata without an increase in the version number.
|
||||
##
|
||||
## TODO nim-json-serializations should allow setting up this policy per format
|
||||
##
|
||||
## This also means that when new fields are introduced to the object definitions
|
||||
## below, one must use the `Option[T]` type.
|
||||
|
||||
const
|
||||
DecimalSet = {'0' .. '9'}
|
||||
# Base10 (decimal) set of chars
|
||||
@ -93,12 +121,6 @@ type
|
||||
Web3SignerSignatureResponse |
|
||||
Web3SignerStatusResponse
|
||||
|
||||
# These types may be extended with additional fields in the future.
|
||||
# Locally unknown fields are silently ignored when decoding them.
|
||||
ExtensibleDecodeTypes* =
|
||||
GetSpecResponse |
|
||||
GetSpecVCResponse
|
||||
|
||||
SszDecodeTypes* =
|
||||
GetPhase0StateSszResponse |
|
||||
GetPhase0BlockSszResponse
|
||||
@ -338,7 +360,9 @@ proc decodeJsonString*[T](t: typedesc[T],
|
||||
data: JsonString,
|
||||
requireAllFields = true): Result[T, cstring] =
|
||||
try:
|
||||
ok(RestJson.decode(string(data), T, requireAllFields = requireAllFields))
|
||||
ok(RestJson.decode(string(data), T,
|
||||
requireAllFields = requireAllFields,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
err("Unable to deserialize data")
|
||||
|
||||
@ -664,6 +688,13 @@ proc readValue*(
|
||||
raiseUnexpectedValue(
|
||||
reader, "Expected a valid hex string with " & $value.len() & " bytes")
|
||||
|
||||
template unrecognizedFieldWarning =
|
||||
# TODO: There should be a different notification mechanism for informing the
|
||||
# caller of a deserialization routine for unexpected fields.
|
||||
# The chonicles import in this module should be removed.
|
||||
debug "JSON field not recognized by the current version of Nimbus. Consider upgrading",
|
||||
fieldName, typeName = typetraits.name(typeof value)
|
||||
|
||||
## ForkedBeaconBlock
|
||||
proc readValue*[BlockType: Web3SignerForkedBeaconBlock|ForkedBeaconBlock](
|
||||
reader: var JsonReader[RestJson],
|
||||
@ -694,7 +725,7 @@ proc readValue*[BlockType: Web3SignerForkedBeaconBlock|ForkedBeaconBlock](
|
||||
"ForkedBeaconBlock")
|
||||
data = some(reader.readValue(JsonString))
|
||||
else:
|
||||
reader.raiseUnexpectedField(fieldName, "ForkedBeaconBlock")
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if version.isNone():
|
||||
reader.raiseUnexpectedValue("Field version is missing")
|
||||
@ -705,8 +736,10 @@ proc readValue*[BlockType: Web3SignerForkedBeaconBlock|ForkedBeaconBlock](
|
||||
of BeaconBlockFork.Phase0:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), phase0.BeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
phase0.BeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[phase0.BeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -715,8 +748,10 @@ proc readValue*[BlockType: Web3SignerForkedBeaconBlock|ForkedBeaconBlock](
|
||||
of BeaconBlockFork.Altair:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), altair.BeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
altair.BeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[altair.BeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -725,8 +760,10 @@ proc readValue*[BlockType: Web3SignerForkedBeaconBlock|ForkedBeaconBlock](
|
||||
of BeaconBlockFork.Bellatrix:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), bellatrix.BeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
bellatrix.BeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[bellatrix.BeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -835,8 +872,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"RestPublishedBeaconBlockBody")
|
||||
execution_payload = some(reader.readValue(ExecutionPayload))
|
||||
else:
|
||||
# Ignore unknown fields
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if randao_reveal.isNone():
|
||||
reader.raiseUnexpectedValue("Field `randao_reveal` is missing")
|
||||
@ -949,8 +985,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"RestPublishedBeaconBlock")
|
||||
blockBody = some(reader.readValue(RestPublishedBeaconBlockBody))
|
||||
else:
|
||||
# Ignore unknown fields
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if slot.isNone():
|
||||
reader.raiseUnexpectedValue("Field `slot` is missing")
|
||||
@ -1017,8 +1052,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"RestPublishedSignedBeaconBlock")
|
||||
signature = some(reader.readValue(ValidatorSig))
|
||||
else:
|
||||
# Ignore unknown fields
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if signature.isNone():
|
||||
reader.raiseUnexpectedValue("Field `signature` is missing")
|
||||
@ -1081,7 +1115,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"ForkedSignedBeaconBlock")
|
||||
data = some(reader.readValue(JsonString))
|
||||
else:
|
||||
reader.raiseUnexpectedField(fieldName, "ForkedSignedBeaconBlock")
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if version.isNone():
|
||||
reader.raiseUnexpectedValue("Field version is missing")
|
||||
@ -1092,8 +1126,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconBlockFork.Phase0:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), phase0.SignedBeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
phase0.SignedBeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[phase0.SignedBeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -1102,8 +1138,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconBlockFork.Altair:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), altair.SignedBeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
altair.SignedBeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[altair.SignedBeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -1112,8 +1150,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconBlockFork.Bellatrix:
|
||||
let res =
|
||||
try:
|
||||
some(RestJson.decode(string(data.get()), bellatrix.SignedBeaconBlock,
|
||||
requireAllFields = true))
|
||||
some(RestJson.decode(string(data.get()),
|
||||
bellatrix.SignedBeaconBlock,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
none[bellatrix.SignedBeaconBlock]()
|
||||
if res.isNone():
|
||||
@ -1165,7 +1205,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"ForkedBeaconState")
|
||||
data = some(reader.readValue(JsonString))
|
||||
else:
|
||||
reader.raiseUnexpectedField(fieldName, "ForkedBeaconState")
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if version.isNone():
|
||||
reader.raiseUnexpectedValue("Field version is missing")
|
||||
@ -1188,7 +1228,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconStateFork.Phase0:
|
||||
try:
|
||||
tmp[].phase0Data.data = RestJson.decode(
|
||||
string(data.get()), phase0.BeaconState, requireAllFields = true)
|
||||
string(data.get()),
|
||||
phase0.BeaconState,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true)
|
||||
except SerializationError:
|
||||
reader.raiseUnexpectedValue("Incorrect phase0 beacon state format")
|
||||
|
||||
@ -1196,7 +1239,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconStateFork.Altair:
|
||||
try:
|
||||
tmp[].altairData.data = RestJson.decode(
|
||||
string(data.get()), altair.BeaconState, requireAllFields = true)
|
||||
string(data.get()),
|
||||
altair.BeaconState,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true)
|
||||
except SerializationError:
|
||||
reader.raiseUnexpectedValue("Incorrect altair beacon state format")
|
||||
|
||||
@ -1204,7 +1250,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
of BeaconStateFork.Bellatrix:
|
||||
try:
|
||||
tmp[].bellatrixData.data = RestJson.decode(
|
||||
string(data.get()), bellatrix.BeaconState, requireAllFields = true)
|
||||
string(data.get()),
|
||||
bellatrix.BeaconState,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true)
|
||||
except SerializationError:
|
||||
reader.raiseUnexpectedValue("Incorrect altair beacon state format")
|
||||
toValue(bellatrixData)
|
||||
@ -1382,8 +1431,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
dataName = fieldName
|
||||
data = some(reader.readValue(JsonString))
|
||||
else:
|
||||
# We ignore all unknown fields.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if requestKind.isNone():
|
||||
reader.raiseUnexpectedValue("Field `type` is missing")
|
||||
@ -1620,10 +1668,11 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
reader.raiseUnexpectedValue("Invalid `status` value")
|
||||
)
|
||||
else:
|
||||
# We ignore all unknown fields.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if status.isNone():
|
||||
reader.raiseUnexpectedValue("Field `status` is missing")
|
||||
|
||||
value = RemoteKeystoreStatus(status: status.get(), message: message)
|
||||
|
||||
## ScryptSalt
|
||||
@ -1675,8 +1724,7 @@ proc readValue*(reader: var JsonReader[RestJson], value: var Pbkdf2Params) {.
|
||||
"Pbkdf2Params")
|
||||
salt = some(reader.readValue(Pbkdf2Salt))
|
||||
else:
|
||||
# Ignore unknown field names.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if dklen.isNone():
|
||||
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
||||
@ -1749,8 +1797,7 @@ proc readValue*(reader: var JsonReader[RestJson], value: var ScryptParams) {.
|
||||
"ScryptParams")
|
||||
salt = some(reader.readValue(ScryptSalt))
|
||||
else:
|
||||
# Ignore unknown field names.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if dklen.isNone():
|
||||
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
||||
@ -1833,8 +1880,7 @@ proc readValue*(reader: var JsonReader[RestJson], value: var Keystore) {.
|
||||
reader.raiseUnexpectedValue("Unexpected negative `version` value")
|
||||
version = some(res)
|
||||
else:
|
||||
# Ignore unknown field names.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if crypto.isNone():
|
||||
reader.raiseUnexpectedValue("Field `crypto` is missing")
|
||||
@ -1888,8 +1934,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
||||
"KeystoresAndSlashingProtection")
|
||||
slashing = some(reader.readValue(SPDIR))
|
||||
else:
|
||||
# Ignore unknown field names.
|
||||
discard
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if len(keystores) == 0:
|
||||
reader.raiseUnexpectedValue("Missing `keystores` value")
|
||||
@ -1912,7 +1957,7 @@ proc decodeBody*[T](t: typedesc[T],
|
||||
return err("Unsupported content type")
|
||||
let data =
|
||||
try:
|
||||
RestJson.decode(body.data, T)
|
||||
RestJson.decode(body.data, T, allowUnknownFields = true)
|
||||
except SerializationError as exc:
|
||||
return err("Unable to deserialize data")
|
||||
except CatchableError:
|
||||
@ -1959,11 +2004,10 @@ proc encodeBytes*[T: EncodeArrays](value: T,
|
||||
|
||||
proc decodeBytes*[T: DecodeTypes](t: typedesc[T], value: openArray[byte],
|
||||
contentType: string): RestResult[T] =
|
||||
const isExtensibleType = t is ExtensibleDecodeTypes
|
||||
case contentType
|
||||
of "application/json":
|
||||
try:
|
||||
ok RestJson.decode(value, T, allowUnknownFields = isExtensibleType)
|
||||
ok RestJson.decode(value, T, allowUnknownFields = true)
|
||||
except SerializationError:
|
||||
err("Serialization error")
|
||||
else:
|
||||
|
@ -169,6 +169,7 @@ when const_preset == "mainnet":
|
||||
# canonical network names include:
|
||||
# * 'mainnet' - there can be only one
|
||||
# * 'prater' - testnet
|
||||
# * 'ropsten' - testnet
|
||||
# Must match the regex: [a-z0-9\-]
|
||||
CONFIG_NAME: "mainnet",
|
||||
|
||||
@ -271,6 +272,7 @@ elif const_preset == "minimal":
|
||||
# canonical network names include:
|
||||
# * 'mainnet' - there can be only one
|
||||
# * 'prater' - testnet
|
||||
# * 'ropsten' - testnet
|
||||
# Must match the regex: [a-z0-9\-]
|
||||
CONFIG_NAME: "minimal",
|
||||
|
||||
|
@ -15,7 +15,7 @@ when not defined(nimscript):
|
||||
const
|
||||
versionMajor* = 22
|
||||
versionMinor* = 5
|
||||
versionBuild* = 0
|
||||
versionBuild* = 1
|
||||
|
||||
versionBlob* = "stateofus" # Single word - ends up in the default graffiti
|
||||
|
||||
|
2
docker/dist/README.md.tpl
vendored
2
docker/dist/README.md.tpl
vendored
@ -67,5 +67,5 @@ The `prater` testnet runs on
|
||||
|
||||
```bash
|
||||
# using a local Goerli instance
|
||||
WEB3_URL="ws://localhost:8545" ./run-mainnet-node.sh --max-peers=150
|
||||
WEB3_URL="ws://localhost:8545" ./run-prater-node.sh --max-peers=150
|
||||
```
|
||||
|
@ -178,7 +178,7 @@ Each era is identified by when it ends. Thus, the genesis era is era `0`, follow
|
||||
|
||||
`.era` file names follow a simple convention: `<config-name>-<era-number>-<era-count>-<short-historical-root>.era`:
|
||||
|
||||
* `config-name` is the `CONFIG_NAME` field of the runtime configation (`mainnet`, `prater`, etc)
|
||||
* `config-name` is the `CONFIG_NAME` field of the runtime configation (`mainnet`, `prater`, `ropsten`, etc)
|
||||
* `era-number` is the number of the _first_ era stored in the file - for example, the genesis era file has number 0 - as a 5-digit 0-filled decimal integer
|
||||
* `short-era-root` is the first 4 bytes of the last historical root in the _last_ state in the era file, lower-case hex-encoded (8 characters), except the genesis era which instead uses the `genesis_validators_root` field from the genesis state.
|
||||
* The root is available as `state.historical_roots[era - 1]` except for genesis, which is `state.genesis_validators_root`
|
||||
|
@ -48,6 +48,12 @@
|
||||
- [Migrate from another client](./migration.md)
|
||||
- [Validate with a Raspberry Pi](./pi-guide.md)
|
||||
|
||||
# Security
|
||||
- [Security Issues / Responsible Disclosure](./security_issues.md)
|
||||
- [Security Audit](./audit.md)
|
||||
- [Reproducible Builds](./distribution_internals.md)
|
||||
|
||||
|
||||
# Downloads
|
||||
- [Download binaries](./binaries.md)
|
||||
- [Download Docker images](./docker.md)
|
||||
@ -62,7 +68,5 @@
|
||||
- [For developers](./developers.md)
|
||||
- [Contribute](./contribute.md)
|
||||
- [Resources](./resources.md)
|
||||
- [Binary distribution internals](./distribution_internals.md)
|
||||
- [Prater testnet](./prater.md)
|
||||
- [Security Audit](./audit.md)
|
||||
- [FAQ](./faq.md)
|
||||
|
@ -32,9 +32,9 @@ The following options are available:
|
||||
|
||||
Where:
|
||||
|
||||
- The `network` can either be `mainnet` or `prater`
|
||||
- The `network` can either be `mainnet`, `prater` or `ropsten`
|
||||
|
||||
- The default location of the `db` is either `build/data/shared_mainnet_0/db` or `build/data/shared_prater_0/db`
|
||||
- The default location of the `db` is either `build/data/shared_mainnet_0/db`, `build/data/shared_prater_0/db` or `build/data/shared_ropsten_0/db`
|
||||
|
||||
|
||||
Near the bottom, you should see
|
||||
|
24
docs/the_nimbus_book/src/security_issues.md
Normal file
24
docs/the_nimbus_book/src/security_issues.md
Normal file
@ -0,0 +1,24 @@
|
||||
## Security related issues
|
||||
**For any security related issues, follow responsible disclosure standards. Do not file public issues.**
|
||||
|
||||
|
||||
*Please file a report at the ethereum [bug bounty program](https://ethereum.org/en/bug-bounty/) in order to receive a reward for your findings.*
|
||||
|
||||
|
||||
*When in doubt, please send an encrypted email to security@status.im and ask ([gpg key](https://github.com/status-im/status-security/blob/master/pgp-keys/security%40status.im.asc)).*
|
||||
|
||||
|
||||
*Security related issues are (sufficient but not necessary criteria):*
|
||||
|
||||
- Soundness of protocols (consensus model, p2p protocols): consensus liveness and integrity.
|
||||
- Errors and failures in the cryptographic primitives
|
||||
- RCE vulnerabilities
|
||||
- Any issues causing consensus splits from the rest of the network
|
||||
- Denial of service (DOS) vectors
|
||||
- Broken Access Control
|
||||
- Memory Errors
|
||||
- Security Misconfiguration
|
||||
- Vulnerable Dependencies
|
||||
- Authentication Failures
|
||||
- Data Integrity Failures
|
||||
- Logging and Monitoring Vulnerabilities
|
12
run-ropsten-beacon-node.sh
Executable file
12
run-ropsten-beacon-node.sh
Executable file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2020-2021 Status Research & Development GmbH. Licensed under
|
||||
# either of:
|
||||
# - Apache License, version 2.0
|
||||
# - MIT license
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
cd "$(dirname $0)"
|
||||
# Allow the binary to receive signals directly.
|
||||
exec scripts/run-beacon-node.sh nimbus_beacon_node ropsten $@
|
1
vendor/merge-testnets
vendored
Submodule
1
vendor/merge-testnets
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 5b1b44aa912dd3433ba30d381345659c53918955
|
Loading…
x
Reference in New Issue
Block a user