2022-06-19 05:57:52 +00:00
|
|
|
# beacon_chain
|
2023-01-12 17:11:38 +00:00
|
|
|
# Copyright (c) 2018-2023 Status Research & Development GmbH
|
2021-08-03 15:17:11 +00:00
|
|
|
# Licensed and distributed under either of
|
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
import std/[typetraits, strutils]
|
2022-01-08 20:06:34 +00:00
|
|
|
import stew/[assign2, results, base10, byteutils], presto/common,
|
2022-02-07 20:36:09 +00:00
|
|
|
libp2p/peerid, serialization, json_serialization,
|
2022-05-20 15:25:26 +00:00
|
|
|
json_serialization/std/[options, net, sets],
|
|
|
|
chronicles
|
2022-02-07 20:36:09 +00:00
|
|
|
import ".."/[eth2_ssz_serialization, forks, keystore],
|
2022-06-20 05:53:39 +00:00
|
|
|
".."/../consensus_object_pools/block_pools_types,
|
2022-01-06 11:25:35 +00:00
|
|
|
".."/datatypes/[phase0, altair, bellatrix],
|
2023-02-06 18:07:30 +00:00
|
|
|
".."/mev/[bellatrix_mev, capella_mev],
|
2022-02-07 20:36:09 +00:00
|
|
|
".."/../validators/slashing_protection_common,
|
2021-12-22 12:37:31 +00:00
|
|
|
"."/[rest_types, rest_keymanager_types]
|
2022-02-07 20:36:09 +00:00
|
|
|
import nimcrypto/utils as ncrutils
|
2021-11-09 19:21:36 +00:00
|
|
|
|
2022-11-22 11:56:05 +00:00
|
|
|
from ".."/datatypes/capella import SignedBeaconBlock
|
2023-02-25 01:03:34 +00:00
|
|
|
from ".."/datatypes/deneb import BeaconState
|
2022-11-22 11:56:05 +00:00
|
|
|
|
2021-08-18 18:57:58 +00:00
|
|
|
export
|
2022-05-20 15:25:26 +00:00
|
|
|
eth2_ssz_serialization, results, peerid, common, serialization, chronicles,
|
2022-02-07 20:36:09 +00:00
|
|
|
json_serialization, options, net, sets, rest_types, slashing_protection_common
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2021-11-02 18:23:31 +00:00
|
|
|
from web3/ethtypes import BlockHash
|
|
|
|
export ethtypes.BlockHash
|
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
func decodeMediaType*(
|
|
|
|
contentType: Opt[ContentTypeData]): Result[MediaType, string] =
|
|
|
|
if contentType.isNone or isWildCard(contentType.get.mediaType):
|
|
|
|
return err("Missing or incorrect Content-Type")
|
|
|
|
ok contentType.get.mediaType
|
|
|
|
|
|
|
|
func decodeEthConsensusVersion*(
|
|
|
|
value: string): Result[ConsensusFork, string] =
|
|
|
|
let normalizedValue = value.toLowerAscii()
|
|
|
|
for consensusFork in ConsensusFork:
|
|
|
|
if normalizedValue == ($consensusFork).toLowerAscii():
|
|
|
|
return ok consensusFork
|
|
|
|
err("Unsupported Eth-Consensus-Version: " & value)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
Json.createFlavor RestJson
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-05-20 15:25:26 +00:00
|
|
|
## The RestJson format implements JSON serialization in the way specified
|
|
|
|
## by the Beacon API:
|
|
|
|
##
|
|
|
|
## https://ethereum.github.io/beacon-APIs/
|
|
|
|
##
|
|
|
|
## In this format, we must always set `allowUnknownFields = true` in the
|
|
|
|
## decode calls in order to conform the following spec:
|
|
|
|
##
|
|
|
|
## All JSON responses return the requested data under a data key in the top
|
|
|
|
## level of their response. Additional metadata may or may not be present
|
|
|
|
## in other keys at the top level of the response, dependent on the endpoint.
|
|
|
|
## The rules that require an increase in version number are as follows:
|
|
|
|
##
|
|
|
|
## - no field that is listed in an endpoint shall be removed without an increase
|
|
|
|
## in the version number
|
|
|
|
##
|
|
|
|
## - no field that is listed in an endpoint shall be altered in terms of format
|
|
|
|
## (e.g. from a string to an array) without an increase in the version number
|
|
|
|
##
|
|
|
|
## Note that it is possible for a field to be added to an endpoint's data or
|
|
|
|
## metadata without an increase in the version number.
|
|
|
|
##
|
|
|
|
## TODO nim-json-serializations should allow setting up this policy per format
|
|
|
|
##
|
|
|
|
## This also means that when new fields are introduced to the object definitions
|
|
|
|
## below, one must use the `Option[T]` type.
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
const
|
|
|
|
DecimalSet = {'0' .. '9'}
|
|
|
|
# Base10 (decimal) set of chars
|
|
|
|
ValidatorKeySize = RawPubKeySize * 2
|
|
|
|
# Size of `ValidatorPubKey` hexadecimal value (without 0x)
|
|
|
|
ValidatorSigSize = RawSigSize * 2
|
|
|
|
# Size of `ValidatorSig` hexadecimal value (without 0x)
|
|
|
|
RootHashSize = sizeof(Eth2Digest) * 2
|
|
|
|
# Size of `xxx_root` hexadecimal value (without 0x)
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-08-06 11:55:40 +00:00
|
|
|
ApplicationJsonMediaType* = MediaType.init("application/json")
|
|
|
|
TextPlainMediaType* = MediaType.init("text/plain")
|
2022-09-19 09:17:29 +00:00
|
|
|
OctetStreamMediaType* = MediaType.init("application/octet-stream")
|
2022-08-06 11:55:40 +00:00
|
|
|
UrlEncodedMediaType* = MediaType.init("application/x-www-form-urlencoded")
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
type
|
2022-07-13 14:45:04 +00:00
|
|
|
EmptyBody* = object
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
EncodeTypes* =
|
|
|
|
AttesterSlashing |
|
2022-07-13 14:45:04 +00:00
|
|
|
DeleteKeystoresBody |
|
|
|
|
EmptyBody |
|
|
|
|
ImportDistributedKeystoresBody |
|
|
|
|
ImportRemoteKeystoresBody |
|
|
|
|
KeystoresAndSlashingProtection |
|
2022-08-09 09:53:02 +00:00
|
|
|
PrepareBeaconProposer |
|
2021-08-03 15:17:11 +00:00
|
|
|
ProposerSlashing |
|
2022-07-13 14:45:04 +00:00
|
|
|
SetFeeRecipientRequest |
|
2023-02-15 15:10:31 +00:00
|
|
|
SetGasLimitRequest |
|
2023-02-06 18:07:30 +00:00
|
|
|
bellatrix_mev.SignedBlindedBeaconBlock |
|
|
|
|
capella_mev.SignedBlindedBeaconBlock |
|
2022-05-17 13:56:19 +00:00
|
|
|
SignedValidatorRegistrationV1 |
|
2021-10-19 17:44:05 +00:00
|
|
|
SignedVoluntaryExit |
|
2022-09-29 21:00:53 +00:00
|
|
|
Web3SignerRequest
|
|
|
|
|
|
|
|
EncodeOctetTypes* =
|
2022-07-13 14:45:04 +00:00
|
|
|
altair.SignedBeaconBlock |
|
|
|
|
bellatrix.SignedBeaconBlock |
|
2022-11-22 11:56:05 +00:00
|
|
|
capella.SignedBeaconBlock |
|
2022-07-13 14:45:04 +00:00
|
|
|
phase0.SignedBeaconBlock
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
EncodeArrays* =
|
|
|
|
seq[Attestation] |
|
2022-08-09 09:53:02 +00:00
|
|
|
seq[PrepareBeaconProposer] |
|
2022-07-13 14:45:04 +00:00
|
|
|
seq[RemoteKeystoreInfo] |
|
2021-10-19 17:44:05 +00:00
|
|
|
seq[RestCommitteeSubscription] |
|
2022-02-07 20:36:09 +00:00
|
|
|
seq[RestSignedContributionAndProof] |
|
2022-07-13 14:45:04 +00:00
|
|
|
seq[RestSyncCommitteeMessage] |
|
|
|
|
seq[RestSyncCommitteeSubscription] |
|
|
|
|
seq[SignedAggregateAndProof] |
|
|
|
|
seq[SignedValidatorRegistrationV1] |
|
|
|
|
seq[ValidatorIndex]
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
DecodeTypes* =
|
|
|
|
DataEnclosedObject |
|
|
|
|
DataMetaEnclosedObject |
|
|
|
|
DataRootEnclosedObject |
|
2023-03-03 20:20:01 +00:00
|
|
|
DataOptimisticObject |
|
2022-06-06 13:55:02 +00:00
|
|
|
DataVersionEnclosedObject |
|
2021-09-27 18:31:11 +00:00
|
|
|
GetBlockV2Response |
|
2022-07-13 14:45:04 +00:00
|
|
|
GetDistributedKeystoresResponse |
|
2021-12-22 12:37:31 +00:00
|
|
|
GetKeystoresResponse |
|
2022-02-07 20:36:09 +00:00
|
|
|
GetRemoteKeystoresResponse |
|
2022-01-25 10:07:15 +00:00
|
|
|
GetStateForkResponse |
|
2022-07-13 14:45:04 +00:00
|
|
|
GetStateV2Response |
|
|
|
|
KeymanagerGenericError |
|
|
|
|
KeystoresAndSlashingProtection |
|
|
|
|
ListFeeRecipientResponse |
|
2022-08-09 09:53:02 +00:00
|
|
|
PrepareBeaconProposer |
|
2021-12-22 12:37:31 +00:00
|
|
|
ProduceBlockResponseV2 |
|
2022-11-24 09:14:05 +00:00
|
|
|
ProduceBlindedBlockResponse |
|
2022-09-29 20:55:18 +00:00
|
|
|
RestIndexedErrorMessage |
|
|
|
|
RestErrorMessage |
|
2022-07-13 14:45:04 +00:00
|
|
|
RestValidator |
|
2021-12-22 12:37:31 +00:00
|
|
|
Web3SignerErrorResponse |
|
2021-11-30 01:20:21 +00:00
|
|
|
Web3SignerKeysResponse |
|
|
|
|
Web3SignerSignatureResponse |
|
2023-02-15 14:09:31 +00:00
|
|
|
Web3SignerStatusResponse |
|
|
|
|
GetStateRootResponse |
|
2023-04-16 06:07:07 +00:00
|
|
|
GetBlockRootResponse |
|
|
|
|
SomeForkedLightClientObject |
|
|
|
|
seq[SomeForkedLightClientObject]
|
2021-09-27 18:31:11 +00:00
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
RestVersioned*[T] = object
|
|
|
|
data*: T
|
2023-01-28 19:53:41 +00:00
|
|
|
jsonVersion*: ConsensusFork
|
2023-01-12 17:11:38 +00:00
|
|
|
sszContext*: ForkDigest
|
|
|
|
|
2023-01-20 14:14:37 +00:00
|
|
|
{.push raises: [].}
|
2021-08-23 10:41:48 +00:00
|
|
|
|
|
|
|
proc prepareJsonResponse*(t: typedesc[RestApiResponse], d: auto): seq[byte] =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", d)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc prepareJsonStringResponse*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse], d: RestVersioned[T]): string =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
withForkyObject(d.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", d.jsonVersion.toString())
|
|
|
|
writer.writeField("data", forkyObject)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2021-09-22 12:17:15 +00:00
|
|
|
proc prepareJsonStringResponse*(t: typedesc[RestApiResponse], d: auto): string =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(d)
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2021-08-23 10:41:48 +00:00
|
|
|
proc jsonResponseWRoot*(t: typedesc[RestApiResponse], data: auto,
|
2022-06-20 05:53:39 +00:00
|
|
|
dependent_root: Eth2Digest,
|
|
|
|
execOpt: Option[bool]): RestApiResponse =
|
2021-08-23 10:41:48 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dependent_root", dependent_root)
|
2022-06-20 05:53:39 +00:00
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
|
|
|
proc jsonResponse*(t: typedesc[RestApiResponse], data: auto): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2022-06-20 05:53:39 +00:00
|
|
|
proc jsonResponseBlock*(t: typedesc[RestApiResponse],
|
|
|
|
data: ForkedSignedBeaconBlock,
|
2022-10-03 22:05:52 +00:00
|
|
|
execOpt: Option[bool]): RestApiResponse =
|
|
|
|
let
|
|
|
|
headers = [("eth-consensus-version", data.kind.toString())]
|
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", data.kind.toString())
|
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
|
|
|
withBlck(data):
|
|
|
|
writer.writeField("data", blck)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
2022-06-20 05:53:39 +00:00
|
|
|
|
|
|
|
proc jsonResponseState*(t: typedesc[RestApiResponse],
|
2022-09-23 13:49:22 +00:00
|
|
|
data: ForkedHashedBeaconState,
|
2022-06-20 05:53:39 +00:00
|
|
|
execOpt: Option[bool]): RestApiResponse =
|
|
|
|
let
|
2022-09-23 13:49:22 +00:00
|
|
|
headers = [("eth-consensus-version", data.kind.toString())]
|
2022-06-20 05:53:39 +00:00
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-23 13:49:22 +00:00
|
|
|
writer.writeField("version", data.kind.toString())
|
2022-06-20 05:53:39 +00:00
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
2022-09-23 13:49:22 +00:00
|
|
|
withState(data):
|
|
|
|
writer.writeField("data", forkyState.data)
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
|
|
|
proc jsonResponseWOpt*(t: typedesc[RestApiResponse], data: auto,
|
|
|
|
execOpt: Option[bool]): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2022-10-04 11:38:09 +00:00
|
|
|
proc jsonResponseWVersion*(t: typedesc[RestApiResponse], data: auto,
|
2023-01-28 19:53:41 +00:00
|
|
|
version: ConsensusFork): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let
|
|
|
|
headers = [("eth-consensus-version", version.toString())]
|
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", version.toString())
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc jsonResponseVersioned*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse],
|
|
|
|
entries: openArray[RestVersioned[T]]): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
for e in writer.stepwiseArrayCreation(entries):
|
2023-01-12 17:11:38 +00:00
|
|
|
withForkyObject(e.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", e.jsonVersion.toString())
|
|
|
|
writer.writeField("data", forkyObject)
|
|
|
|
writer.endRecord()
|
2022-10-04 11:38:09 +00:00
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
proc jsonResponsePlain*(t: typedesc[RestApiResponse],
|
|
|
|
data: auto): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2021-04-04 07:23:36 +00:00
|
|
|
proc jsonResponseWMeta*(t: typedesc[RestApiResponse],
|
|
|
|
data: auto, meta: auto): RestApiResponse =
|
2021-08-23 10:41:48 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.writeField("meta", meta)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc jsonMsgResponse*(t: typedesc[RestApiResponse],
|
|
|
|
msg: string = ""): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", 200)
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.response(data, Http200, "application/json")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
2021-07-13 11:15:07 +00:00
|
|
|
msg: string = ""): RestApiResponse =
|
2021-03-23 22:50:18 +00:00
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
|
|
|
msg: string = "", stacktrace: string): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
if len(stacktrace) > 0:
|
2021-12-22 12:37:31 +00:00
|
|
|
writer.writeField("stacktraces", [stacktrace])
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
|
|
|
msg: string = "",
|
2022-04-08 16:22:49 +00:00
|
|
|
stacktraces: openArray[string]): RestApiResponse =
|
2021-07-13 11:15:07 +00:00
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
2021-12-22 12:37:31 +00:00
|
|
|
writer.writeField("stacktraces", stacktraces)
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-03-23 22:50:18 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
|
|
|
proc jsonErrorList*(t: typedesc[RestApiResponse],
|
|
|
|
status: HttpCode = Http200,
|
|
|
|
msg: string = "", failures: auto): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.writeField("failures", failures)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-03-23 22:50:18 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc sszResponseVersioned*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse],
|
|
|
|
entries: openArray[RestVersioned[T]]): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
for e in entries:
|
2023-01-12 17:11:38 +00:00
|
|
|
withForkyUpdate(e.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
var cursor = stream.delayFixedSizeWrite(sizeof(uint64))
|
|
|
|
let initPos = stream.pos
|
|
|
|
stream.write e.sszContext.data
|
|
|
|
var writer = SszWriter.init(stream)
|
|
|
|
writer.writeValue forkyUpdate
|
|
|
|
cursor.finalWrite (stream.pos - initPos).uint64.toBytesLE()
|
2022-10-04 11:38:09 +00:00
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream")
|
|
|
|
|
2022-09-23 15:51:04 +00:00
|
|
|
proc sszResponsePlain*(t: typedesc[RestApiResponse], res: seq[byte],
|
|
|
|
headers: openArray[RestKeyValueTuple] = []
|
|
|
|
): RestApiResponse =
|
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream",
|
|
|
|
headers = headers)
|
|
|
|
|
2022-06-20 05:53:39 +00:00
|
|
|
proc sszResponse*(t: typedesc[RestApiResponse], data: auto,
|
2022-09-23 15:51:04 +00:00
|
|
|
headers: openArray[RestKeyValueTuple] = []
|
2022-06-20 05:53:39 +00:00
|
|
|
): RestApiResponse =
|
2021-09-16 13:32:32 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = SszWriter.init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except SerializationError:
|
|
|
|
default
|
|
|
|
except IOError:
|
|
|
|
default
|
2022-06-20 05:53:39 +00:00
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream",
|
|
|
|
headers = headers)
|
2021-09-16 13:32:32 +00:00
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
template hexOriginal(data: openArray[byte]): string =
|
2022-01-08 20:06:34 +00:00
|
|
|
to0xHex(data)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc decodeJsonString*[T](t: typedesc[T],
|
2022-07-13 14:45:04 +00:00
|
|
|
data: JsonString): Result[T, cstring] =
|
2021-11-30 01:20:21 +00:00
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
ok(RestJson.decode(string(data), T,
|
2022-07-13 14:45:04 +00:00
|
|
|
requireAllFields = true,
|
2022-05-20 15:25:26 +00:00
|
|
|
allowUnknownFields = true))
|
2021-11-30 01:20:21 +00:00
|
|
|
except SerializationError:
|
|
|
|
err("Unable to deserialize data")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## uint64
|
2021-08-23 10:41:48 +00:00
|
|
|
proc writeValue*(w: var JsonWriter[RestJson], value: uint64) {.
|
|
|
|
raises: [IOError, Defect].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(w, Base10.toString(value))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var uint64) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
2022-01-06 07:38:40 +00:00
|
|
|
reader.raiseUnexpectedValue($res.error() & ": " & svalue)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## uint8
|
2022-01-06 07:38:40 +00:00
|
|
|
proc writeValue*(w: var JsonWriter[RestJson], value: uint8) {.
|
2021-11-02 18:23:31 +00:00
|
|
|
raises: [IOError, Defect].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
writeValue(w, Base10.toString(value))
|
2021-11-02 18:23:31 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var uint8) {.
|
2021-11-02 18:23:31 +00:00
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
2022-01-06 07:38:40 +00:00
|
|
|
let res = Base10.decode(uint8, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error() & ": " & svalue)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## JustificationBits
|
2022-01-06 07:38:40 +00:00
|
|
|
proc writeValue*(w: var JsonWriter[RestJson], value: JustificationBits) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
w.writeValue hexOriginal([uint8(value)])
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var JustificationBits) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let hex = reader.readValue(string)
|
2021-11-02 18:23:31 +00:00
|
|
|
try:
|
2022-01-06 07:38:40 +00:00
|
|
|
value = JustificationBits(hexToByteArray(hex, 1)[0])
|
2021-11-02 18:23:31 +00:00
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
2022-01-06 07:38:40 +00:00
|
|
|
"The `justification_bits` value must be a hex string")
|
2021-11-02 18:23:31 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
## UInt256
|
|
|
|
proc writeValue*(w: var JsonWriter[RestJson], value: UInt256) {.
|
2021-08-23 10:41:48 +00:00
|
|
|
raises: [IOError, Defect].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
writeValue(w, toString(value))
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var UInt256) {.
|
2021-07-13 11:15:07 +00:00
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
let svalue = reader.readValue(string)
|
2021-07-13 11:15:07 +00:00
|
|
|
try:
|
2022-01-06 07:38:40 +00:00
|
|
|
value = parse(svalue, UInt256, 10)
|
2021-07-13 11:15:07 +00:00
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
2022-01-06 07:38:40 +00:00
|
|
|
"UInt256 value should be a valid decimal string")
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## Slot
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Slot) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Slot) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = Slot(res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## Epoch
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Epoch) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Epoch) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = Epoch(res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2022-09-23 18:29:31 +00:00
|
|
|
## EpochParticipationFlags
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
epochFlags: EpochParticipationFlags)
|
|
|
|
{.raises: [IOError, Defect].} =
|
2022-12-19 12:01:49 +00:00
|
|
|
for e in writer.stepwiseArrayCreation(epochFlags.asList):
|
2022-09-23 18:29:31 +00:00
|
|
|
writer.writeValue $e
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
epochFlags: var EpochParticipationFlags)
|
|
|
|
{.raises: [SerializationError, IOError, Defect].} =
|
|
|
|
for e in reader.readArray(string):
|
|
|
|
let parsed = try:
|
|
|
|
parseBiggestUInt(e)
|
|
|
|
except ValueError as err:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"A string-encoded 8-bit usigned integer value expected")
|
|
|
|
|
|
|
|
if parsed > uint8.high:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"The usigned integer value should fit in 8 bits")
|
|
|
|
|
2022-12-19 12:01:49 +00:00
|
|
|
if not epochFlags.asList.add(uint8(parsed)):
|
2022-09-23 18:29:31 +00:00
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"The participation flags list size exceeds limit")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## ValidatorIndex
|
2021-11-07 21:03:23 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: ValidatorIndex)
|
|
|
|
{.raises: [IOError, Defect].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
2021-11-07 21:03:23 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorIndex)
|
|
|
|
{.raises: [IOError, SerializationError, Defect].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
if v < VALIDATOR_REGISTRY_LIMIT:
|
|
|
|
value = ValidatorIndex(v)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Validator index is bigger then VALIDATOR_REGISTRY_LIMIT")
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## IndexInSyncCommittee
|
2021-11-09 19:21:36 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: IndexInSyncCommittee)
|
2021-11-07 21:03:23 +00:00
|
|
|
{.raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, Base10.toString(distinctBase(value)))
|
|
|
|
|
2021-11-09 19:21:36 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var IndexInSyncCommittee)
|
2021-11-07 21:03:23 +00:00
|
|
|
{.raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
if v < SYNC_COMMITTEE_SIZE:
|
2021-11-09 19:21:36 +00:00
|
|
|
value = IndexInSyncCommittee(v)
|
2021-11-07 21:03:23 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue(
|
2021-11-09 19:21:36 +00:00
|
|
|
"Index in committee is bigger than SYNC_COMMITTEE_SIZE")
|
2021-11-07 21:03:23 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2021-04-06 08:00:26 +00:00
|
|
|
## RestValidatorIndex
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestValidatorIndex) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestValidatorIndex) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
value = RestValidatorIndex(v)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## CommitteeIndex
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: CommitteeIndex) {.
|
|
|
|
raises: [IOError, Defect].} =
|
2022-01-08 23:28:49 +00:00
|
|
|
writeValue(writer, value.asUInt64)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var CommitteeIndex) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
2022-01-08 23:28:49 +00:00
|
|
|
var v: uint64
|
|
|
|
reader.readValue(v)
|
|
|
|
|
|
|
|
let res = CommitteeIndex.init(v)
|
2021-03-23 22:50:18 +00:00
|
|
|
if res.isOk():
|
2022-01-08 23:28:49 +00:00
|
|
|
value = res.get()
|
2021-03-23 22:50:18 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## ValidatorSig
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: ValidatorSig) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorSig) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorSig.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## TrustedSig
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: TrustedSig) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var TrustedSig) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorSig.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = cast[TrustedSig](res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## ValidatorPubKey
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: ValidatorPubKey) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorPubKey) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorPubKey.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## BitSeq
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitSeq) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
value = BitSeq hexToSeqByte(reader.readValue(string))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader, "A BitSeq value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: BitSeq) {.
|
|
|
|
raises: [IOError, Defect].} =
|
2021-06-10 17:51:15 +00:00
|
|
|
writeValue(writer, hexOriginal(value.bytes()))
|
2021-03-23 22:50:18 +00:00
|
|
|
|
|
|
|
## BitList
|
2021-08-23 10:41:48 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitList) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
type T = type(value)
|
|
|
|
value = T readValue(reader, BitSeq)
|
|
|
|
|
2021-08-23 10:41:48 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: BitList) {.
|
|
|
|
raises: [IOError, Defect].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, BitSeq value)
|
|
|
|
|
2021-09-27 08:14:43 +00:00
|
|
|
## BitArray
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitArray) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(readValue(reader, string), value.bytes)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"A BitArray value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: BitArray) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(value.bytes))
|
|
|
|
|
2021-11-02 18:23:31 +00:00
|
|
|
## BlockHash
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BlockHash) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"BlockHash value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: BlockHash) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## Eth2Digest
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Eth2Digest) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), value.data)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"Eth2Digest value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Eth2Digest) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(value.data))
|
|
|
|
|
2021-05-20 10:44:13 +00:00
|
|
|
## BloomLogs
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BloomLogs) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), value.data)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"BloomLogs value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: BloomLogs) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(value.data))
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## HashArray
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var HashArray) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
readValue(reader, value.data)
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: HashArray) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, value.data)
|
|
|
|
|
|
|
|
## HashList
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var HashList) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
readValue(reader, value.data)
|
2021-09-27 09:24:58 +00:00
|
|
|
value.resetCache()
|
2021-03-23 22:50:18 +00:00
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: HashList) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, value.data)
|
|
|
|
|
|
|
|
## Eth1Address
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Eth1Address) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"Eth1Address value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Eth1Address) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## GraffitiBytes
|
2022-01-20 12:31:55 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: GraffitiBytes)
|
|
|
|
{.raises: [IOError, Defect].} =
|
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], T: type GraffitiBytes): T
|
|
|
|
{.raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
init(GraffitiBytes, reader.readValue(string))
|
|
|
|
except ValueError as err:
|
|
|
|
reader.raiseUnexpectedValue err.msg
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## Version | ForkDigest | DomainType | GraffitiBytes
|
2022-01-08 20:06:34 +00:00
|
|
|
proc readValue*(
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var (Version | ForkDigest | DomainType | GraffitiBytes)) {.
|
2021-03-23 22:50:18 +00:00
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
2022-01-08 20:06:34 +00:00
|
|
|
raiseUnexpectedValue(
|
|
|
|
reader, "Expected a valid hex string with " & $value.len() & " bytes")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2022-05-20 15:25:26 +00:00
|
|
|
template unrecognizedFieldWarning =
|
|
|
|
# TODO: There should be a different notification mechanism for informing the
|
|
|
|
# caller of a deserialization routine for unexpected fields.
|
|
|
|
# The chonicles import in this module should be removed.
|
2022-06-09 13:08:50 +00:00
|
|
|
trace "JSON field not recognized by the current version of Nimbus. Consider upgrading",
|
2022-05-20 15:25:26 +00:00
|
|
|
fieldName, typeName = typetraits.name(typeof value)
|
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
## ForkedBeaconBlock
|
2022-08-01 06:41:47 +00:00
|
|
|
template prepareForkedBlockReading(
|
|
|
|
reader: var JsonReader[RestJson], value: untyped,
|
2023-01-28 19:53:41 +00:00
|
|
|
version: var Option[ConsensusFork],
|
2022-11-24 09:14:05 +00:00
|
|
|
data: var Option[JsonString],
|
|
|
|
blockTypeName: cstring) =
|
2022-08-01 06:41:47 +00:00
|
|
|
for fieldName {.inject.} in readObjectFields(reader):
|
2021-08-27 09:00:06 +00:00
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
2022-11-24 09:14:05 +00:00
|
|
|
blockTypeName)
|
|
|
|
let vres = reader.readValue(string).toLowerAscii()
|
2021-08-27 09:00:06 +00:00
|
|
|
case vres
|
2022-11-24 09:14:05 +00:00
|
|
|
of "phase0":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Phase0)
|
2022-11-24 09:14:05 +00:00
|
|
|
of "altair":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Altair)
|
2022-11-24 09:14:05 +00:00
|
|
|
of "bellatrix":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Bellatrix)
|
2022-11-24 09:14:05 +00:00
|
|
|
of "capella":
|
2023-02-03 15:12:11 +00:00
|
|
|
version = some(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb":
|
|
|
|
version = some(ConsensusFork.Deneb)
|
2021-08-27 09:00:06 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
2022-05-17 16:55:03 +00:00
|
|
|
of "block", "block_header", "data":
|
2021-08-27 09:00:06 +00:00
|
|
|
if data.isSome():
|
2022-05-17 16:55:03 +00:00
|
|
|
reader.raiseUnexpectedField("Multiple block or block_header fields found",
|
2022-11-24 09:14:05 +00:00
|
|
|
blockTypeName)
|
2021-08-27 09:00:06 +00:00
|
|
|
data = some(reader.readValue(JsonString))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
2022-08-01 06:41:47 +00:00
|
|
|
proc readValue*[BlockType: ForkedBeaconBlock](
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var BlockType) {.raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
2023-01-28 19:53:41 +00:00
|
|
|
version: Option[ConsensusFork]
|
2022-08-01 06:41:47 +00:00
|
|
|
data: Option[JsonString]
|
|
|
|
|
2022-11-24 09:14:05 +00:00
|
|
|
prepareForkedBlockReading(reader, value, version, data, "ForkedBeaconBlock")
|
2022-08-01 06:41:47 +00:00
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
phase0.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
|
|
|
none[phase0.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format")
|
2022-05-17 16:55:03 +00:00
|
|
|
value = ForkedBeaconBlock.init(res.get()).BlockType
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
altair.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
|
|
|
none[altair.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format")
|
2022-05-17 16:55:03 +00:00
|
|
|
value = ForkedBeaconBlock.init(res.get()).BlockType
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2021-09-27 14:22:58 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
bellatrix.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-09-27 14:22:58 +00:00
|
|
|
except SerializationError:
|
2022-01-06 11:25:35 +00:00
|
|
|
none[bellatrix.BeaconBlock]()
|
2021-09-27 14:22:58 +00:00
|
|
|
if res.isNone():
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix block format")
|
2022-05-17 16:55:03 +00:00
|
|
|
value = ForkedBeaconBlock.init(res.get()).BlockType
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-29 05:02:16 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
capella.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[capella.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format")
|
|
|
|
value = ForkedBeaconBlock.init(res.get()).BlockType
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
deneb.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[deneb.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format")
|
|
|
|
value = ForkedBeaconBlock.init(res.get()).BlockType
|
2022-05-17 16:55:03 +00:00
|
|
|
|
2022-11-24 09:14:05 +00:00
|
|
|
proc readValue*[BlockType: ForkedBlindedBeaconBlock](
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var BlockType
|
|
|
|
) {.raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
2023-01-28 19:53:41 +00:00
|
|
|
version: Option[ConsensusFork]
|
2022-11-24 09:14:05 +00:00
|
|
|
data: Option[JsonString]
|
|
|
|
|
|
|
|
prepareForkedBlockReading(reader, value, version, data,
|
|
|
|
"ForkedBlindedBeaconBlock")
|
|
|
|
|
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-11-24 09:14:05 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
phase0.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
2023-01-28 19:53:41 +00:00
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Phase0,
|
2022-11-24 09:14:05 +00:00
|
|
|
phase0Data: res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-11-24 09:14:05 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
altair.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
2023-01-28 19:53:41 +00:00
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Altair,
|
2022-11-24 09:14:05 +00:00
|
|
|
altairData: res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-11-24 09:14:05 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
2023-02-21 13:21:38 +00:00
|
|
|
bellatrix_mev.BlindedBeaconBlock,
|
2022-11-24 09:14:05 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
2023-01-28 19:53:41 +00:00
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Bellatrix,
|
2022-11-24 09:14:05 +00:00
|
|
|
bellatrixData: res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2023-02-14 10:49:48 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
2023-02-21 13:21:38 +00:00
|
|
|
capella_mev.BlindedBeaconBlock,
|
2023-02-14 10:49:48 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Capella,
|
|
|
|
capellaData: res)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
capella_mev.BlindedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Deneb,
|
|
|
|
denebData: res)
|
2022-12-05 16:29:09 +00:00
|
|
|
|
2022-08-01 06:41:47 +00:00
|
|
|
proc readValue*[BlockType: Web3SignerForkedBeaconBlock](
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var BlockType) {.raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
2023-01-28 19:53:41 +00:00
|
|
|
version: Option[ConsensusFork]
|
2022-08-01 06:41:47 +00:00
|
|
|
data: Option[JsonString]
|
|
|
|
|
2022-11-24 09:14:05 +00:00
|
|
|
prepareForkedBlockReading(reader, value, version, data,
|
|
|
|
"Web3SignerForkedBeaconBlock")
|
2022-08-01 06:41:47 +00:00
|
|
|
|
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-08-01 06:41:47 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
phase0.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[phase0.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format")
|
|
|
|
value = Web3SignerForkedBeaconBlock(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Phase0,
|
2022-08-01 06:41:47 +00:00
|
|
|
phase0Data: res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-08-01 06:41:47 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
altair.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[altair.BeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format")
|
|
|
|
value = Web3SignerForkedBeaconBlock(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Altair,
|
2022-08-01 06:41:47 +00:00
|
|
|
altairData: res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-08-01 06:41:47 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
BeaconBlockHeader,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[BeaconBlockHeader]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix block format")
|
|
|
|
value = Web3SignerForkedBeaconBlock(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Bellatrix,
|
2022-08-01 06:41:47 +00:00
|
|
|
bellatrixData: res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
BeaconBlockHeader,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[BeaconBlockHeader]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format")
|
|
|
|
value = Web3SignerForkedBeaconBlock(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Capella,
|
2022-11-24 14:38:07 +00:00
|
|
|
capellaData: res.get())
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
BeaconBlockHeader,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[BeaconBlockHeader]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format")
|
|
|
|
value = Web3SignerForkedBeaconBlock(
|
|
|
|
kind: ConsensusFork.Deneb,
|
|
|
|
denebData: res.get())
|
2022-12-05 16:29:09 +00:00
|
|
|
|
2022-12-08 21:57:47 +00:00
|
|
|
proc writeValue*[
|
|
|
|
BlockType: Web3SignerForkedBeaconBlock](
|
|
|
|
writer: var JsonWriter[RestJson],
|
|
|
|
value: BlockType) {.raises: [IOError, Defect].} =
|
|
|
|
template forkIdentifier(id: string): auto = (static toUpperAscii id)
|
|
|
|
|
|
|
|
# https://consensys.github.io/web3signer/web3signer-eth2.html#tag/Signing/operation/ETH2_SIGN
|
|
|
|
# https://github.com/ConsenSys/web3signer/blob/d51337e96ba5ce410222943556bed7c4856b8e57/core/src/main/java/tech/pegasys/web3signer/core/service/http/handlers/signing/eth2/json/BlockRequestDeserializer.java#L42-L58
|
|
|
|
writer.beginRecord()
|
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.writeField("version", forkIdentifier "phase0")
|
|
|
|
writer.writeField("block", value.phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.writeField("version", forkIdentifier "altair")
|
|
|
|
writer.writeField("block", value.altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.writeField("version", forkIdentifier "bellatrix")
|
|
|
|
writer.writeField("block_header", value.bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.writeField("version", forkIdentifier "capella")
|
|
|
|
writer.writeField("block_header", value.capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", forkIdentifier "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("block_header", value.denebData)
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.endRecord()
|
2022-05-17 16:55:03 +00:00
|
|
|
|
2022-10-31 17:39:03 +00:00
|
|
|
proc writeValue*[
|
2023-05-12 15:40:45 +00:00
|
|
|
BlockType: ForkedBeaconBlock](
|
2022-05-17 16:55:03 +00:00
|
|
|
writer: var JsonWriter[RestJson],
|
|
|
|
value: BlockType) {.raises: [IOError, Defect].} =
|
|
|
|
|
|
|
|
template forkIdentifier(id: string): auto =
|
|
|
|
when BlockType is ForkedBeaconBlock:
|
|
|
|
id
|
|
|
|
else:
|
|
|
|
(static toUpperAscii id)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
writer.beginRecord()
|
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "phase0")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "altair")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "bellatrix")
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("data", value.bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", forkIdentifier "capella")
|
|
|
|
writer.writeField("data", value.capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", forkIdentifier "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData)
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2022-02-13 15:21:55 +00:00
|
|
|
## RestPublishedBeaconBlockBody
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedBeaconBlockBody) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
|
|
|
randao_reveal: Option[ValidatorSig]
|
|
|
|
eth1_data: Option[Eth1Data]
|
|
|
|
graffiti: Option[GraffitiBytes]
|
|
|
|
proposer_slashings: Option[
|
|
|
|
List[ProposerSlashing, Limit MAX_PROPOSER_SLASHINGS]]
|
|
|
|
attester_slashings: Option[
|
|
|
|
List[AttesterSlashing, Limit MAX_ATTESTER_SLASHINGS]]
|
|
|
|
attestations: Option[List[Attestation, Limit MAX_ATTESTATIONS]]
|
|
|
|
deposits: Option[List[Deposit, Limit MAX_DEPOSITS]]
|
|
|
|
voluntary_exits: Option[
|
|
|
|
List[SignedVoluntaryExit, Limit MAX_VOLUNTARY_EXITS]]
|
|
|
|
sync_aggregate: Option[SyncAggregate]
|
2023-02-03 15:12:11 +00:00
|
|
|
execution_payload: Option[RestExecutionPayload]
|
|
|
|
bls_to_execution_changes: Option[SignedBLSToExecutionChangeList]
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "randao_reveal":
|
|
|
|
if randao_reveal.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `randao_reveal` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
randao_reveal = some(reader.readValue(ValidatorSig))
|
|
|
|
of "eth1_data":
|
|
|
|
if eth1_data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `eth1_data` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
eth1_data = some(reader.readValue(Eth1Data))
|
|
|
|
of "graffiti":
|
|
|
|
if graffiti.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `graffiti` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
graffiti = some(reader.readValue(GraffitiBytes))
|
|
|
|
of "proposer_slashings":
|
|
|
|
if proposer_slashings.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `proposer_slashings` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
proposer_slashings = some(
|
|
|
|
reader.readValue(List[ProposerSlashing, Limit MAX_PROPOSER_SLASHINGS]))
|
|
|
|
of "attester_slashings":
|
|
|
|
if attester_slashings.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `attester_slashings` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
attester_slashings = some(
|
|
|
|
reader.readValue(List[AttesterSlashing, Limit MAX_ATTESTER_SLASHINGS]))
|
|
|
|
of "attestations":
|
|
|
|
if attestations.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `attestations` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
attestations = some(
|
|
|
|
reader.readValue(List[Attestation, Limit MAX_ATTESTATIONS]))
|
|
|
|
of "deposits":
|
|
|
|
if deposits.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `deposits` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
deposits = some(reader.readValue(List[Deposit, Limit MAX_DEPOSITS]))
|
|
|
|
of "voluntary_exits":
|
|
|
|
if voluntary_exits.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `voluntary_exits` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
voluntary_exits = some(
|
|
|
|
reader.readValue(List[SignedVoluntaryExit, Limit MAX_VOLUNTARY_EXITS]))
|
|
|
|
of "sync_aggregate":
|
|
|
|
if sync_aggregate.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `sync_aggregate` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
sync_aggregate = some(reader.readValue(SyncAggregate))
|
|
|
|
of "execution_payload":
|
|
|
|
if execution_payload.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `execution_payload` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-02-03 15:12:11 +00:00
|
|
|
execution_payload = some(reader.readValue(RestExecutionPayload))
|
|
|
|
of "bls_to_execution_changes":
|
|
|
|
if bls_to_execution_changes.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `bls_to_execution_changes` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
|
|
|
bls_to_execution_changes = some(reader.readValue(SignedBLSToExecutionChangeList))
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if randao_reveal.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `randao_reveal` is missing")
|
|
|
|
if eth1_data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `eth1_data` is missing")
|
|
|
|
if graffiti.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `graffiti` is missing")
|
|
|
|
if proposer_slashings.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `proposer_slashings` is missing")
|
|
|
|
if attester_slashings.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `attester_slashings` is missing")
|
|
|
|
if attestations.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `attestations` is missing")
|
|
|
|
if deposits.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `deposits` is missing")
|
|
|
|
if voluntary_exits.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `voluntary_exits` is missing")
|
|
|
|
|
|
|
|
let bodyKind =
|
2023-02-03 15:12:11 +00:00
|
|
|
if execution_payload.isSome() and
|
|
|
|
execution_payload.get().withdrawals.isSome() and
|
|
|
|
bls_to_execution_changes.isSome() and
|
|
|
|
sync_aggregate.isSome():
|
|
|
|
ConsensusFork.Capella
|
|
|
|
elif execution_payload.isSome() and sync_aggregate.isSome():
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Bellatrix
|
2022-02-13 15:21:55 +00:00
|
|
|
elif execution_payload.isNone() and sync_aggregate.isSome():
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Altair
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Phase0
|
2022-02-13 15:21:55 +00:00
|
|
|
|
2023-02-03 15:12:11 +00:00
|
|
|
template ep_src: auto = execution_payload.get()
|
|
|
|
template copy_ep_bellatrix(ep_dst: auto) =
|
|
|
|
assign(ep_dst.parent_hash, ep_src.parent_hash)
|
|
|
|
assign(ep_dst.fee_recipient, ep_src.fee_recipient)
|
|
|
|
assign(ep_dst.state_root, ep_src.state_root)
|
|
|
|
assign(ep_dst.receipts_root, ep_src.receipts_root)
|
|
|
|
assign(ep_dst.logs_bloom, ep_src.logs_bloom)
|
|
|
|
assign(ep_dst.prev_randao, ep_src.prev_randao)
|
|
|
|
assign(ep_dst.block_number, ep_src.block_number)
|
|
|
|
assign(ep_dst.gas_limit, ep_src.gas_limit)
|
|
|
|
assign(ep_dst.gas_used, ep_src.gas_used)
|
|
|
|
assign(ep_dst.timestamp, ep_src.timestamp)
|
2023-02-14 06:48:39 +00:00
|
|
|
assign(ep_dst.extra_data, ep_src.extra_data)
|
|
|
|
assign(ep_dst.base_fee_per_gas, ep_src.base_fee_per_gas)
|
2023-02-03 15:12:11 +00:00
|
|
|
assign(ep_dst.block_hash, ep_src.block_hash)
|
|
|
|
assign(ep_dst.transactions, ep_src.transactions)
|
|
|
|
|
2022-02-13 15:21:55 +00:00
|
|
|
case bodyKind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Phase0,
|
2022-02-13 15:21:55 +00:00
|
|
|
phase0Body: phase0.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Altair,
|
2022-02-13 15:21:55 +00:00
|
|
|
altairBody: altair.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Bellatrix,
|
2022-02-13 15:21:55 +00:00
|
|
|
bellatrixBody: bellatrix.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
)
|
|
|
|
)
|
2023-02-03 15:12:11 +00:00
|
|
|
copy_ep_bellatrix(value.bellatrixBody.execution_payload)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2023-02-03 15:12:11 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
|
|
|
kind: ConsensusFork.Capella,
|
|
|
|
capellaBody: capella.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
bls_to_execution_changes: bls_to_execution_changes.get()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
copy_ep_bellatrix(value.capellaBody.execution_payload)
|
|
|
|
assign(
|
|
|
|
value.capellaBody.execution_payload.withdrawals,
|
|
|
|
ep_src.withdrawals.get())
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-02-23 10:37:45 +00:00
|
|
|
reader.raiseUnexpectedValue($denebImplementationMissing)
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
## RestPublishedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedBeaconBlock) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
|
|
|
slot: Option[Slot]
|
|
|
|
proposer_index: Option[uint64]
|
|
|
|
parent_root: Option[Eth2Digest]
|
|
|
|
state_root: Option[Eth2Digest]
|
|
|
|
blockBody: Option[RestPublishedBeaconBlockBody]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "slot":
|
|
|
|
if slot.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `slot` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
|
|
|
slot = some(reader.readValue(Slot))
|
|
|
|
of "proposer_index":
|
|
|
|
if proposer_index.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `proposer_index` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
|
|
|
proposer_index = some(reader.readValue(uint64))
|
|
|
|
of "parent_root":
|
2022-04-08 16:22:49 +00:00
|
|
|
if parent_root.isSome():
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.raiseUnexpectedField("Multiple `parent_root` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
|
|
|
parent_root = some(reader.readValue(Eth2Digest))
|
|
|
|
of "state_root":
|
|
|
|
if state_root.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `state_root` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
|
|
|
state_root = some(reader.readValue(Eth2Digest))
|
|
|
|
of "body":
|
|
|
|
if blockBody.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `body` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
|
|
|
blockBody = some(reader.readValue(RestPublishedBeaconBlockBody))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if slot.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `slot` is missing")
|
|
|
|
if proposer_index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `proposer_index` is missing")
|
|
|
|
if parent_root.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `parent_root` is missing")
|
|
|
|
if state_root.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `state_root` is missing")
|
|
|
|
if blockBody.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `body` is missing")
|
|
|
|
|
|
|
|
let body = blockBody.get()
|
|
|
|
value = RestPublishedBeaconBlock(
|
|
|
|
case body.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
phase0.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.phase0Body
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
altair.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.altairBody
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
bellatrix.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.bellatrixBody
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
capella.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.capellaBody
|
|
|
|
)
|
|
|
|
)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
deneb.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.denebBody
|
|
|
|
)
|
|
|
|
)
|
2022-02-13 15:21:55 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
## RestPublishedSignedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedSignedBeaconBlock) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var signature: Option[ValidatorSig]
|
|
|
|
var message: Option[RestPublishedBeaconBlock]
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
|
|
|
"RestPublishedSignedBeaconBlock")
|
|
|
|
message = some(reader.readValue(RestPublishedBeaconBlock))
|
|
|
|
of "signature":
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signature` fields found",
|
|
|
|
"RestPublishedSignedBeaconBlock")
|
|
|
|
signature = some(reader.readValue(ValidatorSig))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if signature.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `signature` is missing")
|
|
|
|
if message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `message` is missing")
|
|
|
|
|
|
|
|
let blck = ForkedBeaconBlock(message.get())
|
|
|
|
value = RestPublishedSignedBeaconBlock(
|
|
|
|
case blck.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedSignedBeaconBlock.init(
|
|
|
|
phase0.SignedBeaconBlock(
|
|
|
|
message: blck.phase0Data,
|
|
|
|
signature: signature.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedSignedBeaconBlock.init(
|
|
|
|
altair.SignedBeaconBlock(
|
|
|
|
message: blck.altairData,
|
|
|
|
signature: signature.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedSignedBeaconBlock.init(
|
|
|
|
bellatrix.SignedBeaconBlock(
|
|
|
|
message: blck.bellatrixData,
|
|
|
|
signature: signature.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
ForkedSignedBeaconBlock.init(
|
|
|
|
capella.SignedBeaconBlock(
|
|
|
|
message: blck.capellaData,
|
|
|
|
signature: signature.get()
|
|
|
|
)
|
|
|
|
)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
ForkedSignedBeaconBlock.init(
|
|
|
|
deneb.SignedBeaconBlock(
|
|
|
|
message: blck.denebData,
|
|
|
|
signature: signature.get()
|
|
|
|
)
|
|
|
|
)
|
2022-02-13 15:21:55 +00:00
|
|
|
)
|
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
## ForkedSignedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var ForkedSignedBeaconBlock) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
2023-01-28 19:53:41 +00:00
|
|
|
version: Option[ConsensusFork]
|
2021-08-27 09:00:06 +00:00
|
|
|
data: Option[JsonString]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
|
|
|
"ForkedSignedBeaconBlock")
|
|
|
|
let vres = reader.readValue(string)
|
|
|
|
case vres
|
|
|
|
of "phase0":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Phase0)
|
2021-08-27 09:00:06 +00:00
|
|
|
of "altair":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Altair)
|
2022-02-13 15:21:55 +00:00
|
|
|
of "bellatrix":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Bellatrix)
|
2022-11-22 11:56:05 +00:00
|
|
|
of "capella":
|
2023-01-28 19:53:41 +00:00
|
|
|
version = some(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb":
|
|
|
|
version = some(ConsensusFork.Deneb)
|
2021-08-27 09:00:06 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
|
|
|
of "data":
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"ForkedSignedBeaconBlock")
|
|
|
|
data = some(reader.readValue(JsonString))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
phase0.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
|
|
|
none[phase0.SignedBeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format")
|
|
|
|
value = ForkedSignedBeaconBlock.init(res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
altair.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
|
|
|
none[altair.SignedBeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format")
|
|
|
|
value = ForkedSignedBeaconBlock.init(res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2021-09-27 14:22:58 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
bellatrix.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2021-09-27 14:22:58 +00:00
|
|
|
except SerializationError:
|
2022-01-06 11:25:35 +00:00
|
|
|
none[bellatrix.SignedBeaconBlock]()
|
2021-09-27 14:22:58 +00:00
|
|
|
if res.isNone():
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix block format")
|
2021-09-27 14:22:58 +00:00
|
|
|
value = ForkedSignedBeaconBlock.init(res.get())
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
capella.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[capella.SignedBeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format")
|
|
|
|
value = ForkedSignedBeaconBlock.init(res.get())
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
some(RestJson.decode(string(data.get()),
|
|
|
|
deneb.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError:
|
|
|
|
none[deneb.SignedBeaconBlock]()
|
|
|
|
if res.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format")
|
|
|
|
value = ForkedSignedBeaconBlock.init(res.get())
|
2022-01-17 09:27:08 +00:00
|
|
|
withBlck(value):
|
|
|
|
blck.root = hash_tree_root(blck.message)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: ForkedSignedBeaconBlock) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.writeField("version", "phase0")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.writeField("version", "altair")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("version", "bellatrix")
|
|
|
|
writer.writeField("data", value.bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", "capella")
|
|
|
|
writer.writeField("data", value.capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData)
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2021-11-05 07:34:34 +00:00
|
|
|
# ForkedHashedBeaconState is used where a `ForkedBeaconState` normally would
|
|
|
|
# be used, mainly because caching the hash early on is easier to do
|
2021-09-16 13:32:32 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
2021-11-05 07:34:34 +00:00
|
|
|
value: var ForkedHashedBeaconState) {.
|
2021-09-16 13:32:32 +00:00
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
2023-01-28 19:53:41 +00:00
|
|
|
version: Option[ConsensusFork]
|
2021-09-16 13:32:32 +00:00
|
|
|
data: Option[JsonString]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
|
|
|
"ForkedBeaconState")
|
|
|
|
let vres = reader.readValue(string)
|
2021-10-18 16:37:27 +00:00
|
|
|
version = case vres
|
2023-01-28 19:53:41 +00:00
|
|
|
of "phase0": some(ConsensusFork.Phase0)
|
|
|
|
of "altair": some(ConsensusFork.Altair)
|
|
|
|
of "bellatrix": some(ConsensusFork.Bellatrix)
|
|
|
|
of "capella": some(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb": some(ConsensusFork.Deneb)
|
2021-10-18 16:37:27 +00:00
|
|
|
else: reader.raiseUnexpectedValue("Incorrect version field value")
|
2021-09-16 13:32:32 +00:00
|
|
|
of "data":
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"ForkedBeaconState")
|
|
|
|
data = some(reader.readValue(JsonString))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2021-09-16 13:32:32 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
2021-11-05 07:34:34 +00:00
|
|
|
# Use a temporary to avoid stack instances and `value` mutation in case of
|
|
|
|
# exception
|
|
|
|
let
|
|
|
|
tmp = (ref ForkedHashedBeaconState)(kind: version.get())
|
|
|
|
|
|
|
|
template toValue(field: untyped) =
|
|
|
|
if tmp[].kind == value.kind:
|
|
|
|
assign(value.field, tmp[].field)
|
|
|
|
else:
|
|
|
|
value = tmp[] # slow, but rare (hopefully)
|
2022-01-06 07:38:40 +00:00
|
|
|
value.field.root = hash_tree_root(value.field.data)
|
2021-11-05 07:34:34 +00:00
|
|
|
|
2021-09-16 13:32:32 +00:00
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
|
|
|
tmp[].phase0Data.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
phase0.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2021-09-16 13:32:32 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 beacon state format")
|
2021-11-05 07:34:34 +00:00
|
|
|
|
|
|
|
toValue(phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
|
|
|
tmp[].altairData.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
altair.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2021-09-16 13:32:32 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect altair beacon state format")
|
2021-11-05 07:34:34 +00:00
|
|
|
|
|
|
|
toValue(altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
2022-01-24 16:23:13 +00:00
|
|
|
tmp[].bellatrixData.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
bellatrix.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2022-11-24 14:38:07 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix beacon state format")
|
2022-01-24 16:23:13 +00:00
|
|
|
toValue(bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
try:
|
|
|
|
tmp[].capellaData.data = RestJson.decode(
|
|
|
|
string(data.get()),
|
|
|
|
capella.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella beacon state format")
|
|
|
|
toValue(capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2022-12-07 16:47:23 +00:00
|
|
|
try:
|
2023-03-04 22:23:52 +00:00
|
|
|
tmp[].denebData.data = RestJson.decode(
|
2022-12-07 16:47:23 +00:00
|
|
|
string(data.get()),
|
2023-02-25 01:03:34 +00:00
|
|
|
deneb.BeaconState,
|
2022-12-07 16:47:23 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
2023-03-11 00:28:19 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb beacon state format")
|
2023-03-04 22:23:52 +00:00
|
|
|
toValue(denebData)
|
2021-09-16 13:32:32 +00:00
|
|
|
|
2022-06-20 05:38:56 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: ForkedHashedBeaconState)
|
|
|
|
{.raises: [IOError, Defect].} =
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.beginRecord()
|
2021-10-18 16:37:27 +00:00
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.writeField("version", "phase0")
|
2021-11-05 07:34:34 +00:00
|
|
|
writer.writeField("data", value.phase0Data.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.writeField("version", "altair")
|
2021-11-05 07:34:34 +00:00
|
|
|
writer.writeField("data", value.altairData.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("version", "bellatrix")
|
|
|
|
writer.writeField("data", value.bellatrixData.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", "capella")
|
|
|
|
writer.writeField("data", value.capellaData.data)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData.data)
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
## SomeForkedLightClientObject
|
|
|
|
proc readValue*[T: SomeForkedLightClientObject](
|
|
|
|
reader: var JsonReader[RestJson], value: var T) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome:
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found", T.name)
|
|
|
|
let consensusFork =
|
|
|
|
decodeEthConsensusVersion(reader.readValue(string)).valueOr:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
|
|
|
version.ok consensusFork
|
|
|
|
of "data":
|
|
|
|
if data.isSome:
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found", T.name)
|
|
|
|
data.ok reader.readValue(JsonString)
|
|
|
|
else:
|
|
|
|
unrecognizedFieldWarning()
|
|
|
|
|
|
|
|
if version.isNone:
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone:
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
|
|
|
withLcDataFork(lcDataForkAtConsensusFork(version.get)):
|
|
|
|
when lcDataFork > LightClientDataFork.None:
|
|
|
|
value = T(kind: lcDataFork)
|
|
|
|
try:
|
|
|
|
value.forky(lcDataFork) = RestJson.decode(
|
|
|
|
string(data.get()),
|
|
|
|
T.Forky(lcDataFork),
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect format (" & $lcDataFork & ")")
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Unsupported fork " & $version.get)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## Web3SignerRequest
|
2021-11-30 01:20:21 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: Web3SignerRequest) {.
|
|
|
|
raises: [IOError, Defect].} =
|
2022-05-10 00:32:12 +00:00
|
|
|
writer.beginRecord()
|
2021-11-30 01:20:21 +00:00
|
|
|
case value.kind
|
|
|
|
of Web3SignerRequestKind.AggregationSlot:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "AGGREGATION_SLOT")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("aggregation_slot", value.aggregationSlot)
|
|
|
|
of Web3SignerRequestKind.AggregateAndProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "AGGREGATE_AND_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("aggregate_and_proof", value.aggregateAndProof)
|
|
|
|
of Web3SignerRequestKind.Attestation:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "ATTESTATION")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("attestation", value.attestation)
|
|
|
|
of Web3SignerRequestKind.Block:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "BLOCK")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("block", value.blck)
|
|
|
|
of Web3SignerRequestKind.BlockV2:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "BLOCK_V2")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
2022-08-01 06:41:47 +00:00
|
|
|
|
2022-12-08 21:57:47 +00:00
|
|
|
# https://github.com/ConsenSys/web3signer/blob/41c0cbfabcb1fca9587b59e058b7eb29f152c60c/core/src/main/resources/openapi-specs/eth2/signing/schemas.yaml#L418-L497
|
2021-11-30 01:20:21 +00:00
|
|
|
writer.writeField("beacon_block", value.beaconBlock)
|
2022-12-08 21:57:47 +00:00
|
|
|
|
2023-04-06 13:16:21 +00:00
|
|
|
if isSome(value.proofs):
|
|
|
|
writer.writeField("proofs", value.proofs.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
of Web3SignerRequestKind.Deposit:
|
|
|
|
writer.writeField("type", "DEPOSIT")
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("deposit", value.deposit)
|
|
|
|
of Web3SignerRequestKind.RandaoReveal:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "RANDAO_REVEAL")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("randao_reveal", value.randaoReveal)
|
|
|
|
of Web3SignerRequestKind.VoluntaryExit:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "VOLUNTARY_EXIT")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("voluntary_exit", value.voluntaryExit)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeMessage:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_MESSAGE")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("sync_committee_message", value.syncCommitteeMessage)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeSelectionProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_SELECTION_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("sync_aggregator_selection_data",
|
|
|
|
value.syncAggregatorSelectionData)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeContributionAndProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("contribution_and_proof",
|
|
|
|
value.syncCommitteeContributionAndProof)
|
2022-08-01 06:41:47 +00:00
|
|
|
of Web3SignerRequestKind.ValidatorRegistration:
|
|
|
|
# https://consensys.github.io/web3signer/web3signer-eth2.html#operation/ETH2_SIGN
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "VALIDATOR_REGISTRATION")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("validator_registration", value.validatorRegistration)
|
2022-05-10 00:32:12 +00:00
|
|
|
writer.endRecord()
|
2021-11-30 01:20:21 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var Web3SignerRequest) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var
|
|
|
|
requestKind: Option[Web3SignerRequestKind]
|
|
|
|
forkInfo: Option[Web3SignerForkInfo]
|
|
|
|
signingRoot: Option[Eth2Digest]
|
|
|
|
data: Option[JsonString]
|
2023-04-06 13:16:21 +00:00
|
|
|
proofs: seq[Web3SignerMerkleProof]
|
2021-11-30 01:20:21 +00:00
|
|
|
dataName: string
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "type":
|
|
|
|
if requestKind.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `type` fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
let vres = reader.readValue(string)
|
|
|
|
requestKind = some(
|
|
|
|
case vres
|
|
|
|
of "AGGREGATION_SLOT":
|
|
|
|
Web3SignerRequestKind.AggregationSlot
|
|
|
|
of "AGGREGATE_AND_PROOF":
|
|
|
|
Web3SignerRequestKind.AggregateAndProof
|
|
|
|
of "ATTESTATION":
|
|
|
|
Web3SignerRequestKind.Attestation
|
|
|
|
of "BLOCK":
|
|
|
|
Web3SignerRequestKind.Block
|
|
|
|
of "BLOCK_V2":
|
|
|
|
Web3SignerRequestKind.BlockV2
|
|
|
|
of "DEPOSIT":
|
|
|
|
Web3SignerRequestKind.Deposit
|
|
|
|
of "RANDAO_REVEAL":
|
|
|
|
Web3SignerRequestKind.RandaoReveal
|
|
|
|
of "VOLUNTARY_EXIT":
|
|
|
|
Web3SignerRequestKind.VoluntaryExit
|
|
|
|
of "SYNC_COMMITTEE_MESSAGE":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeMessage
|
|
|
|
of "SYNC_COMMITTEE_SELECTION_PROOF":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeSelectionProof
|
|
|
|
of "SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeContributionAndProof
|
2022-08-01 06:41:47 +00:00
|
|
|
of "VALIDATOR_REGISTRATION":
|
|
|
|
Web3SignerRequestKind.ValidatorRegistration
|
2021-11-30 01:20:21 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected `type` value")
|
|
|
|
)
|
|
|
|
of "fork_info":
|
|
|
|
if forkInfo.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `fork_info` fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
forkInfo = some(reader.readValue(Web3SignerForkInfo))
|
|
|
|
of "signingRoot":
|
|
|
|
if signingRoot.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signingRoot` fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
signingRoot = some(reader.readValue(Eth2Digest))
|
2023-04-06 13:16:21 +00:00
|
|
|
of "proofs":
|
|
|
|
let newProofs = reader.readValue(seq[Web3SignerMerkleProof])
|
|
|
|
proofs.add(newProofs)
|
2021-11-30 01:20:21 +00:00
|
|
|
of "aggregation_slot", "aggregate_and_proof", "block", "beacon_block",
|
|
|
|
"randao_reveal", "voluntary_exit", "sync_committee_message",
|
2023-04-06 13:16:21 +00:00
|
|
|
"sync_aggregator_selection_data", "contribution_and_proof",
|
|
|
|
"attestation", "deposit", "validator_registration":
|
2021-11-30 01:20:21 +00:00
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
dataName = fieldName
|
|
|
|
data = some(reader.readValue(JsonString))
|
2023-04-06 13:16:21 +00:00
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2021-11-30 01:20:21 +00:00
|
|
|
|
|
|
|
if requestKind.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `type` is missing")
|
|
|
|
|
|
|
|
value =
|
|
|
|
case requestKind.get()
|
|
|
|
of Web3SignerRequestKind.AggregationSlot:
|
|
|
|
if dataName != "aggregation_slot":
|
|
|
|
reader.raiseUnexpectedValue("Field `aggregation_slot` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerAggregationSlotData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `aggregation_slot` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(kind: Web3SignerRequestKind.AggregationSlot,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, aggregationSlot: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.AggregateAndProof:
|
|
|
|
if dataName != "aggregate_and_proof":
|
|
|
|
reader.raiseUnexpectedValue("Field `aggregate_and_proof` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(AggregateAndProof, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `aggregate_and_proof` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.AggregateAndProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, aggregateAndProof: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.Attestation:
|
|
|
|
if dataName != "attestation":
|
|
|
|
reader.raiseUnexpectedValue("Field `attestation` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(AttestationData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `attestation` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.Attestation,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, attestation: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.Block:
|
|
|
|
if dataName != "block":
|
|
|
|
reader.raiseUnexpectedValue("Field `block` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(phase0.BeaconBlock, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `block` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.Block,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, blck: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.BlockV2:
|
2022-08-01 06:41:47 +00:00
|
|
|
# https://github.com/ConsenSys/web3signer/blob/41834a927088f1bde7a097e17d19e954d0058e54/core/src/main/resources/openapi-specs/eth2/signing/schemas.yaml#L421-L425 (branch v22.7.0)
|
|
|
|
# It's the "beacon_block" field even when it's not a block, but a header
|
2021-11-30 01:20:21 +00:00
|
|
|
if dataName != "beacon_block":
|
|
|
|
reader.raiseUnexpectedValue("Field `beacon_block` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerForkedBeaconBlock, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `beacon_block` format")
|
|
|
|
res.get()
|
2023-04-06 13:16:21 +00:00
|
|
|
if len(proofs) > 0:
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.BlockV2,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, beaconBlock: data,
|
|
|
|
proofs: Opt.some(proofs)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.BlockV2,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, beaconBlock: data
|
|
|
|
)
|
2021-11-30 01:20:21 +00:00
|
|
|
of Web3SignerRequestKind.Deposit:
|
|
|
|
if dataName != "deposit":
|
|
|
|
reader.raiseUnexpectedValue("Field `deposit` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerDepositData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `deposit` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.Deposit,
|
|
|
|
signingRoot: signingRoot, deposit: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.RandaoReveal:
|
|
|
|
if dataName != "randao_reveal":
|
|
|
|
reader.raiseUnexpectedValue("Field `randao_reveal` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerRandaoRevealData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `randao_reveal` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.RandaoReveal,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, randaoReveal: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.VoluntaryExit:
|
|
|
|
if dataName != "voluntary_exit":
|
|
|
|
reader.raiseUnexpectedValue("Field `voluntary_exit` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(VoluntaryExit, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `voluntary_exit` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.VoluntaryExit,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, voluntaryExit: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeMessage:
|
|
|
|
if dataName != "sync_committee_message":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `sync_committee_message` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerSyncCommitteeMessageData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `sync_committee_message` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeMessage,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncCommitteeMessage: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeSelectionProof:
|
|
|
|
if dataName != "sync_aggregator_selection_data":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `sync_aggregator_selection_data` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(SyncAggregatorSelectionData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `sync_aggregator_selection_data` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeSelectionProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncAggregatorSelectionData: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeContributionAndProof:
|
|
|
|
if dataName != "contribution_and_proof":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `contribution_and_proof` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(ContributionAndProof, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `contribution_and_proof` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeContributionAndProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncCommitteeContributionAndProof: data
|
|
|
|
)
|
2022-08-01 06:41:47 +00:00
|
|
|
of Web3SignerRequestKind.ValidatorRegistration:
|
|
|
|
if dataName != "validator_registration":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `validator_registration` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
let res =
|
|
|
|
decodeJsonString(Web3SignerValidatorRegistration, data.get())
|
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `validator_registration` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.ValidatorRegistration,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
validatorRegistration: data
|
|
|
|
)
|
2021-11-30 01:20:21 +00:00
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
## RemoteKeystoreStatus
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RemoteKeystoreStatus) {.raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("status", $value.status)
|
|
|
|
if value.message.isSome():
|
|
|
|
writer.writeField("message", value.message.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RemoteKeystoreStatus) {.
|
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
|
|
|
var message: Option[string]
|
|
|
|
var status: Option[KeystoreStatus]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
|
|
|
"RemoteKeystoreStatus")
|
|
|
|
message = some(reader.readValue(string))
|
|
|
|
of "status":
|
|
|
|
if status.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `status` fields found",
|
|
|
|
"RemoteKeystoreStatus")
|
|
|
|
let res = reader.readValue(string)
|
|
|
|
status = some(
|
|
|
|
case res
|
|
|
|
of "error":
|
|
|
|
KeystoreStatus.error
|
|
|
|
of "not_active":
|
|
|
|
KeystoreStatus.notActive
|
|
|
|
of "not_found":
|
|
|
|
KeystoreStatus.notFound
|
|
|
|
of "deleted":
|
|
|
|
KeystoreStatus.deleted
|
|
|
|
of "duplicate":
|
|
|
|
KeystoreStatus.duplicate
|
|
|
|
of "imported":
|
|
|
|
KeystoreStatus.imported
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `status` value")
|
|
|
|
)
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
if status.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `status` is missing")
|
2022-05-20 15:25:26 +00:00
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
value = RemoteKeystoreStatus(status: status.get(), message: message)
|
|
|
|
|
|
|
|
## ScryptSalt
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ScryptSalt) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
let res = ncrutils.fromHex(reader.readValue(string))
|
|
|
|
if len(res) == 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid scrypt salt value")
|
|
|
|
value = ScryptSalt(res)
|
|
|
|
|
|
|
|
## Pbkdf2Params
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Pbkdf2Params) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dklen", JsonString(Base10.toString(value.dklen)))
|
|
|
|
writer.writeField("c", JsonString(Base10.toString(value.c)))
|
|
|
|
writer.writeField("prf", value.prf)
|
|
|
|
writer.writeField("salt", value.salt)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Pbkdf2Params) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var
|
|
|
|
dklen: Option[uint64]
|
|
|
|
c: Option[uint64]
|
|
|
|
prf: Option[PrfKind]
|
|
|
|
salt: Option[Pbkdf2Salt]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "dklen":
|
|
|
|
if dklen.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `dklen` fields found",
|
|
|
|
"Pbkdf2Params")
|
|
|
|
dklen = some(reader.readValue(uint64))
|
|
|
|
of "c":
|
|
|
|
if c.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `c` fields found",
|
|
|
|
"Pbkdf2Params")
|
|
|
|
c = some(reader.readValue(uint64))
|
|
|
|
of "prf":
|
|
|
|
if prf.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `prf` fields found",
|
|
|
|
"Pbkdf2Params")
|
|
|
|
prf = some(reader.readValue(PrfKind))
|
|
|
|
of "salt":
|
|
|
|
if salt.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `salt` fields found",
|
|
|
|
"Pbkdf2Params")
|
|
|
|
salt = some(reader.readValue(Pbkdf2Salt))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
if dklen.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
|
|
|
if c.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `c` is missing")
|
|
|
|
if prf.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `prf` is missing")
|
|
|
|
if salt.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `salt` is missing")
|
|
|
|
|
|
|
|
value = Pbkdf2Params(
|
|
|
|
dklen: dklen.get(),
|
|
|
|
c: c.get(),
|
|
|
|
prf: prf.get(),
|
|
|
|
salt: salt.get()
|
|
|
|
)
|
|
|
|
|
|
|
|
## ScryptParams
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: ScryptParams) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dklen", JsonString(Base10.toString(value.dklen)))
|
|
|
|
writer.writeField("n", JsonString(Base10.toString(uint64(value.n))))
|
|
|
|
writer.writeField("p", JsonString(Base10.toString(uint64(value.p))))
|
|
|
|
writer.writeField("r", JsonString(Base10.toString(uint64(value.r))))
|
|
|
|
writer.writeField("salt", value.salt)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ScryptParams) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var
|
|
|
|
dklen: Option[uint64]
|
|
|
|
n, p, r: Option[int]
|
|
|
|
salt: Option[ScryptSalt]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "dklen":
|
|
|
|
if dklen.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `dklen` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
dklen = some(reader.readValue(uint64))
|
|
|
|
of "n":
|
|
|
|
if n.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `n` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `n` value")
|
|
|
|
n = some(res)
|
|
|
|
of "p":
|
|
|
|
if p.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `p` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `p` value")
|
|
|
|
p = some(res)
|
|
|
|
of "r":
|
|
|
|
if r.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `r` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `r` value")
|
|
|
|
r = some(res)
|
|
|
|
of "salt":
|
|
|
|
if salt.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `salt` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
salt = some(reader.readValue(ScryptSalt))
|
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
if dklen.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
|
|
|
if n.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `n` is missing")
|
|
|
|
if p.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `p` is missing")
|
|
|
|
if r.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `r` is missing")
|
|
|
|
if salt.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `salt` is missing")
|
|
|
|
|
|
|
|
value = ScryptParams(
|
|
|
|
dklen: dklen.get(),
|
|
|
|
n: n.get(), p: p.get(), r: r.get(),
|
|
|
|
salt: salt.get()
|
|
|
|
)
|
|
|
|
|
|
|
|
## Keystore
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: Keystore) {.
|
2022-09-19 22:09:56 +00:00
|
|
|
error: "keystores must be converted to json with Json.encode(keystore). " &
|
|
|
|
"There is no REST-specific encoding" .}
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Keystore) {.
|
2022-09-19 22:09:56 +00:00
|
|
|
error: "Keystores must be loaded with `parseKeystore`. " &
|
|
|
|
"There is no REST-specific encoding".}
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
## KeystoresAndSlashingProtection
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: KeystoresAndSlashingProtection) {.
|
2022-09-19 22:09:56 +00:00
|
|
|
raises: [IOError, SerializationError, Defect].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.beginRecord()
|
2022-06-19 06:08:21 +00:00
|
|
|
let keystores =
|
|
|
|
block:
|
|
|
|
var res: seq[string]
|
|
|
|
for keystore in value.keystores:
|
2022-09-19 22:09:56 +00:00
|
|
|
let encoded = Json.encode(keystore)
|
2022-06-19 06:08:21 +00:00
|
|
|
res.add(encoded)
|
|
|
|
res
|
|
|
|
writer.writeField("keystores", keystores)
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.writeField("passwords", value.passwords)
|
|
|
|
if value.slashing_protection.isSome():
|
2022-06-19 06:08:21 +00:00
|
|
|
let slashingProtection = RestJson.encode(value.slashing_protection.get)
|
|
|
|
writer.writeField("slashing_protection", slashingProtection)
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var KeystoresAndSlashingProtection) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var
|
2022-06-19 06:08:21 +00:00
|
|
|
strKeystores: seq[string]
|
2022-02-07 20:36:09 +00:00
|
|
|
passwords: seq[string]
|
2022-06-19 06:08:21 +00:00
|
|
|
strSlashing: Option[string]
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "keystores":
|
2022-06-19 06:08:21 +00:00
|
|
|
strKeystores = reader.readValue(seq[string])
|
2022-02-07 20:36:09 +00:00
|
|
|
of "passwords":
|
|
|
|
passwords = reader.readValue(seq[string])
|
|
|
|
of "slashing_protection":
|
2022-06-19 06:08:21 +00:00
|
|
|
if strSlashing.isSome():
|
2022-02-07 20:36:09 +00:00
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `slashing_protection` fields found",
|
|
|
|
"KeystoresAndSlashingProtection")
|
2022-06-19 06:08:21 +00:00
|
|
|
strSlashing = some(reader.readValue(string))
|
2022-02-07 20:36:09 +00:00
|
|
|
else:
|
2022-05-20 15:25:26 +00:00
|
|
|
unrecognizedFieldWarning()
|
2022-02-07 20:36:09 +00:00
|
|
|
|
2022-06-19 06:08:21 +00:00
|
|
|
if len(strKeystores) == 0:
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `keystores` value")
|
2022-02-07 20:36:09 +00:00
|
|
|
if len(passwords) == 0:
|
2022-06-19 06:08:21 +00:00
|
|
|
reader.raiseUnexpectedValue("Missing or empty `passwords` value")
|
|
|
|
|
|
|
|
let keystores =
|
|
|
|
block:
|
|
|
|
var res: seq[Keystore]
|
|
|
|
for item in strKeystores:
|
|
|
|
let key =
|
|
|
|
try:
|
2022-09-19 22:09:56 +00:00
|
|
|
parseKeystore(item)
|
2022-06-19 06:08:21 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
# TODO re-raise the exception by adjusting the column index, so the user
|
|
|
|
# will get an accurate syntax error within the larger message
|
|
|
|
reader.raiseUnexpectedValue("Invalid keystore format")
|
|
|
|
res.add(key)
|
|
|
|
res
|
|
|
|
|
|
|
|
let slashing =
|
|
|
|
if strSlashing.isSome():
|
|
|
|
let db =
|
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
RestJson.decode(strSlashing.get(),
|
|
|
|
SPDIR,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2022-06-19 06:08:21 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Invalid slashing protection format")
|
|
|
|
some(db)
|
|
|
|
else:
|
|
|
|
none[SPDIR]()
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
value = KeystoresAndSlashingProtection(
|
|
|
|
keystores: keystores, passwords: passwords, slashing_protection: slashing
|
|
|
|
)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## RestActivityItem
|
2022-07-21 16:54:07 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestActivityItem) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("index", value.index)
|
|
|
|
writer.writeField("epoch", value.epoch)
|
|
|
|
writer.writeField("active", value.active)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestActivityItem) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var index: Option[ValidatorIndex]
|
|
|
|
var epoch: Option[Epoch]
|
|
|
|
var active: Option[bool]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "index":
|
|
|
|
if index.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `index` fields found", "RestActivityItem")
|
|
|
|
index = some(reader.readValue(ValidatorIndex))
|
|
|
|
of "epoch":
|
|
|
|
if epoch.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `epoch` fields found", "RestActivityItem")
|
|
|
|
epoch = some(reader.readValue(Epoch))
|
|
|
|
of "active":
|
|
|
|
if active.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `active` fields found", "RestActivityItem")
|
|
|
|
active = some(reader.readValue(bool))
|
|
|
|
else:
|
|
|
|
discard
|
|
|
|
|
|
|
|
if index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `index` value")
|
|
|
|
if epoch.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `epoch` value")
|
|
|
|
if active.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `active` value")
|
|
|
|
|
|
|
|
value = RestActivityItem(index: index.get(), epoch: epoch.get(),
|
|
|
|
active: active.get())
|
|
|
|
|
2022-12-06 11:29:00 +00:00
|
|
|
## RestLivenessItem
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestLivenessItem) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("index", value.index)
|
|
|
|
writer.writeField("is_live", value.is_live)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestLivenessItem) {.
|
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var index: Option[ValidatorIndex]
|
|
|
|
var isLive: Option[bool]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "index":
|
|
|
|
if index.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `index` fields found", "RestLivenessItem")
|
|
|
|
index = some(reader.readValue(ValidatorIndex))
|
|
|
|
of "is_live":
|
|
|
|
if isLive.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `is_live` fields found", "RestLivenessItem")
|
|
|
|
isLive = some(reader.readValue(bool))
|
|
|
|
else:
|
|
|
|
discard
|
|
|
|
|
|
|
|
if index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `index` value")
|
|
|
|
if isLive.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `is_live` value")
|
|
|
|
|
|
|
|
value = RestLivenessItem(index: index.get(), is_live: isLive.get())
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## HeadChangeInfoObject
|
2022-06-20 05:53:39 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: HeadChangeInfoObject) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("slot", value.slot)
|
|
|
|
writer.writeField("block", value.block_root)
|
|
|
|
writer.writeField("state", value.state_root)
|
|
|
|
writer.writeField("epoch_transition", value.epoch_transition)
|
|
|
|
writer.writeField("previous_duty_dependent_root",
|
|
|
|
value.previous_duty_dependent_root)
|
|
|
|
writer.writeField("current_duty_dependent_root",
|
|
|
|
value.current_duty_dependent_root)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## ReorgInfoObject
|
2022-06-20 05:53:39 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: ReorgInfoObject) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("slot", value.slot)
|
|
|
|
writer.writeField("depth", value.depth)
|
|
|
|
writer.writeField("old_head_block", value.old_head_block)
|
|
|
|
writer.writeField("new_head_block", value.new_head_block)
|
|
|
|
writer.writeField("old_head_state", value.old_head_state)
|
|
|
|
writer.writeField("new_head_state", value.new_head_state)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## FinalizationInfoObject
|
2022-06-20 05:53:39 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: FinalizationInfoObject) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("block", value.block_root)
|
|
|
|
writer.writeField("state", value.state_root)
|
|
|
|
writer.writeField("epoch", value.epoch)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## EventBeaconBlockObject
|
2022-06-20 05:53:39 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: EventBeaconBlockObject) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("slot", value.slot)
|
|
|
|
writer.writeField("block", value.block_root)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2023-04-11 23:17:48 +00:00
|
|
|
## RestNodeValidity
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestNodeValidity) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.writeValue($value)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## RestSyncInfo
|
2022-06-20 05:53:39 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestSyncInfo) {.
|
|
|
|
raises: [IOError, Defect].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("head_slot", value.head_slot)
|
|
|
|
writer.writeField("sync_distance", value.sync_distance)
|
|
|
|
writer.writeField("is_syncing", value.is_syncing)
|
|
|
|
if value.is_optimistic.isSome():
|
|
|
|
writer.writeField("is_optimistic", value.is_optimistic.get())
|
2023-04-27 08:47:38 +00:00
|
|
|
if value.el_offline.isSome():
|
|
|
|
writer.writeField("el_offline", value.el_offline.get())
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2022-09-29 20:55:18 +00:00
|
|
|
## RestErrorMessage
|
2022-09-28 18:47:15 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
2022-09-29 20:55:18 +00:00
|
|
|
value: var RestErrorMessage) {.
|
2022-09-28 18:47:15 +00:00
|
|
|
raises: [SerializationError, IOError, Defect].} =
|
|
|
|
var
|
2022-09-29 20:55:18 +00:00
|
|
|
code: Opt[int]
|
2022-09-28 18:47:15 +00:00
|
|
|
message: Opt[string]
|
|
|
|
stacktraces: Option[seq[string]]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "code":
|
|
|
|
if code.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `code` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2022-09-28 18:47:15 +00:00
|
|
|
let ires =
|
|
|
|
try:
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field value")
|
2022-09-29 20:55:18 +00:00
|
|
|
Opt.some(res)
|
2022-09-28 18:47:15 +00:00
|
|
|
except SerializationError:
|
2022-09-29 20:55:18 +00:00
|
|
|
Opt.none(int)
|
2022-09-28 18:47:15 +00:00
|
|
|
if ires.isNone():
|
2022-09-29 20:55:18 +00:00
|
|
|
let sres =
|
|
|
|
try: parseInt(reader.readValue(string))
|
|
|
|
except ValueError:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field format")
|
|
|
|
if sres < 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field value")
|
2022-09-28 18:47:15 +00:00
|
|
|
code = Opt.some(sres)
|
|
|
|
else:
|
|
|
|
code = ires
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2022-09-28 18:47:15 +00:00
|
|
|
message = Opt.some(reader.readValue(string))
|
|
|
|
of "stacktraces":
|
|
|
|
if stacktraces.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `stacktraces` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2022-09-28 18:47:15 +00:00
|
|
|
stacktraces = some(reader.readValue(seq[string]))
|
|
|
|
else:
|
|
|
|
# We ignore all additional fields.
|
|
|
|
discard reader.readValue(JsonString)
|
|
|
|
|
|
|
|
if code.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or invalid `code` value")
|
|
|
|
if message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or invalid `message` value")
|
|
|
|
|
2022-09-29 20:55:18 +00:00
|
|
|
value = RestErrorMessage(
|
2022-09-28 18:47:15 +00:00
|
|
|
code: code.get(), message: message.get(),
|
|
|
|
stacktraces: stacktraces
|
|
|
|
)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc parseRoot(value: string): Result[Eth2Digest, cstring] =
|
|
|
|
try:
|
|
|
|
ok(Eth2Digest(data: hexToByteArray[32](value)))
|
|
|
|
except ValueError:
|
|
|
|
err("Unable to decode root value")
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
proc decodeBody*(
|
|
|
|
t: typedesc[RestPublishedSignedBeaconBlock],
|
|
|
|
body: ContentBody,
|
|
|
|
version: string
|
|
|
|
): Result[RestPublishedSignedBeaconBlock, cstring] =
|
|
|
|
if body.contentType == ApplicationJsonMediaType:
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
RestJson.decode(body.data, RestPublishedSignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(data)
|
|
|
|
elif body.contentType == OctetStreamMediaType:
|
2023-03-12 18:48:38 +00:00
|
|
|
let consensusFork = ? ConsensusFork.decodeString(version)
|
|
|
|
case consensusFork
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, phase0.SignedBeaconBlock)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, altair.SignedBeaconBlock)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, bellatrix.SignedBeaconBlock)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, capella.SignedBeaconBlock)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, deneb.SignedBeaconBlock)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2022-09-29 21:00:53 +00:00
|
|
|
else:
|
|
|
|
return err("Unsupported or invalid content media type")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
proc decodeBody*[T](t: typedesc[T],
|
|
|
|
body: ContentBody): Result[T, cstring] =
|
2022-08-06 11:55:40 +00:00
|
|
|
if body.contentType != ApplicationJsonMediaType:
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unsupported content type")
|
|
|
|
let data =
|
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
RestJson.decode(body.data, T,
|
2022-07-15 00:19:19 +00:00
|
|
|
requireAllFields = true,
|
2022-07-13 14:45:04 +00:00
|
|
|
allowUnknownFields = true)
|
2022-02-07 20:36:09 +00:00
|
|
|
except SerializationError as exc:
|
2022-07-13 14:45:04 +00:00
|
|
|
debug "Failed to deserialize REST JSON data",
|
2022-07-15 00:19:19 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unable to deserialize data")
|
2021-06-29 15:09:29 +00:00
|
|
|
except CatchableError:
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(data)
|
2021-04-12 16:05:13 +00:00
|
|
|
|
2022-11-08 18:08:43 +00:00
|
|
|
proc decodeBodyJsonOrSsz*[T](t: typedesc[T],
|
|
|
|
body: ContentBody): Result[T, cstring] =
|
|
|
|
if body.contentType == ApplicationJsonMediaType:
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
RestJson.decode(body.data, T,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(data)
|
|
|
|
elif body.contentType == OctetStreamMediaType:
|
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, T)
|
|
|
|
except SerializationError:
|
|
|
|
return err("Unable to deserialize data")
|
|
|
|
except CatchableError:
|
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(blck)
|
|
|
|
else:
|
|
|
|
return err("Unsupported content type")
|
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc encodeBytes*[T: EncodeTypes](value: T,
|
2021-08-23 10:41:48 +00:00
|
|
|
contentType: string): RestResult[seq[byte]] =
|
2021-07-13 11:15:07 +00:00
|
|
|
case contentType
|
|
|
|
of "application/json":
|
2021-08-23 10:41:48 +00:00
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
|
|
|
proc encodeBytes*[T: EncodeArrays](value: T,
|
|
|
|
contentType: string): RestResult[seq[byte]] =
|
|
|
|
case contentType
|
|
|
|
of "application/json":
|
2021-08-23 10:41:48 +00:00
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeArray(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
proc encodeBytes*[T: EncodeOctetTypes](
|
|
|
|
value: T,
|
|
|
|
contentType: string
|
|
|
|
): RestResult[seq[byte]] =
|
|
|
|
case contentType
|
|
|
|
of "application/json":
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
|
|
|
of "application/octet-stream":
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
SSZ.encode(value)
|
|
|
|
except CatchableError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
2022-09-19 09:17:29 +00:00
|
|
|
proc decodeBytes*[T: DecodeTypes](
|
|
|
|
t: typedesc[T],
|
|
|
|
value: openArray[byte],
|
|
|
|
contentType: Opt[ContentTypeData]
|
|
|
|
): RestResult[T] =
|
|
|
|
|
|
|
|
let mediaType =
|
|
|
|
if contentType.isNone():
|
|
|
|
ApplicationJsonMediaType
|
|
|
|
else:
|
|
|
|
if isWildCard(contentType.get().mediaType):
|
|
|
|
return err("Incorrect Content-Type")
|
|
|
|
contentType.get().mediaType
|
|
|
|
|
|
|
|
if mediaType == ApplicationJsonMediaType:
|
2021-08-03 15:17:11 +00:00
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
ok RestJson.decode(value, T,
|
2022-07-15 00:19:19 +00:00
|
|
|
requireAllFields = true,
|
2022-07-13 14:45:04 +00:00
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
2022-07-15 00:19:19 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(value)
|
2021-08-03 15:17:11 +00:00
|
|
|
err("Serialization error")
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
|
|
|
proc encodeString*(value: string): RestResult[string] =
|
|
|
|
ok(value)
|
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
proc encodeString*(
|
|
|
|
value:
|
|
|
|
uint64 |
|
|
|
|
SyncCommitteePeriod |
|
|
|
|
Epoch |
|
|
|
|
Slot |
|
|
|
|
CommitteeIndex |
|
|
|
|
SyncSubcommitteeIndex): RestResult[string] =
|
2021-07-13 11:15:07 +00:00
|
|
|
ok(Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: ValidatorSig): RestResult[string] =
|
|
|
|
ok(hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: GraffitiBytes): RestResult[string] =
|
|
|
|
ok(hexOriginal(distinctBase(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: Eth2Digest): RestResult[string] =
|
|
|
|
ok(hexOriginal(value.data))
|
|
|
|
|
|
|
|
proc encodeString*(value: ValidatorIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of ValidatorQueryKind.Index:
|
|
|
|
ok(Base10.toString(uint64(value.index)))
|
|
|
|
of ValidatorQueryKind.Key:
|
|
|
|
ok(hexOriginal(toRaw(value.key)))
|
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc encodeString*(value: ValidatorPubKey): RestResult[string] =
|
|
|
|
ok(hexOriginal(toRaw(value)))
|
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc encodeString*(value: StateIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of StateQueryKind.Slot:
|
|
|
|
ok(Base10.toString(uint64(value.slot)))
|
|
|
|
of StateQueryKind.Root:
|
|
|
|
ok(hexOriginal(value.root.data))
|
|
|
|
of StateQueryKind.Named:
|
|
|
|
case value.value
|
|
|
|
of StateIdentType.Head:
|
|
|
|
ok("head")
|
|
|
|
of StateIdentType.Genesis:
|
|
|
|
ok("genesis")
|
|
|
|
of StateIdentType.Finalized:
|
|
|
|
ok("finalized")
|
|
|
|
of StateIdentType.Justified:
|
|
|
|
ok("justified")
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
proc encodeString*(value: BlockIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of BlockQueryKind.Slot:
|
|
|
|
ok(Base10.toString(uint64(value.slot)))
|
|
|
|
of BlockQueryKind.Root:
|
|
|
|
ok(hexOriginal(value.root.data))
|
|
|
|
of BlockQueryKind.Named:
|
|
|
|
case value.value
|
|
|
|
of BlockIdentType.Head:
|
|
|
|
ok("head")
|
|
|
|
of BlockIdentType.Genesis:
|
|
|
|
ok("genesis")
|
|
|
|
of BlockIdentType.Finalized:
|
|
|
|
ok("finalized")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[PeerStateKind],
|
|
|
|
value: string): Result[PeerStateKind, cstring] =
|
|
|
|
case value
|
|
|
|
of "disconnected":
|
|
|
|
ok(PeerStateKind.Disconnected)
|
|
|
|
of "connecting":
|
|
|
|
ok(PeerStateKind.Connecting)
|
|
|
|
of "connected":
|
|
|
|
ok(PeerStateKind.Connected)
|
|
|
|
of "disconnecting":
|
|
|
|
ok(PeerStateKind.Disconnecting)
|
|
|
|
else:
|
|
|
|
err("Incorrect peer's state value")
|
|
|
|
|
|
|
|
proc encodeString*(value: PeerStateKind): Result[string, cstring] =
|
|
|
|
case value
|
|
|
|
of PeerStateKind.Disconnected:
|
|
|
|
ok("disconnected")
|
|
|
|
of PeerStateKind.Connecting:
|
|
|
|
ok("connecting")
|
|
|
|
of PeerStateKind.Connected:
|
|
|
|
ok("connected")
|
|
|
|
of PeerStateKind.Disconnecting:
|
|
|
|
ok("disconnecting")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[PeerDirectKind],
|
|
|
|
value: string): Result[PeerDirectKind, cstring] =
|
|
|
|
case value
|
|
|
|
of "inbound":
|
|
|
|
ok(PeerDirectKind.Inbound)
|
|
|
|
of "outbound":
|
|
|
|
ok(PeerDirectKind.Outbound)
|
|
|
|
else:
|
|
|
|
err("Incorrect peer's direction value")
|
|
|
|
|
|
|
|
proc encodeString*(value: PeerDirectKind): Result[string, cstring] =
|
|
|
|
case value
|
|
|
|
of PeerDirectKind.Inbound:
|
|
|
|
ok("inbound")
|
|
|
|
of PeerDirectKind.Outbound:
|
|
|
|
ok("outbound")
|
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
proc encodeString*(peerid: PeerId): Result[string, cstring] =
|
2021-08-03 15:17:11 +00:00
|
|
|
ok($peerid)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[EventTopic],
|
|
|
|
value: string): Result[EventTopic, cstring] =
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(EventTopic.Head)
|
|
|
|
of "block":
|
|
|
|
ok(EventTopic.Block)
|
|
|
|
of "attestation":
|
|
|
|
ok(EventTopic.Attestation)
|
|
|
|
of "voluntary_exit":
|
|
|
|
ok(EventTopic.VoluntaryExit)
|
|
|
|
of "finalized_checkpoint":
|
|
|
|
ok(EventTopic.FinalizedCheckpoint)
|
|
|
|
of "chain_reorg":
|
|
|
|
ok(EventTopic.ChainReorg)
|
2021-09-22 12:17:15 +00:00
|
|
|
of "contribution_and_proof":
|
|
|
|
ok(EventTopic.ContributionAndProof)
|
2022-10-13 00:16:49 +00:00
|
|
|
of "light_client_finality_update":
|
2022-06-19 05:57:52 +00:00
|
|
|
ok(EventTopic.LightClientFinalityUpdate)
|
2022-10-13 00:16:49 +00:00
|
|
|
of "light_client_optimistic_update":
|
2022-06-19 05:57:52 +00:00
|
|
|
ok(EventTopic.LightClientOptimisticUpdate)
|
2021-08-03 15:17:11 +00:00
|
|
|
else:
|
|
|
|
err("Incorrect event's topic value")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[ValidatorSig],
|
|
|
|
value: string): Result[ValidatorSig, cstring] =
|
|
|
|
if len(value) != ValidatorSigSize + 2:
|
|
|
|
return err("Incorrect validator signature value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
return err("Incorrect validator signature encoding")
|
|
|
|
ValidatorSig.fromHex(value)
|
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc decodeString*(t: typedesc[ValidatorPubKey],
|
|
|
|
value: string): Result[ValidatorPubKey, cstring] =
|
|
|
|
if len(value) != ValidatorKeySize + 2:
|
|
|
|
return err("Incorrect validator's key value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
err("Incorrect validator's key encoding")
|
|
|
|
else:
|
|
|
|
ValidatorPubKey.fromHex(value)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[GraffitiBytes],
|
|
|
|
value: string): Result[GraffitiBytes, cstring] =
|
|
|
|
try:
|
|
|
|
ok(GraffitiBytes.init(value))
|
|
|
|
except ValueError:
|
|
|
|
err("Unable to decode graffiti value")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[string],
|
|
|
|
value: string): Result[string, cstring] =
|
|
|
|
ok(value)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[Slot], value: string): Result[Slot, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(Slot(res))
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[Epoch], value: string): Result[Epoch, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(Epoch(res))
|
|
|
|
|
2022-06-19 05:57:52 +00:00
|
|
|
proc decodeString*(t: typedesc[SyncCommitteePeriod],
|
|
|
|
value: string): Result[SyncCommitteePeriod, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(SyncCommitteePeriod(res))
|
|
|
|
|
2021-09-23 22:13:25 +00:00
|
|
|
proc decodeString*(t: typedesc[uint64],
|
|
|
|
value: string): Result[uint64, cstring] =
|
|
|
|
Base10.decode(uint64, value)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[StateIdent],
|
|
|
|
value: string): Result[StateIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
err("Incorrect state root value length")
|
|
|
|
else:
|
|
|
|
let res = ? parseRoot(value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Root, root: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Slot, slot: Slot(res)))
|
|
|
|
else:
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Head))
|
|
|
|
of "genesis":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Genesis))
|
|
|
|
of "finalized":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Finalized))
|
|
|
|
of "justified":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Justified))
|
|
|
|
else:
|
|
|
|
err("Incorrect state identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Slot, slot: Slot(res)))
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[BlockIdent],
|
|
|
|
value: string): Result[BlockIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
err("Incorrect block root value length")
|
|
|
|
else:
|
|
|
|
let res = ? parseRoot(value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Root, root: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Slot, slot: Slot(res)))
|
|
|
|
else:
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Head))
|
|
|
|
of "genesis":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Genesis))
|
|
|
|
of "finalized":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Finalized))
|
|
|
|
else:
|
|
|
|
err("Incorrect block identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Slot, slot: Slot(res)))
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[ValidatorIdent],
|
|
|
|
value: string): Result[ValidatorIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != ValidatorKeySize + 2:
|
|
|
|
err("Incorrect validator's key value length")
|
|
|
|
else:
|
|
|
|
let res = ? ValidatorPubKey.fromHex(value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Key,
|
|
|
|
key: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Index,
|
|
|
|
index: RestValidatorIndex(res)))
|
|
|
|
else:
|
|
|
|
err("Incorrect validator identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Index,
|
|
|
|
index: RestValidatorIndex(res)))
|
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
proc decodeString*(t: typedesc[PeerId],
|
|
|
|
value: string): Result[PeerId, cstring] =
|
|
|
|
PeerId.init(value)
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[CommitteeIndex],
|
|
|
|
value: string): Result[CommitteeIndex, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
2022-01-08 23:28:49 +00:00
|
|
|
CommitteeIndex.init(res)
|
2021-08-03 15:17:11 +00:00
|
|
|
|
2021-10-20 16:32:46 +00:00
|
|
|
proc decodeString*(t: typedesc[SyncSubcommitteeIndex],
|
|
|
|
value: string): Result[SyncSubcommitteeIndex, cstring] =
|
2022-01-08 23:28:49 +00:00
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
SyncSubcommitteeIndex.init(res)
|
2021-10-19 17:44:05 +00:00
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[Eth2Digest],
|
|
|
|
value: string): Result[Eth2Digest, cstring] =
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
return err("Incorrect root value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
return err("Incorrect root value encoding")
|
|
|
|
parseRoot(value)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[ValidatorFilter],
|
|
|
|
value: string): Result[ValidatorFilter, cstring] =
|
|
|
|
case value
|
|
|
|
of "pending_initialized":
|
|
|
|
ok({ValidatorFilterKind.PendingInitialized})
|
|
|
|
of "pending_queued":
|
|
|
|
ok({ValidatorFilterKind.PendingQueued})
|
|
|
|
of "active_ongoing":
|
|
|
|
ok({ValidatorFilterKind.ActiveOngoing})
|
|
|
|
of "active_exiting":
|
|
|
|
ok({ValidatorFilterKind.ActiveExiting})
|
|
|
|
of "active_slashed":
|
|
|
|
ok({ValidatorFilterKind.ActiveSlashed})
|
|
|
|
of "exited_unslashed":
|
|
|
|
ok({ValidatorFilterKind.ExitedUnslashed})
|
|
|
|
of "exited_slashed":
|
|
|
|
ok({ValidatorFilterKind.ExitedSlashed})
|
|
|
|
of "withdrawal_possible":
|
|
|
|
ok({ValidatorFilterKind.WithdrawalPossible})
|
|
|
|
of "withdrawal_done":
|
|
|
|
ok({ValidatorFilterKind.WithdrawalDone})
|
|
|
|
of "pending":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.PendingInitialized,
|
|
|
|
ValidatorFilterKind.PendingQueued
|
|
|
|
})
|
|
|
|
of "active":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.ActiveOngoing,
|
|
|
|
ValidatorFilterKind.ActiveExiting,
|
|
|
|
ValidatorFilterKind.ActiveSlashed
|
|
|
|
})
|
|
|
|
of "exited":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.ExitedUnslashed,
|
|
|
|
ValidatorFilterKind.ExitedSlashed
|
|
|
|
})
|
|
|
|
of "withdrawal":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.WithdrawalPossible,
|
|
|
|
ValidatorFilterKind.WithdrawalDone
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
err("Incorrect validator state identifier value")
|
2022-09-29 21:00:53 +00:00
|
|
|
|
2023-01-28 19:53:41 +00:00
|
|
|
proc decodeString*(t: typedesc[ConsensusFork],
|
|
|
|
value: string): Result[ConsensusFork, cstring] =
|
2022-09-29 21:00:53 +00:00
|
|
|
case toLowerAscii(value)
|
2023-01-28 19:53:41 +00:00
|
|
|
of "phase0": ok(ConsensusFork.Phase0)
|
|
|
|
of "altair": ok(ConsensusFork.Altair)
|
|
|
|
of "bellatrix": ok(ConsensusFork.Bellatrix)
|
|
|
|
of "capella": ok(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb": ok(ConsensusFork.Deneb)
|
2022-09-29 21:00:53 +00:00
|
|
|
else: err("Unsupported or invalid beacon block fork version")
|