2022-06-19 05:57:52 +00:00
|
|
|
# beacon_chain
|
2024-01-06 14:26:56 +00:00
|
|
|
# Copyright (c) 2018-2024 Status Research & Development GmbH
|
2021-08-03 15:17:11 +00:00
|
|
|
# Licensed and distributed under either of
|
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2023-10-20 01:39:47 +00:00
|
|
|
{.push raises: [].}
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
import std/[typetraits, strutils]
|
2024-01-16 22:37:14 +00:00
|
|
|
import results, stew/[assign2, base10, byteutils, endians2], presto/common,
|
2022-02-07 20:36:09 +00:00
|
|
|
libp2p/peerid, serialization, json_serialization,
|
2023-12-19 15:44:27 +00:00
|
|
|
json_serialization/std/[net, sets],
|
|
|
|
json_serialization/stew/results as jsonSerializationResults,
|
|
|
|
stint, chronicles
|
2022-02-07 20:36:09 +00:00
|
|
|
import ".."/[eth2_ssz_serialization, forks, keystore],
|
2022-06-20 05:53:39 +00:00
|
|
|
".."/../consensus_object_pools/block_pools_types,
|
2022-01-06 11:25:35 +00:00
|
|
|
".."/datatypes/[phase0, altair, bellatrix],
|
2023-02-06 18:07:30 +00:00
|
|
|
".."/mev/[bellatrix_mev, capella_mev],
|
2022-02-07 20:36:09 +00:00
|
|
|
".."/../validators/slashing_protection_common,
|
2021-12-22 12:37:31 +00:00
|
|
|
"."/[rest_types, rest_keymanager_types]
|
2022-02-07 20:36:09 +00:00
|
|
|
import nimcrypto/utils as ncrutils
|
2021-11-09 19:21:36 +00:00
|
|
|
|
2022-11-22 11:56:05 +00:00
|
|
|
from ".."/datatypes/capella import SignedBeaconBlock
|
2023-02-25 01:03:34 +00:00
|
|
|
from ".."/datatypes/deneb import BeaconState
|
2022-11-22 11:56:05 +00:00
|
|
|
|
2021-08-18 18:57:58 +00:00
|
|
|
export
|
2022-05-20 15:25:26 +00:00
|
|
|
eth2_ssz_serialization, results, peerid, common, serialization, chronicles,
|
2023-12-19 15:44:27 +00:00
|
|
|
json_serialization, net, sets, rest_types, slashing_protection_common,
|
2024-03-14 03:44:00 +00:00
|
|
|
jsonSerializationResults, rest_keymanager_types
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2024-03-19 13:21:47 +00:00
|
|
|
from web3/primitives import BlockHash, BlockNumber
|
|
|
|
export primitives.BlockHash, primitives.BlockNumber
|
2021-11-02 18:23:31 +00:00
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
func decodeMediaType*(
|
|
|
|
contentType: Opt[ContentTypeData]): Result[MediaType, string] =
|
|
|
|
if contentType.isNone or isWildCard(contentType.get.mediaType):
|
|
|
|
return err("Missing or incorrect Content-Type")
|
|
|
|
ok contentType.get.mediaType
|
|
|
|
|
2023-12-19 15:44:27 +00:00
|
|
|
type
|
|
|
|
EmptyBody* = object
|
|
|
|
|
|
|
|
createJsonFlavor RestJson
|
|
|
|
|
|
|
|
RestJson.useDefaultSerializationFor(
|
|
|
|
AttestationData,
|
|
|
|
BLSToExecutionChange,
|
|
|
|
BeaconBlockHeader,
|
|
|
|
BlobSidecar,
|
2024-01-13 09:52:13 +00:00
|
|
|
BlobSidecarInfoObject,
|
2023-12-19 15:44:27 +00:00
|
|
|
BlobsBundle,
|
|
|
|
Checkpoint,
|
2024-06-15 22:15:27 +00:00
|
|
|
ConsolidationRequest,
|
2023-12-19 15:44:27 +00:00
|
|
|
ContributionAndProof,
|
|
|
|
DataEnclosedObject,
|
|
|
|
DataMetaEnclosedObject,
|
|
|
|
DataOptimisticAndFinalizedObject,
|
|
|
|
DataOptimisticObject,
|
|
|
|
DataRootEnclosedObject,
|
|
|
|
DataVersionEnclosedObject,
|
|
|
|
DeleteKeystoresBody,
|
|
|
|
DeleteKeystoresResponse,
|
|
|
|
DeleteRemoteKeystoresResponse,
|
|
|
|
DenebSignedBlockContents,
|
|
|
|
Deposit,
|
|
|
|
DepositData,
|
2024-06-15 22:15:27 +00:00
|
|
|
DepositRequest,
|
2024-03-08 13:22:03 +00:00
|
|
|
DepositTreeSnapshot,
|
2023-12-19 15:44:27 +00:00
|
|
|
DistributedKeystoreInfo,
|
2024-04-06 07:46:02 +00:00
|
|
|
ElectraSignedBlockContents,
|
2023-12-19 15:44:27 +00:00
|
|
|
EmptyBody,
|
|
|
|
Eth1Data,
|
|
|
|
EventBeaconBlockObject,
|
|
|
|
Fork,
|
|
|
|
GetBlockAttestationsResponse,
|
|
|
|
GetBlockHeaderResponse,
|
|
|
|
GetBlockHeadersResponse,
|
|
|
|
GetDepositContractResponse,
|
|
|
|
GetDepositSnapshotResponse,
|
|
|
|
GetDistributedKeystoresResponse,
|
|
|
|
GetEpochCommitteesResponse,
|
|
|
|
GetEpochSyncCommitteesResponse,
|
|
|
|
GetForkChoiceResponse,
|
|
|
|
GetForkScheduleResponse,
|
|
|
|
GetGenesisResponse,
|
|
|
|
GetHeaderResponseDeneb,
|
2024-04-09 10:04:33 +00:00
|
|
|
GetHeaderResponseElectra,
|
2023-12-19 15:44:27 +00:00
|
|
|
GetKeystoresResponse,
|
|
|
|
GetNextWithdrawalsResponse,
|
|
|
|
GetPoolAttesterSlashingsResponse,
|
|
|
|
GetPoolProposerSlashingsResponse,
|
|
|
|
GetPoolVoluntaryExitsResponse,
|
|
|
|
GetRemoteKeystoresResponse,
|
|
|
|
GetSpecVCResponse,
|
|
|
|
GetStateFinalityCheckpointsResponse,
|
|
|
|
GetStateForkResponse,
|
|
|
|
GetStateRandaoResponse,
|
|
|
|
GetStateRootResponse,
|
|
|
|
GetStateValidatorBalancesResponse,
|
|
|
|
GetStateValidatorResponse,
|
|
|
|
GetStateValidatorsResponse,
|
|
|
|
GetValidatorGasLimitResponse,
|
|
|
|
HistoricalSummary,
|
|
|
|
ImportDistributedKeystoresBody,
|
|
|
|
ImportRemoteKeystoresBody,
|
|
|
|
KeymanagerGenericError,
|
|
|
|
KeystoreInfo,
|
|
|
|
ListFeeRecipientResponse,
|
|
|
|
ListGasLimitResponse,
|
2024-03-14 03:44:00 +00:00
|
|
|
GetGraffitiResponse,
|
|
|
|
GraffitiResponse,
|
2023-12-19 15:44:27 +00:00
|
|
|
PendingAttestation,
|
2024-04-18 07:30:01 +00:00
|
|
|
PendingBalanceDeposit,
|
|
|
|
PendingConsolidation,
|
|
|
|
PendingPartialWithdrawal,
|
2023-12-19 15:44:27 +00:00
|
|
|
PostKeystoresResponse,
|
|
|
|
PrepareBeaconProposer,
|
|
|
|
ProposerSlashing,
|
|
|
|
RemoteKeystoreInfo,
|
|
|
|
RemoteSignerInfo,
|
|
|
|
RequestItemStatus,
|
|
|
|
RestAttesterDuty,
|
|
|
|
RestBeaconCommitteeSelection,
|
|
|
|
RestBeaconStatesCommittees,
|
|
|
|
RestBeaconStatesFinalityCheckpoints,
|
|
|
|
RestBlockHeader,
|
|
|
|
RestBlockHeaderInfo,
|
2024-02-07 16:51:12 +00:00
|
|
|
RestChainHeadV2,
|
2023-12-19 15:44:27 +00:00
|
|
|
RestCommitteeSubscription,
|
|
|
|
RestContributionAndProof,
|
|
|
|
RestDepositContract,
|
|
|
|
RestEpochRandao,
|
|
|
|
RestEpochSyncCommittee,
|
|
|
|
RestExecutionPayload,
|
|
|
|
RestExtraData,
|
|
|
|
RestGenesis,
|
|
|
|
RestIndexedErrorMessage,
|
|
|
|
RestIndexedErrorMessageItem,
|
|
|
|
RestMetadata,
|
|
|
|
RestNetworkIdentity,
|
|
|
|
RestNimbusTimestamp1,
|
|
|
|
RestNimbusTimestamp2,
|
|
|
|
RestNode,
|
|
|
|
RestNodeExtraData,
|
|
|
|
RestNodePeer,
|
|
|
|
RestNodeVersion,
|
|
|
|
RestPeerCount,
|
|
|
|
RestProposerDuty,
|
|
|
|
RestRoot,
|
|
|
|
RestSignedBlockHeader,
|
|
|
|
RestSignedContributionAndProof,
|
|
|
|
RestSyncCommitteeContribution,
|
|
|
|
RestSyncCommitteeDuty,
|
|
|
|
RestSyncCommitteeMessage,
|
|
|
|
RestSyncCommitteeSelection,
|
|
|
|
RestSyncCommitteeSubscription,
|
|
|
|
RestSyncInfo,
|
|
|
|
RestValidator,
|
|
|
|
RestValidatorBalance,
|
|
|
|
SPDIR,
|
|
|
|
SPDIR_Meta,
|
|
|
|
SPDIR_SignedAttestation,
|
|
|
|
SPDIR_SignedBlock,
|
|
|
|
SPDIR_Validator,
|
|
|
|
SetFeeRecipientRequest,
|
|
|
|
SetGasLimitRequest,
|
2024-03-14 03:44:00 +00:00
|
|
|
SetGraffitiRequest,
|
2023-12-19 15:44:27 +00:00
|
|
|
SignedBLSToExecutionChange,
|
|
|
|
SignedBeaconBlockHeader,
|
|
|
|
SignedContributionAndProof,
|
|
|
|
SignedValidatorRegistrationV1,
|
|
|
|
SignedVoluntaryExit,
|
|
|
|
SubmitBlindedBlockResponseDeneb,
|
2024-04-08 16:03:20 +00:00
|
|
|
SubmitBlindedBlockResponseElectra,
|
2023-12-19 15:44:27 +00:00
|
|
|
SyncAggregate,
|
|
|
|
SyncAggregatorSelectionData,
|
|
|
|
SyncCommittee,
|
|
|
|
SyncCommitteeContribution,
|
|
|
|
SyncCommitteeMessage,
|
|
|
|
Validator,
|
|
|
|
ValidatorRegistrationV1,
|
|
|
|
VoluntaryExit,
|
|
|
|
Web3SignerAggregationSlotData,
|
|
|
|
Web3SignerDepositData,
|
|
|
|
Web3SignerErrorResponse,
|
|
|
|
Web3SignerForkInfo,
|
|
|
|
Web3SignerMerkleProof,
|
|
|
|
Web3SignerRandaoRevealData,
|
|
|
|
Web3SignerSignatureResponse,
|
|
|
|
Web3SignerStatusResponse,
|
|
|
|
Web3SignerSyncCommitteeMessageData,
|
|
|
|
Web3SignerValidatorRegistration,
|
|
|
|
Withdrawal,
|
2024-06-15 22:15:27 +00:00
|
|
|
WithdrawalRequest,
|
2023-12-19 15:44:27 +00:00
|
|
|
altair.BeaconBlock,
|
|
|
|
altair.BeaconBlockBody,
|
|
|
|
altair.BeaconState,
|
|
|
|
altair.LightClientBootstrap,
|
|
|
|
altair.LightClientFinalityUpdate,
|
|
|
|
altair.LightClientHeader,
|
|
|
|
altair.LightClientOptimisticUpdate,
|
|
|
|
altair.LightClientUpdate,
|
|
|
|
altair.SignedBeaconBlock,
|
|
|
|
bellatrix.BeaconBlock,
|
|
|
|
bellatrix.BeaconBlockBody,
|
|
|
|
bellatrix.BeaconState,
|
|
|
|
bellatrix.ExecutionPayload,
|
|
|
|
bellatrix.ExecutionPayloadHeader,
|
|
|
|
bellatrix.SignedBeaconBlock,
|
2024-01-31 03:18:55 +00:00
|
|
|
bellatrix_mev.BlindedBeaconBlockBody,
|
2023-12-19 15:44:27 +00:00
|
|
|
bellatrix_mev.BlindedBeaconBlock,
|
|
|
|
bellatrix_mev.SignedBlindedBeaconBlock,
|
|
|
|
capella.BeaconBlock,
|
|
|
|
capella.BeaconBlockBody,
|
|
|
|
capella.BeaconState,
|
|
|
|
capella.ExecutionPayload,
|
|
|
|
capella.ExecutionPayloadHeader,
|
|
|
|
capella.LightClientBootstrap,
|
|
|
|
capella.LightClientFinalityUpdate,
|
|
|
|
capella.LightClientHeader,
|
|
|
|
capella.LightClientOptimisticUpdate,
|
|
|
|
capella.LightClientUpdate,
|
|
|
|
capella.SignedBeaconBlock,
|
|
|
|
capella_mev.BlindedBeaconBlock,
|
|
|
|
capella_mev.BlindedBeaconBlockBody,
|
|
|
|
capella_mev.SignedBlindedBeaconBlock,
|
|
|
|
deneb.BeaconBlock,
|
|
|
|
deneb.BeaconBlockBody,
|
|
|
|
deneb.BeaconState,
|
|
|
|
deneb.BlockContents,
|
|
|
|
deneb.ExecutionPayload,
|
|
|
|
deneb.ExecutionPayloadHeader,
|
|
|
|
deneb.LightClientBootstrap,
|
|
|
|
deneb.LightClientFinalityUpdate,
|
|
|
|
deneb.LightClientHeader,
|
|
|
|
deneb.LightClientOptimisticUpdate,
|
|
|
|
deneb.LightClientUpdate,
|
|
|
|
deneb.SignedBeaconBlock,
|
|
|
|
deneb_mev.BlindedBeaconBlock,
|
|
|
|
deneb_mev.BlindedBeaconBlockBody,
|
|
|
|
deneb_mev.BuilderBid,
|
2024-04-08 08:34:15 +00:00
|
|
|
deneb_mev.ExecutionPayloadAndBlobsBundle,
|
2023-12-19 15:44:27 +00:00
|
|
|
deneb_mev.SignedBlindedBeaconBlock,
|
|
|
|
deneb_mev.SignedBuilderBid,
|
2024-04-22 09:00:38 +00:00
|
|
|
electra.Attestation,
|
2024-04-29 02:19:10 +00:00
|
|
|
electra.AttesterSlashing,
|
2024-04-04 03:17:31 +00:00
|
|
|
electra.BeaconBlock,
|
|
|
|
electra.BeaconState,
|
|
|
|
electra.BeaconBlockBody,
|
2024-04-06 07:46:02 +00:00
|
|
|
electra.BlockContents,
|
2024-04-04 03:17:31 +00:00
|
|
|
electra.ExecutionPayload,
|
|
|
|
electra.ExecutionPayloadHeader,
|
2024-04-29 02:19:10 +00:00
|
|
|
electra.IndexedAttestation,
|
2024-06-26 19:02:03 +00:00
|
|
|
electra.LightClientBootstrap,
|
|
|
|
electra.LightClientFinalityUpdate,
|
|
|
|
electra.LightClientHeader,
|
|
|
|
electra.LightClientOptimisticUpdate,
|
|
|
|
electra.LightClientUpdate,
|
2024-04-04 03:17:31 +00:00
|
|
|
electra.SignedBeaconBlock,
|
2024-04-22 09:00:38 +00:00
|
|
|
electra.TrustedAttestation,
|
2024-04-08 12:49:03 +00:00
|
|
|
electra_mev.BlindedBeaconBlock,
|
|
|
|
electra_mev.BlindedBeaconBlockBody,
|
2024-04-09 10:04:33 +00:00
|
|
|
electra_mev.BuilderBid,
|
2024-04-08 08:34:15 +00:00
|
|
|
electra_mev.ExecutionPayloadAndBlobsBundle,
|
2024-04-08 12:49:03 +00:00
|
|
|
electra_mev.SignedBlindedBeaconBlock,
|
2024-04-09 10:04:33 +00:00
|
|
|
electra_mev.SignedBuilderBid,
|
2024-05-14 04:12:35 +00:00
|
|
|
phase0.AggregateAndProof,
|
2024-04-17 20:44:29 +00:00
|
|
|
phase0.Attestation,
|
2024-04-21 05:49:11 +00:00
|
|
|
phase0.AttesterSlashing,
|
2023-12-19 15:44:27 +00:00
|
|
|
phase0.BeaconBlock,
|
|
|
|
phase0.BeaconBlockBody,
|
|
|
|
phase0.BeaconState,
|
2024-04-21 05:49:11 +00:00
|
|
|
phase0.IndexedAttestation,
|
2024-05-14 04:12:35 +00:00
|
|
|
phase0.SignedAggregateAndProof,
|
2023-12-19 15:44:27 +00:00
|
|
|
phase0.SignedBeaconBlock,
|
2024-04-17 20:44:29 +00:00
|
|
|
phase0.TrustedAttestation
|
2023-12-19 15:44:27 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
# TODO
|
|
|
|
# Tuples are widely used in the responses of the REST server
|
|
|
|
# If we switch to concrete types there, it would be possible
|
|
|
|
# to remove this overly generic definition.
|
|
|
|
template writeValue*(w: JsonWriter[RestJson], value: tuple) =
|
|
|
|
writeRecordValue(w, value)
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-05-20 15:25:26 +00:00
|
|
|
## The RestJson format implements JSON serialization in the way specified
|
|
|
|
## by the Beacon API:
|
|
|
|
##
|
|
|
|
## https://ethereum.github.io/beacon-APIs/
|
|
|
|
##
|
|
|
|
## In this format, we must always set `allowUnknownFields = true` in the
|
|
|
|
## decode calls in order to conform the following spec:
|
|
|
|
##
|
|
|
|
## All JSON responses return the requested data under a data key in the top
|
|
|
|
## level of their response. Additional metadata may or may not be present
|
|
|
|
## in other keys at the top level of the response, dependent on the endpoint.
|
|
|
|
## The rules that require an increase in version number are as follows:
|
|
|
|
##
|
|
|
|
## - no field that is listed in an endpoint shall be removed without an increase
|
|
|
|
## in the version number
|
|
|
|
##
|
|
|
|
## - no field that is listed in an endpoint shall be altered in terms of format
|
|
|
|
## (e.g. from a string to an array) without an increase in the version number
|
|
|
|
##
|
|
|
|
## Note that it is possible for a field to be added to an endpoint's data or
|
|
|
|
## metadata without an increase in the version number.
|
|
|
|
##
|
|
|
|
## TODO nim-json-serializations should allow setting up this policy per format
|
|
|
|
##
|
|
|
|
## This also means that when new fields are introduced to the object definitions
|
|
|
|
## below, one must use the `Option[T]` type.
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
const
|
|
|
|
DecimalSet = {'0' .. '9'}
|
|
|
|
# Base10 (decimal) set of chars
|
|
|
|
ValidatorKeySize = RawPubKeySize * 2
|
|
|
|
# Size of `ValidatorPubKey` hexadecimal value (without 0x)
|
|
|
|
ValidatorSigSize = RawSigSize * 2
|
|
|
|
# Size of `ValidatorSig` hexadecimal value (without 0x)
|
|
|
|
RootHashSize = sizeof(Eth2Digest) * 2
|
|
|
|
# Size of `xxx_root` hexadecimal value (without 0x)
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-08-06 11:55:40 +00:00
|
|
|
ApplicationJsonMediaType* = MediaType.init("application/json")
|
|
|
|
TextPlainMediaType* = MediaType.init("text/plain")
|
2022-09-19 09:17:29 +00:00
|
|
|
OctetStreamMediaType* = MediaType.init("application/octet-stream")
|
2022-08-06 11:55:40 +00:00
|
|
|
UrlEncodedMediaType* = MediaType.init("application/x-www-form-urlencoded")
|
2023-09-27 14:45:33 +00:00
|
|
|
UnableDecodeVersionError = "Unable to decode version"
|
|
|
|
UnableDecodeError = "Unable to decode data"
|
|
|
|
UnexpectedDecodeError = "Unexpected decoding error"
|
2022-08-06 11:55:40 +00:00
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
type
|
|
|
|
EncodeTypes* =
|
2024-01-13 09:52:13 +00:00
|
|
|
BlobSidecarInfoObject |
|
2022-07-13 14:45:04 +00:00
|
|
|
DeleteKeystoresBody |
|
|
|
|
EmptyBody |
|
|
|
|
ImportDistributedKeystoresBody |
|
|
|
|
ImportRemoteKeystoresBody |
|
|
|
|
KeystoresAndSlashingProtection |
|
2022-08-09 09:53:02 +00:00
|
|
|
PrepareBeaconProposer |
|
2021-08-03 15:17:11 +00:00
|
|
|
ProposerSlashing |
|
2022-07-13 14:45:04 +00:00
|
|
|
SetFeeRecipientRequest |
|
2023-02-15 15:10:31 +00:00
|
|
|
SetGasLimitRequest |
|
2023-02-06 18:07:30 +00:00
|
|
|
bellatrix_mev.SignedBlindedBeaconBlock |
|
|
|
|
capella_mev.SignedBlindedBeaconBlock |
|
2023-11-16 00:20:13 +00:00
|
|
|
deneb_mev.SignedBlindedBeaconBlock |
|
2024-04-08 16:03:20 +00:00
|
|
|
electra_mev.SignedBlindedBeaconBlock |
|
2024-04-21 05:49:11 +00:00
|
|
|
phase0.AttesterSlashing |
|
2022-05-17 13:56:19 +00:00
|
|
|
SignedValidatorRegistrationV1 |
|
2021-10-19 17:44:05 +00:00
|
|
|
SignedVoluntaryExit |
|
2023-06-28 13:33:07 +00:00
|
|
|
Web3SignerRequest |
|
2024-03-14 03:44:00 +00:00
|
|
|
RestNimbusTimestamp1 |
|
|
|
|
SetGraffitiRequest
|
2022-09-29 21:00:53 +00:00
|
|
|
|
|
|
|
EncodeOctetTypes* =
|
2022-07-13 14:45:04 +00:00
|
|
|
altair.SignedBeaconBlock |
|
|
|
|
bellatrix.SignedBeaconBlock |
|
2022-11-22 11:56:05 +00:00
|
|
|
capella.SignedBeaconBlock |
|
2023-06-23 09:30:16 +00:00
|
|
|
phase0.SignedBeaconBlock |
|
2023-11-28 23:30:14 +00:00
|
|
|
DenebSignedBlockContents |
|
2024-04-09 14:54:05 +00:00
|
|
|
ElectraSignedBlockContents |
|
2023-11-28 23:30:14 +00:00
|
|
|
ForkedMaybeBlindedBeaconBlock
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
EncodeArrays* =
|
2024-04-17 20:44:29 +00:00
|
|
|
seq[phase0.Attestation] |
|
2022-08-09 09:53:02 +00:00
|
|
|
seq[PrepareBeaconProposer] |
|
2022-07-13 14:45:04 +00:00
|
|
|
seq[RemoteKeystoreInfo] |
|
2021-10-19 17:44:05 +00:00
|
|
|
seq[RestCommitteeSubscription] |
|
2022-02-07 20:36:09 +00:00
|
|
|
seq[RestSignedContributionAndProof] |
|
2022-07-13 14:45:04 +00:00
|
|
|
seq[RestSyncCommitteeMessage] |
|
|
|
|
seq[RestSyncCommitteeSubscription] |
|
2024-05-14 04:12:35 +00:00
|
|
|
seq[phase0.SignedAggregateAndProof] |
|
2022-07-13 14:45:04 +00:00
|
|
|
seq[SignedValidatorRegistrationV1] |
|
2023-11-08 12:03:51 +00:00
|
|
|
seq[ValidatorIndex] |
|
|
|
|
seq[RestBeaconCommitteeSelection] |
|
|
|
|
seq[RestSyncCommitteeSelection]
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
DecodeTypes* =
|
|
|
|
DataEnclosedObject |
|
|
|
|
DataMetaEnclosedObject |
|
|
|
|
DataRootEnclosedObject |
|
2023-03-03 20:20:01 +00:00
|
|
|
DataOptimisticObject |
|
2022-06-06 13:55:02 +00:00
|
|
|
DataVersionEnclosedObject |
|
2023-11-06 14:40:44 +00:00
|
|
|
DataOptimisticAndFinalizedObject |
|
2021-09-27 18:31:11 +00:00
|
|
|
GetBlockV2Response |
|
2022-07-13 14:45:04 +00:00
|
|
|
GetDistributedKeystoresResponse |
|
2021-12-22 12:37:31 +00:00
|
|
|
GetKeystoresResponse |
|
2022-02-07 20:36:09 +00:00
|
|
|
GetRemoteKeystoresResponse |
|
2022-01-25 10:07:15 +00:00
|
|
|
GetStateForkResponse |
|
2022-07-13 14:45:04 +00:00
|
|
|
GetStateV2Response |
|
|
|
|
KeymanagerGenericError |
|
|
|
|
KeystoresAndSlashingProtection |
|
|
|
|
ListFeeRecipientResponse |
|
2022-08-09 09:53:02 +00:00
|
|
|
PrepareBeaconProposer |
|
2022-09-29 20:55:18 +00:00
|
|
|
RestIndexedErrorMessage |
|
|
|
|
RestErrorMessage |
|
2022-07-13 14:45:04 +00:00
|
|
|
RestValidator |
|
2021-12-22 12:37:31 +00:00
|
|
|
Web3SignerErrorResponse |
|
2021-11-30 01:20:21 +00:00
|
|
|
Web3SignerKeysResponse |
|
|
|
|
Web3SignerSignatureResponse |
|
2023-02-15 14:09:31 +00:00
|
|
|
Web3SignerStatusResponse |
|
|
|
|
GetStateRootResponse |
|
2023-04-16 06:07:07 +00:00
|
|
|
GetBlockRootResponse |
|
|
|
|
SomeForkedLightClientObject |
|
2023-06-28 13:33:07 +00:00
|
|
|
seq[SomeForkedLightClientObject] |
|
|
|
|
RestNimbusTimestamp1 |
|
2024-03-14 03:44:00 +00:00
|
|
|
RestNimbusTimestamp2 |
|
|
|
|
GetGraffitiResponse
|
2021-09-27 18:31:11 +00:00
|
|
|
|
2024-05-22 02:52:35 +00:00
|
|
|
DecodeConsensysTypes* = ProduceBlindedBlockResponse
|
2023-06-14 06:04:15 +00:00
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
RestVersioned*[T] = object
|
|
|
|
data*: T
|
2023-01-28 19:53:41 +00:00
|
|
|
jsonVersion*: ConsensusFork
|
2023-01-12 17:11:38 +00:00
|
|
|
sszContext*: ForkDigest
|
|
|
|
|
2023-06-14 06:04:15 +00:00
|
|
|
RestBlockTypes* = phase0.BeaconBlock | altair.BeaconBlock |
|
|
|
|
bellatrix.BeaconBlock | capella.BeaconBlock |
|
2024-04-09 14:54:05 +00:00
|
|
|
deneb.BlockContents | deneb_mev.BlindedBeaconBlock |
|
|
|
|
electra.BlockContents | electra_mev.BlindedBeaconBlock
|
2021-08-23 10:41:48 +00:00
|
|
|
|
2023-11-28 23:30:14 +00:00
|
|
|
func readStrictHexChar(c: char, radix: static[uint8]): Result[int8, cstring] =
|
|
|
|
## Converts an hex char to an int
|
|
|
|
const
|
|
|
|
lowerLastChar = chr(ord('a') + radix - 11'u8)
|
|
|
|
capitalLastChar = chr(ord('A') + radix - 11'u8)
|
|
|
|
case c
|
|
|
|
of '0' .. '9': ok(int8 ord(c) - ord('0'))
|
|
|
|
of 'a' .. lowerLastChar: ok(int8 ord(c) - ord('a') + 10)
|
|
|
|
of 'A' .. capitalLastChar: ok(int8 ord(c) - ord('A') + 10)
|
|
|
|
else: err("Invalid hexadecimal character encountered!")
|
|
|
|
|
|
|
|
func readStrictDecChar(c: char, radix: static[uint8]): Result[int8, cstring] =
|
|
|
|
const lastChar = char(ord('0') + radix - 1'u8)
|
|
|
|
case c
|
|
|
|
of '0' .. lastChar: ok(int8 ord(c) - ord('0'))
|
|
|
|
else: err("Invalid decimal character encountered!")
|
|
|
|
|
|
|
|
func skipPrefixes(str: string,
|
|
|
|
radix: range[2..16]): Result[int, cstring] =
|
|
|
|
## Returns the index of the first meaningful char in `hexStr` by skipping
|
|
|
|
## "0x" prefix
|
|
|
|
if len(str) < 2:
|
|
|
|
return ok(0)
|
|
|
|
|
|
|
|
return
|
|
|
|
if str[0] == '0':
|
|
|
|
if str[1] in {'x', 'X'}:
|
|
|
|
if radix != 16:
|
|
|
|
return err("Parsing mismatch, 0x prefix is only valid for a " &
|
|
|
|
"hexadecimal number (base 16)")
|
|
|
|
ok(2)
|
|
|
|
elif str[1] in {'o', 'O'}:
|
|
|
|
if radix != 8:
|
|
|
|
return err("Parsing mismatch, 0o prefix is only valid for an " &
|
|
|
|
"octal number (base 8)")
|
|
|
|
ok(2)
|
|
|
|
elif str[1] in {'b', 'B'}:
|
|
|
|
if radix == 2:
|
|
|
|
ok(2)
|
|
|
|
elif radix == 16:
|
|
|
|
# allow something like "0bcdef12345" which is a valid hex
|
|
|
|
ok(0)
|
|
|
|
else:
|
|
|
|
err("Parsing mismatch, 0b prefix is only valid for a binary number " &
|
|
|
|
"(base 2), or hex number")
|
|
|
|
else:
|
|
|
|
ok(0)
|
|
|
|
else:
|
|
|
|
ok(0)
|
|
|
|
|
|
|
|
func strictParse*[bits: static[int]](input: string,
|
|
|
|
T: typedesc[StUint[bits]],
|
|
|
|
radix: static[uint8] = 10
|
|
|
|
): Result[T, cstring] {.raises: [].} =
|
|
|
|
var res: T
|
|
|
|
static: doAssert (radix >= 2) and (radix <= 16),
|
|
|
|
"Only base from 2..16 are supported"
|
|
|
|
|
|
|
|
const
|
|
|
|
base = radix.uint8.stuint(bits)
|
|
|
|
zero = 0.uint8.stuint(256)
|
|
|
|
|
|
|
|
var currentIndex =
|
|
|
|
block:
|
|
|
|
let res = skipPrefixes(input, radix)
|
|
|
|
if res.isErr():
|
|
|
|
return err(res.error)
|
|
|
|
res.get()
|
|
|
|
|
|
|
|
while currentIndex < len(input):
|
|
|
|
let value =
|
|
|
|
when radix <= 10:
|
|
|
|
? readStrictDecChar(input[currentIndex], radix)
|
|
|
|
else:
|
|
|
|
? readStrictHexChar(input[currentIndex], radix)
|
|
|
|
let mres = res * base
|
|
|
|
if (res != zero) and (mres div base != res):
|
|
|
|
return err("Overflow error")
|
|
|
|
let ares = mres + value.stuint(bits)
|
|
|
|
if ares < mres:
|
|
|
|
return err("Overflow error")
|
|
|
|
res = ares
|
|
|
|
inc(currentIndex)
|
|
|
|
ok(res)
|
|
|
|
|
2021-08-23 10:41:48 +00:00
|
|
|
proc prepareJsonResponse*(t: typedesc[RestApiResponse], d: auto): seq[byte] =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", d)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc prepareJsonStringResponse*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse], d: RestVersioned[T]): string =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
withForkyObject(d.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", d.jsonVersion.toString())
|
|
|
|
writer.writeField("data", forkyObject)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2021-09-22 12:17:15 +00:00
|
|
|
proc prepareJsonStringResponse*(t: typedesc[RestApiResponse], d: auto): string =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(d)
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
res
|
|
|
|
|
2021-08-23 10:41:48 +00:00
|
|
|
proc jsonResponseWRoot*(t: typedesc[RestApiResponse], data: auto,
|
2022-06-20 05:53:39 +00:00
|
|
|
dependent_root: Eth2Digest,
|
2023-09-27 14:45:33 +00:00
|
|
|
execOpt: Opt[bool]): RestApiResponse =
|
2021-08-23 10:41:48 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dependent_root", dependent_root)
|
2022-06-20 05:53:39 +00:00
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
|
|
|
proc jsonResponse*(t: typedesc[RestApiResponse], data: auto): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2024-01-31 03:18:55 +00:00
|
|
|
proc jsonResponseBlock*(t: typedesc[RestApiResponse],
|
|
|
|
data: ForkySignedBlindedBeaconBlock,
|
|
|
|
consensusFork: ConsensusFork,
|
|
|
|
execOpt: Opt[bool],
|
|
|
|
finalized: bool): RestApiResponse =
|
|
|
|
let
|
|
|
|
headers = [("eth-consensus-version", consensusFork.toString())]
|
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", consensusFork.toString())
|
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
|
|
|
writer.writeField("finalized", finalized)
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
2022-06-20 05:53:39 +00:00
|
|
|
proc jsonResponseBlock*(t: typedesc[RestApiResponse],
|
|
|
|
data: ForkedSignedBeaconBlock,
|
2023-09-27 14:45:33 +00:00
|
|
|
execOpt: Opt[bool],
|
|
|
|
finalized: bool): RestApiResponse =
|
2022-10-03 22:05:52 +00:00
|
|
|
let
|
|
|
|
headers = [("eth-consensus-version", data.kind.toString())]
|
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", data.kind.toString())
|
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
2023-09-27 14:45:33 +00:00
|
|
|
writer.writeField("finalized", finalized)
|
2022-10-03 22:05:52 +00:00
|
|
|
withBlck(data):
|
2024-04-04 03:17:31 +00:00
|
|
|
writer.writeField("data", forkyBlck)
|
2022-10-03 22:05:52 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
2022-06-20 05:53:39 +00:00
|
|
|
|
|
|
|
proc jsonResponseState*(t: typedesc[RestApiResponse],
|
2022-09-23 13:49:22 +00:00
|
|
|
data: ForkedHashedBeaconState,
|
2023-09-27 14:45:33 +00:00
|
|
|
execOpt: Opt[bool]): RestApiResponse =
|
2022-06-20 05:53:39 +00:00
|
|
|
let
|
2022-09-23 13:49:22 +00:00
|
|
|
headers = [("eth-consensus-version", data.kind.toString())]
|
2022-06-20 05:53:39 +00:00
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-23 13:49:22 +00:00
|
|
|
writer.writeField("version", data.kind.toString())
|
2022-06-20 05:53:39 +00:00
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
2022-09-23 13:49:22 +00:00
|
|
|
withState(data):
|
2024-04-04 03:17:31 +00:00
|
|
|
writer.writeField("data", forkyState.data)
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
|
|
|
proc jsonResponseWOpt*(t: typedesc[RestApiResponse], data: auto,
|
2023-09-27 14:45:33 +00:00
|
|
|
execOpt: Opt[bool]): RestApiResponse =
|
2022-06-20 05:53:39 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
if execOpt.isSome():
|
|
|
|
writer.writeField("execution_optimistic", execOpt.get())
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2023-09-27 14:45:33 +00:00
|
|
|
proc jsonResponseFinalized*(t: typedesc[RestApiResponse], data: auto,
|
|
|
|
exec: Opt[bool],
|
|
|
|
finalized: bool): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
if exec.isSome():
|
|
|
|
writer.writeField("execution_optimistic", exec.get())
|
|
|
|
writer.writeField("finalized", finalized)
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2022-10-04 11:38:09 +00:00
|
|
|
proc jsonResponseWVersion*(t: typedesc[RestApiResponse], data: auto,
|
2023-01-28 19:53:41 +00:00
|
|
|
version: ConsensusFork): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let
|
|
|
|
headers = [("eth-consensus-version", version.toString())]
|
|
|
|
res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", version.toString())
|
2024-04-10 08:54:00 +00:00
|
|
|
writer.writeField("data", data)
|
2022-10-04 11:38:09 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc jsonResponseVersioned*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse],
|
|
|
|
entries: openArray[RestVersioned[T]]): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
for e in writer.stepwiseArrayCreation(entries):
|
2023-01-12 17:11:38 +00:00
|
|
|
withForkyObject(e.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("version", e.jsonVersion.toString())
|
|
|
|
writer.writeField("data", forkyObject)
|
|
|
|
writer.endRecord()
|
2022-10-04 11:38:09 +00:00
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
proc jsonResponsePlain*(t: typedesc[RestApiResponse],
|
|
|
|
data: auto): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
|
|
|
|
2023-11-28 23:30:14 +00:00
|
|
|
proc jsonResponsePlain*(t: typedesc[RestApiResponse],
|
|
|
|
data: auto, headers: HttpTable): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
|
|
|
|
2021-04-04 07:23:36 +00:00
|
|
|
proc jsonResponseWMeta*(t: typedesc[RestApiResponse],
|
|
|
|
data: auto, meta: auto): RestApiResponse =
|
2021-08-23 10:41:48 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("data", data)
|
|
|
|
writer.writeField("meta", meta)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/json")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc jsonMsgResponse*(t: typedesc[RestApiResponse],
|
|
|
|
msg: string = ""): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", 200)
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.response(data, Http200, "application/json")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
2021-07-13 11:15:07 +00:00
|
|
|
msg: string = ""): RestApiResponse =
|
2021-03-23 22:50:18 +00:00
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
|
|
|
msg: string = "", stacktrace: string): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
if len(stacktrace) > 0:
|
2021-12-22 12:37:31 +00:00
|
|
|
writer.writeField("stacktraces", [stacktrace])
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-07-13 11:15:07 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
|
|
|
proc jsonError*(t: typedesc[RestApiResponse], status: HttpCode = Http200,
|
|
|
|
msg: string = "",
|
2022-04-08 16:22:49 +00:00
|
|
|
stacktraces: openArray[string]): RestApiResponse =
|
2021-07-13 11:15:07 +00:00
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
2021-12-22 12:37:31 +00:00
|
|
|
writer.writeField("stacktraces", stacktraces)
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-03-23 22:50:18 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
2023-09-27 14:45:33 +00:00
|
|
|
proc jsonError*(t: typedesc[RestApiResponse],
|
|
|
|
rmsg: RestErrorMessage): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("code", rmsg.code)
|
|
|
|
writer.writeField("message", rmsg.message)
|
|
|
|
if rmsg.stacktraces.isSome():
|
|
|
|
writer.writeField("stacktraces", rmsg.stacktraces)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.error(rmsg.code.toHttpCode().get(), data, "application/json")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
proc jsonErrorList*(t: typedesc[RestApiResponse],
|
|
|
|
status: HttpCode = Http200,
|
|
|
|
msg: string = "", failures: auto): RestApiResponse =
|
|
|
|
let data =
|
|
|
|
block:
|
2021-08-23 10:41:48 +00:00
|
|
|
var default: string
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.beginRecord()
|
2022-09-28 18:47:15 +00:00
|
|
|
writer.writeField("code", int(status.toInt()))
|
2021-08-23 10:41:48 +00:00
|
|
|
writer.writeField("message", msg)
|
|
|
|
writer.writeField("failures", failures)
|
|
|
|
writer.endRecord()
|
|
|
|
stream.getOutput(string)
|
|
|
|
except IOError:
|
|
|
|
default
|
2021-03-23 22:50:18 +00:00
|
|
|
RestApiResponse.error(status, data, "application/json")
|
|
|
|
|
2023-01-12 17:11:38 +00:00
|
|
|
proc sszResponseVersioned*[T: SomeForkedLightClientObject](
|
|
|
|
t: typedesc[RestApiResponse],
|
|
|
|
entries: openArray[RestVersioned[T]]): RestApiResponse =
|
2022-10-04 11:38:09 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
for e in entries:
|
2023-01-12 17:11:38 +00:00
|
|
|
withForkyUpdate(e.data):
|
2023-01-14 21:19:50 +00:00
|
|
|
when lcDataFork > LightClientDataFork.None:
|
2023-01-12 17:11:38 +00:00
|
|
|
var cursor = stream.delayFixedSizeWrite(sizeof(uint64))
|
|
|
|
let initPos = stream.pos
|
|
|
|
stream.write e.sszContext.data
|
|
|
|
var writer = SszWriter.init(stream)
|
|
|
|
writer.writeValue forkyUpdate
|
|
|
|
cursor.finalWrite (stream.pos - initPos).uint64.toBytesLE()
|
2022-10-04 11:38:09 +00:00
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream")
|
|
|
|
|
2022-09-23 15:51:04 +00:00
|
|
|
proc sszResponsePlain*(t: typedesc[RestApiResponse], res: seq[byte],
|
|
|
|
headers: openArray[RestKeyValueTuple] = []
|
|
|
|
): RestApiResponse =
|
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream",
|
|
|
|
headers = headers)
|
|
|
|
|
2022-06-20 05:53:39 +00:00
|
|
|
proc sszResponse*(t: typedesc[RestApiResponse], data: auto,
|
2022-09-23 15:51:04 +00:00
|
|
|
headers: openArray[RestKeyValueTuple] = []
|
2022-06-20 05:53:39 +00:00
|
|
|
): RestApiResponse =
|
2021-09-16 13:32:32 +00:00
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = SszWriter.init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
2022-06-20 05:53:39 +00:00
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream",
|
|
|
|
headers = headers)
|
2021-09-16 13:32:32 +00:00
|
|
|
|
2023-11-28 23:30:14 +00:00
|
|
|
proc sszResponse*(t: typedesc[RestApiResponse], data: auto,
|
|
|
|
headers: HttpTable): RestApiResponse =
|
|
|
|
let res =
|
|
|
|
block:
|
|
|
|
var default: seq[byte]
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = SszWriter.init(stream)
|
|
|
|
writer.writeValue(data)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
default
|
|
|
|
RestApiResponse.response(res, Http200, "application/octet-stream",
|
|
|
|
headers = headers)
|
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
template hexOriginal(data: openArray[byte]): string =
|
2022-01-08 20:06:34 +00:00
|
|
|
to0xHex(data)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc decodeJsonString*[T](t: typedesc[T],
|
2022-07-13 14:45:04 +00:00
|
|
|
data: JsonString): Result[T, cstring] =
|
2021-11-30 01:20:21 +00:00
|
|
|
try:
|
2022-05-20 15:25:26 +00:00
|
|
|
ok(RestJson.decode(string(data), T,
|
2022-07-13 14:45:04 +00:00
|
|
|
requireAllFields = true,
|
2022-05-20 15:25:26 +00:00
|
|
|
allowUnknownFields = true))
|
2021-11-30 01:20:21 +00:00
|
|
|
except SerializationError:
|
|
|
|
err("Unable to deserialize data")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## uint64
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
w: var JsonWriter[RestJson], value: uint64) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(w, Base10.toString(value))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var uint64) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
2022-01-06 07:38:40 +00:00
|
|
|
reader.raiseUnexpectedValue($res.error() & ": " & svalue)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## uint8
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
w: var JsonWriter[RestJson], value: uint8) {.raises: [IOError].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
writeValue(w, Base10.toString(value))
|
2021-11-02 18:23:31 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var uint8) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-11-02 18:23:31 +00:00
|
|
|
let svalue = reader.readValue(string)
|
2022-01-06 07:38:40 +00:00
|
|
|
let res = Base10.decode(uint8, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error() & ": " & svalue)
|
|
|
|
|
2024-03-19 13:21:47 +00:00
|
|
|
## BlockNumber
|
|
|
|
proc writeValue*(
|
|
|
|
w: var JsonWriter[RestJson], value: BlockNumber) {.raises: [IOError].} =
|
|
|
|
w.writeValue(distinctBase(value))
|
|
|
|
|
|
|
|
proc readValue*(
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var BlockNumber) {.raises: [IOError, SerializationError].} =
|
|
|
|
reader.readValue(distinctBase(value))
|
|
|
|
|
2023-11-01 07:32:41 +00:00
|
|
|
## RestNumeric
|
|
|
|
proc writeValue*(w: var JsonWriter[RestJson],
|
|
|
|
value: RestNumeric) {.raises: [IOError].} =
|
|
|
|
writeValue(w, int(value))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestNumeric) {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
value = RestNumeric(reader.readValue(int))
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## JustificationBits
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
w: var JsonWriter[RestJson], value: JustificationBits
|
|
|
|
) {.raises: [IOError].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
w.writeValue hexOriginal([uint8(value)])
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var JustificationBits) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
let hex = reader.readValue(string)
|
2021-11-02 18:23:31 +00:00
|
|
|
try:
|
2022-01-06 07:38:40 +00:00
|
|
|
value = JustificationBits(hexToByteArray(hex, 1)[0])
|
2021-11-02 18:23:31 +00:00
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
2022-01-06 07:38:40 +00:00
|
|
|
"The `justification_bits` value must be a hex string")
|
2021-11-02 18:23:31 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
## UInt256
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
w: var JsonWriter[RestJson], value: UInt256) {.raises: [IOError].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
writeValue(w, toString(value))
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2022-01-06 07:38:40 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var UInt256) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2022-01-06 07:38:40 +00:00
|
|
|
let svalue = reader.readValue(string)
|
2021-07-13 11:15:07 +00:00
|
|
|
try:
|
2022-01-06 07:38:40 +00:00
|
|
|
value = parse(svalue, UInt256, 10)
|
2021-07-13 11:15:07 +00:00
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
2022-01-06 07:38:40 +00:00
|
|
|
"UInt256 value should be a valid decimal string")
|
2021-07-13 11:15:07 +00:00
|
|
|
|
2024-03-19 13:22:07 +00:00
|
|
|
## Gwei
|
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Gwei) {.raises: [IOError].} =
|
|
|
|
writer.writeValue(distinctBase(value))
|
|
|
|
|
|
|
|
proc readValue*(
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var Gwei) {.raises: [IOError, SerializationError].} =
|
|
|
|
reader.readValue(distinctBase(value))
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## Slot
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Slot) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Slot) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = Slot(res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## Epoch
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Epoch) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Epoch) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
value = Epoch(res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2022-09-23 18:29:31 +00:00
|
|
|
## EpochParticipationFlags
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], epochFlags: EpochParticipationFlags
|
|
|
|
) {.raises: [IOError].} =
|
2022-12-19 12:01:49 +00:00
|
|
|
for e in writer.stepwiseArrayCreation(epochFlags.asList):
|
2022-09-23 18:29:31 +00:00
|
|
|
writer.writeValue $e
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
epochFlags: var EpochParticipationFlags)
|
2023-08-25 09:29:07 +00:00
|
|
|
{.raises: [SerializationError, IOError].} =
|
2022-09-23 18:29:31 +00:00
|
|
|
for e in reader.readArray(string):
|
|
|
|
let parsed = try:
|
|
|
|
parseBiggestUInt(e)
|
2023-07-15 16:30:52 +00:00
|
|
|
except ValueError:
|
2022-09-23 18:29:31 +00:00
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"A string-encoded 8-bit usigned integer value expected")
|
|
|
|
|
|
|
|
if parsed > uint8.high:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"The usigned integer value should fit in 8 bits")
|
|
|
|
|
2022-12-19 12:01:49 +00:00
|
|
|
if not epochFlags.asList.add(uint8(parsed)):
|
2022-09-23 18:29:31 +00:00
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"The participation flags list size exceeds limit")
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## ValidatorIndex
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ValidatorIndex
|
|
|
|
) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
2021-11-07 21:03:23 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorIndex)
|
2023-08-25 09:29:07 +00:00
|
|
|
{.raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
if v < VALIDATOR_REGISTRY_LIMIT:
|
|
|
|
value = ValidatorIndex(v)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Validator index is bigger then VALIDATOR_REGISTRY_LIMIT")
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## IndexInSyncCommittee
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: IndexInSyncCommittee
|
|
|
|
) {.raises: [IOError].} =
|
2021-11-07 21:03:23 +00:00
|
|
|
writeValue(writer, Base10.toString(distinctBase(value)))
|
|
|
|
|
2021-11-09 19:21:36 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var IndexInSyncCommittee)
|
2023-08-25 09:29:07 +00:00
|
|
|
{.raises: [IOError, SerializationError].} =
|
2021-11-07 21:03:23 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
if v < SYNC_COMMITTEE_SIZE:
|
2021-11-09 19:21:36 +00:00
|
|
|
value = IndexInSyncCommittee(v)
|
2021-11-07 21:03:23 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue(
|
2021-11-09 19:21:36 +00:00
|
|
|
"Index in committee is bigger than SYNC_COMMITTEE_SIZE")
|
2021-11-07 21:03:23 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2021-04-06 08:00:26 +00:00
|
|
|
## RestValidatorIndex
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: RestValidatorIndex
|
|
|
|
) {.raises: [IOError].} =
|
2021-04-06 08:00:26 +00:00
|
|
|
writeValue(writer, Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestValidatorIndex) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-04-06 08:00:26 +00:00
|
|
|
let svalue = reader.readValue(string)
|
|
|
|
let res = Base10.decode(uint64, svalue)
|
|
|
|
if res.isOk():
|
|
|
|
let v = res.get()
|
|
|
|
value = RestValidatorIndex(v)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## CommitteeIndex
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: CommitteeIndex
|
|
|
|
) {.raises: [IOError].} =
|
2022-01-08 23:28:49 +00:00
|
|
|
writeValue(writer, value.asUInt64)
|
2021-03-23 22:50:18 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var CommitteeIndex) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2022-01-08 23:28:49 +00:00
|
|
|
var v: uint64
|
|
|
|
reader.readValue(v)
|
|
|
|
|
|
|
|
let res = CommitteeIndex.init(v)
|
2021-03-23 22:50:18 +00:00
|
|
|
if res.isOk():
|
2022-01-08 23:28:49 +00:00
|
|
|
value = res.get()
|
2021-03-23 22:50:18 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## ValidatorSig
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ValidatorSig
|
|
|
|
) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorSig) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorSig.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## TrustedSig
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: TrustedSig
|
|
|
|
) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var TrustedSig) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorSig.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = cast[TrustedSig](res.get())
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
|
|
|
## ValidatorPubKey
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ValidatorPubKey
|
|
|
|
) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ValidatorPubKey) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
let hexValue = reader.readValue(string)
|
|
|
|
let res = ValidatorPubKey.fromHex(hexValue)
|
|
|
|
if res.isOk():
|
|
|
|
value = res.get()
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue($res.error())
|
|
|
|
|
2024-02-21 19:06:19 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var HashedValidatorPubKey) {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
var key: ValidatorPubKey
|
|
|
|
readValue(reader, key)
|
|
|
|
|
|
|
|
value = HashedValidatorPubKey.init(key)
|
|
|
|
|
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: HashedValidatorPubKey) {.raises: [IOError].} =
|
|
|
|
writeValue(writer, value.pubkey)
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## BitSeq
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitSeq) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
try:
|
|
|
|
value = BitSeq hexToSeqByte(reader.readValue(string))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader, "A BitSeq value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: BitSeq) {.raises: [IOError].} =
|
2021-06-10 17:51:15 +00:00
|
|
|
writeValue(writer, hexOriginal(value.bytes()))
|
2021-03-23 22:50:18 +00:00
|
|
|
|
|
|
|
## BitList
|
2021-08-23 10:41:48 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitList) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
type T = type(value)
|
|
|
|
value = T readValue(reader, BitSeq)
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: BitList) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, BitSeq value)
|
|
|
|
|
2021-09-27 08:14:43 +00:00
|
|
|
## BitArray
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BitArray) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-09-27 08:14:43 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(readValue(reader, string), value.bytes)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"A BitArray value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: BitArray) {.raises: [IOError].} =
|
2021-09-27 08:14:43 +00:00
|
|
|
writeValue(writer, hexOriginal(value.bytes))
|
|
|
|
|
2021-11-02 18:23:31 +00:00
|
|
|
## BlockHash
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BlockHash) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-11-02 18:23:31 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"BlockHash value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: BlockHash) {.raises: [IOError].} =
|
2021-11-02 18:23:31 +00:00
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## Eth2Digest
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Eth2Digest) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), value.data)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"Eth2Digest value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Eth2Digest) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, hexOriginal(value.data))
|
|
|
|
|
2021-05-20 10:44:13 +00:00
|
|
|
## BloomLogs
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var BloomLogs) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-05-20 10:44:13 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), value.data)
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"BloomLogs value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: BloomLogs) {.raises: [IOError].} =
|
2021-05-20 10:44:13 +00:00
|
|
|
writeValue(writer, hexOriginal(value.data))
|
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
## HashArray
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var HashArray) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
readValue(reader, value.data)
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: HashArray) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, value.data)
|
|
|
|
|
|
|
|
## HashList
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var HashList) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
readValue(reader, value.data)
|
2021-09-27 09:24:58 +00:00
|
|
|
value.resetCache()
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: HashList) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, value.data)
|
|
|
|
|
|
|
|
## Eth1Address
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Eth1Address) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"Eth1Address value should be a valid hex string")
|
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Eth1Address
|
|
|
|
) {.raises: [IOError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
2023-10-25 21:50:59 +00:00
|
|
|
## Blob
|
|
|
|
## https://github.com/ethereum/beacon-APIs/blob/v2.4.2/types/primitive.yaml#L129-L133
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Blob) {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"Blob value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Blob
|
|
|
|
) {.raises: [IOError].} =
|
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
|
|
|
## KzgCommitment and KzgProof; both are the same type, but this makes it
|
|
|
|
## explicit.
|
|
|
|
## https://github.com/ethereum/beacon-APIs/blob/v2.4.2/types/primitive.yaml#L135-L146
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var (KzgCommitment|KzgProof)) {.
|
2023-10-24 15:22:59 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
try:
|
2024-07-03 10:01:33 +00:00
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value.bytes))
|
2023-10-24 15:22:59 +00:00
|
|
|
except ValueError:
|
|
|
|
raiseUnexpectedValue(reader,
|
|
|
|
"KzgCommitment value should be a valid hex string")
|
|
|
|
|
|
|
|
proc writeValue*(
|
2023-10-25 21:50:59 +00:00
|
|
|
writer: var JsonWriter[RestJson], value: KzgCommitment | KzgProof
|
2023-10-24 15:22:59 +00:00
|
|
|
) {.raises: [IOError].} =
|
2024-07-03 10:01:33 +00:00
|
|
|
writeValue(writer, hexOriginal(distinctBase(value.bytes)))
|
2023-10-24 15:22:59 +00:00
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## GraffitiBytes
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: GraffitiBytes
|
|
|
|
) {.raises: [IOError].} =
|
2022-01-20 12:31:55 +00:00
|
|
|
writeValue(writer, hexOriginal(distinctBase(value)))
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], T: type GraffitiBytes): T
|
2023-08-25 09:29:07 +00:00
|
|
|
{.raises: [IOError, SerializationError].} =
|
2022-01-20 12:31:55 +00:00
|
|
|
try:
|
|
|
|
init(GraffitiBytes, reader.readValue(string))
|
|
|
|
except ValueError as err:
|
|
|
|
reader.raiseUnexpectedValue err.msg
|
|
|
|
|
2023-08-30 09:43:25 +00:00
|
|
|
## Version | ForkDigest | DomainType | GraffitiBytes | RestWithdrawalPrefix
|
2022-01-08 20:06:34 +00:00
|
|
|
proc readValue*(
|
|
|
|
reader: var JsonReader[RestJson],
|
2023-08-30 09:43:25 +00:00
|
|
|
value: var (Version | ForkDigest | DomainType | GraffitiBytes |
|
|
|
|
RestWithdrawalPrefix)) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-03-23 22:50:18 +00:00
|
|
|
try:
|
|
|
|
hexToByteArray(reader.readValue(string), distinctBase(value))
|
|
|
|
except ValueError:
|
2022-01-08 20:06:34 +00:00
|
|
|
raiseUnexpectedValue(
|
|
|
|
reader, "Expected a valid hex string with " & $value.len() & " bytes")
|
2021-03-23 22:50:18 +00:00
|
|
|
|
2024-08-23 10:18:06 +00:00
|
|
|
template unrecognizedFieldWarning(fieldNameParam, typeNameParam: string) =
|
2022-05-20 15:25:26 +00:00
|
|
|
# TODO: There should be a different notification mechanism for informing the
|
|
|
|
# caller of a deserialization routine for unexpected fields.
|
|
|
|
# The chonicles import in this module should be removed.
|
2022-06-09 13:08:50 +00:00
|
|
|
trace "JSON field not recognized by the current version of Nimbus. Consider upgrading",
|
2024-08-23 10:18:06 +00:00
|
|
|
fieldName = fieldNameParam, typeName = typeNameParam
|
2022-05-20 15:25:26 +00:00
|
|
|
|
2023-09-23 06:44:54 +00:00
|
|
|
template unrecognizedFieldIgnore =
|
|
|
|
discard readValue(reader, JsonString)
|
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
## ForkedBeaconBlock
|
2024-02-02 15:24:40 +00:00
|
|
|
template prepareForkedBlockReading(blockType: typedesc,
|
|
|
|
reader: var JsonReader[RestJson],
|
2023-09-27 14:45:33 +00:00
|
|
|
version: var Opt[ConsensusFork],
|
|
|
|
data: var Opt[JsonString],
|
2024-02-02 15:24:40 +00:00
|
|
|
blinded: var Opt[bool],
|
|
|
|
payloadValue: var Opt[Uint256],
|
|
|
|
blockValue: var Opt[Uint256]) =
|
2022-08-01 06:41:47 +00:00
|
|
|
for fieldName {.inject.} in readObjectFields(reader):
|
2021-08-27 09:00:06 +00:00
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
2024-02-02 15:24:40 +00:00
|
|
|
blockType.name)
|
2022-11-24 09:14:05 +00:00
|
|
|
let vres = reader.readValue(string).toLowerAscii()
|
2024-03-02 22:45:20 +00:00
|
|
|
version = ConsensusFork.init(vres)
|
|
|
|
if version.isNone():
|
2021-08-27 09:00:06 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
2024-02-02 15:24:40 +00:00
|
|
|
of "data":
|
2024-05-22 02:52:35 +00:00
|
|
|
when (blockType is ForkedBlindedBeaconBlock) or
|
2024-02-02 15:24:40 +00:00
|
|
|
(blockType is ProduceBlockResponseV3):
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple '" & fieldName & "' fields found", blockType.name)
|
|
|
|
data = Opt.some(reader.readValue(JsonString))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2024-02-02 15:24:40 +00:00
|
|
|
of "block_header", "block":
|
|
|
|
when (blockType is Web3SignerForkedBeaconBlock):
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple '" & fieldName & "' fields found", blockType.name)
|
|
|
|
data = Opt.some(reader.readValue(JsonString))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2024-02-02 15:24:40 +00:00
|
|
|
of "execution_payload_blinded":
|
|
|
|
when (blockType is ProduceBlockResponseV3):
|
|
|
|
if blinded.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `execution_payload_blinded` fields found", blockType.name)
|
|
|
|
blinded = Opt.some(reader.readValue(bool))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2024-02-02 15:24:40 +00:00
|
|
|
of "execution_payload_value":
|
|
|
|
when (blockType is ProduceBlockResponseV3):
|
|
|
|
if payloadValue.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `execution_payload_value` fields found", blockType.name)
|
|
|
|
payloadValue = Opt.some(reader.readValue(Uint256))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2024-02-02 15:24:40 +00:00
|
|
|
of "consensus_block_value":
|
|
|
|
when (blockType is ProduceBlockResponseV3):
|
|
|
|
if blockValue.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `consensus_block_value` fields found", blockType.name)
|
|
|
|
blockValue = Opt.some(reader.readValue(Uint256))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2021-08-27 09:00:06 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, blockType.name)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
2024-02-02 15:24:40 +00:00
|
|
|
reader.raiseUnexpectedValue("Field `version` is missing")
|
2021-08-27 09:00:06 +00:00
|
|
|
if data.isNone():
|
2024-02-02 15:24:40 +00:00
|
|
|
reader.raiseUnexpectedValue("Field `data` is missing")
|
2022-05-17 16:55:03 +00:00
|
|
|
|
2022-11-24 09:14:05 +00:00
|
|
|
proc readValue*[BlockType: ForkedBlindedBeaconBlock](
|
|
|
|
reader: var JsonReader[RestJson],
|
|
|
|
value: var BlockType
|
2023-08-25 09:29:07 +00:00
|
|
|
) {.raises: [IOError, SerializationError].} =
|
2022-11-24 09:14:05 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
2024-02-02 15:24:40 +00:00
|
|
|
blinded: Opt[bool]
|
|
|
|
payloadValue: Opt[Uint256]
|
|
|
|
blockValue: Opt[Uint256]
|
2022-11-24 09:14:05 +00:00
|
|
|
|
2024-02-02 15:24:40 +00:00
|
|
|
prepareForkedBlockReading(BlockType, reader, version, data, blinded,
|
|
|
|
payloadValue, blockValue)
|
2022-11-24 09:14:05 +00:00
|
|
|
|
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-11-24 09:14:05 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
phase0.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
2024-03-09 10:38:24 +00:00
|
|
|
|
2023-01-28 19:53:41 +00:00
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Phase0,
|
2022-11-24 09:14:05 +00:00
|
|
|
phase0Data: res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-11-24 09:14:05 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
altair.BeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
2023-01-28 19:53:41 +00:00
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Altair,
|
2022-11-24 09:14:05 +00:00
|
|
|
altairData: res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2023-08-08 00:05:17 +00:00
|
|
|
reader.raiseUnexpectedValue("Bellatrix blinded block format unsupported")
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2023-02-14 10:49:48 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
2023-02-21 13:21:38 +00:00
|
|
|
capella_mev.BlindedBeaconBlock,
|
2023-02-14 10:49:48 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Capella,
|
|
|
|
capellaData: res)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
2023-10-20 01:39:47 +00:00
|
|
|
deneb_mev.BlindedBeaconBlock,
|
2023-03-29 08:41:54 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Deneb,
|
|
|
|
denebData: res)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-10 08:54:00 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
electra_mev.BlindedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect electra block format, [" &
|
|
|
|
exc.formatMsg("BlindedBlock") & "]")
|
|
|
|
value = ForkedBlindedBeaconBlock(kind: ConsensusFork.Electra,
|
|
|
|
electraData: res)
|
2022-12-05 16:29:09 +00:00
|
|
|
|
2022-08-01 06:41:47 +00:00
|
|
|
proc readValue*[BlockType: Web3SignerForkedBeaconBlock](
|
|
|
|
reader: var JsonReader[RestJson],
|
2023-08-25 09:29:07 +00:00
|
|
|
value: var BlockType) {.raises: [IOError, SerializationError].} =
|
2022-08-01 06:41:47 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
2024-02-02 15:24:40 +00:00
|
|
|
blinded: Opt[bool]
|
|
|
|
payloadValue: Opt[Uint256]
|
|
|
|
blockValue: Opt[Uint256]
|
2022-08-01 06:41:47 +00:00
|
|
|
|
2024-02-02 15:24:40 +00:00
|
|
|
prepareForkedBlockReading(BlockType, reader, version, data, blinded,
|
|
|
|
payloadValue, blockValue)
|
2022-08-01 06:41:47 +00:00
|
|
|
|
2023-06-29 09:36:43 +00:00
|
|
|
if version.get() <= ConsensusFork.Altair:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Web3Signer implementation supports Bellatrix and newer")
|
2024-03-09 10:38:24 +00:00
|
|
|
|
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
BeaconBlockHeader,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect block header format")
|
|
|
|
|
|
|
|
value = Web3SignerForkedBeaconBlock(kind: version.get(), data: res)
|
2022-12-05 16:29:09 +00:00
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*[BlockType: Web3SignerForkedBeaconBlock](
|
|
|
|
writer: var JsonWriter[RestJson], value: BlockType) {.raises: [IOError].} =
|
2022-12-08 21:57:47 +00:00
|
|
|
# https://consensys.github.io/web3signer/web3signer-eth2.html#tag/Signing/operation/ETH2_SIGN
|
|
|
|
# https://github.com/ConsenSys/web3signer/blob/d51337e96ba5ce410222943556bed7c4856b8e57/core/src/main/java/tech/pegasys/web3signer/core/service/http/handlers/signing/eth2/json/BlockRequestDeserializer.java#L42-L58
|
|
|
|
writer.beginRecord()
|
2023-06-29 09:36:43 +00:00
|
|
|
writer.writeField("version", value.kind.toString.toUpperAscii)
|
2023-08-15 22:39:12 +00:00
|
|
|
writer.writeField("block_header", value.data)
|
2022-12-08 21:57:47 +00:00
|
|
|
writer.endRecord()
|
2022-05-17 16:55:03 +00:00
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*[BlockType: ForkedBeaconBlock](
|
|
|
|
writer: var JsonWriter[RestJson], value: BlockType) {.raises: [IOError].} =
|
2022-05-17 16:55:03 +00:00
|
|
|
|
|
|
|
template forkIdentifier(id: string): auto =
|
|
|
|
when BlockType is ForkedBeaconBlock:
|
|
|
|
id
|
|
|
|
else:
|
|
|
|
(static toUpperAscii id)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
writer.beginRecord()
|
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "phase0")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "altair")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-05-17 16:55:03 +00:00
|
|
|
writer.writeField("version", forkIdentifier "bellatrix")
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("data", value.bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", forkIdentifier "capella")
|
|
|
|
writer.writeField("data", value.capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", forkIdentifier "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData)
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2022-02-13 15:21:55 +00:00
|
|
|
## RestPublishedBeaconBlockBody
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedBeaconBlockBody) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2022-02-13 15:21:55 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
randao_reveal: Opt[ValidatorSig]
|
|
|
|
eth1_data: Opt[Eth1Data]
|
|
|
|
graffiti: Opt[GraffitiBytes]
|
|
|
|
proposer_slashings:
|
|
|
|
Opt[List[ProposerSlashing, Limit MAX_PROPOSER_SLASHINGS]]
|
|
|
|
attester_slashings:
|
2024-04-21 05:49:11 +00:00
|
|
|
Opt[List[phase0.AttesterSlashing, Limit MAX_ATTESTER_SLASHINGS]]
|
2024-04-17 20:44:29 +00:00
|
|
|
attestations: Opt[List[phase0.Attestation, Limit MAX_ATTESTATIONS]]
|
2023-09-27 14:45:33 +00:00
|
|
|
deposits: Opt[List[Deposit, Limit MAX_DEPOSITS]]
|
|
|
|
voluntary_exits: Opt[List[SignedVoluntaryExit, Limit MAX_VOLUNTARY_EXITS]]
|
|
|
|
sync_aggregate: Opt[SyncAggregate]
|
|
|
|
execution_payload: Opt[RestExecutionPayload]
|
|
|
|
bls_to_execution_changes: Opt[SignedBLSToExecutionChangeList]
|
|
|
|
blob_kzg_commitments: Opt[KzgCommitments]
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "randao_reveal":
|
|
|
|
if randao_reveal.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `randao_reveal` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
randao_reveal = Opt.some(reader.readValue(ValidatorSig))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "eth1_data":
|
|
|
|
if eth1_data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `eth1_data` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
eth1_data = Opt.some(reader.readValue(Eth1Data))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "graffiti":
|
|
|
|
if graffiti.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `graffiti` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
graffiti = Opt.some(reader.readValue(GraffitiBytes))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "proposer_slashings":
|
|
|
|
if proposer_slashings.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `proposer_slashings` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
proposer_slashings = Opt.some(
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.readValue(List[ProposerSlashing, Limit MAX_PROPOSER_SLASHINGS]))
|
|
|
|
of "attester_slashings":
|
|
|
|
if attester_slashings.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `attester_slashings` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
attester_slashings = Opt.some(
|
2024-04-21 05:49:11 +00:00
|
|
|
reader.readValue(
|
|
|
|
List[phase0.AttesterSlashing, Limit MAX_ATTESTER_SLASHINGS]))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "attestations":
|
|
|
|
if attestations.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `attestations` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
attestations = Opt.some(
|
2024-04-17 20:44:29 +00:00
|
|
|
reader.readValue(List[phase0.Attestation, Limit MAX_ATTESTATIONS]))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "deposits":
|
|
|
|
if deposits.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `deposits` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
deposits = Opt.some(reader.readValue(List[Deposit, Limit MAX_DEPOSITS]))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "voluntary_exits":
|
|
|
|
if voluntary_exits.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `voluntary_exits` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
voluntary_exits = Opt.some(
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.readValue(List[SignedVoluntaryExit, Limit MAX_VOLUNTARY_EXITS]))
|
|
|
|
of "sync_aggregate":
|
|
|
|
if sync_aggregate.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `sync_aggregate` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
sync_aggregate = Opt.some(reader.readValue(SyncAggregate))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "execution_payload":
|
|
|
|
if execution_payload.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `execution_payload` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
execution_payload = Opt.some(reader.readValue(RestExecutionPayload))
|
2023-02-03 15:12:11 +00:00
|
|
|
of "bls_to_execution_changes":
|
|
|
|
if bls_to_execution_changes.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `bls_to_execution_changes` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
bls_to_execution_changes = Opt.some(
|
|
|
|
reader.readValue(SignedBLSToExecutionChangeList))
|
2023-10-24 05:50:32 +00:00
|
|
|
of "blob_kzg_commitments":
|
2023-06-19 08:56:52 +00:00
|
|
|
if blob_kzg_commitments.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `blob_kzg_commitments` fields found",
|
|
|
|
"RestPublishedBeaconBlockBody")
|
2023-09-27 14:45:33 +00:00
|
|
|
blob_kzg_commitments = Opt.some(reader.readValue(KzgCommitments))
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if randao_reveal.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `randao_reveal` is missing")
|
|
|
|
if eth1_data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `eth1_data` is missing")
|
|
|
|
if graffiti.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `graffiti` is missing")
|
|
|
|
if proposer_slashings.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `proposer_slashings` is missing")
|
|
|
|
if attester_slashings.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `attester_slashings` is missing")
|
|
|
|
if attestations.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `attestations` is missing")
|
|
|
|
if deposits.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `deposits` is missing")
|
|
|
|
if voluntary_exits.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `voluntary_exits` is missing")
|
|
|
|
|
|
|
|
let bodyKind =
|
2023-02-03 15:12:11 +00:00
|
|
|
if execution_payload.isSome() and
|
2023-08-02 22:07:57 +00:00
|
|
|
execution_payload.get().blob_gas_used.isSome() and
|
2023-06-19 08:56:52 +00:00
|
|
|
blob_kzg_commitments.isSome():
|
|
|
|
ConsensusFork.Deneb
|
|
|
|
elif execution_payload.isSome() and
|
2023-02-03 15:12:11 +00:00
|
|
|
execution_payload.get().withdrawals.isSome() and
|
|
|
|
bls_to_execution_changes.isSome() and
|
|
|
|
sync_aggregate.isSome():
|
|
|
|
ConsensusFork.Capella
|
|
|
|
elif execution_payload.isSome() and sync_aggregate.isSome():
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Bellatrix
|
2022-02-13 15:21:55 +00:00
|
|
|
elif execution_payload.isNone() and sync_aggregate.isSome():
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Altair
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2023-01-28 19:53:41 +00:00
|
|
|
ConsensusFork.Phase0
|
2022-02-13 15:21:55 +00:00
|
|
|
|
2023-02-03 15:12:11 +00:00
|
|
|
template ep_src: auto = execution_payload.get()
|
|
|
|
template copy_ep_bellatrix(ep_dst: auto) =
|
|
|
|
assign(ep_dst.parent_hash, ep_src.parent_hash)
|
|
|
|
assign(ep_dst.fee_recipient, ep_src.fee_recipient)
|
|
|
|
assign(ep_dst.state_root, ep_src.state_root)
|
|
|
|
assign(ep_dst.receipts_root, ep_src.receipts_root)
|
|
|
|
assign(ep_dst.logs_bloom, ep_src.logs_bloom)
|
|
|
|
assign(ep_dst.prev_randao, ep_src.prev_randao)
|
|
|
|
assign(ep_dst.block_number, ep_src.block_number)
|
|
|
|
assign(ep_dst.gas_limit, ep_src.gas_limit)
|
|
|
|
assign(ep_dst.gas_used, ep_src.gas_used)
|
|
|
|
assign(ep_dst.timestamp, ep_src.timestamp)
|
2023-02-14 06:48:39 +00:00
|
|
|
assign(ep_dst.extra_data, ep_src.extra_data)
|
|
|
|
assign(ep_dst.base_fee_per_gas, ep_src.base_fee_per_gas)
|
2023-02-03 15:12:11 +00:00
|
|
|
assign(ep_dst.block_hash, ep_src.block_hash)
|
|
|
|
assign(ep_dst.transactions, ep_src.transactions)
|
|
|
|
|
2022-02-13 15:21:55 +00:00
|
|
|
case bodyKind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Phase0,
|
2022-02-13 15:21:55 +00:00
|
|
|
phase0Body: phase0.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Altair,
|
2022-02-13 15:21:55 +00:00
|
|
|
altairBody: altair.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get()
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
2023-01-28 19:53:41 +00:00
|
|
|
kind: ConsensusFork.Bellatrix,
|
2022-02-13 15:21:55 +00:00
|
|
|
bellatrixBody: bellatrix.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
)
|
|
|
|
)
|
2023-02-03 15:12:11 +00:00
|
|
|
copy_ep_bellatrix(value.bellatrixBody.execution_payload)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2023-02-03 15:12:11 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
|
|
|
kind: ConsensusFork.Capella,
|
|
|
|
capellaBody: capella.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
bls_to_execution_changes: bls_to_execution_changes.get()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
copy_ep_bellatrix(value.capellaBody.execution_payload)
|
|
|
|
assign(
|
|
|
|
value.capellaBody.execution_payload.withdrawals,
|
|
|
|
ep_src.withdrawals.get())
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-07-10 14:57:11 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
|
|
|
kind: ConsensusFork.Deneb,
|
|
|
|
denebBody: deneb.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
|
|
|
attester_slashings: attester_slashings.get(),
|
|
|
|
attestations: attestations.get(),
|
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
bls_to_execution_changes: bls_to_execution_changes.get(),
|
|
|
|
blob_kzg_commitments: blob_kzg_commitments.get()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
copy_ep_bellatrix(value.denebBody.execution_payload)
|
|
|
|
assign(
|
|
|
|
value.denebBody.execution_payload.withdrawals,
|
|
|
|
ep_src.withdrawals.get())
|
|
|
|
assign(
|
2023-08-02 22:07:57 +00:00
|
|
|
value.denebBody.execution_payload.blob_gas_used,
|
|
|
|
ep_src.blob_gas_used.get())
|
2023-07-10 14:57:11 +00:00
|
|
|
assign(
|
2023-08-02 22:07:57 +00:00
|
|
|
value.denebBody.execution_payload.excess_blob_gas,
|
|
|
|
ep_src.excess_blob_gas.get())
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-10 08:54:00 +00:00
|
|
|
value = RestPublishedBeaconBlockBody(
|
|
|
|
kind: ConsensusFork.Electra,
|
|
|
|
electraBody: electra.BeaconBlockBody(
|
|
|
|
randao_reveal: randao_reveal.get(),
|
|
|
|
eth1_data: eth1_data.get(),
|
|
|
|
graffiti: graffiti.get(),
|
|
|
|
proposer_slashings: proposer_slashings.get(),
|
2024-04-18 07:30:01 +00:00
|
|
|
#attester_slashings: attester_slashings.get(),
|
|
|
|
#attestations: attestations.get(),
|
2024-04-10 08:54:00 +00:00
|
|
|
deposits: deposits.get(),
|
|
|
|
voluntary_exits: voluntary_exits.get(),
|
|
|
|
sync_aggregate: sync_aggregate.get(),
|
|
|
|
bls_to_execution_changes: bls_to_execution_changes.get(),
|
|
|
|
blob_kzg_commitments: blob_kzg_commitments.get()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
copy_ep_bellatrix(value.electraBody.execution_payload)
|
|
|
|
assign(
|
|
|
|
value.electraBody.execution_payload.withdrawals,
|
|
|
|
ep_src.withdrawals.get())
|
|
|
|
assign(
|
|
|
|
value.electraBody.execution_payload.blob_gas_used,
|
|
|
|
ep_src.blob_gas_used.get())
|
|
|
|
assign(
|
|
|
|
value.electraBody.execution_payload.excess_blob_gas,
|
|
|
|
ep_src.excess_blob_gas.get())
|
|
|
|
|
2024-05-14 11:51:06 +00:00
|
|
|
debugComment "electra support missing, including attslashing/atts"
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
## RestPublishedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedBeaconBlock) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2022-02-13 15:21:55 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
slot: Opt[Slot]
|
|
|
|
proposer_index: Opt[uint64]
|
|
|
|
parent_root: Opt[Eth2Digest]
|
|
|
|
state_root: Opt[Eth2Digest]
|
|
|
|
blockBody: Opt[RestPublishedBeaconBlockBody]
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "slot":
|
|
|
|
if slot.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `slot` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
slot = Opt.some(reader.readValue(Slot))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "proposer_index":
|
|
|
|
if proposer_index.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `proposer_index` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
proposer_index = Opt.some(reader.readValue(uint64))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "parent_root":
|
2022-04-08 16:22:49 +00:00
|
|
|
if parent_root.isSome():
|
2022-02-13 15:21:55 +00:00
|
|
|
reader.raiseUnexpectedField("Multiple `parent_root` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
parent_root = Opt.some(reader.readValue(Eth2Digest))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "state_root":
|
|
|
|
if state_root.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `state_root` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
state_root = Opt.some(reader.readValue(Eth2Digest))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "body":
|
|
|
|
if blockBody.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `body` fields found",
|
|
|
|
"RestPublishedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
blockBody = Opt.some(reader.readValue(RestPublishedBeaconBlockBody))
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if slot.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `slot` is missing")
|
|
|
|
if proposer_index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `proposer_index` is missing")
|
|
|
|
if parent_root.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `parent_root` is missing")
|
|
|
|
if state_root.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `state_root` is missing")
|
|
|
|
if blockBody.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `body` is missing")
|
|
|
|
|
|
|
|
let body = blockBody.get()
|
|
|
|
value = RestPublishedBeaconBlock(
|
|
|
|
case body.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
phase0.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.phase0Body
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
altair.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.altairBody
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
bellatrix.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.bellatrixBody
|
|
|
|
)
|
|
|
|
)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
capella.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.capellaBody
|
|
|
|
)
|
|
|
|
)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
deneb.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.denebBody
|
|
|
|
)
|
|
|
|
)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-04 03:17:31 +00:00
|
|
|
ForkedBeaconBlock.init(
|
|
|
|
electra.BeaconBlock(
|
|
|
|
slot: slot.get(),
|
|
|
|
proposer_index: proposer_index.get(),
|
|
|
|
parent_root: parent_root.get(),
|
|
|
|
state_root: state_root.get(),
|
|
|
|
body: body.electraBody
|
|
|
|
)
|
|
|
|
)
|
2022-02-13 15:21:55 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
## RestPublishedSignedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedSignedBeaconBlock) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2023-09-27 14:45:33 +00:00
|
|
|
var signature: Opt[ValidatorSig]
|
|
|
|
var message: Opt[RestPublishedBeaconBlock]
|
2022-02-13 15:21:55 +00:00
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
|
|
|
"RestPublishedSignedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
message = Opt.some(reader.readValue(RestPublishedBeaconBlock))
|
2022-02-13 15:21:55 +00:00
|
|
|
of "signature":
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signature` fields found",
|
|
|
|
"RestPublishedSignedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
signature = Opt.some(reader.readValue(ValidatorSig))
|
2022-02-13 15:21:55 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-13 15:21:55 +00:00
|
|
|
|
|
|
|
if signature.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `signature` is missing")
|
|
|
|
if message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `message` is missing")
|
|
|
|
|
|
|
|
let blck = ForkedBeaconBlock(message.get())
|
2024-04-03 00:05:29 +00:00
|
|
|
value = RestPublishedSignedBeaconBlock ForkedSignedBeaconBlock.init(
|
|
|
|
blck, blck.hash_tree_root(), signature.get())
|
2022-02-13 15:21:55 +00:00
|
|
|
|
2023-06-19 08:56:52 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestPublishedSignedBlockContents) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2023-09-27 14:45:33 +00:00
|
|
|
var signature: Opt[ValidatorSig]
|
|
|
|
var message: Opt[RestPublishedBeaconBlock]
|
|
|
|
var signed_message: Opt[RestPublishedSignedBeaconBlock]
|
|
|
|
var signed_block_data: Opt[JsonString]
|
2023-11-06 06:48:43 +00:00
|
|
|
var kzg_proofs: Opt[deneb.KzgProofs]
|
|
|
|
var blobs: Opt[deneb.Blobs]
|
2023-06-19 08:56:52 +00:00
|
|
|
|
|
|
|
# Pre-Deneb, there were always the same two top-level fields
|
|
|
|
# ('signature' and 'message'). For Deneb, there's a different set of
|
2023-11-06 06:48:43 +00:00
|
|
|
# a top-level fields: 'signed_block' 'kzg_proofs', `blobs`. The
|
2023-06-19 08:56:52 +00:00
|
|
|
# former is the same as the pre-Deneb object.
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
|
|
|
"RestPublishedSignedBlockContents")
|
2023-09-27 14:45:33 +00:00
|
|
|
message = Opt.some(reader.readValue(RestPublishedBeaconBlock))
|
2023-06-19 08:56:52 +00:00
|
|
|
of "signature":
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signature` fields found",
|
|
|
|
"RestPublishedSignedBlockContents")
|
2023-09-27 14:45:33 +00:00
|
|
|
signature = Opt.some(reader.readValue(ValidatorSig))
|
2023-06-19 08:56:52 +00:00
|
|
|
of "signed_block":
|
|
|
|
if signed_block_data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signed_block` fields found",
|
|
|
|
"RestPublishedSignedBlockContents")
|
2023-09-27 14:45:33 +00:00
|
|
|
signed_block_data = Opt.some(reader.readValue(JsonString))
|
2023-06-19 08:56:52 +00:00
|
|
|
if message.isSome() or signature.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Found `signed_block` field alongside message or signature fields",
|
|
|
|
"RestPublishedSignedBlockContents")
|
|
|
|
signed_message =
|
|
|
|
try:
|
2023-09-27 14:45:33 +00:00
|
|
|
Opt.some(RestJson.decode(string(signed_block_data.get()),
|
|
|
|
RestPublishedSignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
2023-06-19 08:56:52 +00:00
|
|
|
except SerializationError:
|
2023-09-27 14:45:33 +00:00
|
|
|
Opt.none(RestPublishedSignedBeaconBlock)
|
2023-06-19 08:56:52 +00:00
|
|
|
if signed_message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Incorrect signed_block format")
|
2023-11-06 06:48:43 +00:00
|
|
|
of "kzg_proofs":
|
|
|
|
if kzg_proofs.isSome():
|
2023-06-19 08:56:52 +00:00
|
|
|
reader.raiseUnexpectedField(
|
2023-11-06 06:48:43 +00:00
|
|
|
"Multiple `kzg_proofs` fields found",
|
2023-06-19 08:56:52 +00:00
|
|
|
"RestPublishedSignedBlockContents")
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
2023-11-06 06:48:43 +00:00
|
|
|
"Found `kzg_proofs` field alongside signature field",
|
2023-06-19 08:56:52 +00:00
|
|
|
"RestPublishedSignedBlockContents")
|
2023-11-06 06:48:43 +00:00
|
|
|
kzg_proofs = Opt.some(reader.readValue(deneb.KzgProofs))
|
|
|
|
of "blobs":
|
|
|
|
if blobs.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `blobs` fields found",
|
|
|
|
"RestPublishedSignedBlockContents")
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Found `blobs` field alongside signature field",
|
|
|
|
"RestPublishedSignedBlockContents")
|
|
|
|
blobs = Opt.some(reader.readValue(deneb.Blobs))
|
2023-06-19 08:56:52 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2023-06-19 08:56:52 +00:00
|
|
|
|
2024-04-03 00:05:29 +00:00
|
|
|
if signed_message.isSome():
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedValue("Field `message` found but unsupported")
|
|
|
|
if signature.isSome():
|
|
|
|
reader.raiseUnexpectedValue("Field `signature` found but unsupported")
|
2023-11-06 06:48:43 +00:00
|
|
|
if kzg_proofs.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `kzg_proofs` is missing")
|
|
|
|
if blobs.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `blobs` is missing")
|
2024-04-03 00:05:29 +00:00
|
|
|
if kzg_proofs.get.len != blobs.get.len:
|
|
|
|
reader.raiseUnexpectedValue("Length mismatch of `kzg_proofs` and `blobs`")
|
|
|
|
|
|
|
|
withBlck(distinctBase(signed_message.get)):
|
2024-04-10 08:54:00 +00:00
|
|
|
when consensusFork >= ConsensusFork.Deneb:
|
2024-04-03 00:05:29 +00:00
|
|
|
template kzg_commitments: untyped =
|
|
|
|
forkyBlck.message.body.blob_kzg_commitments
|
|
|
|
if kzg_proofs.get().len != kzg_commitments.len:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Length mismatch of `kzg_proofs` and `blob_kzg_commitments`")
|
|
|
|
value = RestPublishedSignedBlockContents.init(
|
|
|
|
consensusFork.BlockContents(
|
|
|
|
`block`: forkyBlck.message,
|
|
|
|
kzg_proofs: kzg_proofs.get(),
|
|
|
|
blobs: blobs.get()),
|
|
|
|
forkyBlck.root, forkyBlck.signature)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("`signed_message` supported post-Deneb")
|
2023-11-06 06:48:43 +00:00
|
|
|
else:
|
2024-04-03 00:05:29 +00:00
|
|
|
if signature.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `signature` is missing")
|
|
|
|
if message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `message` is missing")
|
2023-11-06 06:48:43 +00:00
|
|
|
if kzg_proofs.isSome():
|
|
|
|
reader.raiseUnexpectedValue("Field `kzg_proofs` found but unsupported")
|
|
|
|
if blobs.isSome():
|
|
|
|
reader.raiseUnexpectedValue("Field `blobs` found but unsupported")
|
|
|
|
|
2024-04-03 00:05:29 +00:00
|
|
|
withBlck(distinctBase(message.get)):
|
|
|
|
when consensusFork < ConsensusFork.Deneb:
|
|
|
|
value = RestPublishedSignedBlockContents.init(
|
|
|
|
forkyBlck, forkyBlck.hash_tree_root(), signature.get)
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("`message` support stopped at Deneb")
|
2023-06-19 08:56:52 +00:00
|
|
|
|
2021-08-27 09:00:06 +00:00
|
|
|
## ForkedSignedBeaconBlock
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var ForkedSignedBeaconBlock) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-08-27 09:00:06 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
|
|
|
"ForkedSignedBeaconBlock")
|
|
|
|
let vres = reader.readValue(string)
|
|
|
|
case vres
|
|
|
|
of "phase0":
|
2023-09-27 14:45:33 +00:00
|
|
|
version = Opt.some(ConsensusFork.Phase0)
|
2021-08-27 09:00:06 +00:00
|
|
|
of "altair":
|
2023-09-27 14:45:33 +00:00
|
|
|
version = Opt.some(ConsensusFork.Altair)
|
2022-02-13 15:21:55 +00:00
|
|
|
of "bellatrix":
|
2023-09-27 14:45:33 +00:00
|
|
|
version = Opt.some(ConsensusFork.Bellatrix)
|
2022-11-22 11:56:05 +00:00
|
|
|
of "capella":
|
2023-09-27 14:45:33 +00:00
|
|
|
version = Opt.some(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb":
|
2023-09-27 14:45:33 +00:00
|
|
|
version = Opt.some(ConsensusFork.Deneb)
|
2021-08-27 09:00:06 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
|
|
|
of "data":
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"ForkedSignedBeaconBlock")
|
2023-09-27 14:45:33 +00:00
|
|
|
data = Opt.some(reader.readValue(JsonString))
|
2021-08-27 09:00:06 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2024-03-09 10:38:24 +00:00
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
phase0.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
2024-03-09 10:38:24 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-08-27 09:00:06 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2024-03-09 10:38:24 +00:00
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
altair.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-08-27 09:00:06 +00:00
|
|
|
except SerializationError:
|
2024-03-09 10:38:24 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect altair block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2021-09-27 14:22:58 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2024-03-09 10:38:24 +00:00
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
bellatrix.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-09-27 14:22:58 +00:00
|
|
|
except SerializationError:
|
2024-03-09 10:38:24 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2024-03-09 10:38:24 +00:00
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
capella.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2022-11-24 14:38:07 +00:00
|
|
|
except SerializationError:
|
2024-03-09 10:38:24 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect capella block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let res =
|
|
|
|
try:
|
2024-03-09 10:38:24 +00:00
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
deneb.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2023-03-29 08:41:54 +00:00
|
|
|
except SerializationError:
|
2024-03-09 10:38:24 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-04 03:17:31 +00:00
|
|
|
let res =
|
|
|
|
try:
|
|
|
|
RestJson.decode(string(data.get()),
|
|
|
|
electra.SignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect electra block format")
|
|
|
|
|
|
|
|
value = ForkedSignedBeaconBlock.init(res)
|
2022-01-17 09:27:08 +00:00
|
|
|
withBlck(value):
|
2023-09-21 10:49:14 +00:00
|
|
|
forkyBlck.root = hash_tree_root(forkyBlck.message)
|
2021-08-27 09:00:06 +00:00
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ForkedSignedBeaconBlock
|
|
|
|
) {.raises: [IOError].} =
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.writeField("version", "phase0")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.writeField("version", "altair")
|
2021-10-18 16:37:27 +00:00
|
|
|
writer.writeField("data", value.altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("version", "bellatrix")
|
|
|
|
writer.writeField("data", value.bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", "capella")
|
|
|
|
writer.writeField("data", value.capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-04 03:17:31 +00:00
|
|
|
writer.writeField("version", "electra")
|
2024-04-06 07:46:02 +00:00
|
|
|
writer.writeField("data", value.electraData)
|
2021-08-27 09:00:06 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2021-11-05 07:34:34 +00:00
|
|
|
# ForkedHashedBeaconState is used where a `ForkedBeaconState` normally would
|
|
|
|
# be used, mainly because caching the hash early on is easier to do
|
2021-09-16 13:32:32 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
2021-11-05 07:34:34 +00:00
|
|
|
value: var ForkedHashedBeaconState) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-09-16 13:32:32 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
2021-09-16 13:32:32 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found",
|
|
|
|
"ForkedBeaconState")
|
|
|
|
let vres = reader.readValue(string)
|
2021-10-18 16:37:27 +00:00
|
|
|
version = case vres
|
2023-09-27 14:45:33 +00:00
|
|
|
of "phase0": Opt.some(ConsensusFork.Phase0)
|
|
|
|
of "altair": Opt.some(ConsensusFork.Altair)
|
|
|
|
of "bellatrix": Opt.some(ConsensusFork.Bellatrix)
|
|
|
|
of "capella": Opt.some(ConsensusFork.Capella)
|
|
|
|
of "deneb": Opt.some(ConsensusFork.Deneb)
|
2024-04-04 03:17:31 +00:00
|
|
|
of "electra": Opt.some(ConsensusFork.Electra)
|
2021-10-18 16:37:27 +00:00
|
|
|
else: reader.raiseUnexpectedValue("Incorrect version field value")
|
2021-09-16 13:32:32 +00:00
|
|
|
of "data":
|
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"ForkedBeaconState")
|
2023-09-27 14:45:33 +00:00
|
|
|
data = Opt.some(reader.readValue(JsonString))
|
2021-09-16 13:32:32 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2021-09-16 13:32:32 +00:00
|
|
|
|
|
|
|
if version.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
2021-11-05 07:34:34 +00:00
|
|
|
# Use a temporary to avoid stack instances and `value` mutation in case of
|
|
|
|
# exception
|
|
|
|
let
|
|
|
|
tmp = (ref ForkedHashedBeaconState)(kind: version.get())
|
|
|
|
|
|
|
|
template toValue(field: untyped) =
|
|
|
|
if tmp[].kind == value.kind:
|
|
|
|
assign(value.field, tmp[].field)
|
|
|
|
else:
|
|
|
|
value = tmp[] # slow, but rare (hopefully)
|
2022-01-06 07:38:40 +00:00
|
|
|
value.field.root = hash_tree_root(value.field.data)
|
2021-11-05 07:34:34 +00:00
|
|
|
|
2021-09-16 13:32:32 +00:00
|
|
|
case version.get():
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
|
|
|
tmp[].phase0Data.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
phase0.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2021-09-16 13:32:32 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect phase0 beacon state format")
|
2021-11-05 07:34:34 +00:00
|
|
|
|
|
|
|
toValue(phase0Data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
|
|
|
tmp[].altairData.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
altair.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2021-09-16 13:32:32 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect altair beacon state format")
|
2021-11-05 07:34:34 +00:00
|
|
|
|
|
|
|
toValue(altairData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2021-11-05 07:34:34 +00:00
|
|
|
try:
|
2022-01-24 16:23:13 +00:00
|
|
|
tmp[].bellatrixData.data = RestJson.decode(
|
2022-05-20 15:25:26 +00:00
|
|
|
string(data.get()),
|
|
|
|
bellatrix.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2021-11-05 07:34:34 +00:00
|
|
|
except SerializationError:
|
2022-11-24 14:38:07 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect bellatrix beacon state format")
|
2022-01-24 16:23:13 +00:00
|
|
|
toValue(bellatrixData)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
try:
|
|
|
|
tmp[].capellaData.data = RestJson.decode(
|
|
|
|
string(data.get()),
|
|
|
|
capella.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect capella beacon state format")
|
|
|
|
toValue(capellaData)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2022-12-07 16:47:23 +00:00
|
|
|
try:
|
2023-03-04 22:23:52 +00:00
|
|
|
tmp[].denebData.data = RestJson.decode(
|
2022-12-07 16:47:23 +00:00
|
|
|
string(data.get()),
|
2023-02-25 01:03:34 +00:00
|
|
|
deneb.BeaconState,
|
2022-12-07 16:47:23 +00:00
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
2023-03-11 00:28:19 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect deneb beacon state format")
|
2023-03-04 22:23:52 +00:00
|
|
|
toValue(denebData)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-04 03:17:31 +00:00
|
|
|
try:
|
|
|
|
tmp[].electraData.data = RestJson.decode(
|
|
|
|
string(data.get()),
|
|
|
|
electra.BeaconState,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect electra beacon state format")
|
|
|
|
toValue(electraData)
|
2021-09-16 13:32:32 +00:00
|
|
|
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ForkedHashedBeaconState
|
|
|
|
) {.raises: [IOError].} =
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.beginRecord()
|
2021-10-18 16:37:27 +00:00
|
|
|
case value.kind
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.writeField("version", "phase0")
|
2021-11-05 07:34:34 +00:00
|
|
|
writer.writeField("data", value.phase0Data.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.writeField("version", "altair")
|
2021-11-05 07:34:34 +00:00
|
|
|
writer.writeField("data", value.altairData.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-02-13 15:21:55 +00:00
|
|
|
writer.writeField("version", "bellatrix")
|
|
|
|
writer.writeField("data", value.bellatrixData.data)
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-22 11:56:05 +00:00
|
|
|
writer.writeField("version", "capella")
|
|
|
|
writer.writeField("data", value.capellaData.data)
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
writer.writeField("version", "deneb")
|
2023-03-04 22:23:52 +00:00
|
|
|
writer.writeField("data", value.denebData.data)
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-04 03:17:31 +00:00
|
|
|
writer.writeField("version", "electra")
|
|
|
|
writer.writeField("data", value.electraData.data)
|
2021-09-16 13:32:32 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
## SomeForkedLightClientObject
|
|
|
|
proc readValue*[T: SomeForkedLightClientObject](
|
|
|
|
reader: var JsonReader[RestJson], value: var T) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2023-04-16 06:07:07 +00:00
|
|
|
var
|
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
data: Opt[JsonString]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "version":
|
|
|
|
if version.isSome:
|
|
|
|
reader.raiseUnexpectedField("Multiple version fields found", T.name)
|
|
|
|
let consensusFork =
|
2023-09-27 14:45:33 +00:00
|
|
|
ConsensusFork.decodeString(reader.readValue(string)).valueOr:
|
2023-04-16 06:07:07 +00:00
|
|
|
reader.raiseUnexpectedValue("Incorrect version field value")
|
|
|
|
version.ok consensusFork
|
|
|
|
of "data":
|
|
|
|
if data.isSome:
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found", T.name)
|
|
|
|
data.ok reader.readValue(JsonString)
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2023-04-16 06:07:07 +00:00
|
|
|
|
|
|
|
if version.isNone:
|
|
|
|
reader.raiseUnexpectedValue("Field version is missing")
|
|
|
|
if data.isNone:
|
|
|
|
reader.raiseUnexpectedValue("Field data is missing")
|
|
|
|
|
|
|
|
withLcDataFork(lcDataForkAtConsensusFork(version.get)):
|
|
|
|
when lcDataFork > LightClientDataFork.None:
|
|
|
|
try:
|
2023-10-04 16:11:45 +00:00
|
|
|
value = T.init(RestJson.decode(
|
2023-04-16 06:07:07 +00:00
|
|
|
string(data.get()),
|
|
|
|
T.Forky(lcDataFork),
|
|
|
|
requireAllFields = true,
|
2023-10-04 16:11:45 +00:00
|
|
|
allowUnknownFields = true))
|
2023-04-16 06:07:07 +00:00
|
|
|
except SerializationError:
|
|
|
|
reader.raiseUnexpectedValue("Incorrect format (" & $lcDataFork & ")")
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Unsupported fork " & $version.get)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## Web3SignerRequest
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Web3SignerRequest
|
|
|
|
) {.raises: [IOError].} =
|
2022-05-10 00:32:12 +00:00
|
|
|
writer.beginRecord()
|
2021-11-30 01:20:21 +00:00
|
|
|
case value.kind
|
|
|
|
of Web3SignerRequestKind.AggregationSlot:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "AGGREGATION_SLOT")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("aggregation_slot", value.aggregationSlot)
|
|
|
|
of Web3SignerRequestKind.AggregateAndProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "AGGREGATE_AND_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("aggregate_and_proof", value.aggregateAndProof)
|
|
|
|
of Web3SignerRequestKind.Attestation:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "ATTESTATION")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("attestation", value.attestation)
|
|
|
|
of Web3SignerRequestKind.BlockV2:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "BLOCK_V2")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
2022-08-01 06:41:47 +00:00
|
|
|
|
2023-08-15 22:39:12 +00:00
|
|
|
# https://github.com/Consensys/web3signer/blob/2d956c019663ac70f60640d23196d1d321c1b1fa/core/src/main/resources/openapi-specs/eth2/signing/schemas.yaml#L483-L500
|
|
|
|
writer.writeField("beacon_block", value.beaconBlockHeader)
|
2022-12-08 21:57:47 +00:00
|
|
|
|
2023-04-06 13:16:21 +00:00
|
|
|
if isSome(value.proofs):
|
|
|
|
writer.writeField("proofs", value.proofs.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
of Web3SignerRequestKind.Deposit:
|
|
|
|
writer.writeField("type", "DEPOSIT")
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("deposit", value.deposit)
|
|
|
|
of Web3SignerRequestKind.RandaoReveal:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "RANDAO_REVEAL")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("randao_reveal", value.randaoReveal)
|
|
|
|
of Web3SignerRequestKind.VoluntaryExit:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "VOLUNTARY_EXIT")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("voluntary_exit", value.voluntaryExit)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeMessage:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_MESSAGE")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("sync_committee_message", value.syncCommitteeMessage)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeSelectionProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_SELECTION_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("sync_aggregator_selection_data",
|
|
|
|
value.syncAggregatorSelectionData)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeContributionAndProof:
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("contribution_and_proof",
|
|
|
|
value.syncCommitteeContributionAndProof)
|
2022-08-01 06:41:47 +00:00
|
|
|
of Web3SignerRequestKind.ValidatorRegistration:
|
|
|
|
# https://consensys.github.io/web3signer/web3signer-eth2.html#operation/ETH2_SIGN
|
|
|
|
doAssert(value.forkInfo.isSome(),
|
|
|
|
"forkInfo should be set for this type of request")
|
|
|
|
writer.writeField("type", "VALIDATOR_REGISTRATION")
|
|
|
|
writer.writeField("fork_info", value.forkInfo.get())
|
|
|
|
if isSome(value.signingRoot):
|
|
|
|
writer.writeField("signingRoot", value.signingRoot)
|
|
|
|
writer.writeField("validator_registration", value.validatorRegistration)
|
2022-05-10 00:32:12 +00:00
|
|
|
writer.endRecord()
|
2021-11-30 01:20:21 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var Web3SignerRequest) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2021-11-30 01:20:21 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
requestKind: Opt[Web3SignerRequestKind]
|
|
|
|
forkInfo: Opt[Web3SignerForkInfo]
|
|
|
|
signingRoot: Opt[Eth2Digest]
|
|
|
|
data: Opt[JsonString]
|
2023-04-06 13:16:21 +00:00
|
|
|
proofs: seq[Web3SignerMerkleProof]
|
2021-11-30 01:20:21 +00:00
|
|
|
dataName: string
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "type":
|
|
|
|
if requestKind.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `type` fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
let vres = reader.readValue(string)
|
2023-09-27 14:45:33 +00:00
|
|
|
requestKind = Opt.some(
|
2021-11-30 01:20:21 +00:00
|
|
|
case vres
|
|
|
|
of "AGGREGATION_SLOT":
|
|
|
|
Web3SignerRequestKind.AggregationSlot
|
|
|
|
of "AGGREGATE_AND_PROOF":
|
|
|
|
Web3SignerRequestKind.AggregateAndProof
|
|
|
|
of "ATTESTATION":
|
|
|
|
Web3SignerRequestKind.Attestation
|
|
|
|
of "BLOCK_V2":
|
|
|
|
Web3SignerRequestKind.BlockV2
|
|
|
|
of "DEPOSIT":
|
|
|
|
Web3SignerRequestKind.Deposit
|
|
|
|
of "RANDAO_REVEAL":
|
|
|
|
Web3SignerRequestKind.RandaoReveal
|
|
|
|
of "VOLUNTARY_EXIT":
|
|
|
|
Web3SignerRequestKind.VoluntaryExit
|
|
|
|
of "SYNC_COMMITTEE_MESSAGE":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeMessage
|
|
|
|
of "SYNC_COMMITTEE_SELECTION_PROOF":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeSelectionProof
|
|
|
|
of "SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF":
|
|
|
|
Web3SignerRequestKind.SyncCommitteeContributionAndProof
|
2022-08-01 06:41:47 +00:00
|
|
|
of "VALIDATOR_REGISTRATION":
|
|
|
|
Web3SignerRequestKind.ValidatorRegistration
|
2021-11-30 01:20:21 +00:00
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected `type` value")
|
|
|
|
)
|
|
|
|
of "fork_info":
|
|
|
|
if forkInfo.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `fork_info` fields found",
|
|
|
|
"Web3SignerRequest")
|
2023-09-27 14:45:33 +00:00
|
|
|
forkInfo = Opt.some(reader.readValue(Web3SignerForkInfo))
|
2021-11-30 01:20:21 +00:00
|
|
|
of "signingRoot":
|
|
|
|
if signingRoot.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `signingRoot` fields found",
|
|
|
|
"Web3SignerRequest")
|
2023-09-27 14:45:33 +00:00
|
|
|
signingRoot = Opt.some(reader.readValue(Eth2Digest))
|
2023-04-06 13:16:21 +00:00
|
|
|
of "proofs":
|
|
|
|
let newProofs = reader.readValue(seq[Web3SignerMerkleProof])
|
|
|
|
proofs.add(newProofs)
|
2021-11-30 01:20:21 +00:00
|
|
|
of "aggregation_slot", "aggregate_and_proof", "block", "beacon_block",
|
|
|
|
"randao_reveal", "voluntary_exit", "sync_committee_message",
|
2023-04-06 13:16:21 +00:00
|
|
|
"sync_aggregator_selection_data", "contribution_and_proof",
|
|
|
|
"attestation", "deposit", "validator_registration":
|
2021-11-30 01:20:21 +00:00
|
|
|
if data.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple data fields found",
|
|
|
|
"Web3SignerRequest")
|
|
|
|
dataName = fieldName
|
2023-09-27 14:45:33 +00:00
|
|
|
data = Opt.some(reader.readValue(JsonString))
|
2023-04-06 13:16:21 +00:00
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2021-11-30 01:20:21 +00:00
|
|
|
|
|
|
|
if requestKind.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `type` is missing")
|
|
|
|
|
|
|
|
value =
|
|
|
|
case requestKind.get()
|
|
|
|
of Web3SignerRequestKind.AggregationSlot:
|
|
|
|
if dataName != "aggregation_slot":
|
|
|
|
reader.raiseUnexpectedValue("Field `aggregation_slot` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerAggregationSlotData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `aggregation_slot` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(kind: Web3SignerRequestKind.AggregationSlot,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, aggregationSlot: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.AggregateAndProof:
|
|
|
|
if dataName != "aggregate_and_proof":
|
|
|
|
reader.raiseUnexpectedValue("Field `aggregate_and_proof` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2024-05-14 04:12:35 +00:00
|
|
|
let res = decodeJsonString(phase0.AggregateAndProof, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `aggregate_and_proof` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.AggregateAndProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, aggregateAndProof: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.Attestation:
|
|
|
|
if dataName != "attestation":
|
|
|
|
reader.raiseUnexpectedValue("Field `attestation` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(AttestationData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `attestation` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.Attestation,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, attestation: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.BlockV2:
|
2022-08-01 06:41:47 +00:00
|
|
|
# https://github.com/ConsenSys/web3signer/blob/41834a927088f1bde7a097e17d19e954d0058e54/core/src/main/resources/openapi-specs/eth2/signing/schemas.yaml#L421-L425 (branch v22.7.0)
|
|
|
|
# It's the "beacon_block" field even when it's not a block, but a header
|
2021-11-30 01:20:21 +00:00
|
|
|
if dataName != "beacon_block":
|
|
|
|
reader.raiseUnexpectedValue("Field `beacon_block` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerForkedBeaconBlock, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `beacon_block` format")
|
|
|
|
res.get()
|
2023-04-06 13:16:21 +00:00
|
|
|
if len(proofs) > 0:
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.BlockV2,
|
2023-08-15 22:39:12 +00:00
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, beaconBlockHeader: data,
|
2023-04-06 13:16:21 +00:00
|
|
|
proofs: Opt.some(proofs)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.BlockV2,
|
2023-08-15 22:39:12 +00:00
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, beaconBlockHeader: data
|
2023-04-06 13:16:21 +00:00
|
|
|
)
|
2021-11-30 01:20:21 +00:00
|
|
|
of Web3SignerRequestKind.Deposit:
|
|
|
|
if dataName != "deposit":
|
|
|
|
reader.raiseUnexpectedValue("Field `deposit` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerDepositData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `deposit` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.Deposit,
|
|
|
|
signingRoot: signingRoot, deposit: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.RandaoReveal:
|
|
|
|
if dataName != "randao_reveal":
|
|
|
|
reader.raiseUnexpectedValue("Field `randao_reveal` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerRandaoRevealData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `randao_reveal` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.RandaoReveal,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, randaoReveal: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.VoluntaryExit:
|
|
|
|
if dataName != "voluntary_exit":
|
|
|
|
reader.raiseUnexpectedValue("Field `voluntary_exit` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(VoluntaryExit, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `voluntary_exit` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.VoluntaryExit,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot, voluntaryExit: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeMessage:
|
|
|
|
if dataName != "sync_committee_message":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `sync_committee_message` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(Web3SignerSyncCommitteeMessageData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `sync_committee_message` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeMessage,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncCommitteeMessage: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeSelectionProof:
|
|
|
|
if dataName != "sync_aggregator_selection_data":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `sync_aggregator_selection_data` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(SyncAggregatorSelectionData, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `sync_aggregator_selection_data` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeSelectionProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncAggregatorSelectionData: data
|
|
|
|
)
|
|
|
|
of Web3SignerRequestKind.SyncCommitteeContributionAndProof:
|
|
|
|
if dataName != "contribution_and_proof":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `contribution_and_proof` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
2022-07-13 14:45:04 +00:00
|
|
|
let res = decodeJsonString(ContributionAndProof, data.get())
|
2021-11-30 01:20:21 +00:00
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `contribution_and_proof` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.SyncCommitteeContributionAndProof,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
syncCommitteeContributionAndProof: data
|
|
|
|
)
|
2022-08-01 06:41:47 +00:00
|
|
|
of Web3SignerRequestKind.ValidatorRegistration:
|
|
|
|
if dataName != "validator_registration":
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Field `validator_registration` is missing")
|
|
|
|
if forkInfo.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `fork_info` is missing")
|
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
let res =
|
|
|
|
decodeJsonString(Web3SignerValidatorRegistration, data.get())
|
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"Incorrect field `validator_registration` format")
|
|
|
|
res.get()
|
|
|
|
Web3SignerRequest(
|
|
|
|
kind: Web3SignerRequestKind.ValidatorRegistration,
|
|
|
|
forkInfo: forkInfo, signingRoot: signingRoot,
|
|
|
|
validatorRegistration: data
|
|
|
|
)
|
2021-11-30 01:20:21 +00:00
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
## RemoteKeystoreStatus
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: RemoteKeystoreStatus
|
|
|
|
) {.raises: [IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("status", $value.status)
|
|
|
|
if value.message.isSome():
|
|
|
|
writer.writeField("message", value.message.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RemoteKeystoreStatus) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [IOError, SerializationError].} =
|
2023-09-27 14:45:33 +00:00
|
|
|
var message: Opt[string]
|
|
|
|
var status: Opt[KeystoreStatus]
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
|
|
|
"RemoteKeystoreStatus")
|
2023-09-27 14:45:33 +00:00
|
|
|
message = Opt.some(reader.readValue(string))
|
2022-02-07 20:36:09 +00:00
|
|
|
of "status":
|
|
|
|
if status.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `status` fields found",
|
|
|
|
"RemoteKeystoreStatus")
|
|
|
|
let res = reader.readValue(string)
|
2023-09-27 14:45:33 +00:00
|
|
|
status = Opt.some(
|
2022-02-07 20:36:09 +00:00
|
|
|
case res
|
|
|
|
of "error":
|
|
|
|
KeystoreStatus.error
|
|
|
|
of "not_active":
|
|
|
|
KeystoreStatus.notActive
|
|
|
|
of "not_found":
|
|
|
|
KeystoreStatus.notFound
|
|
|
|
of "deleted":
|
|
|
|
KeystoreStatus.deleted
|
|
|
|
of "duplicate":
|
|
|
|
KeystoreStatus.duplicate
|
|
|
|
of "imported":
|
|
|
|
KeystoreStatus.imported
|
|
|
|
else:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `status` value")
|
|
|
|
)
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-05-20 15:25:26 +00:00
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
if status.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `status` is missing")
|
2022-05-20 15:25:26 +00:00
|
|
|
|
2022-02-07 20:36:09 +00:00
|
|
|
value = RemoteKeystoreStatus(status: status.get(), message: message)
|
|
|
|
|
|
|
|
## ScryptSalt
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ScryptSalt) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
let res = ncrutils.fromHex(reader.readValue(string))
|
|
|
|
if len(res) == 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid scrypt salt value")
|
|
|
|
value = ScryptSalt(res)
|
|
|
|
|
|
|
|
## Pbkdf2Params
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Pbkdf2Params
|
|
|
|
) {.raises: [IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dklen", JsonString(Base10.toString(value.dklen)))
|
|
|
|
writer.writeField("c", JsonString(Base10.toString(value.c)))
|
|
|
|
writer.writeField("prf", value.prf)
|
|
|
|
writer.writeField("salt", value.salt)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Pbkdf2Params) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
dklen: Opt[uint64]
|
|
|
|
c: Opt[uint64]
|
|
|
|
prf: Opt[PrfKind]
|
|
|
|
salt: Opt[Pbkdf2Salt]
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "dklen":
|
|
|
|
if dklen.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `dklen` fields found",
|
|
|
|
"Pbkdf2Params")
|
2023-09-27 14:45:33 +00:00
|
|
|
dklen = Opt.some(reader.readValue(uint64))
|
2022-02-07 20:36:09 +00:00
|
|
|
of "c":
|
|
|
|
if c.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `c` fields found",
|
|
|
|
"Pbkdf2Params")
|
2023-09-27 14:45:33 +00:00
|
|
|
c = Opt.some(reader.readValue(uint64))
|
2022-02-07 20:36:09 +00:00
|
|
|
of "prf":
|
|
|
|
if prf.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `prf` fields found",
|
|
|
|
"Pbkdf2Params")
|
2023-09-27 14:45:33 +00:00
|
|
|
prf = Opt.some(reader.readValue(PrfKind))
|
2022-02-07 20:36:09 +00:00
|
|
|
of "salt":
|
|
|
|
if salt.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `salt` fields found",
|
|
|
|
"Pbkdf2Params")
|
2023-09-27 14:45:33 +00:00
|
|
|
salt = Opt.some(reader.readValue(Pbkdf2Salt))
|
2022-02-07 20:36:09 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
if dklen.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
|
|
|
if c.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `c` is missing")
|
|
|
|
if prf.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `prf` is missing")
|
|
|
|
if salt.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `salt` is missing")
|
|
|
|
|
|
|
|
value = Pbkdf2Params(
|
|
|
|
dklen: dklen.get(),
|
|
|
|
c: c.get(),
|
|
|
|
prf: prf.get(),
|
|
|
|
salt: salt.get()
|
|
|
|
)
|
|
|
|
|
|
|
|
## ScryptParams
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ScryptParams
|
|
|
|
) {.raises: [IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("dklen", JsonString(Base10.toString(value.dklen)))
|
|
|
|
writer.writeField("n", JsonString(Base10.toString(uint64(value.n))))
|
|
|
|
writer.writeField("p", JsonString(Base10.toString(uint64(value.p))))
|
|
|
|
writer.writeField("r", JsonString(Base10.toString(uint64(value.r))))
|
|
|
|
writer.writeField("salt", value.salt)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var ScryptParams) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
var
|
2023-09-27 14:45:33 +00:00
|
|
|
dklen: Opt[uint64]
|
|
|
|
n, p, r: Opt[int]
|
|
|
|
salt: Opt[ScryptSalt]
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "dklen":
|
|
|
|
if dklen.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `dklen` fields found",
|
|
|
|
"ScryptParams")
|
2023-09-27 14:45:33 +00:00
|
|
|
dklen = Opt.some(reader.readValue(uint64))
|
2022-02-07 20:36:09 +00:00
|
|
|
of "n":
|
|
|
|
if n.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `n` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `n` value")
|
2023-09-27 14:45:33 +00:00
|
|
|
n = Opt.some(res)
|
2022-02-07 20:36:09 +00:00
|
|
|
of "p":
|
|
|
|
if p.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `p` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `p` value")
|
2023-09-27 14:45:33 +00:00
|
|
|
p = Opt.some(res)
|
2022-02-07 20:36:09 +00:00
|
|
|
of "r":
|
|
|
|
if r.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `r` fields found",
|
|
|
|
"ScryptParams")
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Unexpected negative `r` value")
|
2023-09-27 14:45:33 +00:00
|
|
|
r = Opt.some(res)
|
2022-02-07 20:36:09 +00:00
|
|
|
of "salt":
|
|
|
|
if salt.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `salt` fields found",
|
|
|
|
"ScryptParams")
|
2023-09-27 14:45:33 +00:00
|
|
|
salt = Opt.some(reader.readValue(ScryptSalt))
|
2022-02-07 20:36:09 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
if dklen.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `dklen` is missing")
|
|
|
|
if n.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `n` is missing")
|
|
|
|
if p.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `p` is missing")
|
|
|
|
if r.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `r` is missing")
|
|
|
|
if salt.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `salt` is missing")
|
|
|
|
|
|
|
|
value = ScryptParams(
|
|
|
|
dklen: dklen.get(),
|
|
|
|
n: n.get(), p: p.get(), r: r.get(),
|
|
|
|
salt: salt.get()
|
|
|
|
)
|
|
|
|
|
|
|
|
## Keystore
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: Keystore
|
|
|
|
) {.error: "keystores must be converted to json with Json.encode(keystore). " &
|
|
|
|
"There is no REST-specific encoding" .}
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], value: var Keystore) {.
|
2022-09-19 22:09:56 +00:00
|
|
|
error: "Keystores must be loaded with `parseKeystore`. " &
|
|
|
|
"There is no REST-specific encoding".}
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
## KeystoresAndSlashingProtection
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: KeystoresAndSlashingProtection
|
|
|
|
) {.raises: [IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.beginRecord()
|
2022-06-19 06:08:21 +00:00
|
|
|
let keystores =
|
|
|
|
block:
|
|
|
|
var res: seq[string]
|
|
|
|
for keystore in value.keystores:
|
2022-09-19 22:09:56 +00:00
|
|
|
let encoded = Json.encode(keystore)
|
2022-06-19 06:08:21 +00:00
|
|
|
res.add(encoded)
|
|
|
|
res
|
|
|
|
writer.writeField("keystores", keystores)
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.writeField("passwords", value.passwords)
|
|
|
|
if value.slashing_protection.isSome():
|
2022-06-19 06:08:21 +00:00
|
|
|
let slashingProtection = RestJson.encode(value.slashing_protection.get)
|
|
|
|
writer.writeField("slashing_protection", slashingProtection)
|
2022-02-07 20:36:09 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var KeystoresAndSlashingProtection) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2022-02-07 20:36:09 +00:00
|
|
|
var
|
2022-06-19 06:08:21 +00:00
|
|
|
strKeystores: seq[string]
|
2022-02-07 20:36:09 +00:00
|
|
|
passwords: seq[string]
|
2023-09-27 14:45:33 +00:00
|
|
|
strSlashing: Opt[string]
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "keystores":
|
2022-06-19 06:08:21 +00:00
|
|
|
strKeystores = reader.readValue(seq[string])
|
2022-02-07 20:36:09 +00:00
|
|
|
of "passwords":
|
|
|
|
passwords = reader.readValue(seq[string])
|
|
|
|
of "slashing_protection":
|
2022-06-19 06:08:21 +00:00
|
|
|
if strSlashing.isSome():
|
2022-02-07 20:36:09 +00:00
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `slashing_protection` fields found",
|
|
|
|
"KeystoresAndSlashingProtection")
|
2023-09-27 14:45:33 +00:00
|
|
|
strSlashing = Opt.some(reader.readValue(string))
|
2022-02-07 20:36:09 +00:00
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2022-02-07 20:36:09 +00:00
|
|
|
|
2022-06-19 06:08:21 +00:00
|
|
|
if len(strKeystores) == 0:
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `keystores` value")
|
2022-02-07 20:36:09 +00:00
|
|
|
if len(passwords) == 0:
|
2022-06-19 06:08:21 +00:00
|
|
|
reader.raiseUnexpectedValue("Missing or empty `passwords` value")
|
|
|
|
|
|
|
|
let keystores =
|
|
|
|
block:
|
|
|
|
var res: seq[Keystore]
|
|
|
|
for item in strKeystores:
|
|
|
|
let key =
|
|
|
|
try:
|
2022-09-19 22:09:56 +00:00
|
|
|
parseKeystore(item)
|
2023-07-15 16:30:52 +00:00
|
|
|
except SerializationError:
|
2022-06-19 06:08:21 +00:00
|
|
|
# TODO re-raise the exception by adjusting the column index, so the user
|
|
|
|
# will get an accurate syntax error within the larger message
|
|
|
|
reader.raiseUnexpectedValue("Invalid keystore format")
|
|
|
|
res.add(key)
|
|
|
|
res
|
|
|
|
|
|
|
|
let slashing =
|
|
|
|
if strSlashing.isSome():
|
|
|
|
let db =
|
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
RestJson.decode(strSlashing.get(),
|
|
|
|
SPDIR,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
2023-07-15 16:30:52 +00:00
|
|
|
except SerializationError:
|
2022-06-19 06:08:21 +00:00
|
|
|
reader.raiseUnexpectedValue("Invalid slashing protection format")
|
2023-09-27 14:45:33 +00:00
|
|
|
Opt.some(db)
|
2022-06-19 06:08:21 +00:00
|
|
|
else:
|
2023-09-27 14:45:33 +00:00
|
|
|
Opt.none(SPDIR)
|
2022-02-07 20:36:09 +00:00
|
|
|
|
|
|
|
value = KeystoresAndSlashingProtection(
|
|
|
|
keystores: keystores, passwords: passwords, slashing_protection: slashing
|
|
|
|
)
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## RestActivityItem
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: RestActivityItem
|
|
|
|
) {.raises: [IOError].} =
|
2022-07-21 16:54:07 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("index", value.index)
|
|
|
|
writer.writeField("epoch", value.epoch)
|
|
|
|
writer.writeField("active", value.active)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestActivityItem) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2023-09-27 14:45:33 +00:00
|
|
|
var index: Opt[ValidatorIndex]
|
|
|
|
var epoch: Opt[Epoch]
|
|
|
|
var active: Opt[bool]
|
2022-07-21 16:54:07 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "index":
|
|
|
|
if index.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `index` fields found", "RestActivityItem")
|
2023-09-27 14:45:33 +00:00
|
|
|
index = Opt.some(reader.readValue(ValidatorIndex))
|
2022-07-21 16:54:07 +00:00
|
|
|
of "epoch":
|
|
|
|
if epoch.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `epoch` fields found", "RestActivityItem")
|
2023-09-27 14:45:33 +00:00
|
|
|
epoch = Opt.some(reader.readValue(Epoch))
|
2022-07-21 16:54:07 +00:00
|
|
|
of "active":
|
|
|
|
if active.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `active` fields found", "RestActivityItem")
|
2023-09-27 14:45:33 +00:00
|
|
|
active = Opt.some(reader.readValue(bool))
|
2022-07-21 16:54:07 +00:00
|
|
|
else:
|
2023-09-23 06:44:54 +00:00
|
|
|
unrecognizedFieldIgnore()
|
2022-07-21 16:54:07 +00:00
|
|
|
|
|
|
|
if index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `index` value")
|
|
|
|
if epoch.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `epoch` value")
|
|
|
|
if active.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `active` value")
|
|
|
|
|
|
|
|
value = RestActivityItem(index: index.get(), epoch: epoch.get(),
|
|
|
|
active: active.get())
|
|
|
|
|
2022-12-06 11:29:00 +00:00
|
|
|
## RestLivenessItem
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: RestLivenessItem
|
|
|
|
) {.raises: [IOError].} =
|
2022-12-06 11:29:00 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("index", value.index)
|
|
|
|
writer.writeField("is_live", value.is_live)
|
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestLivenessItem) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2023-09-27 14:45:33 +00:00
|
|
|
var index: Opt[ValidatorIndex]
|
|
|
|
var isLive: Opt[bool]
|
2022-12-06 11:29:00 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "index":
|
|
|
|
if index.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `index` fields found", "RestLivenessItem")
|
2023-09-27 14:45:33 +00:00
|
|
|
index = Opt.some(reader.readValue(ValidatorIndex))
|
2022-12-06 11:29:00 +00:00
|
|
|
of "is_live":
|
|
|
|
if isLive.isSome():
|
|
|
|
reader.raiseUnexpectedField(
|
|
|
|
"Multiple `is_live` fields found", "RestLivenessItem")
|
2023-09-27 14:45:33 +00:00
|
|
|
isLive = Opt.some(reader.readValue(bool))
|
2022-12-06 11:29:00 +00:00
|
|
|
else:
|
2023-09-23 06:44:54 +00:00
|
|
|
unrecognizedFieldIgnore()
|
2022-12-06 11:29:00 +00:00
|
|
|
|
|
|
|
if index.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `index` value")
|
|
|
|
if isLive.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or empty `is_live` value")
|
|
|
|
|
|
|
|
value = RestLivenessItem(index: index.get(), is_live: isLive.get())
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## HeadChangeInfoObject
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: HeadChangeInfoObject
|
|
|
|
) {.raises: [IOError].} =
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("slot", value.slot)
|
|
|
|
writer.writeField("block", value.block_root)
|
|
|
|
writer.writeField("state", value.state_root)
|
|
|
|
writer.writeField("epoch_transition", value.epoch_transition)
|
|
|
|
writer.writeField("previous_duty_dependent_root",
|
|
|
|
value.previous_duty_dependent_root)
|
|
|
|
writer.writeField("current_duty_dependent_root",
|
|
|
|
value.current_duty_dependent_root)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## ReorgInfoObject
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: ReorgInfoObject
|
|
|
|
) {.raises: [IOError].} =
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("slot", value.slot)
|
|
|
|
writer.writeField("depth", value.depth)
|
|
|
|
writer.writeField("old_head_block", value.old_head_block)
|
|
|
|
writer.writeField("new_head_block", value.new_head_block)
|
|
|
|
writer.writeField("old_head_state", value.old_head_state)
|
|
|
|
writer.writeField("new_head_state", value.new_head_state)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2022-10-03 22:29:07 +00:00
|
|
|
## FinalizationInfoObject
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: FinalizationInfoObject
|
|
|
|
) {.raises: [IOError].} =
|
2022-06-20 05:53:39 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("block", value.block_root)
|
|
|
|
writer.writeField("state", value.state_root)
|
|
|
|
writer.writeField("epoch", value.epoch)
|
|
|
|
if value.optimistic.isSome():
|
|
|
|
writer.writeField("execution_optimistic", value.optimistic.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2023-04-11 23:17:48 +00:00
|
|
|
## RestNodeValidity
|
2023-08-19 15:11:56 +00:00
|
|
|
proc writeValue*(
|
|
|
|
writer: var JsonWriter[RestJson], value: RestNodeValidity
|
|
|
|
) {.raises: [IOError].} =
|
2023-04-11 23:17:48 +00:00
|
|
|
writer.writeValue($value)
|
|
|
|
|
2022-09-29 20:55:18 +00:00
|
|
|
## RestErrorMessage
|
2022-09-28 18:47:15 +00:00
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
2022-09-29 20:55:18 +00:00
|
|
|
value: var RestErrorMessage) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2022-09-28 18:47:15 +00:00
|
|
|
var
|
2022-09-29 20:55:18 +00:00
|
|
|
code: Opt[int]
|
2022-09-28 18:47:15 +00:00
|
|
|
message: Opt[string]
|
2023-09-27 14:45:33 +00:00
|
|
|
stacktraces: Opt[seq[string]]
|
2022-09-28 18:47:15 +00:00
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "code":
|
|
|
|
if code.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `code` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2022-09-28 18:47:15 +00:00
|
|
|
let ires =
|
|
|
|
try:
|
|
|
|
let res = reader.readValue(int)
|
|
|
|
if res < 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field value")
|
2022-09-29 20:55:18 +00:00
|
|
|
Opt.some(res)
|
2022-09-28 18:47:15 +00:00
|
|
|
except SerializationError:
|
2022-09-29 20:55:18 +00:00
|
|
|
Opt.none(int)
|
2022-09-28 18:47:15 +00:00
|
|
|
if ires.isNone():
|
2022-09-29 20:55:18 +00:00
|
|
|
let sres =
|
|
|
|
try: parseInt(reader.readValue(string))
|
|
|
|
except ValueError:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field format")
|
|
|
|
if sres < 0:
|
|
|
|
reader.raiseUnexpectedValue("Invalid `code` field value")
|
2022-09-28 18:47:15 +00:00
|
|
|
code = Opt.some(sres)
|
|
|
|
else:
|
|
|
|
code = ires
|
|
|
|
of "message":
|
|
|
|
if message.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `message` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2022-09-28 18:47:15 +00:00
|
|
|
message = Opt.some(reader.readValue(string))
|
|
|
|
of "stacktraces":
|
|
|
|
if stacktraces.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `stacktraces` fields found",
|
2022-09-29 20:55:18 +00:00
|
|
|
"RestErrorMessage")
|
2023-09-27 14:45:33 +00:00
|
|
|
stacktraces = Opt.some(reader.readValue(seq[string]))
|
2022-09-28 18:47:15 +00:00
|
|
|
else:
|
2023-09-23 06:44:54 +00:00
|
|
|
unrecognizedFieldIgnore()
|
2022-09-28 18:47:15 +00:00
|
|
|
|
|
|
|
if code.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or invalid `code` value")
|
|
|
|
if message.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Missing or invalid `message` value")
|
|
|
|
|
2022-09-29 20:55:18 +00:00
|
|
|
value = RestErrorMessage(
|
2022-09-28 18:47:15 +00:00
|
|
|
code: code.get(), message: message.get(),
|
|
|
|
stacktraces: stacktraces
|
|
|
|
)
|
|
|
|
|
2023-09-27 14:45:33 +00:00
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: RestErrorMessage) {.
|
|
|
|
raises: [IOError].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
writer.writeField("code", value.code)
|
|
|
|
writer.writeField("message", value.message)
|
|
|
|
if value.stacktraces.isSome():
|
|
|
|
writer.writeField("stacktraces", value.stacktraces.get())
|
|
|
|
writer.endRecord()
|
|
|
|
|
2023-06-28 13:33:38 +00:00
|
|
|
## VCRuntimeConfig
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var VCRuntimeConfig) {.
|
2023-08-25 09:29:07 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
2023-06-28 13:33:38 +00:00
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
let fieldValue = reader.readValue(string)
|
|
|
|
if value.hasKeyOrPut(toUpperAscii(fieldName), fieldValue):
|
|
|
|
let msg = "Multiple `" & fieldName & "` fields found"
|
|
|
|
reader.raiseUnexpectedField(msg, "VCRuntimeConfig")
|
|
|
|
|
2023-11-28 23:30:14 +00:00
|
|
|
## ForkedMaybeBlindedBeaconBlock
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
2024-02-02 15:24:40 +00:00
|
|
|
value: ProduceBlockResponseV3) {.raises: [IOError].} =
|
2023-11-28 23:30:14 +00:00
|
|
|
writer.beginRecord()
|
|
|
|
withForkyMaybeBlindedBlck(value):
|
2024-02-25 19:42:44 +00:00
|
|
|
writer.writeField("version", consensusFork.toString())
|
2024-04-15 23:08:41 +00:00
|
|
|
writer.writeField("execution_payload_blinded", isBlinded)
|
2024-02-25 19:42:44 +00:00
|
|
|
if value.executionValue.isSome():
|
|
|
|
writer.writeField("execution_payload_value",
|
|
|
|
$(value.executionValue.get()))
|
|
|
|
if value.consensusValue.isSome():
|
|
|
|
writer.writeField("consensus_block_value",
|
|
|
|
$(value.consensusValue.get()))
|
2024-04-07 07:58:11 +00:00
|
|
|
writer.writeField("data", forkyMaybeBlindedBlck)
|
2023-11-28 23:30:14 +00:00
|
|
|
writer.endRecord()
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
2024-02-02 15:24:40 +00:00
|
|
|
value: var ProduceBlockResponseV3) {.
|
2023-11-28 23:30:14 +00:00
|
|
|
raises: [SerializationError, IOError].} =
|
|
|
|
var
|
|
|
|
version: Opt[ConsensusFork]
|
|
|
|
blinded: Opt[bool]
|
|
|
|
executionValue: Opt[UInt256]
|
|
|
|
consensusValue: Opt[UInt256]
|
|
|
|
data: Opt[JsonString]
|
|
|
|
|
2024-02-02 15:24:40 +00:00
|
|
|
prepareForkedBlockReading(ProduceBlockResponseV3, reader, version, data,
|
|
|
|
blinded, executionValue, consensusValue)
|
2023-11-28 23:30:14 +00:00
|
|
|
|
|
|
|
if blinded.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `execution_payload_blinded` is missing")
|
|
|
|
if executionValue.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `execution_payload_value` is missing")
|
|
|
|
# TODO (cheatfate): At some point we should add check for missing
|
|
|
|
# `consensus_block_value` too
|
|
|
|
if data.isNone():
|
|
|
|
reader.raiseUnexpectedValue("Field `data` is missing")
|
|
|
|
|
|
|
|
withConsensusFork(version.get):
|
2024-04-10 08:54:00 +00:00
|
|
|
when consensusFork >= ConsensusFork.Deneb:
|
2023-11-28 23:30:14 +00:00
|
|
|
if blinded.get:
|
|
|
|
value = ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
RestJson.decode(
|
|
|
|
string(data.get()), consensusFork.BlindedBlockContents,
|
|
|
|
requireAllFields = true, allowUnknownFields = true),
|
|
|
|
executionValue, consensusValue)
|
|
|
|
else:
|
|
|
|
value = ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
RestJson.decode(
|
|
|
|
string(data.get()), consensusFork.BlockContents,
|
|
|
|
requireAllFields = true, allowUnknownFields = true),
|
|
|
|
executionValue, consensusValue)
|
|
|
|
elif consensusFork >= ConsensusFork.Bellatrix:
|
|
|
|
if blinded.get:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"`execution_payload_blinded` unsupported for `version`")
|
|
|
|
value = ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
RestJson.decode(
|
|
|
|
string(data.get()), consensusFork.BlockContents,
|
|
|
|
requireAllFields = true, allowUnknownFields = true),
|
|
|
|
executionValue, consensusValue)
|
|
|
|
else:
|
|
|
|
if blinded.get:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"`execution_payload_blinded` unsupported for `version`")
|
|
|
|
value = ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
RestJson.decode(
|
|
|
|
string(data.get()), consensusFork.BlockContents,
|
|
|
|
requireAllFields = true, allowUnknownFields = true))
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc parseRoot(value: string): Result[Eth2Digest, cstring] =
|
|
|
|
try:
|
|
|
|
ok(Eth2Digest(data: hexToByteArray[32](value)))
|
|
|
|
except ValueError:
|
|
|
|
err("Unable to decode root value")
|
|
|
|
|
2024-03-14 03:44:00 +00:00
|
|
|
## GraffitiString
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson], value: GraffitiString) {.
|
|
|
|
raises: [IOError].} =
|
|
|
|
writeValue(writer, $value)
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson], T: type GraffitiString): T {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
let res = init(GraffitiString, reader.readValue(string))
|
|
|
|
if res.isErr():
|
|
|
|
reader.raiseUnexpectedValue res.error
|
|
|
|
res.get
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
proc decodeBody*(
|
|
|
|
t: typedesc[RestPublishedSignedBeaconBlock],
|
|
|
|
body: ContentBody,
|
|
|
|
version: string
|
2023-09-27 14:45:33 +00:00
|
|
|
): Result[RestPublishedSignedBeaconBlock, RestErrorMessage] =
|
2022-09-29 21:00:53 +00:00
|
|
|
if body.contentType == ApplicationJsonMediaType:
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
RestJson.decode(body.data, RestPublishedSignedBeaconBlock,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
2023-09-27 14:45:33 +00:00
|
|
|
debug "Failed to decode JSON data",
|
2022-09-29 21:00:53 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
2023-09-27 14:45:33 +00:00
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2022-09-29 21:00:53 +00:00
|
|
|
ok(data)
|
|
|
|
elif body.contentType == OctetStreamMediaType:
|
2023-09-27 14:45:33 +00:00
|
|
|
let consensusFork = ConsensusFork.decodeString(version).valueOr:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeVersionError,
|
|
|
|
[version, $error]))
|
2023-03-12 18:48:38 +00:00
|
|
|
case consensusFork
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Phase0:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, phase0.SignedBeaconBlock)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2022-09-29 21:00:53 +00:00
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Altair:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, altair.SignedBeaconBlock)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2022-09-29 21:00:53 +00:00
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Bellatrix:
|
2022-09-29 21:00:53 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, bellatrix.SignedBeaconBlock)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2022-09-29 21:00:53 +00:00
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-01-28 19:53:41 +00:00
|
|
|
of ConsensusFork.Capella:
|
2022-11-24 14:38:07 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, capella.SignedBeaconBlock)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2022-11-24 14:38:07 +00:00
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2023-03-04 13:35:39 +00:00
|
|
|
of ConsensusFork.Deneb:
|
2023-03-29 08:41:54 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, deneb.SignedBeaconBlock)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-03-29 08:41:54 +00:00
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-06 13:11:47 +00:00
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, electra.SignedBeaconBlock)
|
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
|
|
|
ok(RestPublishedSignedBeaconBlock(ForkedSignedBeaconBlock.init(blck)))
|
2022-09-29 21:00:53 +00:00
|
|
|
else:
|
2023-09-27 14:45:33 +00:00
|
|
|
err(RestErrorMessage.init(Http415, "Invalid content type",
|
|
|
|
[version, $body.contentType]))
|
2022-09-29 21:00:53 +00:00
|
|
|
|
2023-06-19 08:56:52 +00:00
|
|
|
proc decodeBody*(
|
|
|
|
t: typedesc[RestPublishedSignedBlockContents],
|
|
|
|
body: ContentBody,
|
|
|
|
version: string
|
2023-09-27 14:45:33 +00:00
|
|
|
): Result[RestPublishedSignedBlockContents, RestErrorMessage] =
|
2023-06-19 08:56:52 +00:00
|
|
|
if body.contentType == ApplicationJsonMediaType:
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
RestJson.decode(body.data, RestPublishedSignedBlockContents,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
2023-09-27 14:45:33 +00:00
|
|
|
debug "Failed to decode JSON data",
|
2023-06-19 08:56:52 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
2023-09-27 14:45:33 +00:00
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(data)
|
|
|
|
elif body.contentType == OctetStreamMediaType:
|
2023-09-27 14:45:33 +00:00
|
|
|
let consensusFork = ConsensusFork.decodeString(version).valueOr:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeVersionError,
|
|
|
|
[version, $error]))
|
2023-06-19 08:56:52 +00:00
|
|
|
case consensusFork
|
|
|
|
of ConsensusFork.Phase0:
|
|
|
|
let blck =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, phase0.SignedBeaconBlock)
|
|
|
|
res.root = hash_tree_root(res.message)
|
|
|
|
res
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Phase0, phase0Data: blck))
|
|
|
|
of ConsensusFork.Altair:
|
|
|
|
let blck =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, altair.SignedBeaconBlock)
|
|
|
|
res.root = hash_tree_root(res.message)
|
|
|
|
res
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Altair, altairData: blck))
|
|
|
|
of ConsensusFork.Bellatrix:
|
|
|
|
let blck =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, bellatrix.SignedBeaconBlock)
|
|
|
|
res.root = hash_tree_root(res.message)
|
|
|
|
res
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Bellatrix, bellatrixData: blck))
|
|
|
|
of ConsensusFork.Capella:
|
|
|
|
let blck =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, capella.SignedBeaconBlock)
|
|
|
|
res.root = hash_tree_root(res.message)
|
|
|
|
res
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Capella, capellaData: blck))
|
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
let blckContents =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, DenebSignedBlockContents)
|
|
|
|
res.signed_block.root = hash_tree_root(res.signed_block.message)
|
|
|
|
res
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
2023-06-19 08:56:52 +00:00
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Deneb, denebData: blckContents))
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-06 07:46:02 +00:00
|
|
|
let blckContents =
|
|
|
|
try:
|
2024-07-04 01:08:07 +00:00
|
|
|
var res = SSZ.decode(body.data, ElectraSignedBlockContents)
|
|
|
|
res.signed_block.root = hash_tree_root(res.signed_block.message)
|
|
|
|
res
|
2024-04-06 07:46:02 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[version, exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(RestErrorMessage.init(Http400, UnexpectedDecodeError,
|
|
|
|
[version, $exc.msg]))
|
|
|
|
ok(RestPublishedSignedBlockContents(
|
|
|
|
kind: ConsensusFork.Electra, electraData: blckContents))
|
2023-06-19 08:56:52 +00:00
|
|
|
else:
|
2023-09-27 14:45:33 +00:00
|
|
|
err(RestErrorMessage.init(Http415, "Invalid content type",
|
|
|
|
[version, $body.contentType]))
|
2023-06-19 08:56:52 +00:00
|
|
|
|
2021-03-23 22:50:18 +00:00
|
|
|
proc decodeBody*[T](t: typedesc[T],
|
|
|
|
body: ContentBody): Result[T, cstring] =
|
2022-08-06 11:55:40 +00:00
|
|
|
if body.contentType != ApplicationJsonMediaType:
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unsupported content type")
|
|
|
|
let data =
|
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
RestJson.decode(body.data, T,
|
2022-07-15 00:19:19 +00:00
|
|
|
requireAllFields = true,
|
2022-07-13 14:45:04 +00:00
|
|
|
allowUnknownFields = true)
|
2022-02-07 20:36:09 +00:00
|
|
|
except SerializationError as exc:
|
2022-07-13 14:45:04 +00:00
|
|
|
debug "Failed to deserialize REST JSON data",
|
2022-07-15 00:19:19 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unable to deserialize data")
|
2021-06-29 15:09:29 +00:00
|
|
|
except CatchableError:
|
2021-03-23 22:50:18 +00:00
|
|
|
return err("Unexpected deserialization error")
|
|
|
|
ok(data)
|
2021-04-12 16:05:13 +00:00
|
|
|
|
2022-11-08 18:08:43 +00:00
|
|
|
proc decodeBodyJsonOrSsz*[T](t: typedesc[T],
|
2023-09-27 14:45:33 +00:00
|
|
|
body: ContentBody): Result[T, RestErrorMessage] =
|
2022-11-08 18:08:43 +00:00
|
|
|
if body.contentType == ApplicationJsonMediaType:
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
RestJson.decode(body.data, T,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
2023-09-27 14:45:33 +00:00
|
|
|
debug "Failed to decode JSON data",
|
2022-11-08 18:08:43 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(body.data)
|
2023-09-27 14:45:33 +00:00
|
|
|
return err(
|
|
|
|
RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(
|
|
|
|
RestErrorMessage.init(Http400, UnexpectedDecodeError, [$exc.msg]))
|
2022-11-08 18:08:43 +00:00
|
|
|
ok(data)
|
|
|
|
elif body.contentType == OctetStreamMediaType:
|
|
|
|
let blck =
|
|
|
|
try:
|
|
|
|
SSZ.decode(body.data, T)
|
2023-09-27 14:45:33 +00:00
|
|
|
except SerializationError as exc:
|
|
|
|
return err(
|
|
|
|
RestErrorMessage.init(Http400, UnableDecodeError,
|
|
|
|
[exc.formatMsg("<data>")]))
|
|
|
|
except CatchableError as exc:
|
|
|
|
return err(
|
|
|
|
RestErrorMessage.init(Http400, UnexpectedDecodeError, [$exc.msg]))
|
2022-11-08 18:08:43 +00:00
|
|
|
ok(blck)
|
|
|
|
else:
|
2023-09-27 14:45:33 +00:00
|
|
|
err(RestErrorMessage.init(Http415, "Invalid content type",
|
|
|
|
[$body.contentType]))
|
2022-11-08 18:08:43 +00:00
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc encodeBytes*[T: EncodeTypes](value: T,
|
2021-08-23 10:41:48 +00:00
|
|
|
contentType: string): RestResult[seq[byte]] =
|
2021-07-13 11:15:07 +00:00
|
|
|
case contentType
|
|
|
|
of "application/json":
|
2021-08-23 10:41:48 +00:00
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
|
|
|
proc encodeBytes*[T: EncodeArrays](value: T,
|
|
|
|
contentType: string): RestResult[seq[byte]] =
|
|
|
|
case contentType
|
|
|
|
of "application/json":
|
2021-08-23 10:41:48 +00:00
|
|
|
let data =
|
|
|
|
block:
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeArray(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
2022-09-29 21:00:53 +00:00
|
|
|
proc encodeBytes*[T: EncodeOctetTypes](
|
|
|
|
value: T,
|
|
|
|
contentType: string
|
|
|
|
): RestResult[seq[byte]] =
|
|
|
|
case contentType
|
|
|
|
of "application/json":
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
var stream = memoryOutput()
|
|
|
|
var writer = JsonWriter[RestJson].init(stream)
|
|
|
|
writer.writeValue(value)
|
|
|
|
stream.getOutput(seq[byte])
|
|
|
|
except IOError:
|
|
|
|
return err("Input/output error")
|
|
|
|
except SerializationError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
|
|
|
of "application/octet-stream":
|
|
|
|
let data =
|
|
|
|
try:
|
|
|
|
SSZ.encode(value)
|
|
|
|
except CatchableError:
|
|
|
|
return err("Serialization error")
|
|
|
|
ok(data)
|
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
2023-06-14 06:04:15 +00:00
|
|
|
func readSszResBytes(T: typedesc[RestBlockTypes],
|
|
|
|
data: openArray[byte]): RestResult[T] =
|
|
|
|
var res: T
|
|
|
|
try:
|
|
|
|
readSszBytes(data, res)
|
|
|
|
ok(res)
|
|
|
|
except SszSizeMismatchError:
|
|
|
|
err("Incorrect SSZ object's size")
|
2023-09-22 21:44:57 +00:00
|
|
|
except SszError:
|
|
|
|
err("Invalid SSZ object")
|
2023-06-14 06:04:15 +00:00
|
|
|
|
|
|
|
proc decodeBytes*[T: DecodeConsensysTypes](
|
|
|
|
t: typedesc[T],
|
|
|
|
value: openArray[byte],
|
|
|
|
contentType: Opt[ContentTypeData],
|
|
|
|
consensusVersion: string
|
|
|
|
): RestResult[T] =
|
|
|
|
let mediaType =
|
|
|
|
if contentType.isNone() or
|
|
|
|
isWildCard(contentType.get().mediaType):
|
|
|
|
return err("Invalid/missing Content-Type value")
|
|
|
|
else:
|
|
|
|
contentType.get().mediaType
|
|
|
|
|
|
|
|
if mediaType == ApplicationJsonMediaType:
|
|
|
|
try:
|
|
|
|
ok(RestJson.decode(value, T,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(value)
|
|
|
|
return err("Serialization error")
|
|
|
|
elif mediaType == OctetStreamMediaType:
|
2024-05-22 02:52:35 +00:00
|
|
|
when t is ProduceBlindedBlockResponse:
|
2023-09-27 14:45:33 +00:00
|
|
|
let fork = ConsensusFork.decodeString(consensusVersion).valueOr:
|
2023-06-14 06:04:15 +00:00
|
|
|
return err("Invalid or Unsupported consensus version")
|
|
|
|
case fork
|
2024-04-03 14:43:43 +00:00
|
|
|
of ConsensusFork.Electra:
|
2024-04-10 08:54:00 +00:00
|
|
|
let
|
|
|
|
blck = ? readSszResBytes(electra_mev.BlindedBeaconBlock, value)
|
|
|
|
forked = ForkedBlindedBeaconBlock(
|
|
|
|
kind: ConsensusFork.Electra, electraData: blck)
|
|
|
|
ok(ProduceBlindedBlockResponse(forked))
|
2023-06-14 06:04:15 +00:00
|
|
|
of ConsensusFork.Deneb:
|
|
|
|
let
|
2023-10-20 01:39:47 +00:00
|
|
|
blck = ? readSszResBytes(deneb_mev.BlindedBeaconBlock, value)
|
2023-06-14 06:04:15 +00:00
|
|
|
forked = ForkedBlindedBeaconBlock(
|
|
|
|
kind: ConsensusFork.Deneb, denebData: blck)
|
|
|
|
ok(ProduceBlindedBlockResponse(forked))
|
2024-04-09 14:54:05 +00:00
|
|
|
of ConsensusFork.Phase0 .. ConsensusFork.Capella:
|
|
|
|
err("Unable to decode blinded block for pre-Deneb forks")
|
2023-06-14 06:04:15 +00:00
|
|
|
else:
|
|
|
|
err("Unsupported Content-Type")
|
|
|
|
|
2024-02-02 15:24:40 +00:00
|
|
|
proc decodeBytes*[T: ProduceBlockResponseV3](
|
|
|
|
t: typedesc[T],
|
|
|
|
value: openArray[byte],
|
|
|
|
contentType: Opt[ContentTypeData],
|
|
|
|
headerConsensusVersion: string,
|
|
|
|
headerBlinded: string,
|
|
|
|
headerPayloadValue: string,
|
|
|
|
headerConsensusValue: string): RestResult[T] =
|
|
|
|
let
|
|
|
|
mediaType =
|
|
|
|
if contentType.isNone():
|
|
|
|
ApplicationJsonMediaType
|
|
|
|
else:
|
|
|
|
if isWildCard(contentType.get().mediaType):
|
|
|
|
return err("Incorrect Content-Type")
|
|
|
|
contentType.get().mediaType
|
|
|
|
|
|
|
|
if mediaType == ApplicationJsonMediaType:
|
|
|
|
try:
|
|
|
|
ok(RestJson.decode(value, T,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(value)
|
|
|
|
return err("Serialization error")
|
|
|
|
elif mediaType == OctetStreamMediaType:
|
|
|
|
let
|
|
|
|
fork = ConsensusFork.decodeString(headerConsensusVersion).valueOr:
|
|
|
|
return err("Invalid or Unsupported consensus version")
|
|
|
|
blinded =
|
|
|
|
block:
|
|
|
|
var toCheck = headerBlinded.toLowerAscii()
|
|
|
|
if toCheck == "true":
|
|
|
|
true
|
|
|
|
elif toCheck == "false":
|
|
|
|
false
|
|
|
|
else:
|
|
|
|
return err("Incorrect `Eth-Execution-Payload-Blinded` header value")
|
|
|
|
executionValue =
|
|
|
|
try:
|
|
|
|
Opt.some parse(headerPayloadValue, UInt256, 10)
|
|
|
|
except ValueError:
|
|
|
|
return err("Incorrect `Eth-Execution-Payload-Value` header value")
|
|
|
|
consensusValue =
|
|
|
|
if len(headerConsensusValue) == 0:
|
|
|
|
# TODO (cheatfate): We should not allow empty `consensus-value`.
|
|
|
|
Opt.none(Uint256)
|
|
|
|
else:
|
|
|
|
try:
|
2024-02-26 08:41:41 +00:00
|
|
|
Opt.some parse(headerConsensusValue, UInt256, 10)
|
2024-02-02 15:24:40 +00:00
|
|
|
except ValueError:
|
|
|
|
return err("Incorrect `Eth-Consensus-Block-Value` header value")
|
|
|
|
withConsensusFork(fork):
|
2024-04-09 14:54:05 +00:00
|
|
|
when consensusFork >= ConsensusFork.Deneb:
|
2024-02-02 15:24:40 +00:00
|
|
|
if blinded:
|
|
|
|
let contents =
|
|
|
|
? readSszResBytes(consensusFork.BlindedBlockContents, value)
|
|
|
|
ok(
|
|
|
|
ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
contents, executionValue, consensusValue))
|
|
|
|
else:
|
|
|
|
let contents = ? readSszResBytes(consensusFork.BlockContents, value)
|
|
|
|
ok(
|
|
|
|
ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
contents, executionValue, consensusValue))
|
|
|
|
elif consensusFork >= ConsensusFork.Bellatrix:
|
|
|
|
if blinded:
|
|
|
|
return err("`Eth-Execution-Payload-Blinded` unsupported for " &
|
|
|
|
"`Eth-Consensus-Version`")
|
|
|
|
let contents = ? readSszResBytes(consensusFork.BlockContents, value)
|
|
|
|
ok(
|
|
|
|
ForkedMaybeBlindedBeaconBlock.init(
|
|
|
|
contents, executionValue, consensusValue))
|
|
|
|
else:
|
|
|
|
if blinded:
|
|
|
|
return err("`Eth-Execution-Payload-Blinded` unsupported for " &
|
|
|
|
"`Eth-Consensus-Version`")
|
|
|
|
let contents = ? readSszResBytes(consensusFork.BlockContents, value)
|
|
|
|
ok(ForkedMaybeBlindedBeaconBlock.init(contents))
|
|
|
|
else:
|
|
|
|
err("Unsupported Content-Type")
|
|
|
|
|
2022-09-19 09:17:29 +00:00
|
|
|
proc decodeBytes*[T: DecodeTypes](
|
|
|
|
t: typedesc[T],
|
|
|
|
value: openArray[byte],
|
|
|
|
contentType: Opt[ContentTypeData]
|
|
|
|
): RestResult[T] =
|
|
|
|
|
|
|
|
let mediaType =
|
|
|
|
if contentType.isNone():
|
|
|
|
ApplicationJsonMediaType
|
|
|
|
else:
|
|
|
|
if isWildCard(contentType.get().mediaType):
|
|
|
|
return err("Incorrect Content-Type")
|
|
|
|
contentType.get().mediaType
|
|
|
|
|
|
|
|
if mediaType == ApplicationJsonMediaType:
|
2021-08-03 15:17:11 +00:00
|
|
|
try:
|
2022-07-13 14:45:04 +00:00
|
|
|
ok RestJson.decode(value, T,
|
2022-07-15 00:19:19 +00:00
|
|
|
requireAllFields = true,
|
2022-07-13 14:45:04 +00:00
|
|
|
allowUnknownFields = true)
|
|
|
|
except SerializationError as exc:
|
|
|
|
debug "Failed to deserialize REST JSON data",
|
2022-07-15 00:19:19 +00:00
|
|
|
err = exc.formatMsg("<data>"),
|
|
|
|
data = string.fromBytes(value)
|
2021-08-03 15:17:11 +00:00
|
|
|
err("Serialization error")
|
2021-07-13 11:15:07 +00:00
|
|
|
else:
|
|
|
|
err("Content-Type not supported")
|
|
|
|
|
|
|
|
proc encodeString*(value: string): RestResult[string] =
|
|
|
|
ok(value)
|
|
|
|
|
2023-04-16 06:07:07 +00:00
|
|
|
proc encodeString*(
|
|
|
|
value:
|
|
|
|
uint64 |
|
|
|
|
SyncCommitteePeriod |
|
|
|
|
Epoch |
|
|
|
|
Slot |
|
|
|
|
CommitteeIndex |
|
|
|
|
SyncSubcommitteeIndex): RestResult[string] =
|
2021-07-13 11:15:07 +00:00
|
|
|
ok(Base10.toString(uint64(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: ValidatorSig): RestResult[string] =
|
|
|
|
ok(hexOriginal(toRaw(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: GraffitiBytes): RestResult[string] =
|
|
|
|
ok(hexOriginal(distinctBase(value)))
|
|
|
|
|
|
|
|
proc encodeString*(value: Eth2Digest): RestResult[string] =
|
|
|
|
ok(hexOriginal(value.data))
|
|
|
|
|
|
|
|
proc encodeString*(value: ValidatorIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of ValidatorQueryKind.Index:
|
|
|
|
ok(Base10.toString(uint64(value.index)))
|
|
|
|
of ValidatorQueryKind.Key:
|
|
|
|
ok(hexOriginal(toRaw(value.key)))
|
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc encodeString*(value: ValidatorPubKey): RestResult[string] =
|
|
|
|
ok(hexOriginal(toRaw(value)))
|
|
|
|
|
2021-07-13 11:15:07 +00:00
|
|
|
proc encodeString*(value: StateIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of StateQueryKind.Slot:
|
|
|
|
ok(Base10.toString(uint64(value.slot)))
|
|
|
|
of StateQueryKind.Root:
|
|
|
|
ok(hexOriginal(value.root.data))
|
|
|
|
of StateQueryKind.Named:
|
|
|
|
case value.value
|
|
|
|
of StateIdentType.Head:
|
|
|
|
ok("head")
|
|
|
|
of StateIdentType.Genesis:
|
|
|
|
ok("genesis")
|
|
|
|
of StateIdentType.Finalized:
|
|
|
|
ok("finalized")
|
|
|
|
of StateIdentType.Justified:
|
|
|
|
ok("justified")
|
2021-08-03 15:17:11 +00:00
|
|
|
|
2023-11-01 07:31:18 +00:00
|
|
|
proc encodeString*(value: BroadcastValidationType): RestResult[string] =
|
|
|
|
case value
|
|
|
|
of BroadcastValidationType.Gossip:
|
|
|
|
ok("gossip")
|
|
|
|
of BroadcastValidationType.Consensus:
|
|
|
|
ok("consensus")
|
|
|
|
of BroadcastValidationType.ConsensusAndEquivocation:
|
|
|
|
ok("consensus_and_equivocation")
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc encodeString*(value: BlockIdent): RestResult[string] =
|
|
|
|
case value.kind
|
|
|
|
of BlockQueryKind.Slot:
|
|
|
|
ok(Base10.toString(uint64(value.slot)))
|
|
|
|
of BlockQueryKind.Root:
|
|
|
|
ok(hexOriginal(value.root.data))
|
|
|
|
of BlockQueryKind.Named:
|
|
|
|
case value.value
|
|
|
|
of BlockIdentType.Head:
|
|
|
|
ok("head")
|
|
|
|
of BlockIdentType.Genesis:
|
|
|
|
ok("genesis")
|
|
|
|
of BlockIdentType.Finalized:
|
|
|
|
ok("finalized")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[PeerStateKind],
|
|
|
|
value: string): Result[PeerStateKind, cstring] =
|
|
|
|
case value
|
|
|
|
of "disconnected":
|
|
|
|
ok(PeerStateKind.Disconnected)
|
|
|
|
of "connecting":
|
|
|
|
ok(PeerStateKind.Connecting)
|
|
|
|
of "connected":
|
|
|
|
ok(PeerStateKind.Connected)
|
|
|
|
of "disconnecting":
|
|
|
|
ok(PeerStateKind.Disconnecting)
|
|
|
|
else:
|
|
|
|
err("Incorrect peer's state value")
|
|
|
|
|
|
|
|
proc encodeString*(value: PeerStateKind): Result[string, cstring] =
|
|
|
|
case value
|
|
|
|
of PeerStateKind.Disconnected:
|
|
|
|
ok("disconnected")
|
|
|
|
of PeerStateKind.Connecting:
|
|
|
|
ok("connecting")
|
|
|
|
of PeerStateKind.Connected:
|
|
|
|
ok("connected")
|
|
|
|
of PeerStateKind.Disconnecting:
|
|
|
|
ok("disconnecting")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[PeerDirectKind],
|
|
|
|
value: string): Result[PeerDirectKind, cstring] =
|
|
|
|
case value
|
|
|
|
of "inbound":
|
|
|
|
ok(PeerDirectKind.Inbound)
|
|
|
|
of "outbound":
|
|
|
|
ok(PeerDirectKind.Outbound)
|
|
|
|
else:
|
|
|
|
err("Incorrect peer's direction value")
|
|
|
|
|
|
|
|
proc encodeString*(value: PeerDirectKind): Result[string, cstring] =
|
|
|
|
case value
|
|
|
|
of PeerDirectKind.Inbound:
|
|
|
|
ok("inbound")
|
|
|
|
of PeerDirectKind.Outbound:
|
|
|
|
ok("outbound")
|
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
proc encodeString*(peerid: PeerId): Result[string, cstring] =
|
2021-08-03 15:17:11 +00:00
|
|
|
ok($peerid)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[EventTopic],
|
|
|
|
value: string): Result[EventTopic, cstring] =
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(EventTopic.Head)
|
|
|
|
of "block":
|
|
|
|
ok(EventTopic.Block)
|
|
|
|
of "attestation":
|
|
|
|
ok(EventTopic.Attestation)
|
|
|
|
of "voluntary_exit":
|
|
|
|
ok(EventTopic.VoluntaryExit)
|
2023-12-22 13:52:43 +00:00
|
|
|
of "bls_to_execution_change":
|
|
|
|
ok(EventTopic.BLSToExecutionChange)
|
2023-12-22 17:54:55 +00:00
|
|
|
of "proposer_slashing":
|
|
|
|
ok(EventTopic.ProposerSlashing)
|
|
|
|
of "attester_slashing":
|
|
|
|
ok(EventTopic.AttesterSlashing)
|
2024-01-13 09:52:13 +00:00
|
|
|
of "blob_sidecar":
|
|
|
|
ok(EventTopic.BlobSidecar)
|
2021-08-03 15:17:11 +00:00
|
|
|
of "finalized_checkpoint":
|
|
|
|
ok(EventTopic.FinalizedCheckpoint)
|
|
|
|
of "chain_reorg":
|
|
|
|
ok(EventTopic.ChainReorg)
|
2021-09-22 12:17:15 +00:00
|
|
|
of "contribution_and_proof":
|
|
|
|
ok(EventTopic.ContributionAndProof)
|
2022-10-13 00:16:49 +00:00
|
|
|
of "light_client_finality_update":
|
2022-06-19 05:57:52 +00:00
|
|
|
ok(EventTopic.LightClientFinalityUpdate)
|
2022-10-13 00:16:49 +00:00
|
|
|
of "light_client_optimistic_update":
|
2022-06-19 05:57:52 +00:00
|
|
|
ok(EventTopic.LightClientOptimisticUpdate)
|
2021-08-03 15:17:11 +00:00
|
|
|
else:
|
|
|
|
err("Incorrect event's topic value")
|
|
|
|
|
2023-06-08 08:44:32 +00:00
|
|
|
proc encodeString*(value: set[EventTopic]): Result[string, cstring] =
|
|
|
|
var res: string
|
|
|
|
if EventTopic.Head in value:
|
|
|
|
res.add("head,")
|
|
|
|
if EventTopic.Block in value:
|
|
|
|
res.add("block,")
|
|
|
|
if EventTopic.Attestation in value:
|
|
|
|
res.add("attestation,")
|
|
|
|
if EventTopic.VoluntaryExit in value:
|
|
|
|
res.add("voluntary_exit,")
|
2023-12-22 13:52:43 +00:00
|
|
|
if EventTopic.BLSToExecutionChange in value:
|
|
|
|
res.add("bls_to_execution_change,")
|
2023-12-22 17:54:55 +00:00
|
|
|
if EventTopic.ProposerSlashing in value:
|
|
|
|
res.add("proposer_slashing,")
|
|
|
|
if EventTopic.AttesterSlashing in value:
|
|
|
|
res.add("attester_slashing,")
|
2024-01-13 09:52:13 +00:00
|
|
|
if EventTopic.BlobSidecar in value:
|
|
|
|
res.add("blob_sidecar,")
|
2023-06-08 08:44:32 +00:00
|
|
|
if EventTopic.FinalizedCheckpoint in value:
|
|
|
|
res.add("finalized_checkpoint,")
|
|
|
|
if EventTopic.ChainReorg in value:
|
|
|
|
res.add("chain_reorg,")
|
|
|
|
if EventTopic.ContributionAndProof in value:
|
|
|
|
res.add("contribution_and_proof,")
|
|
|
|
if EventTopic.LightClientFinalityUpdate in value:
|
|
|
|
res.add("light_client_finality_update,")
|
|
|
|
if EventTopic.LightClientOptimisticUpdate in value:
|
|
|
|
res.add("light_client_optimistic_update,")
|
|
|
|
if len(res) == 0:
|
|
|
|
return err("Topics set must not be empty")
|
|
|
|
res.setLen(len(res) - 1)
|
|
|
|
ok(res)
|
|
|
|
|
2023-11-29 12:05:03 +00:00
|
|
|
proc toList*(value: set[ValidatorFilterKind]): seq[string] =
|
|
|
|
const
|
|
|
|
pendingSet = {ValidatorFilterKind.PendingInitialized,
|
|
|
|
ValidatorFilterKind.PendingQueued}
|
|
|
|
activeSet = {ValidatorFilterKind.ActiveOngoing,
|
|
|
|
ValidatorFilterKind.ActiveExiting,
|
|
|
|
ValidatorFilterKind.ActiveSlashed}
|
|
|
|
exitedSet = {ValidatorFilterKind.ExitedUnslashed,
|
|
|
|
ValidatorFilterKind.ExitedSlashed}
|
|
|
|
withdrawSet = {ValidatorFilterKind.WithdrawalPossible,
|
|
|
|
ValidatorFilterKind.WithdrawalDone}
|
|
|
|
var
|
|
|
|
res: seq[string]
|
|
|
|
v = value
|
|
|
|
|
|
|
|
template processSet(argSet, argName: untyped): untyped =
|
|
|
|
if argSet * v == argSet:
|
|
|
|
res.add(argName)
|
|
|
|
v.excl(argSet)
|
|
|
|
|
|
|
|
template processSingle(argSingle, argName): untyped =
|
|
|
|
if argSingle in v:
|
|
|
|
res.add(argName)
|
|
|
|
|
|
|
|
processSet(pendingSet, "pending")
|
|
|
|
processSet(activeSet, "active")
|
|
|
|
processSet(exitedSet, "exited")
|
|
|
|
processSet(withdrawSet, "withdrawal")
|
|
|
|
processSingle(ValidatorFilterKind.PendingInitialized, "pending_initialized")
|
|
|
|
processSingle(ValidatorFilterKind.PendingQueued, "pending_queued")
|
|
|
|
processSingle(ValidatorFilterKind.ActiveOngoing, "active_ongoing")
|
|
|
|
processSingle(ValidatorFilterKind.ActiveExiting, "active_exiting")
|
|
|
|
processSingle(ValidatorFilterKind.ActiveSlashed, "active_slashed")
|
|
|
|
processSingle(ValidatorFilterKind.ExitedUnslashed, "exited_unslashed")
|
|
|
|
processSingle(ValidatorFilterKind.ExitedSlashed, "exited_slashed")
|
|
|
|
processSingle(ValidatorFilterKind.WithdrawalPossible, "withdrawal_possible")
|
|
|
|
processSingle(ValidatorFilterKind.WithdrawalDone, "withdrawal_done")
|
|
|
|
res
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[ValidatorSig],
|
|
|
|
value: string): Result[ValidatorSig, cstring] =
|
|
|
|
if len(value) != ValidatorSigSize + 2:
|
|
|
|
return err("Incorrect validator signature value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
return err("Incorrect validator signature encoding")
|
|
|
|
ValidatorSig.fromHex(value)
|
|
|
|
|
2021-11-30 01:20:21 +00:00
|
|
|
proc decodeString*(t: typedesc[ValidatorPubKey],
|
|
|
|
value: string): Result[ValidatorPubKey, cstring] =
|
|
|
|
if len(value) != ValidatorKeySize + 2:
|
|
|
|
return err("Incorrect validator's key value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
err("Incorrect validator's key encoding")
|
|
|
|
else:
|
|
|
|
ValidatorPubKey.fromHex(value)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[GraffitiBytes],
|
|
|
|
value: string): Result[GraffitiBytes, cstring] =
|
|
|
|
try:
|
|
|
|
ok(GraffitiBytes.init(value))
|
|
|
|
except ValueError:
|
|
|
|
err("Unable to decode graffiti value")
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[string],
|
|
|
|
value: string): Result[string, cstring] =
|
|
|
|
ok(value)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[Slot], value: string): Result[Slot, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(Slot(res))
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[Epoch], value: string): Result[Epoch, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(Epoch(res))
|
|
|
|
|
2022-06-19 05:57:52 +00:00
|
|
|
proc decodeString*(t: typedesc[SyncCommitteePeriod],
|
|
|
|
value: string): Result[SyncCommitteePeriod, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(SyncCommitteePeriod(res))
|
|
|
|
|
2021-09-23 22:13:25 +00:00
|
|
|
proc decodeString*(t: typedesc[uint64],
|
|
|
|
value: string): Result[uint64, cstring] =
|
|
|
|
Base10.decode(uint64, value)
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[StateIdent],
|
|
|
|
value: string): Result[StateIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
err("Incorrect state root value length")
|
|
|
|
else:
|
|
|
|
let res = ? parseRoot(value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Root, root: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Slot, slot: Slot(res)))
|
|
|
|
else:
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Head))
|
|
|
|
of "genesis":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Genesis))
|
|
|
|
of "finalized":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Finalized))
|
|
|
|
of "justified":
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Named,
|
|
|
|
value: StateIdentType.Justified))
|
|
|
|
else:
|
|
|
|
err("Incorrect state identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(StateIdent(kind: StateQueryKind.Slot, slot: Slot(res)))
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[BlockIdent],
|
|
|
|
value: string): Result[BlockIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
err("Incorrect block root value length")
|
|
|
|
else:
|
|
|
|
let res = ? parseRoot(value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Root, root: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Slot, slot: Slot(res)))
|
|
|
|
else:
|
|
|
|
case value
|
|
|
|
of "head":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Head))
|
|
|
|
of "genesis":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Genesis))
|
|
|
|
of "finalized":
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Named,
|
|
|
|
value: BlockIdentType.Finalized))
|
|
|
|
else:
|
|
|
|
err("Incorrect block identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(BlockIdent(kind: BlockQueryKind.Slot, slot: Slot(res)))
|
|
|
|
|
2023-11-01 07:31:18 +00:00
|
|
|
proc decodeString*(t: typedesc[BroadcastValidationType],
|
|
|
|
value: string): Result[BroadcastValidationType, cstring] =
|
|
|
|
case value
|
|
|
|
of "gossip":
|
|
|
|
ok(BroadcastValidationType.Gossip)
|
|
|
|
of "consensus":
|
|
|
|
ok(BroadcastValidationType.Consensus)
|
|
|
|
of "consensus_and_equivocation":
|
|
|
|
ok(BroadcastValidationType.ConsensusAndEquivocation)
|
|
|
|
else:
|
|
|
|
err("Incorrect broadcast validation type value")
|
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[ValidatorIdent],
|
|
|
|
value: string): Result[ValidatorIdent, cstring] =
|
|
|
|
if len(value) > 2:
|
|
|
|
if (value[0] == '0') and (value[1] == 'x'):
|
|
|
|
if len(value) != ValidatorKeySize + 2:
|
|
|
|
err("Incorrect validator's key value length")
|
|
|
|
else:
|
|
|
|
let res = ? ValidatorPubKey.fromHex(value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Key,
|
|
|
|
key: res))
|
|
|
|
elif (value[0] in DecimalSet) and (value[1] in DecimalSet):
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Index,
|
|
|
|
index: RestValidatorIndex(res)))
|
|
|
|
else:
|
|
|
|
err("Incorrect validator identifier value")
|
|
|
|
else:
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
ok(ValidatorIdent(kind: ValidatorQueryKind.Index,
|
|
|
|
index: RestValidatorIndex(res)))
|
|
|
|
|
2022-04-08 16:22:49 +00:00
|
|
|
proc decodeString*(t: typedesc[PeerId],
|
|
|
|
value: string): Result[PeerId, cstring] =
|
|
|
|
PeerId.init(value)
|
2021-08-03 15:17:11 +00:00
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[CommitteeIndex],
|
|
|
|
value: string): Result[CommitteeIndex, cstring] =
|
|
|
|
let res = ? Base10.decode(uint64, value)
|
2022-01-08 23:28:49 +00:00
|
|
|
CommitteeIndex.init(res)
|
2021-08-03 15:17:11 +00:00
|
|
|
|
2021-10-20 16:32:46 +00:00
|
|
|
proc decodeString*(t: typedesc[SyncSubcommitteeIndex],
|
|
|
|
value: string): Result[SyncSubcommitteeIndex, cstring] =
|
2022-01-08 23:28:49 +00:00
|
|
|
let res = ? Base10.decode(uint64, value)
|
|
|
|
SyncSubcommitteeIndex.init(res)
|
2021-10-19 17:44:05 +00:00
|
|
|
|
2021-08-03 15:17:11 +00:00
|
|
|
proc decodeString*(t: typedesc[Eth2Digest],
|
|
|
|
value: string): Result[Eth2Digest, cstring] =
|
|
|
|
if len(value) != RootHashSize + 2:
|
|
|
|
return err("Incorrect root value length")
|
|
|
|
if value[0] != '0' and value[1] != 'x':
|
|
|
|
return err("Incorrect root value encoding")
|
|
|
|
parseRoot(value)
|
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[ValidatorFilter],
|
|
|
|
value: string): Result[ValidatorFilter, cstring] =
|
|
|
|
case value
|
|
|
|
of "pending_initialized":
|
|
|
|
ok({ValidatorFilterKind.PendingInitialized})
|
|
|
|
of "pending_queued":
|
|
|
|
ok({ValidatorFilterKind.PendingQueued})
|
|
|
|
of "active_ongoing":
|
|
|
|
ok({ValidatorFilterKind.ActiveOngoing})
|
|
|
|
of "active_exiting":
|
|
|
|
ok({ValidatorFilterKind.ActiveExiting})
|
|
|
|
of "active_slashed":
|
|
|
|
ok({ValidatorFilterKind.ActiveSlashed})
|
|
|
|
of "exited_unslashed":
|
|
|
|
ok({ValidatorFilterKind.ExitedUnslashed})
|
|
|
|
of "exited_slashed":
|
|
|
|
ok({ValidatorFilterKind.ExitedSlashed})
|
|
|
|
of "withdrawal_possible":
|
|
|
|
ok({ValidatorFilterKind.WithdrawalPossible})
|
|
|
|
of "withdrawal_done":
|
|
|
|
ok({ValidatorFilterKind.WithdrawalDone})
|
|
|
|
of "pending":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.PendingInitialized,
|
|
|
|
ValidatorFilterKind.PendingQueued
|
|
|
|
})
|
|
|
|
of "active":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.ActiveOngoing,
|
|
|
|
ValidatorFilterKind.ActiveExiting,
|
|
|
|
ValidatorFilterKind.ActiveSlashed
|
|
|
|
})
|
|
|
|
of "exited":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.ExitedUnslashed,
|
|
|
|
ValidatorFilterKind.ExitedSlashed
|
|
|
|
})
|
|
|
|
of "withdrawal":
|
|
|
|
ok({
|
|
|
|
ValidatorFilterKind.WithdrawalPossible,
|
|
|
|
ValidatorFilterKind.WithdrawalDone
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
err("Incorrect validator state identifier value")
|
2022-09-29 21:00:53 +00:00
|
|
|
|
2023-01-28 19:53:41 +00:00
|
|
|
proc decodeString*(t: typedesc[ConsensusFork],
|
|
|
|
value: string): Result[ConsensusFork, cstring] =
|
2022-09-29 21:00:53 +00:00
|
|
|
case toLowerAscii(value)
|
2023-01-28 19:53:41 +00:00
|
|
|
of "phase0": ok(ConsensusFork.Phase0)
|
|
|
|
of "altair": ok(ConsensusFork.Altair)
|
|
|
|
of "bellatrix": ok(ConsensusFork.Bellatrix)
|
|
|
|
of "capella": ok(ConsensusFork.Capella)
|
2023-03-04 13:35:39 +00:00
|
|
|
of "deneb": ok(ConsensusFork.Deneb)
|
2022-09-29 21:00:53 +00:00
|
|
|
else: err("Unsupported or invalid beacon block fork version")
|
2023-06-08 08:44:32 +00:00
|
|
|
|
|
|
|
proc decodeString*(t: typedesc[EventBeaconBlockObject],
|
|
|
|
value: string): Result[EventBeaconBlockObject, string] =
|
|
|
|
try:
|
|
|
|
ok(RestJson.decode(value, t,
|
|
|
|
requireAllFields = true,
|
|
|
|
allowUnknownFields = true))
|
|
|
|
except SerializationError as exc:
|
|
|
|
err(exc.formatMsg("<data>"))
|
2023-11-29 12:05:03 +00:00
|
|
|
|
|
|
|
## ValidatorIdent
|
|
|
|
proc writeValue*(w: var JsonWriter[RestJson],
|
|
|
|
value: ValidatorIdent) {.raises: [IOError].} =
|
|
|
|
writeValue(w, value.encodeString().get())
|
|
|
|
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var ValidatorIdent) {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
value = decodeString(ValidatorIdent, reader.readValue(string)).valueOr:
|
|
|
|
raise newException(SerializationError, $error)
|
|
|
|
|
|
|
|
## RestValidatorRequest
|
|
|
|
proc readValue*(reader: var JsonReader[RestJson],
|
|
|
|
value: var RestValidatorRequest) {.
|
|
|
|
raises: [IOError, SerializationError].} =
|
|
|
|
var
|
|
|
|
statuses: Opt[seq[string]]
|
|
|
|
ids: Opt[seq[string]]
|
|
|
|
|
|
|
|
for fieldName in readObjectFields(reader):
|
|
|
|
case fieldName
|
|
|
|
of "ids":
|
|
|
|
if ids.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `ids` fields found",
|
|
|
|
"RestValidatorRequest")
|
|
|
|
ids = Opt.some(reader.readValue(seq[string]))
|
|
|
|
of "statuses":
|
|
|
|
if statuses.isSome():
|
|
|
|
reader.raiseUnexpectedField("Multiple `statuses` fields found",
|
|
|
|
"RestValidatorRequest")
|
|
|
|
statuses = Opt.some(reader.readValue(seq[string]))
|
|
|
|
else:
|
2024-08-23 10:18:06 +00:00
|
|
|
unrecognizedFieldWarning(fieldName, typeof(value).name)
|
2023-11-29 12:05:03 +00:00
|
|
|
|
|
|
|
let
|
|
|
|
validatorIds =
|
|
|
|
block:
|
|
|
|
# Test for uniqueness of value will be happened on higher layer.
|
|
|
|
if ids.isSome():
|
|
|
|
var res: seq[ValidatorIdent]
|
|
|
|
for item in ids.get():
|
|
|
|
let value = decodeString(ValidatorIdent, item).valueOr:
|
|
|
|
reader.raiseUnexpectedValue($error)
|
|
|
|
res.add(value)
|
|
|
|
Opt.some(res)
|
|
|
|
else:
|
|
|
|
Opt.none(seq[ValidatorIdent])
|
|
|
|
filter =
|
|
|
|
block:
|
|
|
|
if statuses.isSome():
|
|
|
|
var res: ValidatorFilter
|
|
|
|
for item in statuses.get():
|
|
|
|
let value = decodeString(ValidatorFilter, item).valueOr:
|
|
|
|
reader.raiseUnexpectedValue($error)
|
|
|
|
# Test for uniqueness of value.
|
|
|
|
if value * res != {}:
|
|
|
|
reader.raiseUnexpectedValue(
|
|
|
|
"The `statuses` array should consist of only unique values")
|
|
|
|
res.incl(value)
|
|
|
|
Opt.some(res)
|
|
|
|
else:
|
|
|
|
Opt.none(ValidatorFilter)
|
|
|
|
|
|
|
|
value = RestValidatorRequest(ids: validatorIds, status: filter)
|
|
|
|
|
|
|
|
proc writeValue*(writer: var JsonWriter[RestJson],
|
|
|
|
value: RestValidatorRequest) {.raises: [IOError].} =
|
|
|
|
writer.beginRecord()
|
|
|
|
if value.ids.isSome():
|
|
|
|
var res: seq[string]
|
|
|
|
for item in value.ids.get():
|
|
|
|
res.add(item.encodeString().get())
|
|
|
|
writer.writeField("ids", res)
|
|
|
|
if value.status.isSome():
|
|
|
|
let res = value.status.get().toList()
|
|
|
|
if len(res) > 0:
|
|
|
|
writer.writeField("statuses", res)
|
|
|
|
writer.endRecord()
|