REST API produceBlockV3 implementation (#5474)
Co-authored-by: Etan Kissling <etan@status.im> Co-authored-by: Jacek Sieka <jacek@status.im>
This commit is contained in:
parent
3a527d622d
commit
e2e4912645
|
@ -455,8 +455,9 @@ OK: 1/1 Fail: 0/1 Skip: 0/1
|
|||
```diff
|
||||
+ RestErrorMessage parser tests OK
|
||||
+ RestErrorMessage writer tests OK
|
||||
+ strictParse(Stuint) tests OK
|
||||
```
|
||||
OK: 2/2 Fail: 0/2 Skip: 0/2
|
||||
OK: 3/3 Fail: 0/3 Skip: 0/3
|
||||
## Shufflings
|
||||
```diff
|
||||
+ Accelerated shuffling computation OK
|
||||
|
@ -721,4 +722,4 @@ OK: 2/2 Fail: 0/2 Skip: 0/2
|
|||
OK: 9/9 Fail: 0/9 Skip: 0/9
|
||||
|
||||
---TOTAL---
|
||||
OK: 410/415 Fail: 0/415 Skip: 5/415
|
||||
OK: 411/416 Fail: 0/416 Skip: 5/416
|
||||
|
|
|
@ -408,7 +408,7 @@ proc installValidatorApiHandlers*(router: var RestRouter, node: BeaconNode) =
|
|||
let data =
|
||||
when consensusFork >= ConsensusFork.Deneb:
|
||||
let blobsBundle = message.blobsBundleOpt.get()
|
||||
DenebBlockContents(
|
||||
deneb.BlockContents(
|
||||
`block`: forkyBlck,
|
||||
kzg_proofs: blobsBundle.proofs,
|
||||
blobs: blobsBundle.blobs)
|
||||
|
@ -519,31 +519,175 @@ proc installValidatorApiHandlers*(router: var RestRouter, node: BeaconNode) =
|
|||
Http500, "Unable to initialize payload builder client: " & $error)
|
||||
contextFork = node.dag.cfg.consensusForkAtEpoch(node.currentSlot.epoch)
|
||||
|
||||
case contextFork
|
||||
of ConsensusFork.Deneb:
|
||||
# TODO
|
||||
# We should return a block with sidecars here
|
||||
# https://github.com/ethereum/beacon-APIs/pull/302/files
|
||||
return RestApiResponse.jsonError(
|
||||
Http400, "Deneb builder API beacon API not yet supported: " & $denebImplementationMissing)
|
||||
of ConsensusFork.Capella:
|
||||
let res = await makeBlindedBeaconBlockForHeadAndSlot[
|
||||
capella_mev.BlindedBeaconBlock](
|
||||
node, payloadBuilderClient, qrandao, proposer, qgraffiti, qhead, qslot)
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400, res.error())
|
||||
return responseVersioned(res.get().blindedBlckPart, contextFork)
|
||||
of ConsensusFork.Bellatrix:
|
||||
return RestApiResponse.jsonError(Http400, "Pre-Capella builder API unsupported")
|
||||
of ConsensusFork.Altair, ConsensusFork.Phase0:
|
||||
# Pre-Bellatrix, this endpoint will return a BeaconBlock
|
||||
let res = await makeBeaconBlockForHeadAndSlot(
|
||||
bellatrix.ExecutionPayloadForSigning, node, qrandao,
|
||||
proposer, qgraffiti, qhead, qslot)
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400, res.error())
|
||||
withBlck(res.get().blck):
|
||||
return responseVersioned(forkyBlck, contextFork)
|
||||
withConsensusFork(contextFork):
|
||||
when consensusFork >= ConsensusFork.Capella:
|
||||
let res = await makeBlindedBeaconBlockForHeadAndSlot[
|
||||
consensusFork.BlindedBeaconBlock](
|
||||
node, payloadBuilderClient, qrandao,
|
||||
proposer, qgraffiti, qhead, qslot)
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400, res.error())
|
||||
return responseVersioned(res.get().blindedBlckPart, contextFork)
|
||||
elif consensusFork >= ConsensusFork.Bellatrix:
|
||||
return RestApiResponse.jsonError(
|
||||
Http400, "Pre-Capella builder API unsupported")
|
||||
else:
|
||||
# Pre-Bellatrix, this endpoint will return a BeaconBlock
|
||||
let res = await makeBeaconBlockForHeadAndSlot(
|
||||
bellatrix.ExecutionPayloadForSigning, node, qrandao,
|
||||
proposer, qgraffiti, qhead, qslot)
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400, res.error())
|
||||
withBlck(res.get().blck):
|
||||
return responseVersioned(forkyBlck, contextFork)
|
||||
|
||||
func getMaybeBlindedHeaders(
|
||||
consensusFork: ConsensusFork,
|
||||
isBlinded: bool,
|
||||
executionValue: Opt[UInt256],
|
||||
consensusValue: Opt[UInt256]): HttpTable =
|
||||
var res = HttpTable.init()
|
||||
res.add("eth-consensus-version", consensusFork.toString())
|
||||
if isBlinded:
|
||||
res.add("eth-execution-payload-blinded", "true")
|
||||
else:
|
||||
res.add("eth-execution-payload-blinded", "false")
|
||||
if executionValue.isSome():
|
||||
res.add("eth-execution-payload-value", $(executionValue.get()))
|
||||
if consensusValue.isSome():
|
||||
res.add("eth-consensus-block-value", $(consensusValue.get()))
|
||||
res
|
||||
|
||||
# https://ethereum.github.io/beacon-APIs/#/Validator/produceBlockV3
|
||||
router.api(MethodGet, "/eth/v3/validator/blocks/{slot}") do (
|
||||
slot: Slot, randao_reveal: Option[ValidatorSig],
|
||||
graffiti: Option[GraffitiBytes],
|
||||
skip_randao_verification: Option[string]) -> RestApiResponse:
|
||||
let
|
||||
contentType = preferredContentType(jsonMediaType, sszMediaType).valueOr:
|
||||
return RestApiResponse.jsonError(Http406, ContentNotAcceptableError)
|
||||
qslot = block:
|
||||
if slot.isErr():
|
||||
return RestApiResponse.jsonError(Http400, InvalidSlotValueError,
|
||||
$slot.error())
|
||||
let res = slot.get()
|
||||
|
||||
if res <= node.dag.finalizedHead.slot:
|
||||
return RestApiResponse.jsonError(Http400, InvalidSlotValueError,
|
||||
"Slot already finalized")
|
||||
let wallTime =
|
||||
node.beaconClock.now() + MAXIMUM_GOSSIP_CLOCK_DISPARITY
|
||||
if res > wallTime.slotOrZero:
|
||||
return RestApiResponse.jsonError(Http400, InvalidSlotValueError,
|
||||
"Slot cannot be in the future")
|
||||
res
|
||||
qskip_randao_verification =
|
||||
if skip_randao_verification.isNone():
|
||||
false
|
||||
else:
|
||||
let res = skip_randao_verification.get()
|
||||
if res.isErr() or res.get() != "":
|
||||
return RestApiResponse.jsonError(
|
||||
Http400, InvalidSkipRandaoVerificationValue)
|
||||
true
|
||||
qrandao =
|
||||
if randao_reveal.isNone():
|
||||
return RestApiResponse.jsonError(Http400,
|
||||
MissingRandaoRevealValue)
|
||||
else:
|
||||
let res = randao_reveal.get()
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400,
|
||||
InvalidRandaoRevealValue,
|
||||
$res.error())
|
||||
res.get()
|
||||
qgraffiti =
|
||||
if graffiti.isNone():
|
||||
defaultGraffitiBytes()
|
||||
else:
|
||||
let res = graffiti.get()
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http400,
|
||||
InvalidGraffitiBytesValue,
|
||||
$res.error())
|
||||
res.get()
|
||||
qhead =
|
||||
block:
|
||||
let res = node.getSyncedHead(qslot)
|
||||
if res.isErr():
|
||||
return RestApiResponse.jsonError(Http503, BeaconNodeInSyncError,
|
||||
$res.error())
|
||||
let tres = res.get()
|
||||
if not tres.executionValid:
|
||||
return RestApiResponse.jsonError(Http503, BeaconNodeInSyncError)
|
||||
tres
|
||||
proposer = node.dag.getProposer(qhead, qslot).valueOr:
|
||||
return RestApiResponse.jsonError(Http400, ProposerNotFoundError)
|
||||
|
||||
if not node.verifyRandao(
|
||||
qslot, proposer, qrandao, qskip_randao_verification):
|
||||
return RestApiResponse.jsonError(Http400, InvalidRandaoRevealValue)
|
||||
|
||||
withConsensusFork(node.dag.cfg.consensusForkAtEpoch(qslot.epoch)):
|
||||
when consensusFork >= ConsensusFork.Capella:
|
||||
let
|
||||
message = (await node.makeMaybeBlindedBeaconBlockForHeadAndSlot(
|
||||
consensusFork, qrandao, qgraffiti, qhead, qslot)).valueOr:
|
||||
# HTTP 400 error is only for incorrect parameters.
|
||||
return RestApiResponse.jsonError(Http500, error)
|
||||
headers = consensusFork.getMaybeBlindedHeaders(
|
||||
message.blck.isBlinded,
|
||||
message.executionValue,
|
||||
message.consensusValue)
|
||||
|
||||
if contentType == sszMediaType:
|
||||
if message.blck.isBlinded:
|
||||
RestApiResponse.sszResponse(message.blck.blindedData, headers)
|
||||
else:
|
||||
RestApiResponse.sszResponse(message.blck.data, headers)
|
||||
elif contentType == jsonMediaType:
|
||||
let forked =
|
||||
if message.blck.isBlinded:
|
||||
ForkedMaybeBlindedBeaconBlock.init(
|
||||
message.blck.blindedData,
|
||||
message.executionValue,
|
||||
message.consensusValue)
|
||||
else:
|
||||
ForkedMaybeBlindedBeaconBlock.init(
|
||||
message.blck.data,
|
||||
message.executionValue,
|
||||
message.consensusValue)
|
||||
RestApiResponse.jsonResponsePlain(forked, headers)
|
||||
else:
|
||||
raiseAssert "preferredContentType() returns invalid content type"
|
||||
else:
|
||||
when consensusFork >= ConsensusFork.Bellatrix:
|
||||
type PayloadType = consensusFork.ExecutionPayloadForSigning
|
||||
else:
|
||||
type PayloadType = bellatrix.ExecutionPayloadForSigning
|
||||
let
|
||||
message = (await PayloadType.makeBeaconBlockForHeadAndSlot(
|
||||
node, qrandao, proposer, qgraffiti, qhead, qslot)).valueOr:
|
||||
return RestApiResponse.jsonError(Http500, error)
|
||||
executionValue = Opt.some(UInt256(message.blockValue))
|
||||
consensusValue = Opt.none(UInt256)
|
||||
headers = consensusFork.getMaybeBlindedHeaders(
|
||||
isBlinded = false, executionValue, consensusValue)
|
||||
|
||||
doAssert message.blck.kind == consensusFork
|
||||
template forkyBlck: untyped = message.blck.forky(consensusFork)
|
||||
if contentType == sszMediaType:
|
||||
RestApiResponse.sszResponse(forkyBlck, headers)
|
||||
elif contentType == jsonMediaType:
|
||||
let forked =
|
||||
when consensusFork >= ConsensusFork.Bellatrix:
|
||||
ForkedMaybeBlindedBeaconBlock.init(
|
||||
forkyBlck, executionValue, consensusValue)
|
||||
else:
|
||||
ForkedMaybeBlindedBeaconBlock.init(forkyBlck)
|
||||
RestApiResponse.jsonResponsePlain(forked, headers)
|
||||
else:
|
||||
raiseAssert "preferredContentType() returns invalid content type"
|
||||
|
||||
# https://ethereum.github.io/beacon-APIs/#/Validator/produceAttestationData
|
||||
router.api(MethodGet, "/eth/v1/validator/attestation_data") do (
|
||||
|
|
|
@ -1023,12 +1023,6 @@ func checkForkConsistency*(cfg: RuntimeConfig) =
|
|||
assertForkEpochOrder(cfg.BELLATRIX_FORK_EPOCH, cfg.CAPELLA_FORK_EPOCH)
|
||||
assertForkEpochOrder(cfg.CAPELLA_FORK_EPOCH, cfg.DENEB_FORK_EPOCH)
|
||||
|
||||
# This is a readily/uniquely searchable token of where a false assertion is
|
||||
# due to a Deneb implementation missing. checkForkConsistency() checks that
|
||||
# Nimbus does not run any non-FAR_FUTURE_EPOCH Deneb network, so such cases
|
||||
# won't be hit.
|
||||
const denebImplementationMissing* = false
|
||||
|
||||
func ofLen*[T, N](ListType: type List[T, N], n: int): ListType =
|
||||
if n < N:
|
||||
distinctBase(result).setLen(n)
|
||||
|
|
|
@ -501,6 +501,11 @@ type
|
|||
SigVerifiedBeaconBlockBody |
|
||||
TrustedBeaconBlockBody
|
||||
|
||||
BlockContents* = object
|
||||
`block`*: BeaconBlock
|
||||
kzg_proofs*: KzgProofs
|
||||
blobs*: Blobs
|
||||
|
||||
# TODO: There should be only a single generic HashedBeaconState definition
|
||||
func initHashedBeaconState*(s: BeaconState): HashedBeaconState =
|
||||
HashedBeaconState(data: s)
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import std/[typetraits, strutils]
|
||||
import stew/[assign2, results, base10, byteutils, endians2], presto/common,
|
||||
libp2p/peerid, serialization, json_serialization,
|
||||
json_serialization/std/[net, sets],
|
||||
json_serialization/std/[net, sets], stint,
|
||||
chronicles
|
||||
import ".."/[eth2_ssz_serialization, forks, keystore],
|
||||
".."/../consensus_object_pools/block_pools_types,
|
||||
|
@ -110,7 +110,8 @@ type
|
|||
bellatrix.SignedBeaconBlock |
|
||||
capella.SignedBeaconBlock |
|
||||
phase0.SignedBeaconBlock |
|
||||
DenebSignedBlockContents
|
||||
DenebSignedBlockContents |
|
||||
ForkedMaybeBlindedBeaconBlock
|
||||
|
||||
EncodeArrays* =
|
||||
seq[Attestation] |
|
||||
|
@ -167,9 +168,94 @@ type
|
|||
|
||||
RestBlockTypes* = phase0.BeaconBlock | altair.BeaconBlock |
|
||||
bellatrix.BeaconBlock | capella.BeaconBlock |
|
||||
DenebBlockContents | capella_mev.BlindedBeaconBlock |
|
||||
deneb.BlockContents | capella_mev.BlindedBeaconBlock |
|
||||
deneb_mev.BlindedBeaconBlock
|
||||
|
||||
func readStrictHexChar(c: char, radix: static[uint8]): Result[int8, cstring] =
|
||||
## Converts an hex char to an int
|
||||
const
|
||||
lowerLastChar = chr(ord('a') + radix - 11'u8)
|
||||
capitalLastChar = chr(ord('A') + radix - 11'u8)
|
||||
case c
|
||||
of '0' .. '9': ok(int8 ord(c) - ord('0'))
|
||||
of 'a' .. lowerLastChar: ok(int8 ord(c) - ord('a') + 10)
|
||||
of 'A' .. capitalLastChar: ok(int8 ord(c) - ord('A') + 10)
|
||||
else: err("Invalid hexadecimal character encountered!")
|
||||
|
||||
func readStrictDecChar(c: char, radix: static[uint8]): Result[int8, cstring] =
|
||||
const lastChar = char(ord('0') + radix - 1'u8)
|
||||
case c
|
||||
of '0' .. lastChar: ok(int8 ord(c) - ord('0'))
|
||||
else: err("Invalid decimal character encountered!")
|
||||
|
||||
func skipPrefixes(str: string,
|
||||
radix: range[2..16]): Result[int, cstring] =
|
||||
## Returns the index of the first meaningful char in `hexStr` by skipping
|
||||
## "0x" prefix
|
||||
if len(str) < 2:
|
||||
return ok(0)
|
||||
|
||||
return
|
||||
if str[0] == '0':
|
||||
if str[1] in {'x', 'X'}:
|
||||
if radix != 16:
|
||||
return err("Parsing mismatch, 0x prefix is only valid for a " &
|
||||
"hexadecimal number (base 16)")
|
||||
ok(2)
|
||||
elif str[1] in {'o', 'O'}:
|
||||
if radix != 8:
|
||||
return err("Parsing mismatch, 0o prefix is only valid for an " &
|
||||
"octal number (base 8)")
|
||||
ok(2)
|
||||
elif str[1] in {'b', 'B'}:
|
||||
if radix == 2:
|
||||
ok(2)
|
||||
elif radix == 16:
|
||||
# allow something like "0bcdef12345" which is a valid hex
|
||||
ok(0)
|
||||
else:
|
||||
err("Parsing mismatch, 0b prefix is only valid for a binary number " &
|
||||
"(base 2), or hex number")
|
||||
else:
|
||||
ok(0)
|
||||
else:
|
||||
ok(0)
|
||||
|
||||
func strictParse*[bits: static[int]](input: string,
|
||||
T: typedesc[StUint[bits]],
|
||||
radix: static[uint8] = 10
|
||||
): Result[T, cstring] {.raises: [].} =
|
||||
var res: T
|
||||
static: doAssert (radix >= 2) and (radix <= 16),
|
||||
"Only base from 2..16 are supported"
|
||||
|
||||
const
|
||||
base = radix.uint8.stuint(bits)
|
||||
zero = 0.uint8.stuint(256)
|
||||
|
||||
var currentIndex =
|
||||
block:
|
||||
let res = skipPrefixes(input, radix)
|
||||
if res.isErr():
|
||||
return err(res.error)
|
||||
res.get()
|
||||
|
||||
while currentIndex < len(input):
|
||||
let value =
|
||||
when radix <= 10:
|
||||
? readStrictDecChar(input[currentIndex], radix)
|
||||
else:
|
||||
? readStrictHexChar(input[currentIndex], radix)
|
||||
let mres = res * base
|
||||
if (res != zero) and (mres div base != res):
|
||||
return err("Overflow error")
|
||||
let ares = mres + value.stuint(bits)
|
||||
if ares < mres:
|
||||
return err("Overflow error")
|
||||
res = ares
|
||||
inc(currentIndex)
|
||||
ok(res)
|
||||
|
||||
proc prepareJsonResponse*(t: typedesc[RestApiResponse], d: auto): seq[byte] =
|
||||
let res =
|
||||
block:
|
||||
|
@ -416,6 +502,22 @@ proc jsonResponsePlain*(t: typedesc[RestApiResponse],
|
|||
default
|
||||
RestApiResponse.response(res, Http200, "application/json")
|
||||
|
||||
proc jsonResponsePlain*(t: typedesc[RestApiResponse],
|
||||
data: auto, headers: HttpTable): RestApiResponse =
|
||||
let res =
|
||||
block:
|
||||
var default: seq[byte]
|
||||
try:
|
||||
var stream = memoryOutput()
|
||||
var writer = JsonWriter[RestJson].init(stream)
|
||||
writer.writeValue(data)
|
||||
stream.getOutput(seq[byte])
|
||||
except SerializationError:
|
||||
default
|
||||
except IOError:
|
||||
default
|
||||
RestApiResponse.response(res, Http200, "application/json", headers = headers)
|
||||
|
||||
proc jsonResponseWMeta*(t: typedesc[RestApiResponse],
|
||||
data: auto, meta: auto): RestApiResponse =
|
||||
let res =
|
||||
|
@ -605,6 +707,23 @@ proc sszResponse*(t: typedesc[RestApiResponse], data: auto,
|
|||
RestApiResponse.response(res, Http200, "application/octet-stream",
|
||||
headers = headers)
|
||||
|
||||
proc sszResponse*(t: typedesc[RestApiResponse], data: auto,
|
||||
headers: HttpTable): RestApiResponse =
|
||||
let res =
|
||||
block:
|
||||
var default: seq[byte]
|
||||
try:
|
||||
var stream = memoryOutput()
|
||||
var writer = SszWriter.init(stream)
|
||||
writer.writeValue(data)
|
||||
stream.getOutput(seq[byte])
|
||||
except SerializationError:
|
||||
default
|
||||
except IOError:
|
||||
default
|
||||
RestApiResponse.response(res, Http200, "application/octet-stream",
|
||||
headers = headers)
|
||||
|
||||
template hexOriginal(data: openArray[byte]): string =
|
||||
to0xHex(data)
|
||||
|
||||
|
@ -1205,11 +1324,11 @@ proc readValue*[BlockType: ProduceBlockResponseV2](
|
|||
let res =
|
||||
try:
|
||||
Opt.some(RestJson.decode(string(data.get()),
|
||||
DenebBlockContents,
|
||||
deneb.BlockContents,
|
||||
requireAllFields = true,
|
||||
allowUnknownFields = true))
|
||||
except SerializationError:
|
||||
Opt.none(DenebBlockContents)
|
||||
Opt.none(deneb.BlockContents)
|
||||
if res.isNone():
|
||||
reader.raiseUnexpectedValue("Incorrect deneb block format")
|
||||
value = ProduceBlockResponseV2(kind: ConsensusFork.Deneb,
|
||||
|
@ -3074,6 +3193,120 @@ proc readValue*(reader: var JsonReader[RestJson],
|
|||
let msg = "Multiple `" & fieldName & "` fields found"
|
||||
reader.raiseUnexpectedField(msg, "VCRuntimeConfig")
|
||||
|
||||
## ForkedMaybeBlindedBeaconBlock
|
||||
proc writeValue*(writer: var JsonWriter[RestJson],
|
||||
value: ForkedMaybeBlindedBeaconBlock) {.raises: [IOError].} =
|
||||
writer.beginRecord()
|
||||
withForkyMaybeBlindedBlck(value):
|
||||
writer.writeField("version", consensusFork.toString())
|
||||
when isBlinded:
|
||||
writer.writeField("execution_payload_blinded", "true")
|
||||
else:
|
||||
writer.writeField("execution_payload_blinded", "false")
|
||||
if value.executionValue.isSome():
|
||||
writer.writeField("execution_payload_value",
|
||||
$(value.executionValue.get()))
|
||||
if value.consensusValue.isSome():
|
||||
writer.writeField("consensus_block_value",
|
||||
$(value.consensusValue.get()))
|
||||
writer.writeField("data", forkyMaybeBlindedBlck)
|
||||
writer.endRecord()
|
||||
|
||||
proc readValue*(reader: var JsonReader[RestJson],
|
||||
value: var ForkedMaybeBlindedBeaconBlock) {.
|
||||
raises: [SerializationError, IOError].} =
|
||||
var
|
||||
version: Opt[ConsensusFork]
|
||||
blinded: Opt[bool]
|
||||
executionValue: Opt[UInt256]
|
||||
consensusValue: Opt[UInt256]
|
||||
data: Opt[JsonString]
|
||||
|
||||
for fieldName in readObjectFields(reader):
|
||||
case fieldName
|
||||
of "version":
|
||||
if version.isSome():
|
||||
reader.raiseUnexpectedField("Multiple `version` fields found",
|
||||
"ForkedMaybeBlindedBeaconBlock")
|
||||
let res = reader.readValue(string)
|
||||
version = ConsensusFork.init(res)
|
||||
if version.isNone:
|
||||
reader.raiseUnexpectedValue("Incorrect `version` field value")
|
||||
of "execution_payload_blinded":
|
||||
if blinded.isSome():
|
||||
reader.raiseUnexpectedField("Multiple `execution_payload_blinded`" &
|
||||
"fields found",
|
||||
"ForkedMaybeBlindedBeaconBlock")
|
||||
blinded = Opt.some(reader.readValue(bool))
|
||||
of "execution_payload_value":
|
||||
if executionValue.isSome():
|
||||
reader.raiseUnexpectedField("Multiple `execution_payload_value`" &
|
||||
"fields found",
|
||||
"ForkedMaybeBlindedBeaconBlock")
|
||||
let res = strictParse(reader.readValue(string), UInt256, 10)
|
||||
if res.isErr():
|
||||
reader.raiseUnexpectedValue($res.error)
|
||||
executionValue = Opt.some(res.get())
|
||||
of "consensus_block_value":
|
||||
if consensusValue.isSome():
|
||||
reader.raiseUnexpectedField("Multiple `consensus_block_value`" &
|
||||
"fields found",
|
||||
"ForkedMaybeBlindedBeaconBlock")
|
||||
let res = strictParse(reader.readValue(string), UInt256, 10)
|
||||
if res.isErr():
|
||||
reader.raiseUnexpectedValue($res.error)
|
||||
consensusValue = Opt.some(res.get())
|
||||
of "data":
|
||||
if data.isSome():
|
||||
reader.raiseUnexpectedField("Multiple `data` fields found",
|
||||
"ForkedMaybeBlindedBeaconBlock")
|
||||
data = Opt.some(reader.readValue(JsonString))
|
||||
else:
|
||||
unrecognizedFieldWarning()
|
||||
|
||||
if version.isNone():
|
||||
reader.raiseUnexpectedValue("Field `version` is missing")
|
||||
if blinded.isNone():
|
||||
reader.raiseUnexpectedValue("Field `execution_payload_blinded` is missing")
|
||||
if executionValue.isNone():
|
||||
reader.raiseUnexpectedValue("Field `execution_payload_value` is missing")
|
||||
# TODO (cheatfate): At some point we should add check for missing
|
||||
# `consensus_block_value` too
|
||||
if data.isNone():
|
||||
reader.raiseUnexpectedValue("Field `data` is missing")
|
||||
|
||||
withConsensusFork(version.get):
|
||||
when consensusFork >= ConsensusFork.Capella:
|
||||
if blinded.get:
|
||||
value = ForkedMaybeBlindedBeaconBlock.init(
|
||||
RestJson.decode(
|
||||
string(data.get()), consensusFork.BlindedBlockContents,
|
||||
requireAllFields = true, allowUnknownFields = true),
|
||||
executionValue, consensusValue)
|
||||
else:
|
||||
value = ForkedMaybeBlindedBeaconBlock.init(
|
||||
RestJson.decode(
|
||||
string(data.get()), consensusFork.BlockContents,
|
||||
requireAllFields = true, allowUnknownFields = true),
|
||||
executionValue, consensusValue)
|
||||
elif consensusFork >= ConsensusFork.Bellatrix:
|
||||
if blinded.get:
|
||||
reader.raiseUnexpectedValue(
|
||||
"`execution_payload_blinded` unsupported for `version`")
|
||||
value = ForkedMaybeBlindedBeaconBlock.init(
|
||||
RestJson.decode(
|
||||
string(data.get()), consensusFork.BlockContents,
|
||||
requireAllFields = true, allowUnknownFields = true),
|
||||
executionValue, consensusValue)
|
||||
else:
|
||||
if blinded.get:
|
||||
reader.raiseUnexpectedValue(
|
||||
"`execution_payload_blinded` unsupported for `version`")
|
||||
value = ForkedMaybeBlindedBeaconBlock.init(
|
||||
RestJson.decode(
|
||||
string(data.get()), consensusFork.BlockContents,
|
||||
requireAllFields = true, allowUnknownFields = true))
|
||||
|
||||
proc parseRoot(value: string): Result[Eth2Digest, cstring] =
|
||||
try:
|
||||
ok(Eth2Digest(data: hexToByteArray[32](value)))
|
||||
|
@ -3524,7 +3757,7 @@ proc decodeBytes*[T: DecodeConsensysTypes](
|
|||
return err("Invalid or Unsupported consensus version")
|
||||
case fork
|
||||
of ConsensusFork.Deneb:
|
||||
let blckContents = ? readSszResBytes(DenebBlockContents, value)
|
||||
let blckContents = ? readSszResBytes(deneb.BlockContents, value)
|
||||
ok(ProduceBlockResponseV2(kind: ConsensusFork.Deneb,
|
||||
denebData: blckContents))
|
||||
of ConsensusFork.Capella:
|
||||
|
|
|
@ -341,18 +341,13 @@ type
|
|||
of ConsensusFork.Capella: capellaBody*: capella.BeaconBlockBody
|
||||
of ConsensusFork.Deneb: denebBody*: deneb.BeaconBlockBody
|
||||
|
||||
DenebBlockContents* = object
|
||||
`block`*: deneb.BeaconBlock
|
||||
kzg_proofs*: deneb.KzgProofs
|
||||
blobs*: deneb.Blobs
|
||||
|
||||
ProduceBlockResponseV2* = object
|
||||
case kind*: ConsensusFork
|
||||
of ConsensusFork.Phase0: phase0Data*: phase0.BeaconBlock
|
||||
of ConsensusFork.Altair: altairData*: altair.BeaconBlock
|
||||
of ConsensusFork.Bellatrix: bellatrixData*: bellatrix.BeaconBlock
|
||||
of ConsensusFork.Capella: capellaData*: capella.BeaconBlock
|
||||
of ConsensusFork.Deneb: denebData*: DenebBlockContents
|
||||
of ConsensusFork.Deneb: denebData*: deneb.BlockContents
|
||||
|
||||
VCRuntimeConfig* = Table[string, string]
|
||||
|
||||
|
@ -940,7 +935,7 @@ func toValidatorIndex*(value: RestValidatorIndex): Result[ValidatorIndex,
|
|||
err(ValidatorIndexError.TooHighValue)
|
||||
else:
|
||||
doAssert(false, "ValidatorIndex type size is incorrect")
|
||||
|
||||
|
||||
template withBlck*(x: ProduceBlockResponseV2,
|
||||
body: untyped): untyped =
|
||||
case x.kind
|
||||
|
|
|
@ -137,6 +137,10 @@ type
|
|||
capella.ExecutionPayloadForSigning |
|
||||
deneb.ExecutionPayloadForSigning
|
||||
|
||||
ForkyBlindedBeaconBlock* =
|
||||
capella_mev.BlindedBeaconBlock |
|
||||
deneb_mev.BlindedBeaconBlock
|
||||
|
||||
ForkedBeaconBlock* = object
|
||||
case kind*: ConsensusFork
|
||||
of ConsensusFork.Phase0: phase0Data*: phase0.BeaconBlock
|
||||
|
@ -145,6 +149,21 @@ type
|
|||
of ConsensusFork.Capella: capellaData*: capella.BeaconBlock
|
||||
of ConsensusFork.Deneb: denebData*: deneb.BeaconBlock
|
||||
|
||||
ForkedMaybeBlindedBeaconBlock* = object
|
||||
case kind*: ConsensusFork
|
||||
of ConsensusFork.Phase0:
|
||||
phase0Data*: phase0.BeaconBlock
|
||||
of ConsensusFork.Altair:
|
||||
altairData*: altair.BeaconBlock
|
||||
of ConsensusFork.Bellatrix:
|
||||
bellatrixData*: bellatrix.BeaconBlock
|
||||
of ConsensusFork.Capella:
|
||||
capellaData*: capella_mev.MaybeBlindedBeaconBlock
|
||||
of ConsensusFork.Deneb:
|
||||
denebData*: deneb_mev.MaybeBlindedBeaconBlock
|
||||
consensusValue*: Opt[UInt256]
|
||||
executionValue*: Opt[UInt256]
|
||||
|
||||
Web3SignerForkedBeaconBlock* = object
|
||||
kind*: ConsensusFork
|
||||
data*: BeaconBlockHeader
|
||||
|
@ -420,6 +439,26 @@ template ExecutionPayloadForSigning*(kind: static ConsensusFork): auto =
|
|||
else:
|
||||
static: raiseAssert "Unreachable"
|
||||
|
||||
template BlindedBeaconBlock*(kind: static ConsensusFork): auto =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
typedesc[deneb_mev.BlindedBeaconBlock]
|
||||
elif kind == ConsensusFork.Capella:
|
||||
typedesc[capella_mev.BlindedBeaconBlock]
|
||||
elif kind == ConsensusFork.Bellatrix:
|
||||
static: raiseAssert "Unsupported"
|
||||
else:
|
||||
static: raiseAssert "Unreachable"
|
||||
|
||||
template MaybeBlindedBeaconBlock*(kind: static ConsensusFork): auto =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
typedesc[deneb_mev.MaybeBlindedBeaconBlock]
|
||||
elif kind == ConsensusFork.Capella:
|
||||
typedesc[capella_mev.MaybeBlindedBeaconBlock]
|
||||
elif kind == ConsensusFork.Bellatrix:
|
||||
static: raiseAssert "Unsupported"
|
||||
else:
|
||||
static: raiseAssert "Unreachable"
|
||||
|
||||
template SignedBlindedBeaconBlock*(kind: static ConsensusFork): auto =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
typedesc[deneb_mev.SignedBlindedBeaconBlock]
|
||||
|
@ -468,6 +507,30 @@ template withConsensusFork*(
|
|||
const consensusFork {.inject, used.} = ConsensusFork.Phase0
|
||||
body
|
||||
|
||||
template BlockContents*(
|
||||
kind: static ConsensusFork): auto =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
typedesc[deneb.BlockContents]
|
||||
elif kind == ConsensusFork.Capella:
|
||||
typedesc[capella.BeaconBlock]
|
||||
elif kind == ConsensusFork.Bellatrix:
|
||||
typedesc[bellatrix.BeaconBlock]
|
||||
elif kind == ConsensusFork.Altair:
|
||||
typedesc[altair.BeaconBlock]
|
||||
elif kind == ConsensusFork.Phase0:
|
||||
typedesc[phase0.BeaconBlock]
|
||||
else:
|
||||
{.error: "BlockContents does not support " & $kind.}
|
||||
|
||||
template BlindedBlockContents*(
|
||||
kind: static ConsensusFork): auto =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
typedesc[deneb_mev.BlindedBeaconBlock]
|
||||
elif kind == ConsensusFork.Capella:
|
||||
typedesc[capella_mev.BlindedBeaconBlock]
|
||||
else:
|
||||
{.error: "BlindedBlockContents does not support " & $kind.}
|
||||
|
||||
# TODO when https://github.com/nim-lang/Nim/issues/21086 fixed, use return type
|
||||
# `ref T`
|
||||
func new*(T: type ForkedHashedBeaconState, data: phase0.BeaconState):
|
||||
|
@ -612,6 +675,25 @@ template toString*(kind: ConsensusFork): string =
|
|||
of ConsensusFork.Deneb:
|
||||
"deneb"
|
||||
|
||||
template init*(T: typedesc[ConsensusFork], value: string): Opt[ConsensusFork] =
|
||||
case value
|
||||
of "deneb":
|
||||
Opt.some ConsensusFork.Deneb
|
||||
of "capella":
|
||||
Opt.some ConsensusFork.Capella
|
||||
of "bellatrix":
|
||||
Opt.some ConsensusFork.Bellatrix
|
||||
of "altair":
|
||||
Opt.some ConsensusFork.Altair
|
||||
of "phase0":
|
||||
Opt.some ConsensusFork.Phase0
|
||||
else:
|
||||
Opt.none(ConsensusFork)
|
||||
|
||||
static:
|
||||
for fork in ConsensusFork:
|
||||
doAssert ConsensusFork.init(fork.toString()).expect("init defined") == fork
|
||||
|
||||
template init*(T: type ForkedEpochInfo, info: phase0.EpochInfo): T =
|
||||
T(kind: EpochInfoFork.Phase0, phase0Data: info)
|
||||
template init*(T: type ForkedEpochInfo, info: altair.EpochInfo): T =
|
||||
|
@ -641,7 +723,10 @@ template withState*(x: ForkedHashedBeaconState, body: untyped): untyped =
|
|||
body
|
||||
|
||||
template forky*(
|
||||
x: ForkedHashedBeaconState, kind: static ConsensusFork): untyped =
|
||||
x:
|
||||
ForkedBeaconBlock |
|
||||
ForkedHashedBeaconState,
|
||||
kind: static ConsensusFork): untyped =
|
||||
when kind == ConsensusFork.Deneb:
|
||||
x.denebData
|
||||
elif kind == ConsensusFork.Capella:
|
||||
|
@ -884,6 +969,53 @@ chronicles.formatIt ForkedSignedBeaconBlock: it.shortLog
|
|||
chronicles.formatIt ForkedMsgTrustedSignedBeaconBlock: it.shortLog
|
||||
chronicles.formatIt ForkedTrustedSignedBeaconBlock: it.shortLog
|
||||
|
||||
template withForkyMaybeBlindedBlck*(
|
||||
b: ForkedMaybeBlindedBeaconBlock,
|
||||
body: untyped): untyped =
|
||||
case b.kind
|
||||
of ConsensusFork.Deneb:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Deneb
|
||||
template d: untyped = b.denebData
|
||||
case d.isBlinded:
|
||||
of true:
|
||||
const isBlinded {.inject, used.} = true
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = d.blindedData
|
||||
body
|
||||
of false:
|
||||
const isBlinded {.inject, used.} = false
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = d.data
|
||||
body
|
||||
of ConsensusFork.Capella:
|
||||
const consensusFork {.inject, used.} = ConsensusFork.Capella
|
||||
template d: untyped = b.capellaData
|
||||
case d.isBlinded:
|
||||
of true:
|
||||
const isBlinded {.inject, used.} = true
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = d.blindedData
|
||||
body
|
||||
of false:
|
||||
const isBlinded {.inject, used.} = false
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = d.data
|
||||
body
|
||||
of ConsensusFork.Bellatrix:
|
||||
const
|
||||
consensusFork {.inject, used.} = ConsensusFork.Bellatrix
|
||||
isBlinded {.inject, used.} = false
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = b.bellatrixData
|
||||
body
|
||||
of ConsensusFork.Altair:
|
||||
const
|
||||
consensusFork {.inject, used.} = ConsensusFork.Altair
|
||||
isBlinded {.inject, used.} = false
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = b.altairData
|
||||
body
|
||||
of ConsensusFork.Phase0:
|
||||
const
|
||||
consensusFork {.inject, used.} = ConsensusFork.Phase0
|
||||
isBlinded {.inject, used.} = false
|
||||
template forkyMaybeBlindedBlck: untyped {.inject, used.} = b.phase0Data
|
||||
body
|
||||
|
||||
template withStateAndBlck*(
|
||||
s: ForkedHashedBeaconState,
|
||||
b: ForkedBeaconBlock | ForkedSignedBeaconBlock |
|
||||
|
@ -1152,3 +1284,68 @@ func historical_summaries*(state: ForkedHashedBeaconState):
|
|||
forkyState.data.historical_summaries
|
||||
else:
|
||||
HashList[HistoricalSummary, Limit HISTORICAL_ROOTS_LIMIT]()
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: phase0.BeaconBlock): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Phase0,
|
||||
phase0Data: blck)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: altair.BeaconBlock): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Altair,
|
||||
altairData: blck)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: bellatrix.BeaconBlock,
|
||||
evalue: Opt[UInt256], cvalue: Opt[UInt256]): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Bellatrix,
|
||||
bellatrixData: blck,
|
||||
consensusValue: cvalue,
|
||||
executionValue: evalue)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: capella.BeaconBlock,
|
||||
evalue: Opt[UInt256], cvalue: Opt[UInt256]): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Capella,
|
||||
capellaData: capella_mev.MaybeBlindedBeaconBlock(
|
||||
isBlinded: false,
|
||||
data: blck),
|
||||
consensusValue: cvalue,
|
||||
executionValue: evalue)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: capella_mev.BlindedBeaconBlock,
|
||||
evalue: Opt[UInt256], cvalue: Opt[UInt256]): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Capella,
|
||||
capellaData: capella_mev.MaybeBlindedBeaconBlock(
|
||||
isBlinded: true,
|
||||
blindedData: blck),
|
||||
consensusValue: cvalue,
|
||||
executionValue: evalue)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: deneb.BlockContents,
|
||||
evalue: Opt[UInt256], cvalue: Opt[UInt256]): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Deneb,
|
||||
denebData: deneb_mev.MaybeBlindedBeaconBlock(
|
||||
isBlinded: false,
|
||||
data: blck),
|
||||
consensusValue: cvalue,
|
||||
executionValue: evalue)
|
||||
|
||||
template init*(T: type ForkedMaybeBlindedBeaconBlock,
|
||||
blck: deneb_mev.BlindedBeaconBlock,
|
||||
evalue: Opt[UInt256], cvalue: Opt[UInt256]): T =
|
||||
ForkedMaybeBlindedBeaconBlock(
|
||||
kind: ConsensusFork.Deneb,
|
||||
denebData: deneb_mev.MaybeBlindedBeaconBlock(
|
||||
isBlinded: true,
|
||||
blindedData: blck),
|
||||
consensusValue: cvalue,
|
||||
executionValue: evalue)
|
||||
|
|
|
@ -64,6 +64,13 @@ type
|
|||
state_root*: Eth2Digest
|
||||
body*: BlindedBeaconBlockBody # [Modified in Capella]
|
||||
|
||||
MaybeBlindedBeaconBlock* = object
|
||||
case isBlinded*: bool
|
||||
of false:
|
||||
data*: capella.BeaconBlock
|
||||
of true:
|
||||
blindedData*: BlindedBeaconBlock
|
||||
|
||||
# https://github.com/ethereum/builder-specs/blob/v0.3.0/specs/bellatrix/builder.md#signedblindedbeaconblock
|
||||
# https://github.com/ethereum/builder-specs/blob/v0.3.0/specs/capella/builder.md#blindedbeaconblockbody
|
||||
SignedBlindedBeaconBlock* = object
|
||||
|
|
|
@ -55,6 +55,13 @@ type
|
|||
state_root*: Eth2Digest
|
||||
body*: BlindedBeaconBlockBody # [Modified in Deneb]
|
||||
|
||||
MaybeBlindedBeaconBlock* = object
|
||||
case isBlinded*: bool
|
||||
of false:
|
||||
data*: deneb.BlockContents
|
||||
of true:
|
||||
blindedData*: BlindedBeaconBlock
|
||||
|
||||
# https://github.com/ethereum/builder-specs/blob/v0.3.0/specs/bellatrix/builder.md#signedblindedbeaconblock
|
||||
# https://github.com/ethereum/builder-specs/blob/v0.3.0/specs/capella/builder.md#blindedbeaconblockbody
|
||||
SignedBlindedBeaconBlock* = object
|
||||
|
|
|
@ -717,7 +717,7 @@ proc blindedBlockCheckSlashingAndSign[
|
|||
proc getUnsignedBlindedBeaconBlock[
|
||||
T: capella_mev.SignedBlindedBeaconBlock |
|
||||
deneb_mev.SignedBlindedBeaconBlock](
|
||||
node: BeaconNode, slot: Slot, validator: AttachedValidator,
|
||||
node: BeaconNode, slot: Slot,
|
||||
validator_index: ValidatorIndex, forkedBlock: ForkedBeaconBlock,
|
||||
executionPayloadHeader: capella.ExecutionPayloadHeader |
|
||||
deneb_mev.BlindedExecutionPayloadAndBlobsBundle):
|
||||
|
@ -826,7 +826,7 @@ proc getBuilderBid[
|
|||
SBBB: capella_mev.SignedBlindedBeaconBlock |
|
||||
deneb_mev.SignedBlindedBeaconBlock](
|
||||
node: BeaconNode, payloadBuilderClient: RestClientRef, head: BlockRef,
|
||||
validator: AttachedValidator, slot: Slot, randao: ValidatorSig,
|
||||
validator_pubkey: ValidatorPubKey, slot: Slot, randao: ValidatorSig,
|
||||
validator_index: ValidatorIndex):
|
||||
Future[BlindedBlockResult[SBBB]] {.async.} =
|
||||
## Returns the unsigned blinded block obtained from the Builder API.
|
||||
|
@ -839,7 +839,7 @@ proc getBuilderBid[
|
|||
static: doAssert false
|
||||
|
||||
let blindedBlockParts = await getBlindedBlockParts[EPH](
|
||||
node, payloadBuilderClient, head, validator.pubkey, slot, randao,
|
||||
node, payloadBuilderClient, head, validator_pubkey, slot, randao,
|
||||
validator_index, node.graffitiBytes)
|
||||
if blindedBlockParts.isErr:
|
||||
# Not signed yet, fine to try to fall back on EL
|
||||
|
@ -851,8 +851,7 @@ proc getBuilderBid[
|
|||
let (executionPayloadHeader, bidValue, forkedBlck) = blindedBlockParts.get
|
||||
|
||||
let unsignedBlindedBlock = getUnsignedBlindedBeaconBlock[SBBB](
|
||||
node, slot, validator, validator_index, forkedBlck,
|
||||
executionPayloadHeader)
|
||||
node, slot, validator_index, forkedBlck, executionPayloadHeader)
|
||||
|
||||
if unsignedBlindedBlock.isErr:
|
||||
return err unsignedBlindedBlock.error()
|
||||
|
@ -894,8 +893,7 @@ proc proposeBlockMEV(
|
|||
func isEFMainnet(cfg: RuntimeConfig): bool =
|
||||
cfg.DEPOSIT_CHAIN_ID == 1 and cfg.DEPOSIT_NETWORK_ID == 1
|
||||
|
||||
proc makeBlindedBeaconBlockForHeadAndSlot*[
|
||||
BBB: capella_mev.BlindedBeaconBlock](
|
||||
proc makeBlindedBeaconBlockForHeadAndSlot*[BBB: ForkyBlindedBeaconBlock](
|
||||
node: BeaconNode, payloadBuilderClient: RestClientRef,
|
||||
randao_reveal: ValidatorSig, validator_index: ValidatorIndex,
|
||||
graffiti: GraffitiBytes, head: BlockRef, slot: Slot):
|
||||
|
@ -906,7 +904,9 @@ proc makeBlindedBeaconBlockForHeadAndSlot*[
|
|||
##
|
||||
## This function is used by the validator client, but not the beacon node for
|
||||
## its own validators.
|
||||
when BBB is capella_mev.BlindedBeaconBlock:
|
||||
when BBB is deneb_mev.BlindedBeaconBlock:
|
||||
type EPH = deneb_mev.BlindedExecutionPayloadAndBlobsBundle
|
||||
elif BBB is capella_mev.BlindedBeaconBlock:
|
||||
type EPH = capella.ExecutionPayloadHeader
|
||||
else:
|
||||
static: doAssert false
|
||||
|
@ -952,8 +952,9 @@ proc makeBlindedBeaconBlockForHeadAndSlot*[
|
|||
|
||||
proc collectBidFutures(
|
||||
SBBB: typedesc, EPS: typedesc, node: BeaconNode,
|
||||
payloadBuilderClient: RestClientRef, validator: AttachedValidator,
|
||||
validator_index: ValidatorIndex, head: BlockRef, slot: Slot,
|
||||
payloadBuilderClient: RestClientRef, validator_pubkey: ValidatorPubKey,
|
||||
validator_index: ValidatorIndex, graffitiBytes: GraffitiBytes,
|
||||
head: BlockRef, slot: Slot,
|
||||
randao: ValidatorSig): Future[BlockProposalBidFutures[SBBB]] {.async.} =
|
||||
let usePayloadBuilder =
|
||||
if not payloadBuilderClient.isNil:
|
||||
|
@ -972,9 +973,8 @@ proc collectBidFutures(
|
|||
payloadBuilderBidFut =
|
||||
if usePayloadBuilder:
|
||||
when not (EPS is bellatrix.ExecutionPayloadForSigning):
|
||||
getBuilderBid[SBBB](
|
||||
node, payloadBuilderClient, head, validator, slot, randao,
|
||||
validator_index)
|
||||
getBuilderBid[SBBB](node, payloadBuilderClient, head,
|
||||
validator_pubkey, slot, randao, validator_index)
|
||||
else:
|
||||
let fut = newFuture[BlindedBlockResult[SBBB]]("builder-bid")
|
||||
fut.complete(BlindedBlockResult[SBBB].err(
|
||||
|
@ -986,7 +986,7 @@ proc collectBidFutures(
|
|||
"either payload builder disabled or liveness failsafe active"))
|
||||
fut
|
||||
engineBlockFut = makeBeaconBlockForHeadAndSlot(
|
||||
EPS, node, randao, validator_index, node.graffitiBytes, head, slot)
|
||||
EPS, node, randao, validator_index, graffitiBytes, head, slot)
|
||||
|
||||
# getBuilderBid times out after BUILDER_PROPOSAL_DELAY_TOLERANCE, with 1 more
|
||||
# second for remote validators. makeBeaconBlockForHeadAndSlot times out after
|
||||
|
@ -1000,7 +1000,7 @@ proc collectBidFutures(
|
|||
true
|
||||
elif usePayloadBuilder:
|
||||
info "Payload builder error",
|
||||
slot, head = shortLog(head), validator = shortLog(validator),
|
||||
slot, head = shortLog(head), validator = shortLog(validator_pubkey),
|
||||
err = payloadBuilderBidFut.read().error()
|
||||
false
|
||||
else:
|
||||
|
@ -1008,7 +1008,7 @@ proc collectBidFutures(
|
|||
false
|
||||
else:
|
||||
info "Payload builder bid future failed",
|
||||
slot, head = shortLog(head), validator = shortLog(validator),
|
||||
slot, head = shortLog(head), validator = shortLog(validator_pubkey),
|
||||
err = payloadBuilderBidFut.error.msg
|
||||
false
|
||||
|
||||
|
@ -1018,12 +1018,12 @@ proc collectBidFutures(
|
|||
true
|
||||
else:
|
||||
info "Engine block building error",
|
||||
slot, head = shortLog(head), validator = shortLog(validator),
|
||||
slot, head = shortLog(head), validator = shortLog(validator_pubkey),
|
||||
err = engineBlockFut.read.error()
|
||||
false
|
||||
else:
|
||||
info "Engine block building failed",
|
||||
slot, head = shortLog(head), validator = shortLog(validator),
|
||||
slot, head = shortLog(head), validator = shortLog(validator_pubkey),
|
||||
err = engineBlockFut.error.msg
|
||||
false
|
||||
|
||||
|
@ -1060,8 +1060,8 @@ proc proposeBlockAux(
|
|||
payloadBuilderClient = payloadBuilderClientMaybe.get
|
||||
|
||||
let collectedBids = await collectBidFutures(
|
||||
SBBB, EPS, node, payloadBuilderClient, validator, validator_index, head,
|
||||
slot, randao)
|
||||
SBBB, EPS, node, payloadBuilderClient, validator.pubkey, validator_index,
|
||||
node.graffitiBytes, head, slot, randao)
|
||||
|
||||
let useBuilderBlock =
|
||||
if collectedBids.builderBidAvailable:
|
||||
|
@ -1887,3 +1887,85 @@ proc registerDuties*(node: BeaconNode, wallSlot: Slot) {.async.} =
|
|||
|
||||
node.consensusManager[].actionTracker.registerDuty(
|
||||
slot, subnet_id, validator_index, isAggregator)
|
||||
|
||||
proc makeMaybeBlindedBeaconBlockForHeadAndSlotImpl[ResultType](
|
||||
node: BeaconNode, consensusFork: static ConsensusFork,
|
||||
randao_reveal: ValidatorSig, graffiti: GraffitiBytes,
|
||||
head: BlockRef, slot: Slot): Future[ResultType] {.async.} =
|
||||
let
|
||||
proposer = node.dag.getProposer(head, slot).valueOr:
|
||||
return ResultType.err(
|
||||
"Unable to get proposer for specific head and slot")
|
||||
proposerKey = node.dag.validatorKey(proposer).get().toPubKey()
|
||||
|
||||
payloadBuilderClient =
|
||||
node.getPayloadBuilderClient(proposer.distinctBase).valueOr:
|
||||
nil
|
||||
localBlockValueBoost = node.config.localBlockValueBoost
|
||||
|
||||
collectedBids =
|
||||
await collectBidFutures(consensusFork.SignedBlindedBeaconBlock,
|
||||
consensusFork.ExecutionPayloadForSigning,
|
||||
node,
|
||||
payloadBuilderClient, proposerKey,
|
||||
proposer, graffiti, head, slot,
|
||||
randao_reveal)
|
||||
useBuilderBlock =
|
||||
if collectedBids.builderBidAvailable:
|
||||
(not collectedBids.engineBidAvailable) or builderBetterBid(
|
||||
localBlockValueBoost,
|
||||
collectedBids.payloadBuilderBidFut.read.get().blockValue,
|
||||
collectedBids.engineBlockFut.read.get().blockValue)
|
||||
else:
|
||||
if not(collectedBids.engineBidAvailable):
|
||||
return ResultType.err("Engine bid is not available")
|
||||
false
|
||||
|
||||
blockResult = block:
|
||||
if useBuilderBlock:
|
||||
let
|
||||
blindedResult = collectedBids.payloadBuilderBidFut.read()
|
||||
payloadValue = blindedResult.get().blockValue
|
||||
|
||||
return ResultType.ok((
|
||||
blck: consensusFork.MaybeBlindedBeaconBlock(
|
||||
isBlinded: true,
|
||||
blindedData: blindedResult.get().blindedBlckPart.message),
|
||||
executionValue: Opt.some(payloadValue),
|
||||
consensusValue: Opt.none(UInt256)))
|
||||
|
||||
collectedBids.engineBlockFut.read().get()
|
||||
|
||||
doAssert blockResult.blck.kind == consensusFork
|
||||
template forkyBlck: untyped = blockResult.blck.forky(consensusFork)
|
||||
when consensusFork >= ConsensusFork.Deneb:
|
||||
let blobsBundle = blockResult.blobsBundleOpt.get()
|
||||
doAssert blobsBundle.commitments == forkyBlck.body.blob_kzg_commitments
|
||||
ResultType.ok((
|
||||
blck: consensusFork.MaybeBlindedBeaconBlock(
|
||||
isBlinded: false,
|
||||
data: deneb.BlockContents(
|
||||
`block`: forkyBlck,
|
||||
kzg_proofs: blobsBundle.proofs,
|
||||
blobs: blobsBundle.blobs)),
|
||||
executionValue: Opt.some(blockResult.blockValue),
|
||||
consensusValue: Opt.none(UInt256)))
|
||||
else:
|
||||
ResultType.ok((
|
||||
blck: consensusFork.MaybeBlindedBeaconBlock(
|
||||
isBlinded: false,
|
||||
data: forkyBlck),
|
||||
executionValue: Opt.some(blockResult.blockValue),
|
||||
consensusValue: Opt.none(UInt256)))
|
||||
|
||||
proc makeMaybeBlindedBeaconBlockForHeadAndSlot*(
|
||||
node: BeaconNode, consensusFork: static ConsensusFork,
|
||||
randao_reveal: ValidatorSig, graffiti: GraffitiBytes,
|
||||
head: BlockRef, slot: Slot): auto =
|
||||
type ResultType = Result[tuple[
|
||||
blck: consensusFork.MaybeBlindedBeaconBlock,
|
||||
executionValue: Opt[UInt256],
|
||||
consensusValue: Opt[UInt256]], string]
|
||||
|
||||
makeMaybeBlindedBeaconBlockForHeadAndSlotImpl[ResultType](
|
||||
node, consensusFork, randao_reveal, graffiti, head, slot)
|
||||
|
|
|
@ -147,3 +147,127 @@ suite "Serialization/deserialization test suite":
|
|||
"""{"code":500,"message":"data","stacktraces":["s1","s2"]}"""
|
||||
jsonErrorList(RestApiResponse, Http408, "data", ["s1", "s2"]) ==
|
||||
"""{"code":408,"message":"data","failures":["s1","s2"]}"""
|
||||
|
||||
test "strictParse(Stuint) tests":
|
||||
const
|
||||
GoodVectors16 = [
|
||||
("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"),
|
||||
("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"),
|
||||
("0x123456789ABCDEFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"123456789abcdefffffffffffffffffffffffffffffffffffffffffffffffff"),
|
||||
("123456789ABCDEFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"123456789abcdefffffffffffffffffffffffffffffffffffffffffffffffff")
|
||||
]
|
||||
GoodVectors10 = [
|
||||
("115792089237316195423570985008687907853269984665640564039457584007913129639935",
|
||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"),
|
||||
("0", "0"),
|
||||
]
|
||||
GoodVectors8 = [
|
||||
("0o17777777777777777777777777777777777777777777777777777777777777777777777777777777777777",
|
||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")
|
||||
]
|
||||
GoodVectors2 = [
|
||||
("0b1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111",
|
||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")
|
||||
]
|
||||
OverflowVectors16 = [
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0",
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE",
|
||||
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0",
|
||||
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"
|
||||
]
|
||||
OverflowVectors10 = [
|
||||
"1157920892373161954235709850086879078532699846656405640394575840079131296399350",
|
||||
"1157920892373161954235709850086879078532699846656405640394575840079131296399351"
|
||||
]
|
||||
OverflowVectors8 = [
|
||||
"0o177777777777777777777777777777777777777777777777777777777777777777777777777777777777770",
|
||||
"0o177777777777777777777777777777777777777777777777777777777777777777777777777777777777777"
|
||||
]
|
||||
OverflowVectors2 = [
|
||||
"0b11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110",
|
||||
"0b11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"
|
||||
]
|
||||
InvalidCharsVectors16 = [
|
||||
"GFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"0xGFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"0x0123456789ABCDEFZFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
"0123456789ABCDEFXFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
|
||||
]
|
||||
InvalidCharsVectors10 = [
|
||||
"11579208923731619542357098500868790785326998466564056403945758400791312963993A",
|
||||
"K"
|
||||
]
|
||||
InvalidCharsVectors8 = [
|
||||
"0o17777777777777777777777777777777777777777777777777777777777777777777777777777777777778"
|
||||
]
|
||||
InvalidCharsVectors2 = [
|
||||
"0b1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111112"
|
||||
]
|
||||
|
||||
for vector in GoodVectors16:
|
||||
let res = strictParse(vector[0], UInt256, 16)
|
||||
check:
|
||||
res.isOk()
|
||||
res.get().toHex() == vector[1]
|
||||
|
||||
for vector in GoodVectors10:
|
||||
let res = strictParse(vector[0], UInt256, 10)
|
||||
check:
|
||||
res.isOk()
|
||||
res.get().toHex() == vector[1]
|
||||
|
||||
for vector in GoodVectors8:
|
||||
let res = strictParse(vector[0], UInt256, 8)
|
||||
check:
|
||||
res.isOk()
|
||||
res.get().toHex() == vector[1]
|
||||
|
||||
for vector in GoodVectors2:
|
||||
let res = strictParse(vector[0], UInt256, 2)
|
||||
check:
|
||||
res.isOk()
|
||||
res.get().toHex() == vector[1]
|
||||
|
||||
for vector in OverflowVectors16:
|
||||
let res = strictParse(vector, UInt256, 16)
|
||||
check:
|
||||
res.isErr()
|
||||
res.error == "Overflow error"
|
||||
|
||||
for vector in OverflowVectors10:
|
||||
let res = strictParse(vector, UInt256, 10)
|
||||
check:
|
||||
res.isErr()
|
||||
res.error == "Overflow error"
|
||||
|
||||
for vector in OverflowVectors8:
|
||||
let res = strictParse(vector, UInt256, 8)
|
||||
check:
|
||||
res.isErr()
|
||||
res.error == "Overflow error"
|
||||
|
||||
for vector in OverflowVectors2:
|
||||
let res = strictParse(vector, UInt256, 2)
|
||||
check:
|
||||
res.isErr()
|
||||
res.error == "Overflow error"
|
||||
|
||||
for vector in InvalidCharsVectors16:
|
||||
let res = strictParse(vector, UInt256, 16)
|
||||
check res.isErr()
|
||||
|
||||
for vector in InvalidCharsVectors10:
|
||||
let res = strictParse(vector, UInt256, 10)
|
||||
check res.isErr()
|
||||
|
||||
for vector in InvalidCharsVectors8:
|
||||
let res = strictParse(vector, UInt256, 8)
|
||||
check res.isErr()
|
||||
|
||||
for vector in InvalidCharsVectors2:
|
||||
let res = strictParse(vector, UInt256, 2)
|
||||
check res.isErr()
|
||||
|
|
Loading…
Reference in New Issue