mirror of
https://github.com/status-im/nimbus-eth2.git
synced 2025-01-15 00:54:49 +00:00
cleanups, partly from kintsugi branch (#3161)
* cleanups, partly from kintsugi branch * re-export shortLog(EthBlock) and preserve exception messages in batchVerify and processBatch
This commit is contained in:
parent
8d7df05f2e
commit
e6921f808f
@ -37,7 +37,7 @@ proc batchVerify(quarantine: QuarantineRef, sigs: openArray[SignatureSet]): bool
|
|||||||
try:
|
try:
|
||||||
return quarantine.taskpool.batchVerify(quarantine.sigVerifCache, sigs, secureRandomBytes)
|
return quarantine.taskpool.batchVerify(quarantine.sigVerifCache, sigs, secureRandomBytes)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise newException(Defect, "Unexpected exception in batchVerify.")
|
raiseAssert exc.msg
|
||||||
|
|
||||||
proc addRawBlock*(
|
proc addRawBlock*(
|
||||||
dag: ChainDAGRef, quarantine: QuarantineRef,
|
dag: ChainDAGRef, quarantine: QuarantineRef,
|
||||||
|
@ -298,7 +298,7 @@ func compute_time_at_slot(genesis_time: uint64, slot: Slot): uint64 =
|
|||||||
genesis_time + slot * SECONDS_PER_SLOT
|
genesis_time + slot * SECONDS_PER_SLOT
|
||||||
|
|
||||||
# https://github.com/ethereum/consensus-specs/blob/v1.0.1/specs/phase0/validator.md#get_eth1_data
|
# https://github.com/ethereum/consensus-specs/blob/v1.0.1/specs/phase0/validator.md#get_eth1_data
|
||||||
func voting_period_start_time*(state: ForkedHashedBeaconState): uint64 =
|
func voting_period_start_time(state: ForkedHashedBeaconState): uint64 =
|
||||||
let eth1_voting_period_start_slot =
|
let eth1_voting_period_start_slot =
|
||||||
getStateField(state, slot) - getStateField(state, slot) mod
|
getStateField(state, slot) - getStateField(state, slot) mod
|
||||||
SLOTS_PER_ETH1_VOTING_PERIOD.uint64
|
SLOTS_PER_ETH1_VOTING_PERIOD.uint64
|
||||||
@ -323,7 +323,7 @@ func shortLog*(b: Eth1Block): string =
|
|||||||
&"{b.number}:{shortLog b.voteData.block_hash}(deposits = {b.voteData.deposit_count})"
|
&"{b.number}:{shortLog b.voteData.block_hash}(deposits = {b.voteData.deposit_count})"
|
||||||
except ValueError as exc: raiseAssert exc.msg
|
except ValueError as exc: raiseAssert exc.msg
|
||||||
|
|
||||||
template findBlock*(chain: Eth1Chain, eth1Data: Eth1Data): Eth1Block =
|
template findBlock(chain: Eth1Chain, eth1Data: Eth1Data): Eth1Block =
|
||||||
getOrDefault(chain.blocksByHash, asBlockHash(eth1Data.block_hash), nil)
|
getOrDefault(chain.blocksByHash, asBlockHash(eth1Data.block_hash), nil)
|
||||||
|
|
||||||
func makeSuccessorWithoutDeposits(existingBlock: Eth1Block,
|
func makeSuccessorWithoutDeposits(existingBlock: Eth1Block,
|
||||||
@ -355,12 +355,9 @@ proc addBlock*(chain: var Eth1Chain, newBlock: Eth1Block) =
|
|||||||
chain.blocksByHash[newBlock.voteData.block_hash.asBlockHash] = newBlock
|
chain.blocksByHash[newBlock.voteData.block_hash.asBlockHash] = newBlock
|
||||||
eth1_chain_len.set chain.blocks.len.int64
|
eth1_chain_len.set chain.blocks.len.int64
|
||||||
|
|
||||||
func hash*(x: Eth1Data): Hash =
|
func hash(x: Eth1Data): Hash =
|
||||||
hash(x.block_hash)
|
hash(x.block_hash)
|
||||||
|
|
||||||
template hash*(x: Eth1Block): Hash =
|
|
||||||
hash(x.voteData)
|
|
||||||
|
|
||||||
template awaitWithRetries*[T](lazyFutExpr: Future[T],
|
template awaitWithRetries*[T](lazyFutExpr: Future[T],
|
||||||
retries = 3,
|
retries = 3,
|
||||||
timeout = web3Timeouts): untyped =
|
timeout = web3Timeouts): untyped =
|
||||||
@ -397,7 +394,7 @@ template awaitWithRetries*[T](lazyFutExpr: Future[T],
|
|||||||
|
|
||||||
read(f)
|
read(f)
|
||||||
|
|
||||||
proc close*(p: Web3DataProviderRef): Future[void] {.async.} =
|
proc close(p: Web3DataProviderRef): Future[void] {.async.} =
|
||||||
if p.blockHeadersSubscription != nil:
|
if p.blockHeadersSubscription != nil:
|
||||||
try:
|
try:
|
||||||
awaitWithRetries(p.blockHeadersSubscription.unsubscribe())
|
awaitWithRetries(p.blockHeadersSubscription.unsubscribe())
|
||||||
@ -406,11 +403,11 @@ proc close*(p: Web3DataProviderRef): Future[void] {.async.} =
|
|||||||
|
|
||||||
await p.web3.close()
|
await p.web3.close()
|
||||||
|
|
||||||
proc getBlockByHash*(p: Web3DataProviderRef, hash: BlockHash):
|
proc getBlockByHash(p: Web3DataProviderRef, hash: BlockHash):
|
||||||
Future[BlockObject] =
|
Future[BlockObject] =
|
||||||
return p.web3.provider.eth_getBlockByHash(hash, false)
|
return p.web3.provider.eth_getBlockByHash(hash, false)
|
||||||
|
|
||||||
proc getBlockByNumber*(p: Web3DataProviderRef,
|
proc getBlockByNumber(p: Web3DataProviderRef,
|
||||||
number: Eth1BlockNumber): Future[BlockObject] =
|
number: Eth1BlockNumber): Future[BlockObject] =
|
||||||
let hexNumber = try: &"0x{number:X}" # No leading 0's!
|
let hexNumber = try: &"0x{number:X}" # No leading 0's!
|
||||||
except ValueError as exc: raiseAssert exc.msg # Never fails
|
except ValueError as exc: raiseAssert exc.msg # Never fails
|
||||||
@ -559,7 +556,7 @@ when hasDepositRootChecks:
|
|||||||
err = err.msg
|
err = err.msg
|
||||||
result = DepositCountUnavailable
|
result = DepositCountUnavailable
|
||||||
|
|
||||||
proc onBlockHeaders*(p: Web3DataProviderRef,
|
proc onBlockHeaders(p: Web3DataProviderRef,
|
||||||
blockHeaderHandler: BlockHeaderHandler,
|
blockHeaderHandler: BlockHeaderHandler,
|
||||||
errorHandler: SubscriptionErrorHandler) {.async.} =
|
errorHandler: SubscriptionErrorHandler) {.async.} =
|
||||||
info "Waiting for new Eth1 block headers"
|
info "Waiting for new Eth1 block headers"
|
||||||
@ -577,7 +574,7 @@ func toDepositContractState*(merkleizer: DepositsMerkleizer): DepositContractSta
|
|||||||
result.branch[0..31] = merkleizer.getCombinedChunks[0..31]
|
result.branch[0..31] = merkleizer.getCombinedChunks[0..31]
|
||||||
result.deposit_count[24..31] = merkleizer.getChunkCount().toBytesBE
|
result.deposit_count[24..31] = merkleizer.getChunkCount().toBytesBE
|
||||||
|
|
||||||
func createMerkleizer*(s: DepositContractState): DepositsMerkleizer =
|
func createMerkleizer(s: DepositContractState): DepositsMerkleizer =
|
||||||
DepositsMerkleizer.init(s.branch, s.depositCountU64)
|
DepositsMerkleizer.init(s.branch, s.depositCountU64)
|
||||||
|
|
||||||
func createMerkleizer*(s: DepositContractSnapshot): DepositsMerkleizer =
|
func createMerkleizer*(s: DepositContractSnapshot): DepositsMerkleizer =
|
||||||
@ -671,7 +668,7 @@ func lowerBound(chain: Eth1Chain, depositCount: uint64): Eth1Block =
|
|||||||
return
|
return
|
||||||
result = eth1Block
|
result = eth1Block
|
||||||
|
|
||||||
proc trackFinalizedState*(chain: var Eth1Chain,
|
proc trackFinalizedState(chain: var Eth1Chain,
|
||||||
finalizedEth1Data: Eth1Data,
|
finalizedEth1Data: Eth1Data,
|
||||||
finalizedStateDepositIndex: uint64): bool =
|
finalizedStateDepositIndex: uint64): bool =
|
||||||
# Returns true if the Eth1Monitor is synced to the finalization point
|
# Returns true if the Eth1Monitor is synced to the finalization point
|
||||||
@ -867,7 +864,7 @@ proc resetState(m: Eth1Monitor) {.async.} =
|
|||||||
await m.dataProvider.close()
|
await m.dataProvider.close()
|
||||||
m.dataProvider = nil
|
m.dataProvider = nil
|
||||||
|
|
||||||
proc stop*(m: Eth1Monitor) {.async.} =
|
proc stop(m: Eth1Monitor) {.async.} =
|
||||||
if m.state == Started:
|
if m.state == Started:
|
||||||
m.state = Stopping
|
m.state = Stopping
|
||||||
m.stopFut = resetState(m)
|
m.stopFut = resetState(m)
|
||||||
|
@ -157,7 +157,7 @@ proc processBatch(batchCrypto: ref BatchCrypto) =
|
|||||||
batch.pendingBuffer,
|
batch.pendingBuffer,
|
||||||
secureRandomBytes)
|
secureRandomBytes)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise newException(Defect, "Unexpected exception in batchVerify.")
|
raiseAssert exc.msg
|
||||||
|
|
||||||
trace "batch crypto - finished",
|
trace "batch crypto - finished",
|
||||||
batchSize,
|
batchSize,
|
||||||
|
@ -26,7 +26,6 @@ import
|
|||||||
const
|
const
|
||||||
# https://github.com/ethereum/consensus-specs/blob/v1.1.4/specs/merge/beacon-chain.md#execution
|
# https://github.com/ethereum/consensus-specs/blob/v1.1.4/specs/merge/beacon-chain.md#execution
|
||||||
MAX_BYTES_PER_TRANSACTION* = 1073741824
|
MAX_BYTES_PER_TRANSACTION* = 1073741824
|
||||||
MAX_TRANSACTIONS_PER_PAYLOAD* = 1048576
|
|
||||||
BYTES_PER_LOGS_BLOOM = 256
|
BYTES_PER_LOGS_BLOOM = 256
|
||||||
MAX_EXTRA_DATA_BYTES = 32
|
MAX_EXTRA_DATA_BYTES = 32
|
||||||
|
|
||||||
|
@ -1384,7 +1384,7 @@ proc decodeBytes*[T: DecodeTypes](t: typedesc[T], value: openarray[byte],
|
|||||||
of "application/json":
|
of "application/json":
|
||||||
try:
|
try:
|
||||||
ok RestJson.decode(value, T, allowUnknownFields = isExtensibleType)
|
ok RestJson.decode(value, T, allowUnknownFields = isExtensibleType)
|
||||||
except SerializationError as exc:
|
except SerializationError:
|
||||||
err("Serialization error")
|
err("Serialization error")
|
||||||
else:
|
else:
|
||||||
err("Content-Type not supported")
|
err("Content-Type not supported")
|
||||||
|
@ -88,7 +88,6 @@ proc signData*(client: RestClientRef, identifier: ValidatorPubKey,
|
|||||||
inc(nbc_remote_signer_communication_errors)
|
inc(nbc_remote_signer_communication_errors)
|
||||||
return Web3SignerDataResponse.err(msg)
|
return Web3SignerDataResponse.err(msg)
|
||||||
except CatchableError as exc:
|
except CatchableError as exc:
|
||||||
let signDur = Moment.now() - startSignTick
|
|
||||||
let msg = "[" & $exc.name & "] " & $exc.msg
|
let msg = "[" & $exc.name & "] " & $exc.msg
|
||||||
debug "Unexpected error occured while generating signature",
|
debug "Unexpected error occured while generating signature",
|
||||||
validator = shortLog(identifier),
|
validator = shortLog(identifier),
|
||||||
|
@ -203,16 +203,16 @@ template init*(T: type ForkedEpochInfo, info: altair.EpochInfo): T =
|
|||||||
template withState*(x: ForkedHashedBeaconState, body: untyped): untyped =
|
template withState*(x: ForkedHashedBeaconState, body: untyped): untyped =
|
||||||
case x.kind
|
case x.kind
|
||||||
of BeaconStateFork.Merge:
|
of BeaconStateFork.Merge:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Merge
|
const stateFork {.inject, used.} = BeaconStateFork.Merge
|
||||||
template state: untyped {.inject.} = x.mergeData
|
template state: untyped {.inject, used.} = x.mergeData
|
||||||
body
|
body
|
||||||
of BeaconStateFork.Altair:
|
of BeaconStateFork.Altair:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Altair
|
const stateFork {.inject, used.} = BeaconStateFork.Altair
|
||||||
template state: untyped {.inject.} = x.altairData
|
template state: untyped {.inject, used.} = x.altairData
|
||||||
body
|
body
|
||||||
of BeaconStateFork.Phase0:
|
of BeaconStateFork.Phase0:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Phase0
|
const stateFork {.inject, used.} = BeaconStateFork.Phase0
|
||||||
template state: untyped {.inject.} = x.phase0Data
|
template state: untyped {.inject, used.} = x.phase0Data
|
||||||
body
|
body
|
||||||
|
|
||||||
template withEpochInfo*(x: ForkedEpochInfo, body: untyped): untyped =
|
template withEpochInfo*(x: ForkedEpochInfo, body: untyped): untyped =
|
||||||
@ -297,15 +297,15 @@ template withBlck*(
|
|||||||
body: untyped): untyped =
|
body: untyped): untyped =
|
||||||
case x.kind
|
case x.kind
|
||||||
of BeaconBlockFork.Phase0:
|
of BeaconBlockFork.Phase0:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Phase0
|
const stateFork {.inject, used.} = BeaconStateFork.Phase0
|
||||||
template blck: untyped {.inject.} = x.phase0Data
|
template blck: untyped {.inject.} = x.phase0Data
|
||||||
body
|
body
|
||||||
of BeaconBlockFork.Altair:
|
of BeaconBlockFork.Altair:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Altair
|
const stateFork {.inject, used.} = BeaconStateFork.Altair
|
||||||
template blck: untyped {.inject.} = x.altairData
|
template blck: untyped {.inject.} = x.altairData
|
||||||
body
|
body
|
||||||
of BeaconBlockFork.Merge:
|
of BeaconBlockFork.Merge:
|
||||||
const stateFork {.inject.} = BeaconStateFork.Merge
|
const stateFork {.inject, used.} = BeaconStateFork.Merge
|
||||||
template blck: untyped {.inject.} = x.mergeData
|
template blck: untyped {.inject.} = x.mergeData
|
||||||
body
|
body
|
||||||
|
|
||||||
|
@ -202,6 +202,7 @@ func get_unslashed_participating_balances*(state: altair.BeaconState | merge.Bea
|
|||||||
res.previous_epoch[flag_index] += validator_effective_balance
|
res.previous_epoch[flag_index] += validator_effective_balance
|
||||||
|
|
||||||
# Only TIMELY_TARGET_FLAG_INDEX is used with the current epoch in Altair
|
# Only TIMELY_TARGET_FLAG_INDEX is used with the current epoch in Altair
|
||||||
|
# and merge
|
||||||
if is_active_current_epoch and has_flag(
|
if is_active_current_epoch and has_flag(
|
||||||
state.current_epoch_participation[validator_index],
|
state.current_epoch_participation[validator_index],
|
||||||
TIMELY_TARGET_FLAG_INDEX):
|
TIMELY_TARGET_FLAG_INDEX):
|
||||||
|
@ -64,7 +64,7 @@ proc readChunkPayload*(conn: Connection, peer: Peer,
|
|||||||
var contextBytes: ForkDigest
|
var contextBytes: ForkDigest
|
||||||
try:
|
try:
|
||||||
await conn.readExactly(addr contextBytes, sizeof contextBytes)
|
await conn.readExactly(addr contextBytes, sizeof contextBytes)
|
||||||
except CatchableError as e:
|
except CatchableError:
|
||||||
return neterr UnexpectedEOF
|
return neterr UnexpectedEOF
|
||||||
|
|
||||||
if contextBytes == peer.network.forkDigests.phase0:
|
if contextBytes == peer.network.forkDigests.phase0:
|
||||||
|
@ -23,23 +23,6 @@ kintsugi test vectors passed
|
|||||||
|
|
||||||
# Verify that Nimbus runs through the same examples
|
# Verify that Nimbus runs through the same examples
|
||||||
|
|
||||||
- Ensure `tests/test_merge_vectors.nim` points to the correct Web3 URL, e.g.:
|
|
||||||
```
|
|
||||||
diff --git a/tests/test_merge_vectors.nim b/tests/test_merge_vectors.nim
|
|
||||||
index 7eedb46d..1a573c80 100644
|
|
||||||
--- a/tests/test_merge_vectors.nim
|
|
||||||
+++ b/tests/test_merge_vectors.nim
|
|
||||||
@@ -12,7 +12,7 @@ import
|
|
||||||
|
|
||||||
suite "Merge test vectors":
|
|
||||||
let web3Provider = (waitFor Web3DataProvider.new(
|
|
||||||
- default(Eth1Address), "ws://127.0.0.1:8551")).get
|
|
||||||
+ default(Eth1Address), "ws://127.0.0.1:8546")).get
|
|
||||||
|
|
||||||
test "getPayload, executePayload, and forkchoiceUpdated":
|
|
||||||
const feeRecipient =
|
|
||||||
```
|
|
||||||
|
|
||||||
- Run `./env.sh nim c -r tests/test_merge_vectors.nim`. It should show output akin to:
|
- Run `./env.sh nim c -r tests/test_merge_vectors.nim`. It should show output akin to:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -53,4 +53,4 @@ echo \{\
|
|||||||
~/execution_clients/go-ethereum/build/bin/geth --catalyst --http --ws -http.api "engine" --datadir "${GETHDATADIR}" account import <(echo 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8)
|
~/execution_clients/go-ethereum/build/bin/geth --catalyst --http --ws -http.api "engine" --datadir "${GETHDATADIR}" account import <(echo 45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8)
|
||||||
|
|
||||||
# Start the node (and press enter once to unlock the account)
|
# Start the node (and press enter once to unlock the account)
|
||||||
~/execution_clients/go-ethereum/build/bin/geth --catalyst --http --ws --http.api "eth,net,engine" -ws.api "eth,net,engine" --datadir "${GETHDATADIR}" --allow-insecure-unlock --unlock "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" --password "" --nodiscover console
|
~/execution_clients/go-ethereum/build/bin/geth --catalyst --http --ws --ws.port 8551 --http.port 8550 --http.api "eth,net,engine" -ws.api "eth,net,engine" --datadir "${GETHDATADIR}" --allow-insecure-unlock --unlock "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" --password "" --nodiscover console
|
||||||
|
@ -12,9 +12,6 @@ import
|
|||||||
../beacon_chain/spec/datatypes/[phase0, altair],
|
../beacon_chain/spec/datatypes/[phase0, altair],
|
||||||
../beacon_chain/spec/eth2_ssz_serialization
|
../beacon_chain/spec/eth2_ssz_serialization
|
||||||
|
|
||||||
template reject(stmt) =
|
|
||||||
doAssert(not compiles(stmt))
|
|
||||||
|
|
||||||
static:
|
static:
|
||||||
doAssert isFixedSize(Slot) == true
|
doAssert isFixedSize(Slot) == true
|
||||||
|
|
||||||
|
@ -198,7 +198,6 @@ func makeAttestation*(
|
|||||||
# montonoic enumerable index, is wasteful and slow. Most test callers
|
# montonoic enumerable index, is wasteful and slow. Most test callers
|
||||||
# want ValidatorIndex, so that's supported too.
|
# want ValidatorIndex, so that's supported too.
|
||||||
let
|
let
|
||||||
validator = getStateField(state, validators)[validator_index]
|
|
||||||
sac_index = committee.find(validator_index)
|
sac_index = committee.find(validator_index)
|
||||||
data = makeAttestationData(state, slot, index, beacon_block_root)
|
data = makeAttestationData(state, slot, index, beacon_block_root)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user