commit
67c2f9ee9e
4
Makefile
4
Makefile
|
@ -26,11 +26,11 @@ GENERATOR_VENVS = $(patsubst $(GENERATOR_DIR)/%, $(GENERATOR_DIR)/%venv, $(GENER
|
||||||
MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/phase0/*.md) \
|
MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/phase0/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/altair/*.md) $(wildcard $(SPEC_DIR)/altair/**/*.md) \
|
$(wildcard $(SPEC_DIR)/altair/*.md) $(wildcard $(SPEC_DIR)/altair/**/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/bellatrix/*.md) \
|
$(wildcard $(SPEC_DIR)/bellatrix/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/capella/*.md) \
|
$(wildcard $(SPEC_DIR)/capella/*.md) $(wildcard $(SPEC_DIR)/capella/**/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/custody/*.md) \
|
$(wildcard $(SPEC_DIR)/custody/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/das/*.md) \
|
$(wildcard $(SPEC_DIR)/das/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/sharding/*.md) \
|
$(wildcard $(SPEC_DIR)/sharding/*.md) \
|
||||||
$(wildcard $(SPEC_DIR)/eip4844/*.md) \
|
$(wildcard $(SPEC_DIR)/eip4844/*.md) $(wildcard $(SPEC_DIR)/eip4844/**/*.md) \
|
||||||
$(wildcard $(SSZ_DIR)/*.md)
|
$(wildcard $(SSZ_DIR)/*.md)
|
||||||
|
|
||||||
COV_HTML_OUT=.htmlcov
|
COV_HTML_OUT=.htmlcov
|
||||||
|
|
|
@ -24,8 +24,8 @@ Features are researched and developed in parallel, and then consolidated into se
|
||||||
### In-development Specifications
|
### In-development Specifications
|
||||||
| Code Name or Topic | Specs | Notes |
|
| Code Name or Topic | Specs | Notes |
|
||||||
| - | - | - |
|
| - | - | - |
|
||||||
| Capella (tentative) | <ul><li>Core</li><ul><li>[Beacon chain changes](specs/capella/beacon-chain.md)</li><li>[Capella fork](specs/capella/fork.md)</li></ul><li>Additions</li><ul><li>[Validator additions](specs/capella/validator.md)</li><li>[P2P networking](specs/capella/p2p-interface.md)</li></ul></ul> |
|
| Capella (tentative) | <ul><li>Core</li><ul><li>[Beacon chain changes](specs/capella/beacon-chain.md)</li><li>[Capella fork](specs/capella/fork.md)</li></ul><li>Additions</li><ul><li>[Light client sync protocol changes](specs/capella/light-client/sync-protocol.md) ([fork](specs/capella/light-client/fork.md), [full node](specs/capella/light-client/full-node.md), [networking](specs/capella/light-client/p2p-interface.md))</li></ul><ul><li>[Validator additions](specs/capella/validator.md)</li><li>[P2P networking](specs/capella/p2p-interface.md)</li></ul></ul> |
|
||||||
| EIP4844 (tentative) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/eip4844/beacon-chain.md)</li><li>[EIP-4844 fork](specs/eip4844/fork.md)</li><li>[Polynomial commitments](specs/eip4844/polynomial-commitments.md)</li><li>[Fork choice changes](specs/eip4844/fork-choice.md)</li></ul><li>Additions</li><ul><li>[Honest validator guide changes](specs/eip4844/validator.md)</li><li>[P2P networking](specs/eip4844/p2p-interface.md)</li></ul></ul> |
|
| EIP4844 (tentative) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/eip4844/beacon-chain.md)</li><li>[EIP-4844 fork](specs/eip4844/fork.md)</li><li>[Polynomial commitments](specs/eip4844/polynomial-commitments.md)</li><li>[Fork choice changes](specs/eip4844/fork-choice.md)</li></ul><li>Additions</li><ul><li>[Light client sync protocol changes](specs/eip4844/light-client/sync-protocol.md) ([fork](specs/eip4844/light-client/fork.md), [full node](specs/eip4844/light-client/full-node.md), [networking](specs/eip4844/light-client/p2p-interface.md))</li></ul><ul><li>[Honest validator guide changes](specs/eip4844/validator.md)</li><li>[P2P networking](specs/eip4844/p2p-interface.md)</li></ul></ul> |
|
||||||
| Sharding (outdated) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/sharding/beacon-chain.md)</li></ul><li>Additions</li><ul><li>[P2P networking](specs/sharding/p2p-interface.md)</li></ul></ul> |
|
| Sharding (outdated) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/sharding/beacon-chain.md)</li></ul><li>Additions</li><ul><li>[P2P networking](specs/sharding/p2p-interface.md)</li></ul></ul> |
|
||||||
| Custody Game (outdated) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/custody_game/beacon-chain.md)</li></ul><li>Additions</li><ul><li>[Honest validator guide changes](specs/custody_game/validator.md)</li></ul></ul> | Dependent on sharding |
|
| Custody Game (outdated) | <ul><li>Core</li><ul><li>[Beacon Chain changes](specs/custody_game/beacon-chain.md)</li></ul><li>Additions</li><ul><li>[Honest validator guide changes](specs/custody_game/validator.md)</li></ul></ul> | Dependent on sharding |
|
||||||
| Data Availability Sampling (outdated) | <ul><li>Core</li><ul><li>[Core types and functions](specs/das/das-core.md)</li><li>[Fork choice changes](specs/das/fork-choice.md)</li></ul><li>Additions</li><ul><li>[P2P Networking](specs/das/p2p-interface.md)</li><li>[Sampling process](specs/das/sampling.md)</li></ul></ul> | <ul><li> Dependent on sharding</li><li>[Technical explainer](https://hackmd.io/@HWeNw8hNRimMm2m2GH56Cw/B1YJPGkpD)</li></ul> |
|
| Data Availability Sampling (outdated) | <ul><li>Core</li><ul><li>[Core types and functions](specs/das/das-core.md)</li><li>[Fork choice changes](specs/das/fork-choice.md)</li></ul><li>Additions</li><ul><li>[P2P Networking](specs/das/p2p-interface.md)</li><li>[Sampling process](specs/das/sampling.md)</li></ul></ul> | <ul><li> Dependent on sharding</li><li>[Technical explainer](https://hackmd.io/@HWeNw8hNRimMm2m2GH56Cw/B1YJPGkpD)</li></ul> |
|
||||||
|
|
24
setup.py
24
setup.py
|
@ -616,6 +616,21 @@ from eth2spec.bellatrix import {preset_name} as bellatrix
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sundry_functions(cls) -> str:
|
||||||
|
return super().sundry_functions() + '\n\n' + '''
|
||||||
|
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
|
||||||
|
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||||
|
return build_proof(body.get_backing(), index)'''
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||||
|
constants = {
|
||||||
|
'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)',
|
||||||
|
}
|
||||||
|
return {**super().hardcoded_ssz_dep_constants(), **constants}
|
||||||
|
|
||||||
#
|
#
|
||||||
# EIP4844SpecBuilder
|
# EIP4844SpecBuilder
|
||||||
#
|
#
|
||||||
|
@ -690,6 +705,7 @@ def objects_to_spec(preset_name: str,
|
||||||
if k in [
|
if k in [
|
||||||
"ceillog2",
|
"ceillog2",
|
||||||
"floorlog2",
|
"floorlog2",
|
||||||
|
"compute_merkle_proof_for_block_body",
|
||||||
"compute_merkle_proof_for_state",
|
"compute_merkle_proof_for_state",
|
||||||
]:
|
]:
|
||||||
del spec_object.functions[k]
|
del spec_object.functions[k]
|
||||||
|
@ -982,6 +998,10 @@ class PySpecCommand(Command):
|
||||||
"""
|
"""
|
||||||
if self.spec_fork in (CAPELLA, EIP4844):
|
if self.spec_fork in (CAPELLA, EIP4844):
|
||||||
self.md_doc_paths += """
|
self.md_doc_paths += """
|
||||||
|
specs/capella/light-client/fork.md
|
||||||
|
specs/capella/light-client/full-node.md
|
||||||
|
specs/capella/light-client/p2p-interface.md
|
||||||
|
specs/capella/light-client/sync-protocol.md
|
||||||
specs/capella/beacon-chain.md
|
specs/capella/beacon-chain.md
|
||||||
specs/capella/fork.md
|
specs/capella/fork.md
|
||||||
specs/capella/fork-choice.md
|
specs/capella/fork-choice.md
|
||||||
|
@ -990,6 +1010,10 @@ class PySpecCommand(Command):
|
||||||
"""
|
"""
|
||||||
if self.spec_fork == EIP4844:
|
if self.spec_fork == EIP4844:
|
||||||
self.md_doc_paths += """
|
self.md_doc_paths += """
|
||||||
|
specs/eip4844/light-client/fork.md
|
||||||
|
specs/eip4844/light-client/full-node.md
|
||||||
|
specs/eip4844/light-client/p2p-interface.md
|
||||||
|
specs/eip4844/light-client/sync-protocol.md
|
||||||
specs/eip4844/beacon-chain.md
|
specs/eip4844/beacon-chain.md
|
||||||
specs/eip4844/fork.md
|
specs/eip4844/fork.md
|
||||||
specs/eip4844/fork-choice.md
|
specs/eip4844/fork-choice.md
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
# Capella Light Client -- Fork Logic
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Upgrading light client data](#upgrading-light-client-data)
|
||||||
|
- [Upgrading the store](#upgrading-the-store)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This document describes how to upgrade existing light client objects based on the [Altair specification](../../altair/light-client/sync-protocol.md) to Capella. This is necessary when processing pre-Capella data with a post-Capella `LightClientStore`. Note that the data being exchanged over the network protocols uses the original format.
|
||||||
|
|
||||||
|
### Upgrading light client data
|
||||||
|
|
||||||
|
A Capella `LightClientStore` can still process earlier light client data. In order to do so, that pre-Capella data needs to be locally upgraded to Capella before processing.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_header_to_capella(pre: bellatrix.LightClientHeader) -> LightClientHeader:
|
||||||
|
return LightClientHeader(
|
||||||
|
beacon=pre.beacon,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_bootstrap_to_capella(pre: bellatrix.LightClientBootstrap) -> LightClientBootstrap:
|
||||||
|
return LightClientBootstrap(
|
||||||
|
header=upgrade_lc_header_to_capella(pre.header),
|
||||||
|
current_sync_committee=pre.current_sync_committee,
|
||||||
|
current_sync_committee_branch=pre.current_sync_committee_branch,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_update_to_capella(pre: bellatrix.LightClientUpdate) -> LightClientUpdate:
|
||||||
|
return LightClientUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_capella(pre.attested_header),
|
||||||
|
next_sync_committee=pre.next_sync_committee,
|
||||||
|
next_sync_committee_branch=pre.next_sync_committee_branch,
|
||||||
|
finalized_header=upgrade_lc_header_to_capella(pre.finalized_header),
|
||||||
|
finality_branch=pre.finality_branch,
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_finality_update_to_capella(pre: bellatrix.LightClientFinalityUpdate) -> LightClientFinalityUpdate:
|
||||||
|
return LightClientFinalityUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_capella(pre.attested_header),
|
||||||
|
finalized_header=upgrade_lc_header_to_capella(pre.finalized_header),
|
||||||
|
finality_branch=pre.finality_branch,
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_optimistic_update_to_capella(pre: bellatrix.LightClientOptimisticUpdate) -> LightClientOptimisticUpdate:
|
||||||
|
return LightClientOptimisticUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_capella(pre.attested_header),
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Upgrading the store
|
||||||
|
|
||||||
|
Existing `LightClientStore` objects based on Altair MUST be upgraded to Capella before Capella based light client data can be processed. The `LightClientStore` upgrade MAY be performed before `CAPELLA_FORK_EPOCH`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_store_to_capella(pre: bellatrix.LightClientStore) -> LightClientStore:
|
||||||
|
if pre.best_valid_update is None:
|
||||||
|
best_valid_update = None
|
||||||
|
else:
|
||||||
|
best_valid_update = upgrade_lc_update_to_capella(pre.best_valid_update)
|
||||||
|
return LightClientStore(
|
||||||
|
finalized_header=upgrade_lc_header_to_capella(pre.finalized_header),
|
||||||
|
current_sync_committee=pre.current_sync_committee,
|
||||||
|
next_sync_committee=pre.next_sync_committee,
|
||||||
|
best_valid_update=best_valid_update,
|
||||||
|
optimistic_header=upgrade_lc_header_to_capella(pre.optimistic_header),
|
||||||
|
previous_max_active_participants=pre.previous_max_active_participants,
|
||||||
|
current_max_active_participants=pre.current_max_active_participants,
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,78 @@
|
||||||
|
# Capella Light Client -- Full Node
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Helper functions](#helper-functions)
|
||||||
|
- [`compute_merkle_proof_for_block_body`](#compute_merkle_proof_for_block_body)
|
||||||
|
- [Modified `block_to_light_client_header`](#modified-block_to_light_client_header)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This upgrade adds information about the execution payload to light client data as part of the Capella upgrade.
|
||||||
|
|
||||||
|
## Helper functions
|
||||||
|
|
||||||
|
### `compute_merkle_proof_for_block_body`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
|
||||||
|
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Modified `block_to_light_client_header`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def block_to_light_client_header(block: SignedBeaconBlock) -> LightClientHeader:
|
||||||
|
epoch = compute_epoch_at_slot(block.message.slot)
|
||||||
|
|
||||||
|
if epoch >= CAPELLA_FORK_EPOCH:
|
||||||
|
payload = block.message.body.execution_payload
|
||||||
|
execution_header = ExecutionPayloadHeader(
|
||||||
|
parent_hash=payload.parent_hash,
|
||||||
|
fee_recipient=payload.fee_recipient,
|
||||||
|
state_root=payload.state_root,
|
||||||
|
receipts_root=payload.receipts_root,
|
||||||
|
logs_bloom=payload.logs_bloom,
|
||||||
|
prev_randao=payload.prev_randao,
|
||||||
|
block_number=payload.block_number,
|
||||||
|
gas_limit=payload.gas_limit,
|
||||||
|
gas_used=payload.gas_used,
|
||||||
|
timestamp=payload.timestamp,
|
||||||
|
extra_data=payload.extra_data,
|
||||||
|
base_fee_per_gas=payload.base_fee_per_gas,
|
||||||
|
block_hash=payload.block_hash,
|
||||||
|
transactions_root=hash_tree_root(payload.transactions),
|
||||||
|
withdrawals_root=hash_tree_root(payload.withdrawals),
|
||||||
|
)
|
||||||
|
execution_branch = compute_merkle_proof_for_block_body(block.message.body, EXECUTION_PAYLOAD_INDEX)
|
||||||
|
else:
|
||||||
|
# Note that during fork transitions, `finalized_header` may still point to earlier forks.
|
||||||
|
# While Bellatrix blocks also contain an `ExecutionPayload` (minus `withdrawals_root`),
|
||||||
|
# it was not included in the corresponding light client data. To ensure compatibility
|
||||||
|
# with legacy data going through `upgrade_lc_header_to_capella`, leave out execution data.
|
||||||
|
execution_header = ExecutionPayloadHeader()
|
||||||
|
execution_branch = [Bytes32() for _ in range(floorlog2(EXECUTION_PAYLOAD_INDEX))]
|
||||||
|
|
||||||
|
return LightClientHeader(
|
||||||
|
beacon=BeaconBlockHeader(
|
||||||
|
slot=block.message.slot,
|
||||||
|
proposer_index=block.message.proposer_index,
|
||||||
|
parent_root=block.message.parent_root,
|
||||||
|
state_root=block.message.state_root,
|
||||||
|
body_root=hash_tree_root(block.message.body),
|
||||||
|
),
|
||||||
|
execution=execution_header,
|
||||||
|
execution_branch=execution_branch,
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,99 @@
|
||||||
|
# Capella Light Client -- Networking
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Networking](#networking)
|
||||||
|
- [The gossip domain: gossipsub](#the-gossip-domain-gossipsub)
|
||||||
|
- [Topics and messages](#topics-and-messages)
|
||||||
|
- [Global topics](#global-topics)
|
||||||
|
- [`light_client_finality_update`](#light_client_finality_update)
|
||||||
|
- [`light_client_optimistic_update`](#light_client_optimistic_update)
|
||||||
|
- [The Req/Resp domain](#the-reqresp-domain)
|
||||||
|
- [Messages](#messages)
|
||||||
|
- [GetLightClientBootstrap](#getlightclientbootstrap)
|
||||||
|
- [LightClientUpdatesByRange](#lightclientupdatesbyrange)
|
||||||
|
- [GetLightClientFinalityUpdate](#getlightclientfinalityupdate)
|
||||||
|
- [GetLightClientOptimisticUpdate](#getlightclientoptimisticupdate)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Networking
|
||||||
|
|
||||||
|
The [Altair light client networking specification](../../altair/light-client/p2p-interface.md) is extended to exchange [Capella light client data](./sync-protocol.md).
|
||||||
|
|
||||||
|
### The gossip domain: gossipsub
|
||||||
|
|
||||||
|
#### Topics and messages
|
||||||
|
|
||||||
|
##### Global topics
|
||||||
|
|
||||||
|
###### `light_client_finality_update`
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Message SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientFinalityUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientFinalityUpdate` |
|
||||||
|
|
||||||
|
###### `light_client_optimistic_update`
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Message SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientOptimisticUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientOptimisticUpdate` |
|
||||||
|
|
||||||
|
### The Req/Resp domain
|
||||||
|
|
||||||
|
#### Messages
|
||||||
|
|
||||||
|
##### GetLightClientBootstrap
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientBootstrap` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientBootstrap` |
|
||||||
|
|
||||||
|
##### LightClientUpdatesByRange
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response chunk SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientUpdate` |
|
||||||
|
|
||||||
|
##### GetLightClientFinalityUpdate
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientFinalityUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientFinalityUpdate` |
|
||||||
|
|
||||||
|
##### GetLightClientOptimisticUpdate
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientOptimisticUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` and later | `capella.LightClientOptimisticUpdate` |
|
|
@ -0,0 +1,82 @@
|
||||||
|
# Capella Light Client -- Sync Protocol
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Constants](#constants)
|
||||||
|
- [Containers](#containers)
|
||||||
|
- [Modified `LightClientHeader`](#modified-lightclientheader)
|
||||||
|
- [Helper functions](#helper-functions)
|
||||||
|
- [`get_lc_execution_root`](#get_lc_execution_root)
|
||||||
|
- [Modified `is_valid_light_client_header`](#modified-is_valid_light_client_header)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This upgrade adds information about the execution payload to light client data as part of the Capella upgrade. It extends the [Altair Light Client specifications](../../altair/light-client/sync-protocol.md). The [fork document](./fork.md) explains how to upgrade existing Altair based deployments to Capella.
|
||||||
|
|
||||||
|
Additional documents describes the impact of the upgrade on certain roles:
|
||||||
|
- [Full node](./full-node.md)
|
||||||
|
- [Networking](./p2p-interface.md)
|
||||||
|
|
||||||
|
## Constants
|
||||||
|
|
||||||
|
| Name | Value |
|
||||||
|
| - | - |
|
||||||
|
| `EXECUTION_PAYLOAD_INDEX` | `get_generalized_index(BeaconBlockBody, 'execution_payload')` (= 25) |
|
||||||
|
|
||||||
|
## Containers
|
||||||
|
|
||||||
|
### Modified `LightClientHeader`
|
||||||
|
|
||||||
|
```python
|
||||||
|
class LightClientHeader(Container):
|
||||||
|
# Beacon block header
|
||||||
|
beacon: BeaconBlockHeader
|
||||||
|
# Execution payload header corresponding to `beacon.body_root` (from Capella onward)
|
||||||
|
execution: ExecutionPayloadHeader
|
||||||
|
execution_branch: Vector[Bytes32, floorlog2(EXECUTION_PAYLOAD_INDEX)]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Helper functions
|
||||||
|
|
||||||
|
### `get_lc_execution_root`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def get_lc_execution_root(header: LightClientHeader) -> Root:
|
||||||
|
epoch = compute_epoch_at_slot(header.beacon.slot)
|
||||||
|
|
||||||
|
if epoch >= CAPELLA_FORK_EPOCH:
|
||||||
|
return hash_tree_root(header.execution)
|
||||||
|
|
||||||
|
return Root()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Modified `is_valid_light_client_header`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def is_valid_light_client_header(header: LightClientHeader) -> bool:
|
||||||
|
epoch = compute_epoch_at_slot(header.beacon.slot)
|
||||||
|
|
||||||
|
if epoch < CAPELLA_FORK_EPOCH:
|
||||||
|
return (
|
||||||
|
header.execution == ExecutionPayloadHeader()
|
||||||
|
and header.execution_branch == [Bytes32() for _ in range(floorlog2(EXECUTION_PAYLOAD_INDEX))]
|
||||||
|
)
|
||||||
|
|
||||||
|
return is_valid_merkle_branch(
|
||||||
|
leaf=get_lc_execution_root(header),
|
||||||
|
branch=header.execution_branch,
|
||||||
|
depth=floorlog2(EXECUTION_PAYLOAD_INDEX),
|
||||||
|
index=get_subtree_index(EXECUTION_PAYLOAD_INDEX),
|
||||||
|
root=header.beacon.body_root,
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,110 @@
|
||||||
|
# EIP4844 Light Client -- Fork Logic
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Upgrading light client data](#upgrading-light-client-data)
|
||||||
|
- [Upgrading the store](#upgrading-the-store)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This document describes how to upgrade existing light client objects based on the [Capella specification](../../capella/light-client/sync-protocol.md) to EIP4844. This is necessary when processing pre-EIP4844 data with a post-EIP4844 `LightClientStore`. Note that the data being exchanged over the network protocols uses the original format.
|
||||||
|
|
||||||
|
### Upgrading light client data
|
||||||
|
|
||||||
|
A EIP4844 `LightClientStore` can still process earlier light client data. In order to do so, that pre-EIP4844 data needs to be locally upgraded to EIP4844 before processing.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_header_to_eip4844(pre: capella.LightClientHeader) -> LightClientHeader:
|
||||||
|
return LightClientHeader(
|
||||||
|
beacon=pre.beacon,
|
||||||
|
execution=ExecutionPayloadHeader(
|
||||||
|
parent_hash=pre.execution.parent_hash,
|
||||||
|
fee_recipient=pre.execution.fee_recipient,
|
||||||
|
state_root=pre.execution.state_root,
|
||||||
|
receipts_root=pre.execution.receipts_root,
|
||||||
|
logs_bloom=pre.execution.logs_bloom,
|
||||||
|
prev_randao=pre.execution.prev_randao,
|
||||||
|
block_number=pre.execution.block_number,
|
||||||
|
gas_limit=pre.execution.gas_limit,
|
||||||
|
gas_used=pre.execution.gas_used,
|
||||||
|
timestamp=pre.execution.timestamp,
|
||||||
|
extra_data=pre.execution.extra_data,
|
||||||
|
base_fee_per_gas=pre.execution.base_fee_per_gas,
|
||||||
|
block_hash=pre.execution.block_hash,
|
||||||
|
transactions_root=pre.execution.transactions_root,
|
||||||
|
withdrawals_root=pre.execution.withdrawals_root,
|
||||||
|
),
|
||||||
|
execution_branch=pre.execution_branch,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_bootstrap_to_eip4844(pre: capella.LightClientBootstrap) -> LightClientBootstrap:
|
||||||
|
return LightClientBootstrap(
|
||||||
|
header=upgrade_lc_header_to_eip4844(pre.header),
|
||||||
|
current_sync_committee=pre.current_sync_committee,
|
||||||
|
current_sync_committee_branch=pre.current_sync_committee_branch,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_update_to_eip4844(pre: capella.LightClientUpdate) -> LightClientUpdate:
|
||||||
|
return LightClientUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_eip4844(pre.attested_header),
|
||||||
|
next_sync_committee=pre.next_sync_committee,
|
||||||
|
next_sync_committee_branch=pre.next_sync_committee_branch,
|
||||||
|
finalized_header=upgrade_lc_header_to_eip4844(pre.finalized_header),
|
||||||
|
finality_branch=pre.finality_branch,
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_finality_update_to_eip4844(pre: capella.LightClientFinalityUpdate) -> LightClientFinalityUpdate:
|
||||||
|
return LightClientFinalityUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_eip4844(pre.attested_header),
|
||||||
|
finalized_header=upgrade_lc_header_to_eip4844(pre.finalized_header),
|
||||||
|
finality_branch=pre.finality_branch,
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_optimistic_update_to_eip4844(pre: capella.LightClientOptimisticUpdate) -> LightClientOptimisticUpdate:
|
||||||
|
return LightClientOptimisticUpdate(
|
||||||
|
attested_header=upgrade_lc_header_to_eip4844(pre.attested_header),
|
||||||
|
sync_aggregate=pre.sync_aggregate,
|
||||||
|
signature_slot=pre.signature_slot,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Upgrading the store
|
||||||
|
|
||||||
|
Existing `LightClientStore` objects based on Capella MUST be upgraded to EIP4844 before EIP4844 based light client data can be processed. The `LightClientStore` upgrade MAY be performed before `EIP4844_FORK_EPOCH`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
def upgrade_lc_store_to_eip4844(pre: capella.LightClientStore) -> LightClientStore:
|
||||||
|
if pre.best_valid_update is None:
|
||||||
|
best_valid_update = None
|
||||||
|
else:
|
||||||
|
best_valid_update = upgrade_lc_update_to_eip4844(pre.best_valid_update)
|
||||||
|
return LightClientStore(
|
||||||
|
finalized_header=upgrade_lc_header_to_eip4844(pre.finalized_header),
|
||||||
|
current_sync_committee=pre.current_sync_committee,
|
||||||
|
next_sync_committee=pre.next_sync_committee,
|
||||||
|
best_valid_update=best_valid_update,
|
||||||
|
optimistic_header=upgrade_lc_header_to_eip4844(pre.optimistic_header),
|
||||||
|
previous_max_active_participants=pre.previous_max_active_participants,
|
||||||
|
current_max_active_participants=pre.current_max_active_participants,
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,74 @@
|
||||||
|
# EIP4844 Light Client -- Full Node
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Helper functions](#helper-functions)
|
||||||
|
- [Modified `block_to_light_client_header`](#modified-block_to_light_client_header)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This upgrade adds information about the execution payload to light client data as part of the EIP4844 upgrade.
|
||||||
|
|
||||||
|
## Helper functions
|
||||||
|
|
||||||
|
### Modified `block_to_light_client_header`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def block_to_light_client_header(block: SignedBeaconBlock) -> LightClientHeader:
|
||||||
|
epoch = compute_epoch_at_slot(block.message.slot)
|
||||||
|
|
||||||
|
if epoch >= CAPELLA_FORK_EPOCH:
|
||||||
|
payload = block.message.body.execution_payload
|
||||||
|
execution_header = ExecutionPayloadHeader(
|
||||||
|
parent_hash=payload.parent_hash,
|
||||||
|
fee_recipient=payload.fee_recipient,
|
||||||
|
state_root=payload.state_root,
|
||||||
|
receipts_root=payload.receipts_root,
|
||||||
|
logs_bloom=payload.logs_bloom,
|
||||||
|
prev_randao=payload.prev_randao,
|
||||||
|
block_number=payload.block_number,
|
||||||
|
gas_limit=payload.gas_limit,
|
||||||
|
gas_used=payload.gas_used,
|
||||||
|
timestamp=payload.timestamp,
|
||||||
|
extra_data=payload.extra_data,
|
||||||
|
base_fee_per_gas=payload.base_fee_per_gas,
|
||||||
|
block_hash=payload.block_hash,
|
||||||
|
transactions_root=hash_tree_root(payload.transactions),
|
||||||
|
withdrawals_root=hash_tree_root(payload.withdrawals),
|
||||||
|
)
|
||||||
|
|
||||||
|
# [New in EIP4844]
|
||||||
|
if epoch >= EIP4844_FORK_EPOCH:
|
||||||
|
execution_header.excess_data_gas = payload.excess_data_gas
|
||||||
|
|
||||||
|
execution_branch = compute_merkle_proof_for_block_body(block.message.body, EXECUTION_PAYLOAD_INDEX)
|
||||||
|
else:
|
||||||
|
# Note that during fork transitions, `finalized_header` may still point to earlier forks.
|
||||||
|
# While Bellatrix blocks also contain an `ExecutionPayload` (minus `withdrawals_root`),
|
||||||
|
# it was not included in the corresponding light client data. To ensure compatibility
|
||||||
|
# with legacy data going through `upgrade_lc_header_to_capella`, leave out execution data.
|
||||||
|
execution_header = ExecutionPayloadHeader()
|
||||||
|
execution_branch = [Bytes32() for _ in range(floorlog2(EXECUTION_PAYLOAD_INDEX))]
|
||||||
|
|
||||||
|
return LightClientHeader(
|
||||||
|
beacon=BeaconBlockHeader(
|
||||||
|
slot=block.message.slot,
|
||||||
|
proposer_index=block.message.proposer_index,
|
||||||
|
parent_root=block.message.parent_root,
|
||||||
|
state_root=block.message.state_root,
|
||||||
|
body_root=hash_tree_root(block.message.body),
|
||||||
|
),
|
||||||
|
execution=execution_header,
|
||||||
|
execution_branch=execution_branch,
|
||||||
|
)
|
||||||
|
```
|
|
@ -0,0 +1,105 @@
|
||||||
|
# EIP4844 Light Client -- Networking
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Networking](#networking)
|
||||||
|
- [The gossip domain: gossipsub](#the-gossip-domain-gossipsub)
|
||||||
|
- [Topics and messages](#topics-and-messages)
|
||||||
|
- [Global topics](#global-topics)
|
||||||
|
- [`light_client_finality_update`](#light_client_finality_update)
|
||||||
|
- [`light_client_optimistic_update`](#light_client_optimistic_update)
|
||||||
|
- [The Req/Resp domain](#the-reqresp-domain)
|
||||||
|
- [Messages](#messages)
|
||||||
|
- [GetLightClientBootstrap](#getlightclientbootstrap)
|
||||||
|
- [LightClientUpdatesByRange](#lightclientupdatesbyrange)
|
||||||
|
- [GetLightClientFinalityUpdate](#getlightclientfinalityupdate)
|
||||||
|
- [GetLightClientOptimisticUpdate](#getlightclientoptimisticupdate)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Networking
|
||||||
|
|
||||||
|
The [Capella light client networking specification](../../capella/light-client/p2p-interface.md) is extended to exchange [EIP4844 light client data](./sync-protocol.md).
|
||||||
|
|
||||||
|
### The gossip domain: gossipsub
|
||||||
|
|
||||||
|
#### Topics and messages
|
||||||
|
|
||||||
|
##### Global topics
|
||||||
|
|
||||||
|
###### `light_client_finality_update`
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Message SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientFinalityUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientFinalityUpdate` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientFinalityUpdate` |
|
||||||
|
|
||||||
|
###### `light_client_optimistic_update`
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Message SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientOptimisticUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientOptimisticUpdate` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientOptimisticUpdate` |
|
||||||
|
|
||||||
|
### The Req/Resp domain
|
||||||
|
|
||||||
|
#### Messages
|
||||||
|
|
||||||
|
##### GetLightClientBootstrap
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientBootstrap` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientBootstrap` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientBootstrap` |
|
||||||
|
|
||||||
|
##### LightClientUpdatesByRange
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response chunk SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientUpdate` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientUpdate` |
|
||||||
|
|
||||||
|
##### GetLightClientFinalityUpdate
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientFinalityUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientFinalityUpdate` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientFinalityUpdate` |
|
||||||
|
|
||||||
|
##### GetLightClientOptimisticUpdate
|
||||||
|
|
||||||
|
[0]: # (eth2spec: skip)
|
||||||
|
|
||||||
|
| `fork_version` | Response SSZ type |
|
||||||
|
| ------------------------------------------------------ | ------------------------------------- |
|
||||||
|
| `GENESIS_FORK_VERSION` | n/a |
|
||||||
|
| `ALTAIR_FORK_VERSION` through `BELLATRIX_FORK_VERSION` | `altair.LightClientOptimisticUpdate` |
|
||||||
|
| `CAPELLA_FORK_VERSION` | `capella.LightClientOptimisticUpdate` |
|
||||||
|
| `EIP4844_FORK_VERSION` and later | `eip4844.LightClientOptimisticUpdate` |
|
|
@ -0,0 +1,87 @@
|
||||||
|
# EIP4844 Light Client -- Sync Protocol
|
||||||
|
|
||||||
|
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||||
|
|
||||||
|
## Table of contents
|
||||||
|
|
||||||
|
<!-- TOC -->
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
|
||||||
|
- [Introduction](#introduction)
|
||||||
|
- [Helper functions](#helper-functions)
|
||||||
|
- [Modified `get_lc_execution_root`](#modified-get_lc_execution_root)
|
||||||
|
- [Modified `is_valid_light_client_header`](#modified-is_valid_light_client_header)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- /TOC -->
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
This upgrade updates light client data to include the EIP4844 changes to the [`ExecutionPayload`](../beacon-chain.md) structure. It extends the [Capella Light Client specifications](../../capella/light-client/sync-protocol.md). The [fork document](./fork.md) explains how to upgrade existing Capella based deployments to EIP4844.
|
||||||
|
|
||||||
|
Additional documents describes the impact of the upgrade on certain roles:
|
||||||
|
- [Full node](./full-node.md)
|
||||||
|
- [Networking](./p2p-interface.md)
|
||||||
|
|
||||||
|
## Helper functions
|
||||||
|
|
||||||
|
### Modified `get_lc_execution_root`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def get_lc_execution_root(header: LightClientHeader) -> Root:
|
||||||
|
epoch = compute_epoch_at_slot(header.beacon.slot)
|
||||||
|
|
||||||
|
# [New in EIP4844]
|
||||||
|
if epoch >= EIP4844_FORK_EPOCH:
|
||||||
|
return hash_tree_root(header.execution)
|
||||||
|
|
||||||
|
# [Modified in EIP4844]
|
||||||
|
if epoch >= CAPELLA_FORK_EPOCH:
|
||||||
|
execution_header = capella.ExecutionPayloadHeader(
|
||||||
|
parent_hash=header.execution.parent_hash,
|
||||||
|
fee_recipient=header.execution.fee_recipient,
|
||||||
|
state_root=header.execution.state_root,
|
||||||
|
receipts_root=header.execution.receipts_root,
|
||||||
|
logs_bloom=header.execution.logs_bloom,
|
||||||
|
prev_randao=header.execution.prev_randao,
|
||||||
|
block_number=header.execution.block_number,
|
||||||
|
gas_limit=header.execution.gas_limit,
|
||||||
|
gas_used=header.execution.gas_used,
|
||||||
|
timestamp=header.execution.timestamp,
|
||||||
|
extra_data=header.execution.extra_data,
|
||||||
|
base_fee_per_gas=header.execution.base_fee_per_gas,
|
||||||
|
block_hash=header.execution.block_hash,
|
||||||
|
transactions_root=header.execution.transactions_root,
|
||||||
|
withdrawals_root=header.execution.withdrawals_root,
|
||||||
|
)
|
||||||
|
return hash_tree_root(execution_header)
|
||||||
|
|
||||||
|
return Root()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Modified `is_valid_light_client_header`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def is_valid_light_client_header(header: LightClientHeader) -> bool:
|
||||||
|
epoch = compute_epoch_at_slot(header.beacon.slot)
|
||||||
|
|
||||||
|
# [New in EIP4844]
|
||||||
|
if epoch < EIP4844_FORK_EPOCH:
|
||||||
|
if header.execution.excess_data_gas != uint256(0):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if epoch < CAPELLA_FORK_EPOCH:
|
||||||
|
return (
|
||||||
|
header.execution == ExecutionPayloadHeader()
|
||||||
|
and header.execution_branch == [Bytes32() for _ in range(floorlog2(EXECUTION_PAYLOAD_INDEX))]
|
||||||
|
)
|
||||||
|
|
||||||
|
return is_valid_merkle_branch(
|
||||||
|
leaf=get_lc_execution_root(header),
|
||||||
|
branch=header.execution_branch,
|
||||||
|
depth=floorlog2(EXECUTION_PAYLOAD_INDEX),
|
||||||
|
index=get_subtree_index(EXECUTION_PAYLOAD_INDEX),
|
||||||
|
root=header.beacon.body_root,
|
||||||
|
)
|
||||||
|
```
|
|
@ -85,17 +85,12 @@ This topic is used to propagate new signed and coupled beacon blocks and blobs s
|
||||||
|
|
||||||
In addition to the gossip validations for the `beacon_block` topic from prior specifications, the following validations MUST pass before forwarding the `signed_beacon_block_and_blobs_sidecar` on the network.
|
In addition to the gossip validations for the `beacon_block` topic from prior specifications, the following validations MUST pass before forwarding the `signed_beacon_block_and_blobs_sidecar` on the network.
|
||||||
Alias `signed_beacon_block = signed_beacon_block_and_blobs_sidecar.beacon_block`, `block = signed_beacon_block.message`, `execution_payload = block.body.execution_payload`.
|
Alias `signed_beacon_block = signed_beacon_block_and_blobs_sidecar.beacon_block`, `block = signed_beacon_block.message`, `execution_payload = block.body.execution_payload`.
|
||||||
- _[REJECT]_ The KZG commitments of the blobs are all correctly encoded compressed BLS G1 points
|
|
||||||
-- i.e. `all(bls.KeyValidate(commitment) for commitment in block.body.blob_kzg_commitments)`
|
|
||||||
- _[REJECT]_ The KZG commitments correspond to the versioned hashes in the transactions list
|
- _[REJECT]_ The KZG commitments correspond to the versioned hashes in the transactions list
|
||||||
-- i.e. `verify_kzg_commitments_against_transactions(block.body.execution_payload.transactions, block.body.blob_kzg_commitments)`
|
-- i.e. `verify_kzg_commitments_against_transactions(block.body.execution_payload.transactions, block.body.blob_kzg_commitments)`
|
||||||
|
|
||||||
Alias `sidecar = signed_beacon_block_and_blobs_sidecar.blobs_sidecar`.
|
Alias `sidecar = signed_beacon_block_and_blobs_sidecar.blobs_sidecar`.
|
||||||
- _[IGNORE]_ the `sidecar.beacon_block_slot` is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance)
|
- _[IGNORE]_ the `sidecar.beacon_block_slot` is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance)
|
||||||
-- i.e. `sidecar.beacon_block_slot == block.slot`.
|
-- i.e. `sidecar.beacon_block_slot == block.slot`.
|
||||||
- _[REJECT]_ the `sidecar.blobs` are all well formatted, i.e. the `BLSFieldElement` in valid range (`x < BLS_MODULUS`).
|
|
||||||
- _[REJECT]_ The KZG proof is a correctly encoded compressed BLS G1 point
|
|
||||||
-- i.e. `bls.KeyValidate(blobs_sidecar.kzg_aggregated_proof)`
|
|
||||||
- _[REJECT]_ The KZG commitments in the block are valid against the provided blobs sidecar
|
- _[REJECT]_ The KZG commitments in the block are valid against the provided blobs sidecar
|
||||||
-- i.e. `validate_blobs_sidecar(block.slot, hash_tree_root(block), block.body.blob_kzg_commitments, sidecar)`
|
-- i.e. `validate_blobs_sidecar(block.slot, hash_tree_root(block), block.body.blob_kzg_commitments, sidecar)`
|
||||||
|
|
||||||
|
@ -218,7 +213,7 @@ Each _successful_ `response_chunk` MUST contain a single `BlobsSidecar` payload.
|
||||||
Clients MUST keep a record of signed blobs sidecars seen on the epoch range
|
Clients MUST keep a record of signed blobs sidecars seen on the epoch range
|
||||||
`[max(current_epoch - MIN_EPOCHS_FOR_BLOBS_SIDECARS_REQUESTS, EIP4844_FORK_EPOCH), current_epoch]`
|
`[max(current_epoch - MIN_EPOCHS_FOR_BLOBS_SIDECARS_REQUESTS, EIP4844_FORK_EPOCH), current_epoch]`
|
||||||
where `current_epoch` is defined by the current wall-clock time,
|
where `current_epoch` is defined by the current wall-clock time,
|
||||||
and clients MUST support serving requests of blocks on this range.
|
and clients MUST support serving requests of blobs on this range.
|
||||||
|
|
||||||
Peers that are unable to reply to blobs sidecars requests within the `MIN_EPOCHS_FOR_BLOBS_SIDECARS_REQUESTS`
|
Peers that are unable to reply to blobs sidecars requests within the `MIN_EPOCHS_FOR_BLOBS_SIDECARS_REQUESTS`
|
||||||
epoch range SHOULD respond with error code `3: ResourceUnavailable`.
|
epoch range SHOULD respond with error code `3: ResourceUnavailable`.
|
||||||
|
|
|
@ -21,6 +21,9 @@
|
||||||
- [BLS12-381 helpers](#bls12-381-helpers)
|
- [BLS12-381 helpers](#bls12-381-helpers)
|
||||||
- [`hash_to_bls_field`](#hash_to_bls_field)
|
- [`hash_to_bls_field`](#hash_to_bls_field)
|
||||||
- [`bytes_to_bls_field`](#bytes_to_bls_field)
|
- [`bytes_to_bls_field`](#bytes_to_bls_field)
|
||||||
|
- [`validate_kzg_g1`](#validate_kzg_g1)
|
||||||
|
- [`bytes_to_kzg_commitment`](#bytes_to_kzg_commitment)
|
||||||
|
- [`bytes_to_kzg_proof`](#bytes_to_kzg_proof)
|
||||||
- [`blob_to_polynomial`](#blob_to_polynomial)
|
- [`blob_to_polynomial`](#blob_to_polynomial)
|
||||||
- [`compute_challenges`](#compute_challenges)
|
- [`compute_challenges`](#compute_challenges)
|
||||||
- [`bls_modular_inverse`](#bls_modular_inverse)
|
- [`bls_modular_inverse`](#bls_modular_inverse)
|
||||||
|
@ -35,6 +38,7 @@
|
||||||
- [`verify_kzg_proof`](#verify_kzg_proof)
|
- [`verify_kzg_proof`](#verify_kzg_proof)
|
||||||
- [`verify_kzg_proof_impl`](#verify_kzg_proof_impl)
|
- [`verify_kzg_proof_impl`](#verify_kzg_proof_impl)
|
||||||
- [`compute_kzg_proof`](#compute_kzg_proof)
|
- [`compute_kzg_proof`](#compute_kzg_proof)
|
||||||
|
- [`compute_kzg_proof_impl`](#compute_kzg_proof_impl)
|
||||||
- [`compute_aggregated_poly_and_commitment`](#compute_aggregated_poly_and_commitment)
|
- [`compute_aggregated_poly_and_commitment`](#compute_aggregated_poly_and_commitment)
|
||||||
- [`compute_aggregate_kzg_proof`](#compute_aggregate_kzg_proof)
|
- [`compute_aggregate_kzg_proof`](#compute_aggregate_kzg_proof)
|
||||||
- [`verify_aggregate_kzg_proof`](#verify_aggregate_kzg_proof)
|
- [`verify_aggregate_kzg_proof`](#verify_aggregate_kzg_proof)
|
||||||
|
@ -48,17 +52,19 @@ This document specifies basic polynomial operations and KZG polynomial commitmen
|
||||||
|
|
||||||
Functions flagged as "Public method" MUST be provided by the underlying KZG library as public functions. All other functions are private functions used internally by the KZG library.
|
Functions flagged as "Public method" MUST be provided by the underlying KZG library as public functions. All other functions are private functions used internally by the KZG library.
|
||||||
|
|
||||||
|
Public functions MUST accept raw bytes as input and perform the required cryptographic normalization before invoking any internal functions.
|
||||||
|
|
||||||
## Custom types
|
## Custom types
|
||||||
|
|
||||||
| Name | SSZ equivalent | Description |
|
| Name | SSZ equivalent | Description |
|
||||||
| - | - | - |
|
| - | - | - |
|
||||||
| `G1Point` | `Bytes48` | |
|
| `G1Point` | `Bytes48` | |
|
||||||
| `G2Point` | `Bytes96` | |
|
| `G2Point` | `Bytes96` | |
|
||||||
| `BLSFieldElement` | `uint256` | `x < BLS_MODULUS` |
|
| `BLSFieldElement` | `uint256` | Validation: `x < BLS_MODULUS` |
|
||||||
| `KZGCommitment` | `Bytes48` | Same as BLS standard "is valid pubkey" check but also allows `0x00..00` for point-at-infinity |
|
| `KZGCommitment` | `Bytes48` | Validation: Perform [BLS standard's](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-bls-signature-04#section-2.5) "KeyValidate" check but do allow the identity point |
|
||||||
| `KZGProof` | `Bytes48` | Same as for `KZGCommitment` |
|
| `KZGProof` | `Bytes48` | Same as for `KZGCommitment` |
|
||||||
| `Polynomial` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_BLOB]` | a polynomial in evaluation form |
|
| `Polynomial` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_BLOB]` | A polynomial in evaluation form |
|
||||||
| `Blob` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB]` | a basic blob data |
|
| `Blob` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB]` | A basic blob data |
|
||||||
|
|
||||||
## Constants
|
## Constants
|
||||||
|
|
||||||
|
@ -66,6 +72,8 @@ Functions flagged as "Public method" MUST be provided by the underlying KZG libr
|
||||||
| - | - | - |
|
| - | - | - |
|
||||||
| `BLS_MODULUS` | `52435875175126190479447740508185965837690552500527637822603658699938581184513` | Scalar field modulus of BLS12-381 |
|
| `BLS_MODULUS` | `52435875175126190479447740508185965837690552500527637822603658699938581184513` | Scalar field modulus of BLS12-381 |
|
||||||
| `BYTES_PER_FIELD_ELEMENT` | `uint64(32)` | Bytes used to encode a BLS scalar field element |
|
| `BYTES_PER_FIELD_ELEMENT` | `uint64(32)` | Bytes used to encode a BLS scalar field element |
|
||||||
|
| `G1_POINT_AT_INFINITY` | `Bytes48(b'\xc0' + b'\x00' * 47)` | Serialized form of the point at infinity on the G1 group |
|
||||||
|
|
||||||
|
|
||||||
## Preset
|
## Preset
|
||||||
|
|
||||||
|
@ -156,7 +164,7 @@ def hash_to_bls_field(data: bytes) -> BLSFieldElement:
|
||||||
```python
|
```python
|
||||||
def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement:
|
def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement:
|
||||||
"""
|
"""
|
||||||
Convert 32-byte value to a BLS scalar field element.
|
Convert untrusted bytes to a trusted and validated BLS scalar field element.
|
||||||
This function does not accept inputs greater than the BLS modulus.
|
This function does not accept inputs greater than the BLS modulus.
|
||||||
"""
|
"""
|
||||||
field_element = int.from_bytes(b, ENDIANNESS)
|
field_element = int.from_bytes(b, ENDIANNESS)
|
||||||
|
@ -164,6 +172,42 @@ def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement:
|
||||||
return BLSFieldElement(field_element)
|
return BLSFieldElement(field_element)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
#### `validate_kzg_g1`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def validate_kzg_g1(b: Bytes48) -> None:
|
||||||
|
"""
|
||||||
|
Perform BLS validation required by the types `KZGProof` and `KZGCommitment`.
|
||||||
|
"""
|
||||||
|
if b == G1_POINT_AT_INFINITY:
|
||||||
|
return
|
||||||
|
|
||||||
|
assert bls.KeyValidate(b)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `bytes_to_kzg_commitment`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def bytes_to_kzg_commitment(b: Bytes48) -> KZGCommitment:
|
||||||
|
"""
|
||||||
|
Convert untrusted bytes into a trusted and validated KZGCommitment.
|
||||||
|
"""
|
||||||
|
validate_kzg_g1(b)
|
||||||
|
return KZGCommitment(b)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `bytes_to_kzg_proof`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def bytes_to_kzg_proof(b: Bytes48) -> KZGProof:
|
||||||
|
"""
|
||||||
|
Convert untrusted bytes into a trusted and validated KZGProof.
|
||||||
|
"""
|
||||||
|
validate_kzg_g1(b)
|
||||||
|
return KZGProof(b)
|
||||||
|
```
|
||||||
|
|
||||||
#### `blob_to_polynomial`
|
#### `blob_to_polynomial`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
@ -302,11 +346,13 @@ def evaluate_polynomial_in_evaluation_form(polynomial: Polynomial,
|
||||||
assert width == FIELD_ELEMENTS_PER_BLOB
|
assert width == FIELD_ELEMENTS_PER_BLOB
|
||||||
inverse_width = bls_modular_inverse(BLSFieldElement(width))
|
inverse_width = bls_modular_inverse(BLSFieldElement(width))
|
||||||
|
|
||||||
# Make sure we won't divide by zero during division
|
|
||||||
assert z not in ROOTS_OF_UNITY
|
|
||||||
|
|
||||||
roots_of_unity_brp = bit_reversal_permutation(ROOTS_OF_UNITY)
|
roots_of_unity_brp = bit_reversal_permutation(ROOTS_OF_UNITY)
|
||||||
|
|
||||||
|
# If we are asked to evaluate within the domain, we already know the answer
|
||||||
|
if z in roots_of_unity_brp:
|
||||||
|
eval_index = roots_of_unity_brp.index(z)
|
||||||
|
return BLSFieldElement(polynomial[eval_index])
|
||||||
|
|
||||||
result = 0
|
result = 0
|
||||||
for i in range(width):
|
for i in range(width):
|
||||||
a = BLSFieldElement(int(polynomial[i]) * int(roots_of_unity_brp[i]) % BLS_MODULUS)
|
a = BLSFieldElement(int(polynomial[i]) * int(roots_of_unity_brp[i]) % BLS_MODULUS)
|
||||||
|
@ -333,47 +379,61 @@ def blob_to_kzg_commitment(blob: Blob) -> KZGCommitment:
|
||||||
#### `verify_kzg_proof`
|
#### `verify_kzg_proof`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def verify_kzg_proof(polynomial_kzg: KZGCommitment,
|
def verify_kzg_proof(commitment_bytes: Bytes48,
|
||||||
z: Bytes32,
|
z: Bytes32,
|
||||||
y: Bytes32,
|
y: Bytes32,
|
||||||
kzg_proof: KZGProof) -> bool:
|
proof_bytes: Bytes48) -> bool:
|
||||||
"""
|
"""
|
||||||
Verify KZG proof that ``p(z) == y`` where ``p(z)`` is the polynomial represented by ``polynomial_kzg``.
|
Verify KZG proof that ``p(z) == y`` where ``p(z)`` is the polynomial represented by ``polynomial_kzg``.
|
||||||
Receives inputs as bytes.
|
Receives inputs as bytes.
|
||||||
Public method.
|
Public method.
|
||||||
"""
|
"""
|
||||||
return verify_kzg_proof_impl(polynomial_kzg, bytes_to_bls_field(z), bytes_to_bls_field(y), kzg_proof)
|
return verify_kzg_proof_impl(bytes_to_kzg_commitment(commitment_bytes),
|
||||||
|
bytes_to_bls_field(z),
|
||||||
|
bytes_to_bls_field(y),
|
||||||
|
bytes_to_kzg_proof(proof_bytes))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
#### `verify_kzg_proof_impl`
|
#### `verify_kzg_proof_impl`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def verify_kzg_proof_impl(polynomial_kzg: KZGCommitment,
|
def verify_kzg_proof_impl(commitment: KZGCommitment,
|
||||||
z: BLSFieldElement,
|
z: BLSFieldElement,
|
||||||
y: BLSFieldElement,
|
y: BLSFieldElement,
|
||||||
kzg_proof: KZGProof) -> bool:
|
proof: KZGProof) -> bool:
|
||||||
"""
|
"""
|
||||||
Verify KZG proof that ``p(z) == y`` where ``p(z)`` is the polynomial represented by ``polynomial_kzg``.
|
Verify KZG proof that ``p(z) == y`` where ``p(z)`` is the polynomial represented by ``polynomial_kzg``.
|
||||||
"""
|
"""
|
||||||
# Verify: P - y = Q * (X - z)
|
# Verify: P - y = Q * (X - z)
|
||||||
X_minus_z = bls.add(bls.bytes96_to_G2(KZG_SETUP_G2[1]), bls.multiply(bls.G2, BLS_MODULUS - z))
|
X_minus_z = bls.add(bls.bytes96_to_G2(KZG_SETUP_G2[1]), bls.multiply(bls.G2, BLS_MODULUS - z))
|
||||||
P_minus_y = bls.add(bls.bytes48_to_G1(polynomial_kzg), bls.multiply(bls.G1, BLS_MODULUS - y))
|
P_minus_y = bls.add(bls.bytes48_to_G1(commitment), bls.multiply(bls.G1, BLS_MODULUS - y))
|
||||||
return bls.pairing_check([
|
return bls.pairing_check([
|
||||||
[P_minus_y, bls.neg(bls.G2)],
|
[P_minus_y, bls.neg(bls.G2)],
|
||||||
[bls.bytes48_to_G1(kzg_proof), X_minus_z]
|
[bls.bytes48_to_G1(proof), X_minus_z]
|
||||||
])
|
])
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `compute_kzg_proof`
|
#### `compute_kzg_proof`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def compute_kzg_proof(polynomial: Polynomial, z: BLSFieldElement) -> KZGProof:
|
def compute_kzg_proof(blob: Blob, z: Bytes32) -> KZGProof:
|
||||||
"""
|
"""
|
||||||
Compute KZG proof at point `z` with `polynomial` being in evaluation form.
|
Compute KZG proof at point `z` for the polynomial represented by `blob`.
|
||||||
Do this by computing the quotient polynomial in evaluation form: q(x) = (p(x) - p(z)) / (x - z).
|
Do this by computing the quotient polynomial in evaluation form: q(x) = (p(x) - p(z)) / (x - z).
|
||||||
Public method.
|
Public method.
|
||||||
"""
|
"""
|
||||||
|
polynomial = blob_to_polynomial(blob)
|
||||||
|
return compute_kzg_proof_impl(polynomial, bytes_to_bls_field(z))
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `compute_kzg_proof_impl`
|
||||||
|
|
||||||
|
```python
|
||||||
|
def compute_kzg_proof_impl(polynomial: Polynomial, z: BLSFieldElement) -> KZGProof:
|
||||||
|
"""
|
||||||
|
Helper function for compute_kzg_proof() and compute_aggregate_kzg_proof().
|
||||||
|
"""
|
||||||
y = evaluate_polynomial_in_evaluation_form(polynomial, z)
|
y = evaluate_polynomial_in_evaluation_form(polynomial, z)
|
||||||
polynomial_shifted = [BLSFieldElement((int(p) - int(y)) % BLS_MODULUS) for p in polynomial]
|
polynomial_shifted = [BLSFieldElement((int(p) - int(y)) % BLS_MODULUS) for p in polynomial]
|
||||||
|
|
||||||
|
@ -428,28 +488,31 @@ def compute_aggregate_kzg_proof(blobs: Sequence[Blob]) -> KZGProof:
|
||||||
blobs,
|
blobs,
|
||||||
commitments
|
commitments
|
||||||
)
|
)
|
||||||
return compute_kzg_proof(aggregated_poly, evaluation_challenge)
|
return compute_kzg_proof_impl(aggregated_poly, evaluation_challenge)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `verify_aggregate_kzg_proof`
|
#### `verify_aggregate_kzg_proof`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def verify_aggregate_kzg_proof(blobs: Sequence[Blob],
|
def verify_aggregate_kzg_proof(blobs: Sequence[Blob],
|
||||||
expected_kzg_commitments: Sequence[KZGCommitment],
|
commitments_bytes: Sequence[Bytes48],
|
||||||
kzg_aggregated_proof: KZGProof) -> bool:
|
aggregated_proof_bytes: Bytes48) -> bool:
|
||||||
"""
|
"""
|
||||||
Given a list of blobs and an aggregated KZG proof, verify that they correspond to the provided commitments.
|
Given a list of blobs and an aggregated KZG proof, verify that they correspond to the provided commitments.
|
||||||
|
|
||||||
Public method.
|
Public method.
|
||||||
"""
|
"""
|
||||||
|
commitments = [bytes_to_kzg_commitment(c) for c in commitments_bytes]
|
||||||
|
|
||||||
aggregated_poly, aggregated_poly_commitment, evaluation_challenge = compute_aggregated_poly_and_commitment(
|
aggregated_poly, aggregated_poly_commitment, evaluation_challenge = compute_aggregated_poly_and_commitment(
|
||||||
blobs,
|
blobs,
|
||||||
expected_kzg_commitments,
|
commitments
|
||||||
)
|
)
|
||||||
|
|
||||||
# Evaluate aggregated polynomial at `evaluation_challenge` (evaluation function checks for div-by-zero)
|
# Evaluate aggregated polynomial at `evaluation_challenge` (evaluation function checks for div-by-zero)
|
||||||
y = evaluate_polynomial_in_evaluation_form(aggregated_poly, evaluation_challenge)
|
y = evaluate_polynomial_in_evaluation_form(aggregated_poly, evaluation_challenge)
|
||||||
|
|
||||||
# Verify aggregated proof
|
# Verify aggregated proof
|
||||||
return verify_kzg_proof_impl(aggregated_poly_commitment, evaluation_challenge, y, kzg_aggregated_proof)
|
aggregated_proof = bytes_to_kzg_proof(aggregated_proof_bytes)
|
||||||
|
return verify_kzg_proof_impl(aggregated_poly_commitment, evaluation_challenge, y, aggregated_proof)
|
||||||
```
|
```
|
||||||
|
|
|
@ -43,7 +43,7 @@ This document is the beacon chain fork choice spec, part of Phase 0. It assumes
|
||||||
|
|
||||||
## Fork choice
|
## Fork choice
|
||||||
|
|
||||||
The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_forkchoice_store(genesis_state)` and update `store` by running:
|
The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_forkchoice_store(genesis_state, genesis_block)` and update `store` by running:
|
||||||
|
|
||||||
- `on_tick(store, time)` whenever `time > store.time` where `time` is the current Unix time
|
- `on_tick(store, time)` whenever `time > store.time` where `time` is the current Unix time
|
||||||
- `on_block(store, block)` whenever a block `block: SignedBeaconBlock` is received
|
- `on_block(store, block)` whenever a block `block: SignedBeaconBlock` is received
|
||||||
|
@ -485,4 +485,4 @@ def on_attester_slashing(store: Store, attester_slashing: AttesterSlashing) -> N
|
||||||
indices = set(attestation_1.attesting_indices).intersection(attestation_2.attesting_indices)
|
indices = set(attestation_1.attesting_indices).intersection(attestation_2.attesting_indices)
|
||||||
for index in indices:
|
for index in indices:
|
||||||
store.equivocating_indices.add(index)
|
store.equivocating_indices.add(index)
|
||||||
```
|
```
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
1.3.0-rc.1
|
1.3.0-rc.2
|
||||||
|
|
|
@ -3,14 +3,30 @@ from typing import (Any, Dict, List)
|
||||||
from eth_utils import encode_hex
|
from eth_utils import encode_hex
|
||||||
from eth2spec.test.context import (
|
from eth2spec.test.context import (
|
||||||
spec_state_test_with_matching_config,
|
spec_state_test_with_matching_config,
|
||||||
|
spec_test,
|
||||||
|
with_config_overrides,
|
||||||
|
with_matching_spec_config,
|
||||||
|
with_phases,
|
||||||
with_presets,
|
with_presets,
|
||||||
|
with_state,
|
||||||
with_altair_and_later,
|
with_altair_and_later,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.attestations import (
|
from eth2spec.test.helpers.attestations import (
|
||||||
next_slots_with_attestations,
|
next_slots_with_attestations,
|
||||||
state_transition_with_full_block,
|
state_transition_with_full_block,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.constants import MINIMAL
|
from eth2spec.test.helpers.constants import (
|
||||||
|
PHASE0, ALTAIR, BELLATRIX, CAPELLA, EIP4844,
|
||||||
|
MINIMAL,
|
||||||
|
ALL_PHASES,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.fork_transition import (
|
||||||
|
do_fork,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.forks import (
|
||||||
|
is_post_capella, is_post_eip4844,
|
||||||
|
is_post_fork,
|
||||||
|
)
|
||||||
from eth2spec.test.helpers.light_client import (
|
from eth2spec.test.helpers.light_client import (
|
||||||
get_sync_aggregate,
|
get_sync_aggregate,
|
||||||
)
|
)
|
||||||
|
@ -20,25 +36,150 @@ from eth2spec.test.helpers.state import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def setup_test(spec, state):
|
def get_spec_for_fork_version(spec, fork_version, phases):
|
||||||
class LightClientSyncTest(object):
|
if phases is None:
|
||||||
steps: List[Dict[str, Any]]
|
return spec
|
||||||
genesis_validators_root: spec.Root
|
for fork in [fork for fork in ALL_PHASES if is_post_fork(spec.fork, fork)]:
|
||||||
store: spec.LightClientStore
|
if fork == PHASE0:
|
||||||
|
fork_version_field = 'GENESIS_FORK_VERSION'
|
||||||
|
else:
|
||||||
|
fork_version_field = fork.upper() + '_FORK_VERSION'
|
||||||
|
if fork_version == getattr(spec.config, fork_version_field):
|
||||||
|
return phases[fork]
|
||||||
|
raise ValueError("Unknown fork version %s" % fork_version)
|
||||||
|
|
||||||
|
|
||||||
|
def needs_upgrade_to_capella(d_spec, s_spec):
|
||||||
|
return is_post_capella(s_spec) and not is_post_capella(d_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def needs_upgrade_to_eip4844(d_spec, s_spec):
|
||||||
|
return is_post_eip4844(s_spec) and not is_post_eip4844(d_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def check_lc_header_equal(d_spec, s_spec, data, upgraded):
|
||||||
|
assert upgraded.beacon.slot == data.beacon.slot
|
||||||
|
assert upgraded.beacon.hash_tree_root() == data.beacon.hash_tree_root()
|
||||||
|
if is_post_capella(s_spec):
|
||||||
|
if is_post_capella(d_spec):
|
||||||
|
assert s_spec.get_lc_execution_root(upgraded) == d_spec.get_lc_execution_root(data)
|
||||||
|
else:
|
||||||
|
assert s_spec.get_lc_execution_root(upgraded) == s_spec.Root()
|
||||||
|
|
||||||
|
|
||||||
|
def check_lc_bootstrap_equal(d_spec, s_spec, data, upgraded):
|
||||||
|
check_lc_header_equal(d_spec, s_spec, data.header, upgraded.header)
|
||||||
|
assert upgraded.current_sync_committee == data.current_sync_committee
|
||||||
|
assert upgraded.current_sync_committee_branch == data.current_sync_committee_branch
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_lc_bootstrap_to_store(d_spec, s_spec, data):
|
||||||
|
upgraded = data
|
||||||
|
|
||||||
|
if needs_upgrade_to_capella(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_bootstrap_to_capella(upgraded)
|
||||||
|
check_lc_bootstrap_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
if needs_upgrade_to_eip4844(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_bootstrap_to_eip4844(upgraded)
|
||||||
|
check_lc_bootstrap_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
return upgraded
|
||||||
|
|
||||||
|
|
||||||
|
def check_lc_update_equal(d_spec, s_spec, data, upgraded):
|
||||||
|
check_lc_header_equal(d_spec, s_spec, data.attested_header, upgraded.attested_header)
|
||||||
|
assert upgraded.next_sync_committee == data.next_sync_committee
|
||||||
|
assert upgraded.next_sync_committee_branch == data.next_sync_committee_branch
|
||||||
|
check_lc_header_equal(d_spec, s_spec, data.finalized_header, upgraded.finalized_header)
|
||||||
|
assert upgraded.sync_aggregate == data.sync_aggregate
|
||||||
|
assert upgraded.signature_slot == data.signature_slot
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_lc_update_to_store(d_spec, s_spec, data):
|
||||||
|
upgraded = data
|
||||||
|
|
||||||
|
if needs_upgrade_to_capella(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_update_to_capella(upgraded)
|
||||||
|
check_lc_update_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
if needs_upgrade_to_eip4844(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_update_to_eip4844(upgraded)
|
||||||
|
check_lc_update_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
return upgraded
|
||||||
|
|
||||||
|
|
||||||
|
def check_lc_store_equal(d_spec, s_spec, data, upgraded):
|
||||||
|
check_lc_header_equal(d_spec, s_spec, data.finalized_header, upgraded.finalized_header)
|
||||||
|
assert upgraded.current_sync_committee == data.current_sync_committee
|
||||||
|
assert upgraded.next_sync_committee == data.next_sync_committee
|
||||||
|
if upgraded.best_valid_update is None:
|
||||||
|
assert data.best_valid_update is None
|
||||||
|
else:
|
||||||
|
check_lc_update_equal(d_spec, s_spec, data.best_valid_update, upgraded.best_valid_update)
|
||||||
|
check_lc_header_equal(d_spec, s_spec, data.optimistic_header, upgraded.optimistic_header)
|
||||||
|
assert upgraded.previous_max_active_participants == data.previous_max_active_participants
|
||||||
|
assert upgraded.current_max_active_participants == data.current_max_active_participants
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_lc_store_to_new_spec(d_spec, s_spec, data):
|
||||||
|
upgraded = data
|
||||||
|
|
||||||
|
if needs_upgrade_to_capella(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_store_to_capella(upgraded)
|
||||||
|
check_lc_store_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
if needs_upgrade_to_eip4844(d_spec, s_spec):
|
||||||
|
upgraded = s_spec.upgrade_lc_store_to_eip4844(upgraded)
|
||||||
|
check_lc_store_equal(d_spec, s_spec, data, upgraded)
|
||||||
|
|
||||||
|
return upgraded
|
||||||
|
|
||||||
|
|
||||||
|
class LightClientSyncTest(object):
|
||||||
|
steps: List[Dict[str, Any]]
|
||||||
|
genesis_validators_root: Any
|
||||||
|
s_spec: Any
|
||||||
|
store: Any
|
||||||
|
|
||||||
|
|
||||||
|
def get_store_fork_version(s_spec):
|
||||||
|
if is_post_eip4844(s_spec):
|
||||||
|
return s_spec.config.EIP4844_FORK_VERSION
|
||||||
|
if is_post_capella(s_spec):
|
||||||
|
return s_spec.config.CAPELLA_FORK_VERSION
|
||||||
|
return s_spec.config.ALTAIR_FORK_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
def setup_test(spec, state, s_spec=None, phases=None):
|
||||||
test = LightClientSyncTest()
|
test = LightClientSyncTest()
|
||||||
test.steps = []
|
test.steps = []
|
||||||
|
|
||||||
|
if s_spec is None:
|
||||||
|
s_spec = spec
|
||||||
|
test.s_spec = s_spec
|
||||||
|
|
||||||
yield "genesis_validators_root", "meta", "0x" + state.genesis_validators_root.hex()
|
yield "genesis_validators_root", "meta", "0x" + state.genesis_validators_root.hex()
|
||||||
test.genesis_validators_root = state.genesis_validators_root
|
test.genesis_validators_root = state.genesis_validators_root
|
||||||
|
|
||||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH * 2 - 1)
|
next_slots(spec, state, spec.SLOTS_PER_EPOCH * 2 - 1)
|
||||||
trusted_block = state_transition_with_full_block(spec, state, True, True)
|
trusted_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
trusted_block_root = trusted_block.message.hash_tree_root()
|
trusted_block_root = trusted_block.message.hash_tree_root()
|
||||||
bootstrap = spec.create_light_client_bootstrap(state, trusted_block)
|
|
||||||
yield "trusted_block_root", "meta", "0x" + trusted_block_root.hex()
|
yield "trusted_block_root", "meta", "0x" + trusted_block_root.hex()
|
||||||
yield "bootstrap", bootstrap
|
|
||||||
test.store = spec.initialize_light_client_store(trusted_block_root, bootstrap)
|
data_fork_version = spec.compute_fork_version(spec.compute_epoch_at_slot(trusted_block.message.slot))
|
||||||
|
data_fork_digest = spec.compute_fork_digest(data_fork_version, test.genesis_validators_root)
|
||||||
|
d_spec = get_spec_for_fork_version(spec, data_fork_version, phases)
|
||||||
|
data = d_spec.create_light_client_bootstrap(state, trusted_block)
|
||||||
|
yield "bootstrap_fork_digest", "meta", encode_hex(data_fork_digest)
|
||||||
|
yield "bootstrap", data
|
||||||
|
|
||||||
|
upgraded = upgrade_lc_bootstrap_to_store(d_spec, test.s_spec, data)
|
||||||
|
test.store = test.s_spec.initialize_light_client_store(trusted_block_root, upgraded)
|
||||||
|
store_fork_version = get_store_fork_version(test.s_spec)
|
||||||
|
store_fork_digest = test.s_spec.compute_fork_digest(store_fork_version, test.genesis_validators_root)
|
||||||
|
yield "store_fork_digest", "meta", encode_hex(store_fork_digest)
|
||||||
|
|
||||||
return test
|
return test
|
||||||
|
|
||||||
|
@ -47,19 +188,33 @@ def finish_test(test):
|
||||||
yield "steps", test.steps
|
yield "steps", test.steps
|
||||||
|
|
||||||
|
|
||||||
def get_update_file_name(spec, update):
|
def get_update_file_name(d_spec, update):
|
||||||
if spec.is_sync_committee_update(update):
|
if d_spec.is_sync_committee_update(update):
|
||||||
suffix1 = "s"
|
suffix1 = "s"
|
||||||
else:
|
else:
|
||||||
suffix1 = "x"
|
suffix1 = "x"
|
||||||
if spec.is_finality_update(update):
|
if d_spec.is_finality_update(update):
|
||||||
suffix2 = "f"
|
suffix2 = "f"
|
||||||
else:
|
else:
|
||||||
suffix2 = "x"
|
suffix2 = "x"
|
||||||
return f"update_{encode_hex(update.attested_header.beacon.hash_tree_root())}_{suffix1}{suffix2}"
|
return f"update_{encode_hex(update.attested_header.beacon.hash_tree_root())}_{suffix1}{suffix2}"
|
||||||
|
|
||||||
|
|
||||||
def get_checks(store):
|
def get_checks(s_spec, store):
|
||||||
|
if is_post_capella(s_spec):
|
||||||
|
return {
|
||||||
|
"finalized_header": {
|
||||||
|
'slot': int(store.finalized_header.beacon.slot),
|
||||||
|
'beacon_root': encode_hex(store.finalized_header.beacon.hash_tree_root()),
|
||||||
|
'execution_root': encode_hex(s_spec.get_lc_execution_root(store.finalized_header)),
|
||||||
|
},
|
||||||
|
"optimistic_header": {
|
||||||
|
'slot': int(store.optimistic_header.beacon.slot),
|
||||||
|
'beacon_root': encode_hex(store.optimistic_header.beacon.hash_tree_root()),
|
||||||
|
'execution_root': encode_hex(s_spec.get_lc_execution_root(store.optimistic_header)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"finalized_header": {
|
"finalized_header": {
|
||||||
'slot': int(store.finalized_header.beacon.slot),
|
'slot': int(store.finalized_header.beacon.slot),
|
||||||
|
@ -74,35 +229,56 @@ def get_checks(store):
|
||||||
|
|
||||||
def emit_force_update(test, spec, state):
|
def emit_force_update(test, spec, state):
|
||||||
current_slot = state.slot
|
current_slot = state.slot
|
||||||
spec.process_light_client_store_force_update(test.store, current_slot)
|
test.s_spec.process_light_client_store_force_update(test.store, current_slot)
|
||||||
|
|
||||||
yield from [] # Consistently enable `yield from` syntax in calling tests
|
yield from [] # Consistently enable `yield from` syntax in calling tests
|
||||||
test.steps.append({
|
test.steps.append({
|
||||||
"force_update": {
|
"force_update": {
|
||||||
"current_slot": int(current_slot),
|
"current_slot": int(current_slot),
|
||||||
"checks": get_checks(test.store),
|
"checks": get_checks(test.s_spec, test.store),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
def emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, with_next=True):
|
def emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, with_next=True, phases=None):
|
||||||
update = spec.create_light_client_update(state, block, attested_state, attested_block, finalized_block)
|
data_fork_version = spec.compute_fork_version(spec.compute_epoch_at_slot(attested_block.message.slot))
|
||||||
|
data_fork_digest = spec.compute_fork_digest(data_fork_version, test.genesis_validators_root)
|
||||||
|
d_spec = get_spec_for_fork_version(spec, data_fork_version, phases)
|
||||||
|
data = d_spec.create_light_client_update(state, block, attested_state, attested_block, finalized_block)
|
||||||
if not with_next:
|
if not with_next:
|
||||||
update.next_sync_committee = spec.SyncCommittee()
|
data.next_sync_committee = spec.SyncCommittee()
|
||||||
update.next_sync_committee_branch = \
|
data.next_sync_committee_branch = \
|
||||||
[spec.Bytes32() for _ in range(spec.floorlog2(spec.NEXT_SYNC_COMMITTEE_INDEX))]
|
[spec.Bytes32() for _ in range(spec.floorlog2(spec.NEXT_SYNC_COMMITTEE_INDEX))]
|
||||||
current_slot = state.slot
|
current_slot = state.slot
|
||||||
spec.process_light_client_update(test.store, update, current_slot, test.genesis_validators_root)
|
|
||||||
|
|
||||||
yield get_update_file_name(spec, update), update
|
upgraded = upgrade_lc_update_to_store(d_spec, test.s_spec, data)
|
||||||
|
test.s_spec.process_light_client_update(test.store, upgraded, current_slot, test.genesis_validators_root)
|
||||||
|
|
||||||
|
yield get_update_file_name(d_spec, data), data
|
||||||
test.steps.append({
|
test.steps.append({
|
||||||
"process_update": {
|
"process_update": {
|
||||||
"update": get_update_file_name(spec, update),
|
"update_fork_digest": encode_hex(data_fork_digest),
|
||||||
|
"update": get_update_file_name(d_spec, data),
|
||||||
"current_slot": int(current_slot),
|
"current_slot": int(current_slot),
|
||||||
"checks": get_checks(test.store),
|
"checks": get_checks(test.s_spec, test.store),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return upgraded
|
||||||
|
|
||||||
|
|
||||||
|
def emit_upgrade_store(test, new_s_spec, phases=None):
|
||||||
|
test.store = upgrade_lc_store_to_new_spec(test.s_spec, new_s_spec, test.store)
|
||||||
|
test.s_spec = new_s_spec
|
||||||
|
store_fork_version = get_store_fork_version(test.s_spec)
|
||||||
|
store_fork_digest = test.s_spec.compute_fork_digest(store_fork_version, test.genesis_validators_root)
|
||||||
|
|
||||||
|
yield from [] # Consistently enable `yield from` syntax in calling tests
|
||||||
|
test.steps.append({
|
||||||
|
"upgrade_store": {
|
||||||
|
"store_fork_digest": encode_hex(store_fork_digest),
|
||||||
|
"checks": get_checks(test.s_spec, test.store),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return update
|
|
||||||
|
|
||||||
|
|
||||||
def compute_start_slot_at_sync_committee_period(spec, sync_committee_period):
|
def compute_start_slot_at_sync_committee_period(spec, sync_committee_period):
|
||||||
|
@ -440,3 +616,217 @@ def test_advance_finality_without_sync_committee(spec, state):
|
||||||
|
|
||||||
# Finish test
|
# Finish test
|
||||||
yield from finish_test(test)
|
yield from finish_test(test)
|
||||||
|
|
||||||
|
|
||||||
|
def run_test_single_fork(spec, phases, state, fork):
|
||||||
|
# Start test
|
||||||
|
test = yield from setup_test(spec, state, phases=phases)
|
||||||
|
|
||||||
|
# Initial `LightClientUpdate`
|
||||||
|
finalized_block = spec.SignedBeaconBlock()
|
||||||
|
finalized_block.message.state_root = state.hash_tree_root()
|
||||||
|
finalized_state = state.copy()
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update is None
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Jump to two slots before fork
|
||||||
|
fork_epoch = getattr(phases[fork].config, fork.upper() + '_FORK_EPOCH')
|
||||||
|
transition_to(spec, state, spec.compute_start_slot_at_epoch(fork_epoch) - 4)
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
update = yield from emit_update(
|
||||||
|
test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update == update
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Perform `LightClientStore` upgrade
|
||||||
|
yield from emit_upgrade_store(test, phases[fork], phases=phases)
|
||||||
|
update = test.store.best_valid_update
|
||||||
|
|
||||||
|
# Final slot before fork, check that importing the pre-fork format still works
|
||||||
|
attested_block = block.copy()
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update == update
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Upgrade to post-fork spec, attested block is still before the fork
|
||||||
|
attested_block = block.copy()
|
||||||
|
attested_state = state.copy()
|
||||||
|
state, _ = do_fork(state, spec, phases[fork], fork_epoch, with_block=False)
|
||||||
|
spec = phases[fork]
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update == update
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Another block after the fork, this time attested block is after the fork
|
||||||
|
attested_block = block.copy()
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update == update
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Jump to next epoch
|
||||||
|
transition_to(spec, state, spec.compute_start_slot_at_epoch(fork_epoch + 1) - 2)
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update == update
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Finalize the fork
|
||||||
|
finalized_block = block.copy()
|
||||||
|
finalized_state = state.copy()
|
||||||
|
_, _, state = next_slots_with_attestations(spec, state, 2 * spec.SLOTS_PER_EPOCH - 1, True, True)
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update is None
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Finish test
|
||||||
|
yield from finish_test(test)
|
||||||
|
|
||||||
|
|
||||||
|
@with_phases(phases=[BELLATRIX], other_phases=[CAPELLA])
|
||||||
|
@spec_test
|
||||||
|
@with_config_overrides({
|
||||||
|
'CAPELLA_FORK_EPOCH': 3, # `setup_test` advances to epoch 2
|
||||||
|
}, emit=False)
|
||||||
|
@with_state
|
||||||
|
@with_matching_spec_config(emitted_fork=CAPELLA)
|
||||||
|
@with_presets([MINIMAL], reason="too slow")
|
||||||
|
def test_capella_fork(spec, phases, state):
|
||||||
|
yield from run_test_single_fork(spec, phases, state, CAPELLA)
|
||||||
|
|
||||||
|
|
||||||
|
@with_phases(phases=[CAPELLA], other_phases=[EIP4844])
|
||||||
|
@spec_test
|
||||||
|
@with_config_overrides({
|
||||||
|
'EIP4844_FORK_EPOCH': 3, # `setup_test` advances to epoch 2
|
||||||
|
}, emit=False)
|
||||||
|
@with_state
|
||||||
|
@with_matching_spec_config(emitted_fork=EIP4844)
|
||||||
|
@with_presets([MINIMAL], reason="too slow")
|
||||||
|
def test_eip4844_fork(spec, phases, state):
|
||||||
|
yield from run_test_single_fork(spec, phases, state, EIP4844)
|
||||||
|
|
||||||
|
|
||||||
|
def run_test_multi_fork(spec, phases, state, fork_1, fork_2):
|
||||||
|
# Start test
|
||||||
|
test = yield from setup_test(spec, state, phases[fork_2], phases)
|
||||||
|
|
||||||
|
# Set up so that finalized is from `spec`, ...
|
||||||
|
finalized_block = spec.SignedBeaconBlock()
|
||||||
|
finalized_block.message.state_root = state.hash_tree_root()
|
||||||
|
finalized_state = state.copy()
|
||||||
|
|
||||||
|
# ..., attested is from `fork_1`, ...
|
||||||
|
fork_1_epoch = getattr(phases[fork_1].config, fork_1.upper() + '_FORK_EPOCH')
|
||||||
|
transition_to(spec, state, spec.compute_start_slot_at_epoch(fork_1_epoch) - 1)
|
||||||
|
state, _ = do_fork(state, spec, phases[fork_1], fork_1_epoch, with_block=False)
|
||||||
|
spec = phases[fork_1]
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
|
||||||
|
# ..., and signature is from `fork_2`
|
||||||
|
fork_2_epoch = getattr(phases[fork_2].config, fork_2.upper() + '_FORK_EPOCH')
|
||||||
|
transition_to(spec, state, spec.compute_start_slot_at_epoch(fork_2_epoch) - 1)
|
||||||
|
state, _ = do_fork(state, spec, phases[fork_2], fork_2_epoch, with_block=False)
|
||||||
|
spec = phases[fork_2]
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
|
||||||
|
# Check that update applies
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update is None
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Finish test
|
||||||
|
yield from finish_test(test)
|
||||||
|
|
||||||
|
|
||||||
|
@with_phases(phases=[BELLATRIX], other_phases=[CAPELLA, EIP4844])
|
||||||
|
@spec_test
|
||||||
|
@with_config_overrides({
|
||||||
|
'CAPELLA_FORK_EPOCH': 3, # `setup_test` advances to epoch 2
|
||||||
|
'EIP4844_FORK_EPOCH': 4,
|
||||||
|
}, emit=False)
|
||||||
|
@with_state
|
||||||
|
@with_matching_spec_config(emitted_fork=EIP4844)
|
||||||
|
@with_presets([MINIMAL], reason="too slow")
|
||||||
|
def test_capella_eip4844_fork(spec, phases, state):
|
||||||
|
yield from run_test_multi_fork(spec, phases, state, CAPELLA, EIP4844)
|
||||||
|
|
||||||
|
|
||||||
|
def run_test_upgraded_store_with_legacy_data(spec, phases, state, fork):
|
||||||
|
# Start test (Legacy bootstrap with an upgraded store)
|
||||||
|
test = yield from setup_test(spec, state, phases[fork], phases)
|
||||||
|
|
||||||
|
# Initial `LightClientUpdate` (check that the upgraded store can process it)
|
||||||
|
finalized_block = spec.SignedBeaconBlock()
|
||||||
|
finalized_block.message.state_root = state.hash_tree_root()
|
||||||
|
finalized_state = state.copy()
|
||||||
|
attested_block = state_transition_with_full_block(spec, state, True, True)
|
||||||
|
attested_state = state.copy()
|
||||||
|
sync_aggregate, _ = get_sync_aggregate(spec, state)
|
||||||
|
block = state_transition_with_full_block(spec, state, True, True, sync_aggregate=sync_aggregate)
|
||||||
|
yield from emit_update(test, spec, state, block, attested_state, attested_block, finalized_block, phases=phases)
|
||||||
|
assert test.store.finalized_header.beacon.slot == finalized_state.slot
|
||||||
|
assert test.store.next_sync_committee == finalized_state.next_sync_committee
|
||||||
|
assert test.store.best_valid_update is None
|
||||||
|
assert test.store.optimistic_header.beacon.slot == attested_state.slot
|
||||||
|
|
||||||
|
# Finish test
|
||||||
|
yield from finish_test(test)
|
||||||
|
|
||||||
|
|
||||||
|
@with_phases(phases=[ALTAIR, BELLATRIX], other_phases=[CAPELLA])
|
||||||
|
@spec_test
|
||||||
|
@with_state
|
||||||
|
@with_matching_spec_config(emitted_fork=CAPELLA)
|
||||||
|
@with_presets([MINIMAL], reason="too slow")
|
||||||
|
def test_capella_store_with_legacy_data(spec, phases, state):
|
||||||
|
yield from run_test_upgraded_store_with_legacy_data(spec, phases, state, CAPELLA)
|
||||||
|
|
||||||
|
|
||||||
|
@with_phases(phases=[ALTAIR, BELLATRIX, CAPELLA], other_phases=[EIP4844])
|
||||||
|
@spec_test
|
||||||
|
@with_state
|
||||||
|
@with_matching_spec_config(emitted_fork=EIP4844)
|
||||||
|
@with_presets([MINIMAL], reason="too slow")
|
||||||
|
def test_eip4844_store_with_legacy_data(spec, phases, state):
|
||||||
|
yield from run_test_upgraded_store_with_legacy_data(spec, phases, state, EIP4844)
|
||||||
|
|
|
@ -147,12 +147,14 @@ def test_success_one_partial_withdrawal(spec, state):
|
||||||
|
|
||||||
@with_capella_and_later
|
@with_capella_and_later
|
||||||
@spec_state_test
|
@spec_state_test
|
||||||
def test_success_max_per_slot(spec, state):
|
def test_success_mixed_fully_and_partial_withdrawable(spec, state):
|
||||||
num_full_withdrawals = spec.MAX_WITHDRAWALS_PER_PAYLOAD // 2
|
num_full_withdrawals = spec.MAX_WITHDRAWALS_PER_PAYLOAD // 2
|
||||||
num_partial_withdrawals = spec.MAX_WITHDRAWALS_PER_PAYLOAD - num_full_withdrawals
|
num_partial_withdrawals = spec.MAX_WITHDRAWALS_PER_PAYLOAD - num_full_withdrawals
|
||||||
fully_withdrawable_indices, partial_withdrawals_indices = prepare_expected_withdrawals(
|
fully_withdrawable_indices, partial_withdrawals_indices = prepare_expected_withdrawals(
|
||||||
spec, state,
|
spec, state,
|
||||||
num_full_withdrawals=num_full_withdrawals, num_partial_withdrawals=num_partial_withdrawals)
|
num_full_withdrawals=num_full_withdrawals,
|
||||||
|
num_partial_withdrawals=num_partial_withdrawals,
|
||||||
|
)
|
||||||
|
|
||||||
next_slot(spec, state)
|
next_slot(spec, state)
|
||||||
execution_payload = build_empty_execution_payload(spec, state)
|
execution_payload = build_empty_execution_payload(spec, state)
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
from eth2spec.test.context import (
|
||||||
|
spec_state_test,
|
||||||
|
with_capella_and_later,
|
||||||
|
with_test_suite_name,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.attestations import (
|
||||||
|
state_transition_with_full_block,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@with_test_suite_name("BeaconBlockBody")
|
||||||
|
@with_capella_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_execution_merkle_proof(spec, state):
|
||||||
|
block = state_transition_with_full_block(spec, state, True, False)
|
||||||
|
|
||||||
|
yield "object", block.message.body
|
||||||
|
execution_branch = spec.compute_merkle_proof_for_block_body(
|
||||||
|
block.message.body, spec.EXECUTION_PAYLOAD_INDEX)
|
||||||
|
yield "proof", {
|
||||||
|
"leaf": "0x" + block.message.body.execution_payload.hash_tree_root().hex(),
|
||||||
|
"leaf_index": spec.EXECUTION_PAYLOAD_INDEX,
|
||||||
|
"branch": ['0x' + root.hex() for root in execution_branch]
|
||||||
|
}
|
||||||
|
assert spec.is_valid_merkle_branch(
|
||||||
|
leaf=block.message.body.execution_payload.hash_tree_root(),
|
||||||
|
branch=execution_branch,
|
||||||
|
depth=spec.floorlog2(spec.EXECUTION_PAYLOAD_INDEX),
|
||||||
|
index=spec.get_subtree_index(spec.EXECUTION_PAYLOAD_INDEX),
|
||||||
|
root=block.message.body.hash_tree_root(),
|
||||||
|
)
|
|
@ -1,22 +1,34 @@
|
||||||
|
from eth2spec.test.helpers.constants import MINIMAL
|
||||||
from eth2spec.test.context import (
|
from eth2spec.test.context import (
|
||||||
with_capella_and_later, spec_state_test
|
with_capella_and_later,
|
||||||
|
spec_state_test,
|
||||||
|
with_presets,
|
||||||
)
|
)
|
||||||
|
from eth2spec.test.helpers.keys import pubkeys
|
||||||
from eth2spec.test.helpers.state import (
|
from eth2spec.test.helpers.state import (
|
||||||
|
next_epoch_via_block,
|
||||||
state_transition_and_sign_block,
|
state_transition_and_sign_block,
|
||||||
|
transition_to,
|
||||||
|
transition_to_slot_via_block,
|
||||||
|
next_slot,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.block import (
|
from eth2spec.test.helpers.block import (
|
||||||
build_empty_block_for_next_slot,
|
build_empty_block_for_next_slot,
|
||||||
build_empty_block,
|
build_empty_block,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.bls_to_execution_changes import get_signed_address_change
|
from eth2spec.test.helpers.bls_to_execution_changes import get_signed_address_change
|
||||||
from eth2spec.test.helpers.state import (
|
from eth2spec.test.helpers.attestations import (
|
||||||
next_slot,
|
next_epoch_with_attestations,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.withdrawals import (
|
from eth2spec.test.helpers.withdrawals import (
|
||||||
|
set_eth1_withdrawal_credential_with_balance,
|
||||||
set_validator_fully_withdrawable,
|
set_validator_fully_withdrawable,
|
||||||
set_validator_partially_withdrawable,
|
set_validator_partially_withdrawable,
|
||||||
prepare_expected_withdrawals,
|
prepare_expected_withdrawals,
|
||||||
)
|
)
|
||||||
|
from eth2spec.test.helpers.deposits import (
|
||||||
|
prepare_state_and_deposit,
|
||||||
|
)
|
||||||
from eth2spec.test.helpers.voluntary_exits import prepare_signed_exits
|
from eth2spec.test.helpers.voluntary_exits import prepare_signed_exits
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,3 +267,154 @@ def test_invalid_withdrawal_fail_second_block_payload_isnt_compatible(spec, stat
|
||||||
|
|
||||||
yield 'blocks', [signed_block_2]
|
yield 'blocks', [signed_block_2]
|
||||||
yield 'post', None
|
yield 'post', None
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Mix top-ups and withdrawals
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@with_capella_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_top_up_and_partial_withdrawable_validator(spec, state):
|
||||||
|
next_withdrawal_validator_index = 0
|
||||||
|
validator_index = next_withdrawal_validator_index + 1
|
||||||
|
|
||||||
|
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, spec.MAX_EFFECTIVE_BALANCE)
|
||||||
|
validator = state.validators[validator_index]
|
||||||
|
balance = state.balances[validator_index]
|
||||||
|
assert not spec.is_partially_withdrawable_validator(validator, balance)
|
||||||
|
|
||||||
|
# Make a top-up balance to validator
|
||||||
|
amount = spec.MAX_EFFECTIVE_BALANCE // 4
|
||||||
|
deposit = prepare_state_and_deposit(spec, state, validator_index, amount, signed=True)
|
||||||
|
|
||||||
|
yield 'pre', state
|
||||||
|
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block.body.deposits.append(deposit)
|
||||||
|
|
||||||
|
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||||
|
|
||||||
|
yield 'blocks', [signed_block]
|
||||||
|
yield 'post', state
|
||||||
|
|
||||||
|
# Since withdrawals happen before deposits, it becomes partially withdrawable after state transition.
|
||||||
|
validator = state.validators[validator_index]
|
||||||
|
balance = state.balances[validator_index]
|
||||||
|
assert spec.is_partially_withdrawable_validator(validator, balance)
|
||||||
|
|
||||||
|
|
||||||
|
@with_capella_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_top_up_to_fully_withdrawn_validator(spec, state):
|
||||||
|
"""
|
||||||
|
Similar to `teste_process_deposit::test_success_top_up_to_withdrawn_validator` test.
|
||||||
|
"""
|
||||||
|
next_withdrawal_validator_index = 0
|
||||||
|
validator_index = next_withdrawal_validator_index + 1
|
||||||
|
|
||||||
|
# Fully withdraw validator
|
||||||
|
set_validator_fully_withdrawable(spec, state, validator_index)
|
||||||
|
assert state.balances[validator_index] > 0
|
||||||
|
next_epoch_via_block(spec, state)
|
||||||
|
assert state.balances[validator_index] == 0
|
||||||
|
assert state.validators[validator_index].effective_balance > 0
|
||||||
|
next_epoch_via_block(spec, state)
|
||||||
|
assert state.validators[validator_index].effective_balance == 0
|
||||||
|
|
||||||
|
# Make a top-up deposit to validator
|
||||||
|
amount = spec.MAX_EFFECTIVE_BALANCE // 4
|
||||||
|
deposit = prepare_state_and_deposit(spec, state, validator_index, amount, signed=True)
|
||||||
|
|
||||||
|
yield 'pre', state
|
||||||
|
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block.body.deposits.append(deposit)
|
||||||
|
|
||||||
|
signed_block_1 = state_transition_and_sign_block(spec, state, block)
|
||||||
|
|
||||||
|
assert spec.is_fully_withdrawable_validator(
|
||||||
|
state.validators[validator_index],
|
||||||
|
state.balances[validator_index],
|
||||||
|
spec.get_current_epoch(state)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply an empty block
|
||||||
|
signed_block_2 = transition_to_slot_via_block(spec, state, state.slot + 1)
|
||||||
|
|
||||||
|
# With mainnet preset, it holds
|
||||||
|
if len(state.validators) <= spec.MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP:
|
||||||
|
assert not spec.is_fully_withdrawable_validator(
|
||||||
|
state.validators[validator_index],
|
||||||
|
state.balances[validator_index],
|
||||||
|
spec.get_current_epoch(state)
|
||||||
|
)
|
||||||
|
|
||||||
|
yield 'blocks', [signed_block_1, signed_block_2]
|
||||||
|
yield 'post', state
|
||||||
|
|
||||||
|
|
||||||
|
def _insert_validator(spec, state, balance):
|
||||||
|
effective_balance = balance if balance < spec.MAX_EFFECTIVE_BALANCE else spec.MAX_EFFECTIVE_BALANCE
|
||||||
|
validator_index = len(state.validators)
|
||||||
|
validator = spec.Validator(
|
||||||
|
pubkey=pubkeys[validator_index],
|
||||||
|
withdrawal_credentials=spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + b'\x00' * 11 + b'\x56' * 20,
|
||||||
|
activation_eligibility_epoch=1,
|
||||||
|
activation_epoch=2,
|
||||||
|
exit_epoch=spec.FAR_FUTURE_EPOCH,
|
||||||
|
withdrawable_epoch=spec.FAR_FUTURE_EPOCH,
|
||||||
|
effective_balance=effective_balance,
|
||||||
|
)
|
||||||
|
state.validators.append(validator)
|
||||||
|
state.balances.append(balance)
|
||||||
|
state.previous_epoch_participation.append(spec.ParticipationFlags(0b0000_0000))
|
||||||
|
state.current_epoch_participation.append(spec.ParticipationFlags(0b0000_0000))
|
||||||
|
state.inactivity_scores.append(0)
|
||||||
|
|
||||||
|
return validator_index
|
||||||
|
|
||||||
|
|
||||||
|
def _run_activate_and_partial_withdrawal(spec, state, initial_balance):
|
||||||
|
validator_index = _insert_validator(spec, state, balance=initial_balance)
|
||||||
|
|
||||||
|
# To make it eligibile activation
|
||||||
|
transition_to(spec, state, spec.compute_start_slot_at_epoch(2) - 1)
|
||||||
|
assert not spec.is_active_validator(state.validators[validator_index], spec.get_current_epoch(state))
|
||||||
|
|
||||||
|
yield 'pre', state
|
||||||
|
|
||||||
|
blocks = []
|
||||||
|
# To activate
|
||||||
|
signed_block = transition_to_slot_via_block(spec, state, state.slot + 1)
|
||||||
|
blocks.append(signed_block)
|
||||||
|
|
||||||
|
assert spec.is_active_validator(state.validators[validator_index], spec.get_current_epoch(state))
|
||||||
|
|
||||||
|
if initial_balance > spec.MAX_EFFECTIVE_BALANCE:
|
||||||
|
assert spec.is_partially_withdrawable_validator(
|
||||||
|
state.validators[validator_index], state.balances[validator_index])
|
||||||
|
else:
|
||||||
|
assert not spec.is_partially_withdrawable_validator(
|
||||||
|
state.validators[validator_index], state.balances[validator_index])
|
||||||
|
|
||||||
|
_, new_blocks, state = next_epoch_with_attestations(spec, state, True, True)
|
||||||
|
blocks += new_blocks
|
||||||
|
|
||||||
|
yield 'blocks', blocks
|
||||||
|
yield 'post', state
|
||||||
|
|
||||||
|
|
||||||
|
@with_capella_and_later
|
||||||
|
@with_presets([MINIMAL], reason="too many validators with mainnet config")
|
||||||
|
@spec_state_test
|
||||||
|
def test_activate_and_partial_withdrawal_max_effective_balance(spec, state):
|
||||||
|
yield from _run_activate_and_partial_withdrawal(spec, state, initial_balance=spec.MAX_EFFECTIVE_BALANCE)
|
||||||
|
|
||||||
|
|
||||||
|
@with_capella_and_later
|
||||||
|
@with_presets([MINIMAL], reason="too many validators with mainnet config")
|
||||||
|
@spec_state_test
|
||||||
|
def test_activate_and_partial_withdrawal_overdeposit(spec, state):
|
||||||
|
yield from _run_activate_and_partial_withdrawal(spec, state, initial_balance=spec.MAX_EFFECTIVE_BALANCE + 10000000)
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
from eth2spec.test.context import (
|
||||||
|
ForkMeta,
|
||||||
|
always_bls,
|
||||||
|
with_fork_metas,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.constants import (
|
||||||
|
AFTER_DENEB_PRE_POST_FORKS,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.fork_transition import (
|
||||||
|
OperationType,
|
||||||
|
run_transition_with_operation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# BLSToExecutionChange
|
||||||
|
#
|
||||||
|
|
||||||
|
@with_fork_metas([ForkMeta(pre_fork_name=pre, post_fork_name=post, fork_epoch=2)
|
||||||
|
for pre, post in AFTER_DENEB_PRE_POST_FORKS])
|
||||||
|
@always_bls
|
||||||
|
def test_transition_with_btec_right_after_fork(state, fork_epoch, spec, post_spec, pre_tag, post_tag):
|
||||||
|
"""
|
||||||
|
Create a BLS_TO_EXECUTION_CHANGE right *after* the transition
|
||||||
|
"""
|
||||||
|
yield from run_transition_with_operation(
|
||||||
|
state,
|
||||||
|
fork_epoch,
|
||||||
|
spec,
|
||||||
|
post_spec,
|
||||||
|
pre_tag,
|
||||||
|
post_tag,
|
||||||
|
operation_type=OperationType.BLS_TO_EXECUTION_CHANGE,
|
||||||
|
operation_at_slot=fork_epoch * spec.SLOTS_PER_EPOCH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@with_fork_metas([ForkMeta(pre_fork_name=pre, post_fork_name=post, fork_epoch=2)
|
||||||
|
for pre, post in AFTER_DENEB_PRE_POST_FORKS])
|
||||||
|
@always_bls
|
||||||
|
def test_transition_with_btec_right_before_fork(state, fork_epoch, spec, post_spec, pre_tag, post_tag):
|
||||||
|
"""
|
||||||
|
Create a BLS_TO_EXECUTION_CHANGE right *before* the transition
|
||||||
|
"""
|
||||||
|
yield from run_transition_with_operation(
|
||||||
|
state,
|
||||||
|
fork_epoch,
|
||||||
|
spec,
|
||||||
|
post_spec,
|
||||||
|
pre_tag,
|
||||||
|
post_tag,
|
||||||
|
operation_type=OperationType.BLS_TO_EXECUTION_CHANGE,
|
||||||
|
operation_at_slot=fork_epoch * spec.SLOTS_PER_EPOCH - 1,
|
||||||
|
)
|
|
@ -1,9 +1,13 @@
|
||||||
|
import random
|
||||||
|
|
||||||
from eth2spec.test.context import (
|
from eth2spec.test.context import (
|
||||||
spec_state_test,
|
spec_state_test,
|
||||||
with_eip4844_and_later,
|
with_eip4844_and_later,
|
||||||
)
|
)
|
||||||
from eth2spec.test.helpers.sharding import (
|
from eth2spec.test.helpers.sharding import (
|
||||||
get_sample_blob,
|
get_sample_blob,
|
||||||
|
get_poly_in_both_forms,
|
||||||
|
eval_poly_in_coeff_form,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,7 +18,72 @@ def test_verify_kzg_proof(spec, state):
|
||||||
blob = get_sample_blob(spec)
|
blob = get_sample_blob(spec)
|
||||||
commitment = spec.blob_to_kzg_commitment(blob)
|
commitment = spec.blob_to_kzg_commitment(blob)
|
||||||
polynomial = spec.blob_to_polynomial(blob)
|
polynomial = spec.blob_to_polynomial(blob)
|
||||||
proof = spec.compute_kzg_proof(polynomial, x)
|
proof = spec.compute_kzg_proof_impl(polynomial, x)
|
||||||
|
|
||||||
y = spec.evaluate_polynomial_in_evaluation_form(polynomial, x)
|
y = spec.evaluate_polynomial_in_evaluation_form(polynomial, x)
|
||||||
assert spec.verify_kzg_proof_impl(commitment, x, y, proof)
|
assert spec.verify_kzg_proof_impl(commitment, x, y, proof)
|
||||||
|
|
||||||
|
|
||||||
|
@with_eip4844_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_barycentric_outside_domain(spec, state):
|
||||||
|
"""
|
||||||
|
Test barycentric formula correctness by using it to evaluate a polynomial at a bunch of points outside its domain
|
||||||
|
(the roots of unity).
|
||||||
|
|
||||||
|
Then make sure that we would get the same result if we evaluated it from coefficient form without using the
|
||||||
|
barycentric formula
|
||||||
|
"""
|
||||||
|
rng = random.Random(5566)
|
||||||
|
poly_coeff, poly_eval = get_poly_in_both_forms(spec)
|
||||||
|
roots_of_unity_brp = spec.bit_reversal_permutation(spec.ROOTS_OF_UNITY)
|
||||||
|
|
||||||
|
assert len(poly_coeff) == len(poly_eval) == len(roots_of_unity_brp)
|
||||||
|
n_samples = 12
|
||||||
|
|
||||||
|
for _ in range(n_samples):
|
||||||
|
# Get a random evaluation point and make sure it's not a root of unity
|
||||||
|
z = rng.randint(0, spec.BLS_MODULUS - 1)
|
||||||
|
while z in roots_of_unity_brp:
|
||||||
|
z = rng.randint(0, spec.BLS_MODULUS - 1)
|
||||||
|
|
||||||
|
# Get p(z) by evaluating poly in coefficient form
|
||||||
|
p_z_coeff = eval_poly_in_coeff_form(spec, poly_coeff, z)
|
||||||
|
|
||||||
|
# Get p(z) by evaluating poly in evaluation form
|
||||||
|
p_z_eval = spec.evaluate_polynomial_in_evaluation_form(poly_eval, z)
|
||||||
|
|
||||||
|
# Both evaluations should agree
|
||||||
|
assert p_z_coeff == p_z_eval
|
||||||
|
|
||||||
|
|
||||||
|
@with_eip4844_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_barycentric_within_domain(spec, state):
|
||||||
|
"""
|
||||||
|
Test barycentric formula correctness by using it to evaluate a polynomial at all the points of its domain
|
||||||
|
(the roots of unity).
|
||||||
|
|
||||||
|
Then make sure that we would get the same result if we evaluated it from coefficient form without using the
|
||||||
|
barycentric formula
|
||||||
|
"""
|
||||||
|
poly_coeff, poly_eval = get_poly_in_both_forms(spec)
|
||||||
|
roots_of_unity_brp = spec.bit_reversal_permutation(spec.ROOTS_OF_UNITY)
|
||||||
|
|
||||||
|
assert len(poly_coeff) == len(poly_eval) == len(roots_of_unity_brp)
|
||||||
|
n = len(poly_coeff)
|
||||||
|
|
||||||
|
# Iterate over the entire domain
|
||||||
|
for i in range(n):
|
||||||
|
# Grab a root of unity and use it as the evaluation point
|
||||||
|
z = int(roots_of_unity_brp[i])
|
||||||
|
|
||||||
|
# Get p(z) by evaluating poly in coefficient form
|
||||||
|
p_z_coeff = eval_poly_in_coeff_form(spec, poly_coeff, z)
|
||||||
|
|
||||||
|
# Get p(z) by evaluating poly in evaluation form
|
||||||
|
p_z_eval = spec.evaluate_polynomial_in_evaluation_form(poly_eval, z)
|
||||||
|
|
||||||
|
# The two evaluations should be agree and p(z) should also be the i-th "coefficient" of the polynomial in
|
||||||
|
# evaluation form
|
||||||
|
assert p_z_coeff == p_z_eval == poly_eval[i]
|
||||||
|
|
|
@ -37,6 +37,12 @@ ALL_FORK_UPGRADES = {
|
||||||
ALL_PRE_POST_FORKS = ALL_FORK_UPGRADES.items()
|
ALL_PRE_POST_FORKS = ALL_FORK_UPGRADES.items()
|
||||||
AFTER_BELLATRIX_UPGRADES = {key: value for key, value in ALL_FORK_UPGRADES.items() if key != PHASE0}
|
AFTER_BELLATRIX_UPGRADES = {key: value for key, value in ALL_FORK_UPGRADES.items() if key != PHASE0}
|
||||||
AFTER_BELLATRIX_PRE_POST_FORKS = AFTER_BELLATRIX_UPGRADES.items()
|
AFTER_BELLATRIX_PRE_POST_FORKS = AFTER_BELLATRIX_UPGRADES.items()
|
||||||
|
AFTER_CAPELLA_UPGRADES = {key: value for key, value in ALL_FORK_UPGRADES.items()
|
||||||
|
if key not in [PHASE0, ALTAIR]}
|
||||||
|
AFTER_CAPELLA_PRE_POST_FORKS = AFTER_CAPELLA_UPGRADES.items()
|
||||||
|
AFTER_DENEB_UPGRADES = {key: value for key, value in ALL_FORK_UPGRADES.items()
|
||||||
|
if key not in [PHASE0, ALTAIR, BELLATRIX]}
|
||||||
|
AFTER_DENEB_PRE_POST_FORKS = AFTER_DENEB_UPGRADES.items()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Config
|
# Config
|
||||||
|
|
|
@ -26,6 +26,8 @@ def get_execution_payload_header(spec, execution_payload):
|
||||||
)
|
)
|
||||||
if is_post_capella(spec):
|
if is_post_capella(spec):
|
||||||
payload_header.withdrawals_root = spec.hash_tree_root(execution_payload.withdrawals)
|
payload_header.withdrawals_root = spec.hash_tree_root(execution_payload.withdrawals)
|
||||||
|
if is_post_eip4844(spec):
|
||||||
|
payload_header.excess_data_gas = execution_payload.excess_data_gas
|
||||||
return payload_header
|
return payload_header
|
||||||
|
|
||||||
|
|
||||||
|
@ -108,7 +110,7 @@ def get_withdrawal_rlp(spec, withdrawal):
|
||||||
# address
|
# address
|
||||||
(Binary(20, 20), withdrawal.address),
|
(Binary(20, 20), withdrawal.address),
|
||||||
# amount
|
# amount
|
||||||
(big_endian_int, spec.uint256(withdrawal.amount) * (10**9)),
|
(big_endian_int, withdrawal.amount),
|
||||||
]
|
]
|
||||||
|
|
||||||
sedes = List([schema for schema, _ in withdrawal_rlp])
|
sedes = List([schema for schema, _ in withdrawal_rlp])
|
||||||
|
|
|
@ -9,6 +9,7 @@ from eth2spec.test.helpers.block import (
|
||||||
build_empty_block,
|
build_empty_block,
|
||||||
sign_block,
|
sign_block,
|
||||||
)
|
)
|
||||||
|
from eth2spec.test.helpers.bls_to_execution_changes import get_signed_address_change
|
||||||
from eth2spec.test.helpers.constants import (
|
from eth2spec.test.helpers.constants import (
|
||||||
ALTAIR,
|
ALTAIR,
|
||||||
BELLATRIX,
|
BELLATRIX,
|
||||||
|
@ -36,6 +37,7 @@ class OperationType(Enum):
|
||||||
ATTESTER_SLASHING = auto()
|
ATTESTER_SLASHING = auto()
|
||||||
DEPOSIT = auto()
|
DEPOSIT = auto()
|
||||||
VOLUNTARY_EXIT = auto()
|
VOLUNTARY_EXIT = auto()
|
||||||
|
BLS_TO_EXECUTION_CHANGE = auto()
|
||||||
|
|
||||||
|
|
||||||
def _set_operations_by_dict(block, operation_dict):
|
def _set_operations_by_dict(block, operation_dict):
|
||||||
|
@ -267,6 +269,10 @@ def run_transition_with_operation(state,
|
||||||
selected_validator_index = 0
|
selected_validator_index = 0
|
||||||
signed_exits = prepare_signed_exits(spec, state, [selected_validator_index])
|
signed_exits = prepare_signed_exits(spec, state, [selected_validator_index])
|
||||||
operation_dict = {'voluntary_exits': signed_exits}
|
operation_dict = {'voluntary_exits': signed_exits}
|
||||||
|
elif operation_type == OperationType.BLS_TO_EXECUTION_CHANGE:
|
||||||
|
selected_validator_index = 0
|
||||||
|
bls_to_execution_changes = [get_signed_address_change(spec, state, selected_validator_index)]
|
||||||
|
operation_dict = {'bls_to_execution_changes': bls_to_execution_changes}
|
||||||
|
|
||||||
def _check_state():
|
def _check_state():
|
||||||
if operation_type == OperationType.PROPOSER_SLASHING:
|
if operation_type == OperationType.PROPOSER_SLASHING:
|
||||||
|
@ -288,6 +294,9 @@ def run_transition_with_operation(state,
|
||||||
elif operation_type == OperationType.VOLUNTARY_EXIT:
|
elif operation_type == OperationType.VOLUNTARY_EXIT:
|
||||||
validator = state.validators[selected_validator_index]
|
validator = state.validators[selected_validator_index]
|
||||||
assert validator.exit_epoch < post_spec.FAR_FUTURE_EPOCH
|
assert validator.exit_epoch < post_spec.FAR_FUTURE_EPOCH
|
||||||
|
elif operation_type == OperationType.BLS_TO_EXECUTION_CHANGE:
|
||||||
|
validator = state.validators[selected_validator_index]
|
||||||
|
assert validator.withdrawal_credentials[:1] == spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX
|
||||||
|
|
||||||
yield "pre", state
|
yield "pre", state
|
||||||
|
|
||||||
|
|
|
@ -66,6 +66,38 @@ def get_sample_blob(spec, rng=None):
|
||||||
return spec.Blob(b)
|
return spec.Blob(b)
|
||||||
|
|
||||||
|
|
||||||
|
def eval_poly_in_coeff_form(spec, coeffs, x):
|
||||||
|
"""
|
||||||
|
Evaluate a polynomial in coefficient form at 'x' using Horner's rule
|
||||||
|
"""
|
||||||
|
total = 0
|
||||||
|
for a in reversed(coeffs):
|
||||||
|
total = (total * x + a) % spec.BLS_MODULUS
|
||||||
|
return total % spec.BLS_MODULUS
|
||||||
|
|
||||||
|
|
||||||
|
def get_poly_in_both_forms(spec, rng=None):
|
||||||
|
"""
|
||||||
|
Generate and return a random polynomial in both coefficient form and evaluation form
|
||||||
|
"""
|
||||||
|
if rng is None:
|
||||||
|
rng = random.Random(5566)
|
||||||
|
|
||||||
|
roots_of_unity_brp = spec.bit_reversal_permutation(spec.ROOTS_OF_UNITY)
|
||||||
|
|
||||||
|
coeffs = [
|
||||||
|
rng.randint(0, spec.BLS_MODULUS - 1)
|
||||||
|
for _ in range(spec.FIELD_ELEMENTS_PER_BLOB)
|
||||||
|
]
|
||||||
|
|
||||||
|
evals = [
|
||||||
|
eval_poly_in_coeff_form(spec, coeffs, int(z))
|
||||||
|
for z in roots_of_unity_brp
|
||||||
|
]
|
||||||
|
|
||||||
|
return coeffs, evals
|
||||||
|
|
||||||
|
|
||||||
def get_sample_opaque_tx(spec, blob_count=1, rng=None):
|
def get_sample_opaque_tx(spec, blob_count=1, rng=None):
|
||||||
blobs = []
|
blobs = []
|
||||||
blob_kzg_commitments = []
|
blob_kzg_commitments = []
|
||||||
|
|
|
@ -38,8 +38,9 @@ def transition_to_slot_via_block(spec, state, slot):
|
||||||
Transition to ``slot`` via an empty block transition
|
Transition to ``slot`` via an empty block transition
|
||||||
"""
|
"""
|
||||||
assert state.slot < slot
|
assert state.slot < slot
|
||||||
apply_empty_block(spec, state, slot)
|
signed_block = apply_empty_block(spec, state, slot)
|
||||||
assert state.slot == slot
|
assert state.slot == slot
|
||||||
|
return signed_block
|
||||||
|
|
||||||
|
|
||||||
def transition_to_valid_shard_slot(spec, state):
|
def transition_to_valid_shard_slot(spec, state):
|
||||||
|
|
|
@ -9,11 +9,15 @@ This series of tests provides reference test vectors for validating that a light
|
||||||
```yaml
|
```yaml
|
||||||
genesis_validators_root: Bytes32 -- string, hex encoded, with 0x prefix
|
genesis_validators_root: Bytes32 -- string, hex encoded, with 0x prefix
|
||||||
trusted_block_root: Bytes32 -- string, hex encoded, with 0x prefix
|
trusted_block_root: Bytes32 -- string, hex encoded, with 0x prefix
|
||||||
|
bootstrap_fork_digest: string -- Encoded `ForkDigest`-context of `bootstrap`
|
||||||
|
store_fork_digest: string -- Encoded `ForkDigest`-context of `store` object being tested
|
||||||
```
|
```
|
||||||
|
|
||||||
### `bootstrap.ssz_snappy`
|
### `bootstrap.ssz_snappy`
|
||||||
|
|
||||||
An SSZ-snappy encoded `bootstrap` object of type `LightClientBootstrap` to initialize a local `store` object of type `LightClientStore` using `initialize_light_client_store(trusted_block_rooot, bootstrap)`.
|
An SSZ-snappy encoded `bootstrap` object of type `LightClientBootstrap` to initialize a local `store` object of type `LightClientStore` with `store_fork_digest` using `initialize_light_client_store(trusted_block_rooot, bootstrap)`. The SSZ type can be determined from `bootstrap_fork_digest`.
|
||||||
|
|
||||||
|
If `store_fork_digest` differs from `bootstrap_fork_digest`, the `bootstrap` object may need to be upgraded before initializing the store.
|
||||||
|
|
||||||
### `steps.yaml`
|
### `steps.yaml`
|
||||||
|
|
||||||
|
@ -27,10 +31,12 @@ Each step includes checks to verify the expected impact on the `store` object.
|
||||||
finalized_header: {
|
finalized_header: {
|
||||||
slot: int, -- Integer value from store.finalized_header.beacon.slot
|
slot: int, -- Integer value from store.finalized_header.beacon.slot
|
||||||
beacon_root: string, -- Encoded 32-byte value from store.finalized_header.beacon.hash_tree_root()
|
beacon_root: string, -- Encoded 32-byte value from store.finalized_header.beacon.hash_tree_root()
|
||||||
|
execution_root: string, -- From Capella onward; get_lc_execution_root(store.finalized_header)
|
||||||
}
|
}
|
||||||
optimistic_header: {
|
optimistic_header: {
|
||||||
slot: int, -- Integer value from store.optimistic_header.beacon.slot
|
slot: int, -- Integer value from store.optimistic_header.beacon.slot
|
||||||
beacon_root: string, -- Encoded 32-byte value from store.optimistic_header.beacon.hash_tree_root()
|
beacon_root: string, -- Encoded 32-byte value from store.optimistic_header.beacon.hash_tree_root()
|
||||||
|
execution_root: string, -- From Capella onward; get_lc_execution_root(store.optimistic_header)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -54,6 +60,7 @@ The function `process_light_client_update(store, update, current_slot, genesis_v
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
{
|
{
|
||||||
|
update_fork_digest: string -- Encoded `ForkDigest`-context of `update`
|
||||||
update: string -- name of the `*.ssz_snappy` file to load
|
update: string -- name of the `*.ssz_snappy` file to load
|
||||||
as a `LightClientUpdate` object
|
as a `LightClientUpdate` object
|
||||||
current_slot: int -- integer, decimal
|
current_slot: int -- integer, decimal
|
||||||
|
@ -61,6 +68,21 @@ The function `process_light_client_update(store, update, current_slot, genesis_v
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If `store_fork_digest` differs from `update_fork_digest`, the `update` object may need to be upgraded before processing the update.
|
||||||
|
|
||||||
|
After this step, the `store` object may have been updated.
|
||||||
|
|
||||||
|
#### `upgrade_store`
|
||||||
|
|
||||||
|
The `store` should be upgraded to reflect the new `store_fork_digest`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
{
|
||||||
|
store_fork_digest: string -- Encoded `ForkDigest`-context of `store`
|
||||||
|
checks: {<store_attibute>: value} -- the assertions.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
After this step, the `store` object may have been updated.
|
After this step, the `store` object may have been updated.
|
||||||
|
|
||||||
## Condition
|
## Condition
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from eth2spec.test.helpers.constants import ALTAIR, BELLATRIX, CAPELLA, EIP4844
|
from eth2spec.test.helpers.constants import ALTAIR, BELLATRIX, CAPELLA, EIP4844
|
||||||
from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
|
from eth2spec.gen_helpers.gen_from_tests.gen import combine_mods, run_state_test_generators
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -9,7 +9,11 @@ if __name__ == "__main__":
|
||||||
'update_ranking',
|
'update_ranking',
|
||||||
]}
|
]}
|
||||||
bellatrix_mods = altair_mods
|
bellatrix_mods = altair_mods
|
||||||
capella_mods = bellatrix_mods
|
|
||||||
|
_new_capella_mods = {key: 'eth2spec.test.capella.light_client.test_' + key for key in [
|
||||||
|
'single_merkle_proof',
|
||||||
|
]}
|
||||||
|
capella_mods = combine_mods(_new_capella_mods, bellatrix_mods)
|
||||||
eip4844_mods = capella_mods
|
eip4844_mods = capella_mods
|
||||||
|
|
||||||
all_mods = {
|
all_mods = {
|
||||||
|
|
|
@ -16,6 +16,9 @@ from eth2spec.test.altair.transition import (
|
||||||
test_slashing as test_altair_slashing,
|
test_slashing as test_altair_slashing,
|
||||||
test_operations as test_altair_operations,
|
test_operations as test_altair_operations,
|
||||||
)
|
)
|
||||||
|
from eth2spec.test.eip4844.transition import (
|
||||||
|
test_operations as test_eip4844_operations,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_provider(tests_src, preset_name: str, pre_fork_name: str, post_fork_name: str) -> gen_typing.TestProvider:
|
def create_provider(tests_src, preset_name: str, pre_fork_name: str, post_fork_name: str) -> gen_typing.TestProvider:
|
||||||
|
@ -37,14 +40,14 @@ def create_provider(tests_src, preset_name: str, pre_fork_name: str, post_fork_n
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
altair_tests = (
|
all_tests = (
|
||||||
test_altair_transition,
|
test_altair_transition,
|
||||||
test_altair_activations_and_exits,
|
test_altair_activations_and_exits,
|
||||||
test_altair_leaking,
|
test_altair_leaking,
|
||||||
test_altair_slashing,
|
test_altair_slashing,
|
||||||
test_altair_operations,
|
test_altair_operations,
|
||||||
|
test_eip4844_operations,
|
||||||
)
|
)
|
||||||
all_tests = altair_tests
|
|
||||||
for transition_test_module in all_tests:
|
for transition_test_module in all_tests:
|
||||||
for pre_fork, post_fork in ALL_PRE_POST_FORKS:
|
for pre_fork, post_fork in ALL_PRE_POST_FORKS:
|
||||||
gen_runner.run_generator("transition", [
|
gen_runner.run_generator("transition", [
|
||||||
|
|
Loading…
Reference in New Issue