Merge branch 'dev' into fix-capella-genesis
This commit is contained in:
commit
a1546fdf4f
|
@ -142,7 +142,19 @@ jobs:
|
|||
command: make citest fork=capella
|
||||
- store_test_results:
|
||||
path: tests/core/pyspec/test-reports
|
||||
|
||||
test-eip4844:
|
||||
docker:
|
||||
- image: circleci/python:3.8
|
||||
working_directory: ~/specs-repo
|
||||
steps:
|
||||
- restore_cache:
|
||||
key: v3-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_pyspec_cached_venv
|
||||
- run:
|
||||
name: Run py-tests
|
||||
command: make citest fork=eip4844
|
||||
- store_test_results:
|
||||
path: tests/core/pyspec/test-reports
|
||||
table_of_contents:
|
||||
docker:
|
||||
- image: circleci/node:10.16.3
|
||||
|
@ -260,6 +272,9 @@ workflows:
|
|||
- test-capella:
|
||||
requires:
|
||||
- install_pyspec_test
|
||||
- test-eip4844:
|
||||
requires:
|
||||
- install_pyspec_test
|
||||
- table_of_contents
|
||||
- codespell
|
||||
- lint:
|
||||
|
|
|
@ -19,6 +19,7 @@ tests/core/pyspec/eth2spec/phase0/
|
|||
tests/core/pyspec/eth2spec/altair/
|
||||
tests/core/pyspec/eth2spec/bellatrix/
|
||||
tests/core/pyspec/eth2spec/capella/
|
||||
tests/core/pyspec/eth2spec/eip4844/
|
||||
|
||||
# coverage reports
|
||||
.htmlcov
|
||||
|
|
2
Makefile
2
Makefile
|
@ -136,7 +136,7 @@ codespell:
|
|||
lint: pyspec
|
||||
. venv/bin/activate; cd $(PY_SPEC_DIR); \
|
||||
flake8 --config $(LINTER_CONFIG_FILE) ./eth2spec \
|
||||
&& pylint --disable=all --enable unused-argument ./eth2spec/phase0 ./eth2spec/altair ./eth2spec/bellatrix \
|
||||
&& pylint --disable=all --enable unused-argument ./eth2spec/phase0 ./eth2spec/altair ./eth2spec/bellatrix ./eth2spec/capella \
|
||||
&& mypy --config-file $(LINTER_CONFIG_FILE) -p eth2spec.phase0 -p eth2spec.altair -p eth2spec.bellatrix -p eth2spec.capella
|
||||
|
||||
lint_generators: pyspec
|
||||
|
|
|
@ -47,9 +47,9 @@ BELLATRIX_FORK_EPOCH: 18446744073709551615
|
|||
# Capella
|
||||
CAPELLA_FORK_VERSION: 0x03000000
|
||||
CAPELLA_FORK_EPOCH: 18446744073709551615
|
||||
# Sharding
|
||||
SHARDING_FORK_VERSION: 0x04000000
|
||||
SHARDING_FORK_EPOCH: 18446744073709551615
|
||||
# EIP4844
|
||||
EIP4844_FORK_VERSION: 0x04000000
|
||||
EIP4844_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -46,9 +46,9 @@ BELLATRIX_FORK_EPOCH: 18446744073709551615
|
|||
# Capella
|
||||
CAPELLA_FORK_VERSION: 0x03000001
|
||||
CAPELLA_FORK_EPOCH: 18446744073709551615
|
||||
# Sharding
|
||||
SHARDING_FORK_VERSION: 0x04000001
|
||||
SHARDING_FORK_EPOCH: 18446744073709551615
|
||||
# EIP4844
|
||||
EIP4844_FORK_VERSION: 0x04000001
|
||||
EIP4844_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
# Time parameters
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
# Mainnet preset - Phase0
|
||||
|
||||
# Misc
|
||||
# ---------------------------------------------------------------
|
||||
# `uint64(4096)`
|
||||
FIELD_ELEMENTS_PER_BLOB: 4096
|
||||
# `uint64(2**4)` (= 16)
|
||||
MAX_BLOBS_PER_BLOCK: 16
|
|
@ -0,0 +1,8 @@
|
|||
# Minimal preset - Phase0
|
||||
|
||||
# Misc
|
||||
# ---------------------------------------------------------------
|
||||
# [customized]
|
||||
FIELD_ELEMENTS_PER_BLOB: 4
|
||||
# `uint64(2**4)` (= 16)
|
||||
MAX_BLOBS_PER_BLOCK: 16
|
78
setup.py
78
setup.py
|
@ -45,6 +45,7 @@ PHASE0 = 'phase0'
|
|||
ALTAIR = 'altair'
|
||||
BELLATRIX = 'bellatrix'
|
||||
CAPELLA = 'capella'
|
||||
EIP4844 = 'eip4844'
|
||||
|
||||
|
||||
# The helper functions that are used when defining constants
|
||||
|
@ -230,7 +231,7 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str]) ->
|
|||
|
||||
if not _is_constant_id(name):
|
||||
# Check for short type declarations
|
||||
if value.startswith(("uint", "Bytes", "ByteList", "Union")):
|
||||
if value.startswith(("uint", "Bytes", "ByteList", "Union", "Vector", "List")):
|
||||
custom_types[name] = value
|
||||
continue
|
||||
|
||||
|
@ -304,7 +305,7 @@ class SpecBuilder(ABC):
|
|||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> Dict[str, str]: # TODO
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO
|
||||
"""
|
||||
The constants that are required for custom types.
|
||||
"""
|
||||
|
@ -432,7 +433,7 @@ get_attesting_indices = cache_this(
|
|||
return {}
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> Dict[str, str]:
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
|
@ -548,11 +549,11 @@ EXECUTION_ENGINE = NoopExecutionEngine()"""
|
|||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> str:
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
constants = {
|
||||
'MAX_BYTES_PER_TRANSACTION': 'uint64(2**30)',
|
||||
'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value,
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(), **constants}
|
||||
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
|
||||
|
||||
|
||||
#
|
||||
|
@ -568,14 +569,57 @@ from eth2spec.bellatrix import {preset_name} as bellatrix
|
|||
'''
|
||||
|
||||
|
||||
#
|
||||
# EIP4844SpecBuilder
|
||||
#
|
||||
class EIP4844SpecBuilder(BellatrixSpecBuilder):
|
||||
fork: str = EIP4844
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.utils import kzg
|
||||
from eth2spec.bellatrix import {preset_name} as bellatrix
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '''
|
||||
# TODO: for mainnet, load pre-generated trusted setup file to reduce building time.
|
||||
# TESTING_FIELD_ELEMENTS_PER_BLOB is hardcoded copy from minimal presets
|
||||
TESTING_FIELD_ELEMENTS_PER_BLOB = 4
|
||||
TESTING_SECRET = 1337
|
||||
TESTING_KZG_SETUP_G1 = kzg.generate_setup(bls.G1, TESTING_SECRET, TESTING_FIELD_ELEMENTS_PER_BLOB)
|
||||
TESTING_KZG_SETUP_G2 = kzg.generate_setup(bls.G2, TESTING_SECRET, TESTING_FIELD_ELEMENTS_PER_BLOB)
|
||||
TESTING_KZG_SETUP_LAGRANGE = kzg.get_lagrange(TESTING_KZG_SETUP_G1)
|
||||
|
||||
KZG_SETUP_G1 = [bls.G1_to_bytes48(p) for p in TESTING_KZG_SETUP_G1]
|
||||
KZG_SETUP_G2 = [bls.G2_to_bytes96(p) for p in TESTING_KZG_SETUP_G2]
|
||||
KZG_SETUP_LAGRANGE = TESTING_KZG_SETUP_LAGRANGE
|
||||
ROOTS_OF_UNITY = kzg.compute_roots_of_unity(TESTING_FIELD_ELEMENTS_PER_BLOB)
|
||||
|
||||
|
||||
def retrieve_blobs_sidecar(slot: Slot, beacon_block_root: Root) -> BlobsSidecar:
|
||||
pass'''
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
constants = {
|
||||
'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value,
|
||||
'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value,
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
|
||||
|
||||
|
||||
|
||||
spec_builders = {
|
||||
builder.fork: builder
|
||||
for builder in (Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder)
|
||||
for builder in (Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, EIP4844SpecBuilder)
|
||||
}
|
||||
|
||||
|
||||
def is_spec_defined_type(value: str) -> bool:
|
||||
return value.startswith('ByteList') or value.startswith('Union')
|
||||
return value.startswith(('ByteList', 'Union', 'Vector', 'List'))
|
||||
|
||||
|
||||
def objects_to_spec(preset_name: str,
|
||||
|
@ -653,7 +697,7 @@ def objects_to_spec(preset_name: str,
|
|||
ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values())
|
||||
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_ssz_dep_constants()[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants()[x]), builder.hardcoded_custom_type_dep_constants()))
|
||||
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants(spec_object)[x]), builder.hardcoded_custom_type_dep_constants(spec_object)))
|
||||
spec = (
|
||||
builder.imports(preset_name)
|
||||
+ builder.preparations()
|
||||
|
@ -870,14 +914,14 @@ class PySpecCommand(Command):
|
|||
if len(self.md_doc_paths) == 0:
|
||||
print("no paths were specified, using default markdown file paths for pyspec"
|
||||
" build (spec fork: %s)" % self.spec_fork)
|
||||
if self.spec_fork in (PHASE0, ALTAIR, BELLATRIX, CAPELLA):
|
||||
if self.spec_fork in (PHASE0, ALTAIR, BELLATRIX, CAPELLA, EIP4844):
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
specs/phase0/validator.md
|
||||
specs/phase0/weak-subjectivity.md
|
||||
"""
|
||||
if self.spec_fork in (ALTAIR, BELLATRIX, CAPELLA):
|
||||
if self.spec_fork in (ALTAIR, BELLATRIX, CAPELLA, EIP4844):
|
||||
self.md_doc_paths += """
|
||||
specs/altair/beacon-chain.md
|
||||
specs/altair/bls.md
|
||||
|
@ -886,7 +930,7 @@ class PySpecCommand(Command):
|
|||
specs/altair/p2p-interface.md
|
||||
specs/altair/sync-protocol.md
|
||||
"""
|
||||
if self.spec_fork in (BELLATRIX, CAPELLA):
|
||||
if self.spec_fork in (BELLATRIX, CAPELLA, EIP4844):
|
||||
self.md_doc_paths += """
|
||||
specs/bellatrix/beacon-chain.md
|
||||
specs/bellatrix/fork.md
|
||||
|
@ -903,6 +947,14 @@ class PySpecCommand(Command):
|
|||
specs/capella/validator.md
|
||||
specs/capella/p2p-interface.md
|
||||
"""
|
||||
if self.spec_fork == EIP4844:
|
||||
self.md_doc_paths += """
|
||||
specs/eip4844/beacon-chain.md
|
||||
specs/eip4844/fork.md
|
||||
specs/eip4844/polynomial-commitments.md
|
||||
specs/eip4844/p2p-interface.md
|
||||
specs/eip4844/validator.md
|
||||
"""
|
||||
if len(self.md_doc_paths) == 0:
|
||||
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)
|
||||
|
||||
|
@ -1043,7 +1095,7 @@ setup(
|
|||
extras_require={
|
||||
"test": ["pytest>=4.4", "pytest-cov", "pytest-xdist"],
|
||||
"lint": ["flake8==3.7.7", "mypy==0.812", "pylint==2.12.2"],
|
||||
"generator": ["python-snappy==0.5.4"],
|
||||
"generator": ["python-snappy==0.5.4", "filelock"],
|
||||
},
|
||||
install_requires=[
|
||||
"eth-utils>=1.3.0,<2",
|
||||
|
|
|
@ -11,19 +11,22 @@
|
|||
- [Introduction](#introduction)
|
||||
- [Custom types](#custom-types)
|
||||
- [Constants](#constants)
|
||||
- [Blob](#blob)
|
||||
- [Domain types](#domain-types)
|
||||
- [Preset](#preset)
|
||||
- [Execution](#execution)
|
||||
- [Configuration](#configuration)
|
||||
- [Containers](#containers)
|
||||
- [Extended containers](#extended-containers)
|
||||
- [`BeaconBlockBody`](#beaconblockbody)
|
||||
- [Helper functions](#helper-functions)
|
||||
- [Misc](#misc)
|
||||
- [`kzg_to_versioned_hash`](#kzg_to_versioned_hash)
|
||||
- [`kzg_commitment_to_versioned_hash`](#kzg_commitment_to_versioned_hash)
|
||||
- [`tx_peek_blob_versioned_hashes`](#tx_peek_blob_versioned_hashes)
|
||||
- [`verify_kzgs_against_transactions`](#verify_kzgs_against_transactions)
|
||||
- [`verify_kzg_commitments_against_transactions`](#verify_kzg_commitments_against_transactions)
|
||||
- [Beacon chain state transition function](#beacon-chain-state-transition-function)
|
||||
- [Block processing](#block-processing)
|
||||
- [Blob KZGs](#blob-kzgs)
|
||||
- [Blob KZG commitments](#blob-kzg-commitments)
|
||||
- [Testing](#testing)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
@ -39,13 +42,17 @@ This upgrade adds blobs to the beacon chain as part of EIP-4844.
|
|||
| - | - | - |
|
||||
| `Blob` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_BLOB]` | |
|
||||
| `VersionedHash` | `Bytes32` | |
|
||||
| `KZGCommitment` | `Bytes48` | Same as BLS standard "is valid pubkey" check but also allows `0x00..00` for point-at-infinity |
|
||||
|
||||
## Constants
|
||||
|
||||
### Blob
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `BLOB_TX_TYPE` | `uint8(0x05)` |
|
||||
| `FIELD_ELEMENTS_PER_BLOB` | `4096` |
|
||||
| `FIELD_ELEMENTS_PER_BLOB` | `uint64(4096)` |
|
||||
| `VERSIONED_HASH_VERSION_KZG` | `Bytes1(0x01)` |
|
||||
|
||||
### Domain types
|
||||
|
||||
|
@ -53,6 +60,14 @@ This upgrade adds blobs to the beacon chain as part of EIP-4844.
|
|||
| - | - |
|
||||
| `DOMAIN_BLOBS_SIDECAR` | `DomainType('0x0a000000')` |
|
||||
|
||||
## Preset
|
||||
|
||||
### Execution
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `MAX_BLOBS_PER_BLOCK` | `uint64(2**4)` (= 16) |
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
|
@ -78,43 +93,51 @@ class BeaconBlockBody(Container):
|
|||
sync_aggregate: SyncAggregate
|
||||
# Execution
|
||||
execution_payload: ExecutionPayload
|
||||
blob_kzgs: List[KZGCommitment, MAX_BLOBS_PER_BLOCK] # [New in EIP-4844]
|
||||
blob_kzg_commitments: List[KZGCommitment, MAX_BLOBS_PER_BLOCK] # [New in EIP-4844]
|
||||
```
|
||||
|
||||
## Helper functions
|
||||
|
||||
### Misc
|
||||
|
||||
#### `kzg_to_versioned_hash`
|
||||
#### `kzg_commitment_to_versioned_hash`
|
||||
|
||||
```python
|
||||
def kzg_to_versioned_hash(kzg: KZGCommitment) -> VersionedHash:
|
||||
return BLOB_COMMITMENT_VERSION_KZG + hash(kzg)[1:]
|
||||
def kzg_commitment_to_versioned_hash(kzg_commitment: KZGCommitment) -> VersionedHash:
|
||||
return VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]
|
||||
```
|
||||
|
||||
#### `tx_peek_blob_versioned_hashes`
|
||||
|
||||
This function retrieves the hashes from the `SignedBlobTransaction` as defined in EIP-4844, using SSZ offsets.
|
||||
Offsets are little-endian `uint32` values, as defined in the [SSZ specification](../../ssz/simple-serialize.md).
|
||||
See [the full details of `blob_versioned_hashes` offset calculation](https://gist.github.com/protolambda/23bd106b66f6d4bb854ce46044aa3ca3).
|
||||
|
||||
```python
|
||||
def tx_peek_blob_versioned_hashes(opaque_tx: Transaction) -> Sequence[VersionedHash]:
|
||||
assert opaque_tx[0] == BLOB_TX_TYPE
|
||||
message_offset = 1 + uint32.decode_bytes(opaque_tx[1:5])
|
||||
# field offset: 32 + 8 + 32 + 32 + 8 + 4 + 32 + 4 + 4 = 156
|
||||
blob_versioned_hashes_offset = uint32.decode_bytes(opaque_tx[message_offset+156:message_offset+160])
|
||||
return [VersionedHash(opaque_tx[x:x+32]) for x in range(blob_versioned_hashes_offset, len(opaque_tx), 32)]
|
||||
blob_versioned_hashes_offset = (
|
||||
message_offset
|
||||
+ uint32.decode_bytes(opaque_tx[(message_offset + 156):(message_offset + 160)])
|
||||
)
|
||||
return [
|
||||
VersionedHash(opaque_tx[x:(x + 32)])
|
||||
for x in range(blob_versioned_hashes_offset, len(opaque_tx), 32)
|
||||
]
|
||||
```
|
||||
|
||||
#### `verify_kzgs_against_transactions`
|
||||
#### `verify_kzg_commitments_against_transactions`
|
||||
|
||||
```python
|
||||
def verify_kzgs_against_transactions(transactions: Sequence[Transaction], blob_kzgs: Sequence[KZGCommitment]) -> bool:
|
||||
all_versioned_hashes = []
|
||||
for tx in transactions:
|
||||
if tx[0] == BLOB_TX_TYPE:
|
||||
all_versioned_hashes.extend(tx_peek_blob_versioned_hashes(tx))
|
||||
return all_versioned_hashes == [kzg_to_versioned_hash(kzg) for kzg in blob_kzgs]
|
||||
def verify_kzg_commitments_against_transactions(transactions: Sequence[Transaction],
|
||||
kzg_commitments: Sequence[KZGCommitment]) -> bool:
|
||||
all_versioned_hashes = []
|
||||
for tx in transactions:
|
||||
if tx[0] == BLOB_TX_TYPE:
|
||||
all_versioned_hashes += tx_peek_blob_versioned_hashes(tx)
|
||||
return all_versioned_hashes == [kzg_commitment_to_versioned_hash(commitment) for commitment in kzg_commitments]
|
||||
```
|
||||
|
||||
## Beacon chain state transition function
|
||||
|
@ -130,14 +153,14 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
|
|||
process_eth1_data(state, block.body)
|
||||
process_operations(state, block.body)
|
||||
process_sync_aggregate(state, block.body.sync_aggregate)
|
||||
process_blob_kzgs(state, block.body) # [New in EIP-4844]
|
||||
process_blob_kzg_commitments(state, block.body) # [New in EIP-4844]
|
||||
```
|
||||
|
||||
#### Blob KZGs
|
||||
#### Blob KZG commitments
|
||||
|
||||
```python
|
||||
def process_blob_kzgs(state: BeaconState, body: BeaconBlockBody):
|
||||
assert verify_kzgs_against_transactions(body.execution_payload.transactions, body.blob_kzgs)
|
||||
def process_blob_kzg_commitments(state: BeaconState, body: BeaconBlockBody):
|
||||
assert verify_kzg_commitments_against_transactions(body.execution_payload.transactions, body.blob_kzg_commitments)
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
@ -145,9 +168,53 @@ def process_blob_kzgs(state: BeaconState, body: BeaconBlockBody):
|
|||
*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure EIP-4844 testing only.
|
||||
|
||||
The `BeaconState` initialization is unchanged, except for the use of the updated `eip4844.BeaconBlockBody` type
|
||||
when initializing the first body-root:
|
||||
when initializing the first body-root.
|
||||
|
||||
```python
|
||||
state.latest_block_header=BeaconBlockHeader(body_root=hash_tree_root(BeaconBlockBody())),
|
||||
```
|
||||
def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32,
|
||||
eth1_timestamp: uint64,
|
||||
deposits: Sequence[Deposit],
|
||||
execution_payload_header: ExecutionPayloadHeader=ExecutionPayloadHeader()
|
||||
) -> BeaconState:
|
||||
fork = Fork(
|
||||
previous_version=EIP4844_FORK_VERSION, # [Modified in EIP-4844] for testing only
|
||||
current_version=EIP4844_FORK_VERSION, # [Modified in EIP-4844]
|
||||
epoch=GENESIS_EPOCH,
|
||||
)
|
||||
state = BeaconState(
|
||||
genesis_time=eth1_timestamp + GENESIS_DELAY,
|
||||
fork=fork,
|
||||
eth1_data=Eth1Data(block_hash=eth1_block_hash, deposit_count=uint64(len(deposits))),
|
||||
latest_block_header=BeaconBlockHeader(body_root=hash_tree_root(BeaconBlockBody())),
|
||||
randao_mixes=[eth1_block_hash] * EPOCHS_PER_HISTORICAL_VECTOR, # Seed RANDAO with Eth1 entropy
|
||||
)
|
||||
|
||||
# Process deposits
|
||||
leaves = list(map(lambda deposit: deposit.data, deposits))
|
||||
for index, deposit in enumerate(deposits):
|
||||
deposit_data_list = List[DepositData, 2**DEPOSIT_CONTRACT_TREE_DEPTH](*leaves[:index + 1])
|
||||
state.eth1_data.deposit_root = hash_tree_root(deposit_data_list)
|
||||
process_deposit(state, deposit)
|
||||
|
||||
# Process activations
|
||||
for index, validator in enumerate(state.validators):
|
||||
balance = state.balances[index]
|
||||
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
|
||||
if validator.effective_balance == MAX_EFFECTIVE_BALANCE:
|
||||
validator.activation_eligibility_epoch = GENESIS_EPOCH
|
||||
validator.activation_epoch = GENESIS_EPOCH
|
||||
|
||||
# Set genesis validators root for domain separation and chain versioning
|
||||
state.genesis_validators_root = hash_tree_root(state.validators)
|
||||
|
||||
# Fill in sync committees
|
||||
# Note: A duplicate committee is assigned for the current and next committee at genesis
|
||||
state.current_sync_committee = get_next_sync_committee(state)
|
||||
state.next_sync_committee = get_next_sync_committee(state)
|
||||
|
||||
# Initialize the execution payload header
|
||||
# If empty, will initialize a chain that has not yet gone through the Merge transition
|
||||
state.latest_execution_payload_header = execution_payload_header
|
||||
|
||||
return state
|
||||
```
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
|
||||
- [Introduction](#introduction)
|
||||
- [Configuration](#configuration)
|
||||
- [Helper functions](#helper-functions)
|
||||
- [Misc](#misc)
|
||||
- [Modified `compute_fork_version`](#modified-compute_fork_version)
|
||||
- [Fork to EIP-4844](#fork-to-eip-4844)
|
||||
- [Fork trigger](#fork-trigger)
|
||||
- [Upgrading the state](#upgrading-the-state)
|
||||
|
@ -25,9 +28,29 @@ Warning: this configuration is not definitive.
|
|||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `EIP4844_FORK_VERSION` | `Version('0x03000000')` |
|
||||
| `EIP4844_FORK_VERSION` | `Version('0x04000000')` |
|
||||
| `EIP4844_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** |
|
||||
|
||||
## Helper functions
|
||||
|
||||
### Misc
|
||||
|
||||
#### Modified `compute_fork_version`
|
||||
|
||||
```python
|
||||
def compute_fork_version(epoch: Epoch) -> Version:
|
||||
"""
|
||||
Return the fork version at the given ``epoch``.
|
||||
"""
|
||||
if epoch >= EIP4844_FORK_EPOCH:
|
||||
return EIP4844_FORK_VERSION
|
||||
if epoch >= BELLATRIX_FORK_EPOCH:
|
||||
return BELLATRIX_FORK_VERSION
|
||||
if epoch >= ALTAIR_FORK_EPOCH:
|
||||
return ALTAIR_FORK_VERSION
|
||||
return GENESIS_FORK_VERSION
|
||||
```
|
||||
|
||||
## Fork to EIP-4844
|
||||
|
||||
### Fork trigger
|
||||
|
@ -39,5 +62,54 @@ Note that for the pure EIP-4844 networks, we don't apply `upgrade_to_eip4844` si
|
|||
|
||||
### Upgrading the state
|
||||
|
||||
The `eip4844.BeaconState` format is equal to the `bellatrix.BeaconState` format, no upgrade has to be performed.
|
||||
Since the `eip4844.BeaconState` format is equal to the `bellatrix.BeaconState` format, we only have to update `BeaconState.fork`.
|
||||
|
||||
```python
|
||||
def upgrade_to_eip4844(pre: bellatrix.BeaconState) -> BeaconState:
|
||||
# TODO: if Capella gets scheduled, add sync it with Capella.BeaconState
|
||||
epoch = bellatrix.get_current_epoch(pre)
|
||||
post = BeaconState(
|
||||
# Versioning
|
||||
genesis_time=pre.genesis_time,
|
||||
genesis_validators_root=pre.genesis_validators_root,
|
||||
slot=pre.slot,
|
||||
fork=Fork(
|
||||
previous_version=pre.fork.current_version,
|
||||
current_version=EIP4844_FORK_VERSION, # [Modified in EIP4844]
|
||||
epoch=epoch,
|
||||
),
|
||||
# History
|
||||
latest_block_header=pre.latest_block_header,
|
||||
block_roots=pre.block_roots,
|
||||
state_roots=pre.state_roots,
|
||||
historical_roots=pre.historical_roots,
|
||||
# Eth1
|
||||
eth1_data=pre.eth1_data,
|
||||
eth1_data_votes=pre.eth1_data_votes,
|
||||
eth1_deposit_index=pre.eth1_deposit_index,
|
||||
# Registry
|
||||
validators=pre.validators,
|
||||
balances=pre.balances,
|
||||
# Randomness
|
||||
randao_mixes=pre.randao_mixes,
|
||||
# Slashings
|
||||
slashings=pre.slashings,
|
||||
# Participation
|
||||
previous_epoch_participation=pre.previous_epoch_participation,
|
||||
current_epoch_participation=pre.current_epoch_participation,
|
||||
# Finality
|
||||
justification_bits=pre.justification_bits,
|
||||
previous_justified_checkpoint=pre.previous_justified_checkpoint,
|
||||
current_justified_checkpoint=pre.current_justified_checkpoint,
|
||||
finalized_checkpoint=pre.finalized_checkpoint,
|
||||
# Inactivity
|
||||
inactivity_scores=pre.inactivity_scores,
|
||||
# Sync
|
||||
current_sync_committee=pre.current_sync_committee,
|
||||
next_sync_committee=pre.next_sync_committee,
|
||||
# Execution-layer
|
||||
latest_execution_payload_header=pre.latest_execution_payload_header,
|
||||
)
|
||||
|
||||
return post
|
||||
```
|
||||
|
|
|
@ -10,7 +10,6 @@ The specification of these changes continues in the same format as the network s
|
|||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Preset](#preset)
|
||||
- [Configuration](#configuration)
|
||||
- [Containers](#containers)
|
||||
- [`BlobsSidecar`](#blobssidecar)
|
||||
|
@ -32,13 +31,6 @@ The specification of these changes continues in the same format as the network s
|
|||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
|
||||
## Preset
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `MAX_BLOBS_PER_BLOCK` | `uint64(2**4)` (= 16) |
|
||||
|
||||
## Configuration
|
||||
|
||||
| Name | Value | Description |
|
||||
|
@ -46,8 +38,6 @@ The specification of these changes continues in the same format as the network s
|
|||
| `MAX_REQUEST_BLOBS_SIDECARS` | `2**7` (= 128) | Maximum number of blobs sidecars in a single request |
|
||||
| `MIN_EPOCHS_FOR_BLOBS_SIDECARS_REQUESTS` | `2**13` (= 8192, ~1.2 months) | The minimum epoch range over which a node must serve blobs sidecars |
|
||||
|
||||
|
||||
|
||||
## Containers
|
||||
|
||||
### `BlobsSidecar`
|
||||
|
@ -68,7 +58,6 @@ class SignedBlobsSidecar(Container):
|
|||
signature: BLSSignature
|
||||
```
|
||||
|
||||
|
||||
## The gossip domain: gossipsub
|
||||
|
||||
Some gossip meshes are upgraded in the fork of EIP4844 to support upgraded types.
|
||||
|
@ -103,9 +92,9 @@ In addition to the gossip validations for this topic from prior specifications,
|
|||
the following validations MUST pass before forwarding the `signed_beacon_block` on the network.
|
||||
Alias `block = signed_beacon_block.message`, `execution_payload = block.body.execution_payload`.
|
||||
- _[REJECT]_ The KZG commitments of the blobs are all correctly encoded compressed BLS G1 Points.
|
||||
-- i.e. `all(bls.KeyValidate(commitment) for commitment in block.body.blob_kzgs)`
|
||||
-- i.e. `all(bls.KeyValidate(commitment) for commitment in block.body.blob_kzg_commitments)`
|
||||
- _[REJECT]_ The KZG commitments correspond to the versioned hashes in the transactions list.
|
||||
-- i.e. `verify_kzgs_against_transactions(block.body.execution_payload.transactions, block.body.blob_kzgs)`
|
||||
-- i.e. `verify_kzg_commitments_against_transactions(block.body.execution_payload.transactions, block.body.blob_kzg_commitments)`
|
||||
|
||||
##### `blobs_sidecar`
|
||||
|
||||
|
@ -113,22 +102,20 @@ This topic is used to propagate data blobs included in any given beacon block.
|
|||
|
||||
The following validations MUST pass before forwarding the `signed_blobs_sidecar` on the network;
|
||||
Alias `sidecar = signed_blobs_sidecar.message`.
|
||||
- _[IGNORE]_ the `sidecar.beacon_block_slot` is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `blobs_sidecar.beacon_block_slot == current_slot`.
|
||||
- _[IGNORE]_ the `sidecar.beacon_block_slot` is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `sidecar.beacon_block_slot == current_slot`.
|
||||
- _[REJECT]_ the `sidecar.blobs` are all well formatted, i.e. the `BLSFieldElement` in valid range (`x < BLS_MODULUS`).
|
||||
- _[REJECT]_ The KZG proof is a correctly encoded compressed BLS G1 Point -- i.e. `bls.KeyValidate(blobs_sidecar.kzg_aggregated_proof)
|
||||
- _[REJECT]_ the beacon proposer signature, `signed_blobs_sidecar.signature`, is valid -- i.e.
|
||||
```python
|
||||
domain = get_domain(state, DOMAIN_BLOBS_SIDECAR, blobs_sidecar.beacon_block_slot // SLOTS_PER_EPOCH)
|
||||
signing_root = compute_signing_root(blobs_sidecar, domain)
|
||||
assert bls.Verify(proposer_pubkey, signing_root, signed_blob_header.signature)
|
||||
```
|
||||
where `proposer_pubkey` is the pubkey of the beacon block proposer of `blobs_sidecar.beacon_block_slot`
|
||||
- Let `domain = get_domain(state, DOMAIN_BLOBS_SIDECAR, sidecar.beacon_block_slot // SLOTS_PER_EPOCH)`
|
||||
- Let `signing_root = compute_signing_root(sidecar, domain)`
|
||||
- Verify `bls.Verify(proposer_pubkey, signing_root, signed_blob_header.signature) is True`,
|
||||
where `proposer_pubkey` is the pubkey of the beacon block proposer of `sidecar.beacon_block_slot`
|
||||
- _[IGNORE]_ The sidecar is the first sidecar with valid signature received for the `(proposer_index, sidecar.beacon_block_slot)` combination,
|
||||
where `proposer_index` is the validator index of the beacon block proposer of `blobs_sidecar.beacon_block_slot`
|
||||
where `proposer_index` is the validator index of the beacon block proposer of `sidecar.beacon_block_slot`
|
||||
|
||||
Note that a sidecar may be propagated before or after the corresponding beacon block.
|
||||
|
||||
Once both sidecar and beacon block are received, `verify_blobs_sidecar` can unlock the data-availability fork-choice dependency.
|
||||
Once both sidecar and beacon block are received, `validate_blobs_sidecar` can unlock the data-availability fork-choice dependency.
|
||||
|
||||
### Transitioning the gossip
|
||||
|
||||
|
@ -199,7 +186,7 @@ The response is unsigned, i.e. `BlobsSidecarsByRange`, as the signature of the b
|
|||
may not be available beyond the initial distribution via gossip.
|
||||
|
||||
Before consuming the next response chunk, the response reader SHOULD verify the blobs sidecar is well-formatted and
|
||||
correct w.r.t. the expected KZG commitments through `verify_blobs_sidecar`.
|
||||
correct w.r.t. the expected KZG commitments through `validate_blobs_sidecar`.
|
||||
|
||||
`BlobsSidecarsByRange` is primarily used to sync blobs that may have been missed on gossip.
|
||||
|
||||
|
@ -247,8 +234,6 @@ Clients MUST respond with blobs sidecars that are consistent from a single chain
|
|||
After the initial blobs sidecar, clients MAY stop in the process of responding
|
||||
if their fork choice changes the view of the chain in the context of the request.
|
||||
|
||||
|
||||
|
||||
# Design decision rationale
|
||||
|
||||
## Why are blobs relayed as a sidecar, separate from beacon blocks?
|
||||
|
@ -259,4 +244,3 @@ thus avoiding all blobs being downloaded by all beacon nodes on the network.
|
|||
|
||||
Such sharding design may introduce an updated `BlobsSidecar` to identify the shard,
|
||||
but does not affect the `BeaconBlock` structure.
|
||||
|
||||
|
|
|
@ -16,9 +16,11 @@
|
|||
- [`bls_modular_inverse`](#bls_modular_inverse)
|
||||
- [`div`](#div)
|
||||
- [`lincomb`](#lincomb)
|
||||
- [`matrix_lincomb`](#matrix_lincomb)
|
||||
- [KZG](#kzg)
|
||||
- [`blob_to_kzg`](#blob_to_kzg)
|
||||
- [`blob_to_kzg_commitment`](#blob_to_kzg_commitment)
|
||||
- [`verify_kzg_proof`](#verify_kzg_proof)
|
||||
- [`compute_kzg_proof`](#compute_kzg_proof)
|
||||
- [Polynomials](#polynomials)
|
||||
- [`evaluate_polynomial_in_evaluation_form`](#evaluate_polynomial_in_evaluation_form)
|
||||
|
||||
|
@ -34,6 +36,8 @@ This document specifies basic polynomial operations and KZG polynomial commitmen
|
|||
|
||||
| Name | SSZ equivalent | Description |
|
||||
| - | - | - |
|
||||
| `G1Point` | `Bytes48` | |
|
||||
| `G2Point` | `Bytes96` | |
|
||||
| `BLSFieldElement` | `uint256` | `x < BLS_MODULUS` |
|
||||
| `KZGCommitment` | `Bytes48` | Same as BLS standard "is valid pubkey" check but also allows `0x00..00` for point-at-infinity |
|
||||
| `KZGProof` | `Bytes48` | Same as for `KZGCommitment` |
|
||||
|
@ -54,6 +58,7 @@ but reusing the `mainnet` settings in public networks is a critical security req
|
|||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `KZG_SETUP_G1` | `Vector[G1Point, FIELD_ELEMENTS_PER_BLOB]`, contents TBD |
|
||||
| `KZG_SETUP_G2` | `Vector[G2Point, FIELD_ELEMENTS_PER_BLOB]`, contents TBD |
|
||||
| `KZG_SETUP_LAGRANGE` | `Vector[KZGCommitment, FIELD_ELEMENTS_PER_BLOB]`, contents TBD |
|
||||
|
||||
|
@ -77,30 +82,47 @@ def bls_modular_inverse(x: BLSFieldElement) -> BLSFieldElement:
|
|||
```python
|
||||
def div(x: BLSFieldElement, y: BLSFieldElement) -> BLSFieldElement:
|
||||
"""Divide two field elements: `x` by `y`"""
|
||||
return x * bls_modular_inverse(y) % BLS_MODULUS
|
||||
return (int(x) * int(bls_modular_inverse(y))) % BLS_MODULUS
|
||||
```
|
||||
|
||||
#### `lincomb`
|
||||
|
||||
```python
|
||||
def lincomb(points: List[KZGCommitment], scalars: List[BLSFieldElement]) -> KZGCommitment:
|
||||
def lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElement]) -> KZGCommitment:
|
||||
"""
|
||||
BLS multiscalar multiplication. This function can be optimized using Pippenger's algorithm and variants.
|
||||
"""
|
||||
r = bls.Z1
|
||||
assert len(points) == len(scalars)
|
||||
result = bls.Z1
|
||||
for x, a in zip(points, scalars):
|
||||
r = bls.add(r, bls.multiply(x, a))
|
||||
return r
|
||||
result = bls.add(result, bls.multiply(bls.bytes48_to_G1(x), a))
|
||||
return KZGCommitment(bls.G1_to_bytes48(result))
|
||||
```
|
||||
|
||||
#### `matrix_lincomb`
|
||||
|
||||
```python
|
||||
def matrix_lincomb(vectors: Sequence[Sequence[BLSFieldElement]],
|
||||
scalars: Sequence[BLSFieldElement]) -> Sequence[BLSFieldElement]:
|
||||
"""
|
||||
Given a list of ``vectors``, interpret it as a 2D matrix and compute the linear combination
|
||||
of each column with `scalars`: return the resulting vector.
|
||||
"""
|
||||
result = [0] * len(vectors[0])
|
||||
for v, s in zip(vectors, scalars):
|
||||
for i, x in enumerate(v):
|
||||
result[i] = (result[i] + int(s) * int(x)) % BLS_MODULUS
|
||||
return [BLSFieldElement(x) for x in result]
|
||||
```
|
||||
|
||||
### KZG
|
||||
|
||||
KZG core functions. These are also defined in EIP-4844 execution specs.
|
||||
|
||||
#### `blob_to_kzg`
|
||||
#### `blob_to_kzg_commitment`
|
||||
|
||||
```python
|
||||
def blob_to_kzg(blob: Blob) -> KZGCommitment:
|
||||
def blob_to_kzg_commitment(blob: Blob) -> KZGCommitment:
|
||||
return lincomb(KZG_SETUP_LAGRANGE, blob)
|
||||
```
|
||||
|
||||
|
@ -108,39 +130,67 @@ def blob_to_kzg(blob: Blob) -> KZGCommitment:
|
|||
|
||||
```python
|
||||
def verify_kzg_proof(polynomial_kzg: KZGCommitment,
|
||||
x: BLSFieldElement,
|
||||
z: BLSFieldElement,
|
||||
y: BLSFieldElement,
|
||||
quotient_kzg: KZGProof) -> bool:
|
||||
kzg_proof: KZGProof) -> bool:
|
||||
"""
|
||||
Verify KZG proof that ``p(x) == y`` where ``p(x)`` is the polynomial represented by ``polynomial_kzg``.
|
||||
Verify KZG proof that ``p(z) == y`` where ``p(z)`` is the polynomial represented by ``polynomial_kzg``.
|
||||
"""
|
||||
# Verify: P - y = Q * (X - x)
|
||||
X_minus_x = bls.add(KZG_SETUP_G2[1], bls.multiply(bls.G2, BLS_MODULUS - x))
|
||||
P_minus_y = bls.add(polynomial_kzg, bls.multiply(bls.G1, BLS_MODULUS - y))
|
||||
# Verify: P - y = Q * (X - z)
|
||||
X_minus_z = bls.add(bls.bytes96_to_G2(KZG_SETUP_G2[1]), bls.multiply(bls.G2, BLS_MODULUS - z))
|
||||
P_minus_y = bls.add(bls.bytes48_to_G1(polynomial_kzg), bls.multiply(bls.G1, BLS_MODULUS - y))
|
||||
return bls.pairing_check([
|
||||
[P_minus_y, bls.neg(bls.G2)],
|
||||
[quotient_kzg, X_minus_x]
|
||||
[bls.bytes48_to_G1(kzg_proof), X_minus_z]
|
||||
])
|
||||
```
|
||||
|
||||
#### `compute_kzg_proof`
|
||||
|
||||
```python
|
||||
def compute_kzg_proof(polynomial: Sequence[BLSFieldElement], z: BLSFieldElement) -> KZGProof:
|
||||
"""Compute KZG proof at point `z` with `polynomial` being in evaluation form"""
|
||||
|
||||
# To avoid SSZ overflow/underflow, convert element into int
|
||||
polynomial = [int(i) for i in polynomial]
|
||||
z = int(z)
|
||||
|
||||
# Shift our polynomial first (in evaluation form we can't handle the division remainder)
|
||||
y = evaluate_polynomial_in_evaluation_form(polynomial, z)
|
||||
polynomial_shifted = [(p - int(y)) % BLS_MODULUS for p in polynomial]
|
||||
|
||||
# Make sure we won't divide by zero during division
|
||||
assert z not in ROOTS_OF_UNITY
|
||||
denominator_poly = [(x - z) % BLS_MODULUS for x in ROOTS_OF_UNITY]
|
||||
|
||||
# Calculate quotient polynomial by doing point-by-point division
|
||||
quotient_polynomial = [div(a, b) for a, b in zip(polynomial_shifted, denominator_poly)]
|
||||
return KZGProof(lincomb(KZG_SETUP_LAGRANGE, quotient_polynomial))
|
||||
```
|
||||
|
||||
### Polynomials
|
||||
|
||||
#### `evaluate_polynomial_in_evaluation_form`
|
||||
|
||||
```python
|
||||
def evaluate_polynomial_in_evaluation_form(poly: List[BLSFieldElement], x: BLSFieldElement) -> BLSFieldElement:
|
||||
def evaluate_polynomial_in_evaluation_form(polynomial: Sequence[BLSFieldElement],
|
||||
z: BLSFieldElement) -> BLSFieldElement:
|
||||
"""
|
||||
Evaluate a polynomial (in evaluation form) at an arbitrary point `x`
|
||||
Evaluate a polynomial (in evaluation form) at an arbitrary point `z`
|
||||
Uses the barycentric formula:
|
||||
f(x) = (1 - x**WIDTH) / WIDTH * sum_(i=0)^WIDTH (f(DOMAIN[i]) * DOMAIN[i]) / (x - DOMAIN[i])
|
||||
f(z) = (1 - z**WIDTH) / WIDTH * sum_(i=0)^WIDTH (f(DOMAIN[i]) * DOMAIN[i]) / (z - DOMAIN[i])
|
||||
"""
|
||||
width = len(poly)
|
||||
width = len(polynomial)
|
||||
assert width == FIELD_ELEMENTS_PER_BLOB
|
||||
inverse_width = bls_modular_inverse(width)
|
||||
|
||||
for i in range(width):
|
||||
r += div(poly[i] * ROOTS_OF_UNITY[i], (x - ROOTS_OF_UNITY[i]))
|
||||
r = r * (pow(x, width, BLS_MODULUS) - 1) * inverse_width % BLS_MODULUS
|
||||
# Make sure we won't divide by zero during division
|
||||
assert z not in ROOTS_OF_UNITY
|
||||
|
||||
return r
|
||||
result = 0
|
||||
for i in range(width):
|
||||
result += div(int(polynomial[i]) * int(ROOTS_OF_UNITY[i]), (z - ROOTS_OF_UNITY[i]))
|
||||
result = result * (pow(z, width, BLS_MODULUS) - 1) * inverse_width % BLS_MODULUS
|
||||
return result
|
||||
```
|
||||
|
||||
|
|
|
@ -10,16 +10,22 @@
|
|||
|
||||
- [Introduction](#introduction)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Custom types](#custom-types)
|
||||
- [Containers](#containers)
|
||||
- [`BlobsAndCommmitments`](#blobsandcommmitments)
|
||||
- [`PolynomialAndCommitment`](#polynomialandcommitment)
|
||||
- [Helpers](#helpers)
|
||||
- [`is_data_available`](#is_data_available)
|
||||
- [`hash_to_bls_field`](#hash_to_bls_field)
|
||||
- [`compute_powers`](#compute_powers)
|
||||
- [`vector_lincomb`](#vector_lincomb)
|
||||
- [`verify_blobs_sidecar`](#verify_blobs_sidecar)
|
||||
- [`compute_aggregated_poly_and_commitment`](#compute_aggregated_poly_and_commitment)
|
||||
- [`validate_blobs_sidecar`](#validate_blobs_sidecar)
|
||||
- [`compute_proof_from_blobs`](#compute_proof_from_blobs)
|
||||
- [`get_blobs_and_kzg_commitments`](#get_blobs_and_kzg_commitments)
|
||||
- [Beacon chain responsibilities](#beacon-chain-responsibilities)
|
||||
- [Block proposal](#block-proposal)
|
||||
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
|
||||
- [Blob commitments](#blob-commitments)
|
||||
- [Blob KZG commitments](#blob-kzg-commitments)
|
||||
- [Beacon Block publishing time](#beacon-block-publishing-time)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
@ -37,21 +43,49 @@ All behaviors and definitions defined in this document, and documents it extends
|
|||
All terminology, constants, functions, and protocol mechanics defined in the updated [Beacon Chain doc of EIP4844](./beacon-chain.md) are requisite for this document and used throughout.
|
||||
Please see related Beacon Chain doc before continuing and use them as a reference throughout.
|
||||
|
||||
## Custom types
|
||||
|
||||
| Name | SSZ equivalent | Description |
|
||||
| - | - | - |
|
||||
| `Polynomial` | `List[BLSFieldElement, MAX_BLOBS_PER_BLOCK]` | a polynomial in evaluation form |
|
||||
|
||||
## Containers
|
||||
|
||||
### `BlobsAndCommmitments`
|
||||
|
||||
```python
|
||||
class BlobsAndCommmitments(Container):
|
||||
blobs: List[Blob, MAX_BLOBS_PER_BLOCK]
|
||||
kzg_commitments: List[KZGCommitment, MAX_BLOBS_PER_BLOCK]
|
||||
```
|
||||
|
||||
### `PolynomialAndCommitment`
|
||||
|
||||
```python
|
||||
class PolynomialAndCommitment(Container):
|
||||
polynomial: Polynomial
|
||||
kzg_commitment: KZGCommitment
|
||||
```
|
||||
|
||||
|
||||
## Helpers
|
||||
|
||||
### `is_data_available`
|
||||
|
||||
The implementation of `is_data_available` is meant to change with later sharding upgrades.
|
||||
Initially, it requires every verifying actor to retrieve the matching `BlobsSidecar`,
|
||||
and verify the sidecar with `verify_blobs_sidecar`.
|
||||
and validate the sidecar with `validate_blobs_sidecar`.
|
||||
|
||||
Without the sidecar the block may be processed further optimistically,
|
||||
but MUST NOT be considered valid until a valid `BlobsSidecar` has been downloaded.
|
||||
|
||||
```python
|
||||
def is_data_available(slot: Slot, beacon_block_root: Root, kzgs: Sequence[KZGCommitment]):
|
||||
sidecar = retrieve_blobs_sidecar(slot, beacon_block_root) # implementation dependent, raises an exception if not available
|
||||
verify_blobs_sidecar(slot, beacon_block_root, kzgs, sidecar)
|
||||
def is_data_available(slot: Slot, beacon_block_root: Root, blob_kzg_commitments: Sequence[KZGCommitment]) -> bool:
|
||||
# `retrieve_blobs_sidecar` is implementation dependent, raises an exception if not available.
|
||||
sidecar = retrieve_blobs_sidecar(slot, beacon_block_root)
|
||||
validate_blobs_sidecar(slot, beacon_block_root, blob_kzg_commitments, sidecar)
|
||||
|
||||
return True
|
||||
```
|
||||
|
||||
### `hash_to_bls_field`
|
||||
|
@ -66,7 +100,10 @@ def hash_to_bls_field(x: Container) -> BLSFieldElement:
|
|||
|
||||
### `compute_powers`
|
||||
```python
|
||||
def compute_powers(x: BLSFieldElement, n: uint64) -> List[BLSFieldElement]:
|
||||
def compute_powers(x: BLSFieldElement, n: uint64) -> Sequence[BLSFieldElement]:
|
||||
"""
|
||||
Return ``x`` to power of [0, n-1].
|
||||
"""
|
||||
current_power = 1
|
||||
powers = []
|
||||
for _ in range(n):
|
||||
|
@ -75,50 +112,82 @@ def compute_powers(x: BLSFieldElement, n: uint64) -> List[BLSFieldElement]:
|
|||
return powers
|
||||
```
|
||||
|
||||
### `vector_lincomb`
|
||||
### `compute_aggregated_poly_and_commitment`
|
||||
|
||||
```python
|
||||
def vector_lincomb(vectors: List[List[BLSFieldElement]], scalars: List[BLSFieldElement]) -> List[BLSFieldElement]:
|
||||
def compute_aggregated_poly_and_commitment(
|
||||
blobs: Sequence[BLSFieldElement],
|
||||
kzg_commitments: Sequence[KZGCommitment]) -> Tuple[Polynomial, KZGCommitment]:
|
||||
"""
|
||||
Given a list of vectors, compute the linear combination of each column with `scalars`, and return the resulting
|
||||
vector.
|
||||
Return the aggregated polynomial and aggregated KZG commitment.
|
||||
"""
|
||||
r = [0]*len(vectors[0])
|
||||
for v, a in zip(vectors, scalars):
|
||||
for i, x in enumerate(v):
|
||||
r[i] = (r[i] + a * x) % BLS_MODULUS
|
||||
return [BLSFieldElement(x) for x in r]
|
||||
# Generate random linear combination challenges
|
||||
r = hash_to_bls_field(BlobsAndCommmitments(blobs=blobs, kzg_commitments=kzg_commitments))
|
||||
r_powers = compute_powers(r, len(kzg_commitments))
|
||||
|
||||
# Create aggregated polynomial in evaluation form
|
||||
aggregated_poly = Polynomial(matrix_lincomb(blobs, r_powers))
|
||||
|
||||
# Compute commitment to aggregated polynomial
|
||||
aggregated_poly_commitment = KZGCommitment(lincomb(kzg_commitments, r_powers))
|
||||
|
||||
return aggregated_poly, aggregated_poly_commitment
|
||||
```
|
||||
|
||||
### `verify_blobs_sidecar`
|
||||
### `validate_blobs_sidecar`
|
||||
|
||||
```python
|
||||
def verify_blobs_sidecar(slot: Slot, beacon_block_root: Root,
|
||||
expected_kzgs: Sequence[KZGCommitment], blobs_sidecar: BlobsSidecar) -> None:
|
||||
def validate_blobs_sidecar(slot: Slot,
|
||||
beacon_block_root: Root,
|
||||
expected_kzg_commitments: Sequence[KZGCommitment],
|
||||
blobs_sidecar: BlobsSidecar) -> None:
|
||||
assert slot == blobs_sidecar.beacon_block_slot
|
||||
assert beacon_block_root == blobs_sidecar.beacon_block_root
|
||||
blobs = blobs_sidecar.blobs
|
||||
kzg_aggregated_proof = blobs_sidecar.kzg_aggregated_proof
|
||||
assert len(expected_kzgs) == len(blobs)
|
||||
assert len(expected_kzg_commitments) == len(blobs)
|
||||
|
||||
# Generate random linear combination challenges
|
||||
r = hash_to_bls_field([blobs, expected_kzgs])
|
||||
r_powers = compute_powers(r, len(expected_kzgs))
|
||||
|
||||
# Compute commitment to aggregated polynomial
|
||||
aggregated_poly_commitment = lincomb(expected_kzgs, r_powers)
|
||||
|
||||
# Create aggregated polynomial in evaluation form
|
||||
aggregated_poly = vector_lincomb(blobs, r_powers)
|
||||
aggregated_poly, aggregated_poly_commitment = compute_aggregated_poly_and_commitment(
|
||||
blobs,
|
||||
expected_kzg_commitments,
|
||||
)
|
||||
|
||||
# Generate challenge `x` and evaluate the aggregated polynomial at `x`
|
||||
x = hash_to_bls_field([aggregated_poly, aggregated_poly_commitment])
|
||||
x = hash_to_bls_field(
|
||||
PolynomialAndCommitment(polynomial=aggregated_poly, kzg_commitment=aggregated_poly_commitment)
|
||||
)
|
||||
# Evaluate aggregated polynomial at `x` (evaluation function checks for div-by-zero)
|
||||
y = evaluate_polynomial_in_evaluation_form(aggregated_poly, x)
|
||||
|
||||
# Verify aggregated proof
|
||||
assert verify_kzg_proof(aggregated_poly_commitment, x, y, kzg_aggregated_proof)
|
||||
```
|
||||
|
||||
### `compute_proof_from_blobs`
|
||||
|
||||
```python
|
||||
def compute_proof_from_blobs(blobs: Sequence[BLSFieldElement]) -> KZGProof:
|
||||
commitments = [blob_to_kzg_commitment(blob) for blob in blobs]
|
||||
aggregated_poly, aggregated_poly_commitment = compute_aggregated_poly_and_commitment(blobs, commitments)
|
||||
x = hash_to_bls_field(PolynomialAndCommitment(
|
||||
polynomial=aggregated_poly,
|
||||
kzg_commitment=aggregated_poly_commitment,
|
||||
))
|
||||
return compute_kzg_proof(aggregated_poly, x)
|
||||
```
|
||||
|
||||
### `get_blobs_and_kzg_commitments`
|
||||
|
||||
The interface to retrieve blobs and corresponding kzg commitments.
|
||||
|
||||
Note: This API is *unstable*. `get_blobs_and_kzg_commitments` and `get_payload` may be unified.
|
||||
Implementers may also retrieve blobs individually per transaction.
|
||||
|
||||
```python
|
||||
def get_blobs_and_kzg_commitments(payload_id: PayloadId) -> Tuple[Sequence[BLSFieldElement], Sequence[KZGCommitment]]:
|
||||
...
|
||||
```
|
||||
|
||||
## Beacon chain responsibilities
|
||||
|
||||
All validator responsibilities remain unchanged other than those noted below.
|
||||
|
@ -128,53 +197,51 @@ Namely, the blob handling and the addition of `BlobsSidecar`.
|
|||
|
||||
#### Constructing the `BeaconBlockBody`
|
||||
|
||||
##### Blob commitments
|
||||
##### Blob KZG commitments
|
||||
|
||||
After retrieving the execution payload from the execution engine as specified in Bellatrix,
|
||||
the blobs are retrieved and processed:
|
||||
1. After retrieving the execution payload from the execution engine as specified in Bellatrix,
|
||||
use the `payload_id` to retrieve `blobs` and `blob_kzg_commitments` via `get_blobs_and_kzg_commitments(payload_id)`.
|
||||
2. Validate `blobs` and `blob_kzg_commitments`:
|
||||
|
||||
```python
|
||||
# execution_payload = execution_engine.get_payload(payload_id)
|
||||
# block.body.execution_payload = execution_payload
|
||||
# ...
|
||||
def validate_blobs_and_kzg_commitments(execution_payload: ExecutionPayload,
|
||||
blobs: Sequence[BLSFieldElement],
|
||||
blob_kzg_commitments: Sequence[KZGCommitment]) -> None:
|
||||
# Optionally sanity-check that the KZG commitments match the versioned hashes in the transactions
|
||||
assert verify_kzg_commitments_against_transactions(execution_payload.transactions, blob_kzg_commitments)
|
||||
|
||||
kzgs, blobs = get_blobs(payload_id)
|
||||
|
||||
# Optionally sanity-check that the KZG commitments match the versioned hashes in the transactions
|
||||
assert verify_kzgs_against_transactions(execution_payload.transactions, kzgs)
|
||||
|
||||
# Optionally sanity-check that the KZG commitments match the blobs (as produced by the execution engine)
|
||||
assert len(kzgs) == len(blobs) and [blob_to_kzg(blob) == kzg for blob, kzg in zip(blobs, kzgs)]
|
||||
|
||||
# Update the block body
|
||||
block.body.blob_kzgs = kzgs
|
||||
# Optionally sanity-check that the KZG commitments match the blobs (as produced by the execution engine)
|
||||
assert len(blob_kzg_commitments) == len(blobs)
|
||||
assert [blob_to_kzg_commitment(blob) == commitment for blob, commitment in zip(blobs, blob_kzg_commitments)]
|
||||
```
|
||||
|
||||
The `blobs` should be held with the block in preparation of publishing.
|
||||
Without the `blobs`, the published block will effectively be ignored by honest validators.
|
||||
3. If valid, set `block.body.blob_kzg_commitments = blob_kzg_commitments`.
|
||||
|
||||
Note: This API is *unstable*. `get_blobs` and `get_payload` may be unified.
|
||||
Implementers may also retrieve blobs individually per transaction.
|
||||
Note that the `blobs` should be held with the block in preparation of publishing.
|
||||
Without the `blobs`, the published block will effectively be ignored by honest validators.
|
||||
|
||||
### Beacon Block publishing time
|
||||
|
||||
Before publishing a prepared beacon block proposal, the corresponding blobs are packaged into a sidecar object for distribution to the network:
|
||||
|
||||
```python
|
||||
blobs_sidecar = BlobsSidecar(
|
||||
beacon_block_root=hash_tree_root(beacon_block)
|
||||
beacon_block_slot=beacon_block.slot
|
||||
blobs=blobs,
|
||||
)
|
||||
def get_blobs_sidecar(block: BeaconBlock, blobs: Sequence[Blob]) -> BlobsSidecar:
|
||||
return BlobsSidecar(
|
||||
beacon_block_root=hash_tree_root(block),
|
||||
beacon_block_slot=block.slot,
|
||||
blobs=blobs,
|
||||
kzg_aggregated_proof=compute_proof_from_blobs(blobs),
|
||||
)
|
||||
```
|
||||
|
||||
And then signed:
|
||||
|
||||
```python
|
||||
domain = get_domain(state, DOMAIN_BLOBS_SIDECAR, blobs_sidecar.beacon_block_slot / SLOTS_PER_EPOCH)
|
||||
signing_root = compute_signing_root(blobs_sidecar, domain)
|
||||
signature = bls.Sign(privkey, signing_root)
|
||||
signed_blobs_sidecar = SignedBlobsSidecar(message=blobs_sidecar, signature=signature)
|
||||
def get_signed_blobs_sidecar(state: BeaconState, blobs_sidecar: BlobsSidecar, privkey: int) -> SignedBlobsSidecar:
|
||||
domain = get_domain(state, DOMAIN_BLOBS_SIDECAR, blobs_sidecar.beacon_block_slot // SLOTS_PER_EPOCH)
|
||||
signing_root = compute_signing_root(blobs_sidecar, domain)
|
||||
signature = bls.Sign(privkey, signing_root)
|
||||
return SignedBlobsSidecar(message=blobs_sidecar, signature=signature)
|
||||
```
|
||||
|
||||
This `signed_blobs_sidecar` is then published to the global `blobs_sidecar` topic as soon as the `beacon_block` is published.
|
||||
|
|
|
@ -3,7 +3,9 @@ import time
|
|||
import shutil
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from filelock import FileLock
|
||||
import sys
|
||||
import json
|
||||
from typing import Iterable, AnyStr, Any, Callable
|
||||
import traceback
|
||||
|
||||
|
@ -111,6 +113,8 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
collected_test_count = 0
|
||||
generated_test_count = 0
|
||||
skipped_test_count = 0
|
||||
test_identifiers = []
|
||||
|
||||
provider_start = time.time()
|
||||
for tprov in test_providers:
|
||||
if not collect_only:
|
||||
|
@ -123,12 +127,10 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
/ Path(test_case.runner_name) / Path(test_case.handler_name)
|
||||
/ Path(test_case.suite_name) / Path(test_case.case_name)
|
||||
)
|
||||
incomplete_tag_file = case_dir / "INCOMPLETE"
|
||||
|
||||
collected_test_count += 1
|
||||
if collect_only:
|
||||
print(f"Collected test at: {case_dir}")
|
||||
continue
|
||||
print(f"Collected test at: {case_dir}")
|
||||
|
||||
incomplete_tag_file = case_dir / "INCOMPLETE"
|
||||
|
||||
if case_dir.exists():
|
||||
if not args.force and not incomplete_tag_file.exists():
|
||||
|
@ -198,6 +200,15 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
shutil.rmtree(case_dir)
|
||||
else:
|
||||
generated_test_count += 1
|
||||
test_identifier = "::".join([
|
||||
test_case.preset_name,
|
||||
test_case.fork_name,
|
||||
test_case.runner_name,
|
||||
test_case.handler_name,
|
||||
test_case.suite_name,
|
||||
test_case.case_name
|
||||
])
|
||||
test_identifiers.append(test_identifier)
|
||||
# Only remove `INCOMPLETE` tag file
|
||||
os.remove(incomplete_tag_file)
|
||||
test_end = time.time()
|
||||
|
@ -216,6 +227,28 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
if span > TIME_THRESHOLD_TO_PRINT:
|
||||
summary_message += f" in {span} seconds"
|
||||
print(summary_message)
|
||||
diagnostics = {
|
||||
"collected_test_count": collected_test_count,
|
||||
"generated_test_count": generated_test_count,
|
||||
"skipped_test_count": skipped_test_count,
|
||||
"test_identifiers": test_identifiers,
|
||||
"durations": [f"{span} seconds"],
|
||||
}
|
||||
diagnostics_path = Path(os.path.join(output_dir, "diagnostics.json"))
|
||||
diagnostics_lock = FileLock(os.path.join(output_dir, "diagnostics.json.lock"))
|
||||
with diagnostics_lock:
|
||||
diagnostics_path.touch(exist_ok=True)
|
||||
if os.path.getsize(diagnostics_path) == 0:
|
||||
with open(diagnostics_path, "w+") as f:
|
||||
json.dump(diagnostics, f)
|
||||
else:
|
||||
with open(diagnostics_path, "r+") as f:
|
||||
existing_diagnostics = json.load(f)
|
||||
for k, v in diagnostics.items():
|
||||
existing_diagnostics[k] += v
|
||||
with open(diagnostics_path, "w+") as f:
|
||||
json.dump(existing_diagnostics, f)
|
||||
print(f"wrote diagnostics to {diagnostics_path}")
|
||||
|
||||
|
||||
def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML):
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
from eth2spec.test.context import (
|
||||
is_post_capella,
|
||||
is_post_eip4844,
|
||||
spec_configured_state_test,
|
||||
spec_state_test_with_matching_config,
|
||||
with_all_phases,
|
||||
with_phases
|
||||
with_phases,
|
||||
)
|
||||
from eth2spec.test.helpers.constants import ALTAIR
|
||||
|
||||
|
@ -38,9 +40,15 @@ def test_override_config_fork_epoch(spec, state):
|
|||
if state.fork.current_version == spec.config.BELLATRIX_FORK_VERSION:
|
||||
return
|
||||
|
||||
assert spec.config.CAPELLA_FORK_EPOCH == spec.GENESIS_EPOCH
|
||||
if state.fork.current_version == spec.config.CAPELLA_FORK_VERSION:
|
||||
return
|
||||
if is_post_capella(spec):
|
||||
assert spec.config.CAPELLA_FORK_EPOCH == spec.GENESIS_EPOCH
|
||||
if state.fork.current_version == spec.config.CAPELLA_FORK_VERSION:
|
||||
return
|
||||
|
||||
if is_post_eip4844(spec):
|
||||
assert spec.config.EIP4844_FORK_EPOCH == spec.GENESIS_EPOCH
|
||||
if state.fork.current_version == spec.config.EIP4844_FORK_VERSION:
|
||||
return
|
||||
|
||||
assert spec.config.SHARDING_FORK_EPOCH == spec.GENESIS_EPOCH
|
||||
if state.fork.current_version == spec.config.SHARDING_FORK_VERSION:
|
||||
|
|
|
@ -51,7 +51,7 @@ def pytest_addoption(parser):
|
|||
|
||||
def _validate_fork_name(forks):
|
||||
for fork in forks:
|
||||
if fork not in ALL_PHASES:
|
||||
if fork not in set(ALL_PHASES):
|
||||
raise ValueError(
|
||||
f'The given --fork argument "{fork}" is not an available fork.'
|
||||
f' The available forks: {ALL_PHASES}'
|
||||
|
|
|
@ -7,13 +7,14 @@ from eth2spec.phase0 import mainnet as spec_phase0_mainnet, minimal as spec_phas
|
|||
from eth2spec.altair import mainnet as spec_altair_mainnet, minimal as spec_altair_minimal
|
||||
from eth2spec.bellatrix import mainnet as spec_bellatrix_mainnet, minimal as spec_bellatrix_minimal
|
||||
from eth2spec.capella import mainnet as spec_capella_mainnet, minimal as spec_capella_minimal
|
||||
from eth2spec.eip4844 import mainnet as spec_eip4844_mainnet, minimal as spec_eip4844_minimal
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from .exceptions import SkippedTest
|
||||
from .helpers.constants import (
|
||||
PHASE0, ALTAIR, BELLATRIX, CAPELLA,
|
||||
PHASE0, ALTAIR, BELLATRIX, CAPELLA, EIP4844, SHARDING,
|
||||
MINIMAL, MAINNET,
|
||||
ALL_PHASES, FORKS_BEFORE_ALTAIR, FORKS_BEFORE_BELLATRIX, FORKS_BEFORE_CAPELLA,
|
||||
ALL_PHASES, FORKS_BEFORE_ALTAIR, FORKS_BEFORE_BELLATRIX,
|
||||
ALL_FORK_UPGRADES,
|
||||
)
|
||||
from .helpers.typing import SpecForkName, PresetBaseName
|
||||
|
@ -76,12 +77,14 @@ spec_targets: Dict[PresetBaseName, Dict[SpecForkName, Spec]] = {
|
|||
ALTAIR: spec_altair_minimal,
|
||||
BELLATRIX: spec_bellatrix_minimal,
|
||||
CAPELLA: spec_capella_minimal,
|
||||
EIP4844: spec_eip4844_minimal,
|
||||
},
|
||||
MAINNET: {
|
||||
PHASE0: spec_phase0_mainnet,
|
||||
ALTAIR: spec_altair_mainnet,
|
||||
BELLATRIX: spec_bellatrix_mainnet,
|
||||
CAPELLA: spec_capella_mainnet,
|
||||
EIP4844: spec_eip4844_mainnet
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -277,20 +280,34 @@ def spec_configured_state_test(conf):
|
|||
return decorator
|
||||
|
||||
|
||||
def _check_current_version(spec, state, version_name):
|
||||
fork_version_field = version_name.upper() + '_FORK_VERSION'
|
||||
try:
|
||||
fork_version = getattr(spec.config, fork_version_field)
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return state.fork.current_version == fork_version
|
||||
|
||||
|
||||
def config_fork_epoch_overrides(spec, state):
|
||||
overrides = {}
|
||||
if state.fork.current_version == spec.config.GENESIS_FORK_VERSION:
|
||||
pass
|
||||
elif state.fork.current_version == spec.config.ALTAIR_FORK_VERSION:
|
||||
elif _check_current_version(spec, state, ALTAIR):
|
||||
overrides['ALTAIR_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
elif state.fork.current_version == spec.config.BELLATRIX_FORK_VERSION:
|
||||
elif _check_current_version(spec, state, BELLATRIX):
|
||||
overrides['ALTAIR_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['BELLATRIX_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
elif state.fork.current_version == spec.config.CAPELLA_FORK_VERSION:
|
||||
elif _check_current_version(spec, state, CAPELLA):
|
||||
overrides['ALTAIR_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['BELLATRIX_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['CAPELLA_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
elif state.fork.current_version == spec.config.SHARDING_FORK_VERSION:
|
||||
elif _check_current_version(spec, state, EIP4844):
|
||||
overrides['ALTAIR_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['BELLATRIX_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['EIP4844_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
elif _check_current_version(spec, state, SHARDING):
|
||||
overrides['ALTAIR_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['BELLATRIX_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
overrides['CAPELLA_FORK_EPOCH'] = spec.GENESIS_EPOCH
|
||||
|
@ -576,12 +593,17 @@ def is_post_bellatrix(spec):
|
|||
|
||||
|
||||
def is_post_capella(spec):
|
||||
return spec.fork not in FORKS_BEFORE_CAPELLA
|
||||
return spec.fork == CAPELLA
|
||||
|
||||
|
||||
def is_post_eip4844(spec):
|
||||
return spec.fork == EIP4844
|
||||
|
||||
|
||||
with_altair_and_later = with_all_phases_except([PHASE0])
|
||||
with_bellatrix_and_later = with_all_phases_except([PHASE0, ALTAIR])
|
||||
with_capella_and_later = with_all_phases_except([PHASE0, ALTAIR, BELLATRIX])
|
||||
with_capella_and_later = with_all_phases_except([PHASE0, ALTAIR, BELLATRIX, EIP4844])
|
||||
with_eip4844_and_later = with_all_phases_except([PHASE0, ALTAIR, BELLATRIX, CAPELLA])
|
||||
|
||||
|
||||
def only_generator(reason):
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
from eth2spec.test.helpers.state import (
|
||||
state_transition_and_sign_block
|
||||
)
|
||||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot
|
||||
)
|
||||
from eth2spec.test.context import (
|
||||
spec_state_test,
|
||||
with_eip4844_and_later,
|
||||
)
|
||||
from eth2spec.test.helpers.sharding import (
|
||||
get_sample_opaque_tx,
|
||||
)
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_one_blob(spec, state):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
opaque_tx, _, blob_kzg_commitments = get_sample_opaque_tx(spec)
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [signed_block]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_multiple_blobs(spec, state):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
opaque_tx, _, blob_kzg_commitments = get_sample_opaque_tx(spec, blob_count=5)
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [signed_block]
|
||||
yield 'post', state
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
from eth2spec.test.helpers.constants import (
|
||||
EIP4844,
|
||||
MINIMAL,
|
||||
)
|
||||
from eth2spec.test.helpers.sharding import (
|
||||
get_sample_blob,
|
||||
)
|
||||
from eth2spec.test.context import (
|
||||
with_phases,
|
||||
spec_state_test,
|
||||
with_presets,
|
||||
)
|
||||
|
||||
|
||||
@with_phases([EIP4844])
|
||||
@spec_state_test
|
||||
@with_presets([MINIMAL])
|
||||
def test_blob_to_kzg_commitment(spec, state):
|
||||
blob = get_sample_blob(spec)
|
||||
spec.blob_to_kzg_commitment(blob)
|
|
@ -0,0 +1,62 @@
|
|||
from eth2spec.test.helpers.state import (
|
||||
state_transition_and_sign_block,
|
||||
)
|
||||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot
|
||||
)
|
||||
from eth2spec.test.context import (
|
||||
spec_state_test,
|
||||
with_eip4844_and_later,
|
||||
)
|
||||
from eth2spec.test.helpers.sharding import (
|
||||
get_sample_opaque_tx,
|
||||
get_sample_blob,
|
||||
)
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_verify_kzg_proof(spec, state):
|
||||
x = 3
|
||||
polynomial = get_sample_blob(spec)
|
||||
polynomial = [int(i) for i in polynomial]
|
||||
commitment = spec.blob_to_kzg_commitment(polynomial)
|
||||
|
||||
# Get the proof
|
||||
proof = spec.compute_kzg_proof(polynomial, x)
|
||||
|
||||
y = spec.evaluate_polynomial_in_evaluation_form(polynomial, x)
|
||||
assert spec.verify_kzg_proof(commitment, x, y, proof)
|
||||
|
||||
|
||||
def _run_validate_blobs_sidecar_test(spec, state, blob_count):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
opaque_tx, blobs, blob_kzg_commitments = get_sample_opaque_tx(spec, blob_count=blob_count)
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
blobs_sidecar = spec.get_blobs_sidecar(block, blobs)
|
||||
privkey = privkeys[1]
|
||||
spec.get_signed_blobs_sidecar(state, blobs_sidecar, privkey)
|
||||
expected_commitments = [spec.blob_to_kzg_commitment(blobs[i]) for i in range(blob_count)]
|
||||
spec.validate_blobs_sidecar(block.slot, block.hash_tree_root(), expected_commitments, blobs_sidecar)
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_validate_blobs_sidecar_one_blob(spec, state):
|
||||
_run_validate_blobs_sidecar_test(spec, state, blob_count=1)
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_validate_blobs_sidecar_two_blobs(spec, state):
|
||||
_run_validate_blobs_sidecar_test(spec, state, blob_count=2)
|
||||
|
||||
|
||||
@with_eip4844_and_later
|
||||
@spec_state_test
|
||||
def test_validate_blobs_sidecar_ten_blobs(spec, state):
|
||||
_run_validate_blobs_sidecar_test(spec, state, blob_count=10)
|
|
@ -14,9 +14,15 @@ CAPELLA = SpecForkName('capella')
|
|||
SHARDING = SpecForkName('sharding')
|
||||
CUSTODY_GAME = SpecForkName('custody_game')
|
||||
DAS = SpecForkName('das')
|
||||
EIP4844 = SpecForkName('eip4844')
|
||||
|
||||
# The forks that pytest runs with.
|
||||
ALL_PHASES = (PHASE0, ALTAIR, BELLATRIX, CAPELLA)
|
||||
# The forks that pytest can run with.
|
||||
ALL_PHASES = (
|
||||
# Formal forks
|
||||
PHASE0, ALTAIR, BELLATRIX, CAPELLA,
|
||||
# Experimental patches
|
||||
EIP4844,
|
||||
)
|
||||
# The forks that output to the test vectors.
|
||||
TESTGEN_FORKS = (PHASE0, ALTAIR, BELLATRIX)
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from eth2spec.test.helpers.constants import FORKS_BEFORE_CAPELLA
|
||||
from eth2spec.test.context import is_post_capella
|
||||
|
||||
|
||||
def build_empty_execution_payload(spec, state, randao_mix=None):
|
||||
|
@ -28,7 +28,7 @@ def build_empty_execution_payload(spec, state, randao_mix=None):
|
|||
block_hash=spec.Hash32(),
|
||||
transactions=empty_txs,
|
||||
)
|
||||
if spec.fork not in FORKS_BEFORE_CAPELLA:
|
||||
if is_post_capella(spec):
|
||||
num_withdrawals = min(spec.MAX_WITHDRAWALS_PER_PAYLOAD, len(state.withdrawal_queue))
|
||||
payload.withdrawals = state.withdrawal_queue[:num_withdrawals]
|
||||
|
||||
|
@ -55,7 +55,7 @@ def get_execution_payload_header(spec, execution_payload):
|
|||
block_hash=execution_payload.block_hash,
|
||||
transactions_root=spec.hash_tree_root(execution_payload.transactions)
|
||||
)
|
||||
if spec.fork not in FORKS_BEFORE_CAPELLA:
|
||||
if is_post_capella(spec):
|
||||
payload_header.withdrawals_root = spec.hash_tree_root(execution_payload.withdrawals)
|
||||
return payload_header
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ from eth2spec.test.helpers.constants import (
|
|||
ALTAIR,
|
||||
BELLATRIX,
|
||||
CAPELLA,
|
||||
EIP4844,
|
||||
)
|
||||
from eth2spec.test.helpers.deposits import (
|
||||
prepare_state_and_deposit,
|
||||
|
@ -150,6 +151,8 @@ def do_fork(state, spec, post_spec, fork_epoch, with_block=True, operation_dict=
|
|||
state = post_spec.upgrade_to_bellatrix(state)
|
||||
elif post_spec.fork == CAPELLA:
|
||||
state = post_spec.upgrade_to_capella(state)
|
||||
elif post_spec.fork == EIP4844:
|
||||
state = post_spec.upgrade_to_eip4844(state)
|
||||
|
||||
assert state.fork.epoch == fork_epoch
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from eth2spec.test.helpers.constants import (
|
||||
ALTAIR, BELLATRIX, CAPELLA,
|
||||
FORKS_BEFORE_ALTAIR, FORKS_BEFORE_BELLATRIX, FORKS_BEFORE_CAPELLA,
|
||||
ALTAIR, BELLATRIX, CAPELLA, EIP4844,
|
||||
FORKS_BEFORE_ALTAIR, FORKS_BEFORE_BELLATRIX,
|
||||
)
|
||||
from eth2spec.test.helpers.keys import pubkeys
|
||||
|
||||
|
@ -20,7 +20,7 @@ def build_mock_validator(spec, i: int, balance: int):
|
|||
effective_balance=min(balance - balance % spec.EFFECTIVE_BALANCE_INCREMENT, spec.MAX_EFFECTIVE_BALANCE)
|
||||
)
|
||||
|
||||
if spec.fork not in FORKS_BEFORE_CAPELLA:
|
||||
if spec.fork in (CAPELLA):
|
||||
validator.fully_withdrawn_epoch = spec.FAR_FUTURE_EPOCH
|
||||
|
||||
return validator
|
||||
|
@ -60,6 +60,9 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
|
|||
elif spec.fork == CAPELLA:
|
||||
previous_version = spec.config.BELLATRIX_FORK_VERSION
|
||||
current_version = spec.config.CAPELLA_FORK_VERSION
|
||||
elif spec.fork == EIP4844:
|
||||
previous_version = spec.config.BELLATRIX_FORK_VERSION
|
||||
current_version = spec.config.EIP4844_FORK_VERSION
|
||||
|
||||
state = spec.BeaconState(
|
||||
genesis_time=0,
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
import random
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
Container,
|
||||
Bytes20, Bytes32,
|
||||
ByteList,
|
||||
List,
|
||||
Union,
|
||||
boolean,
|
||||
uint256, uint64,
|
||||
)
|
||||
from eth2spec.utils.ssz.ssz_impl import serialize
|
||||
|
||||
|
||||
#
|
||||
# Containers from EIP-4844
|
||||
#
|
||||
MAX_CALLDATA_SIZE = 2**24
|
||||
MAX_VERSIONED_HASHES_LIST_SIZE = 2**24
|
||||
MAX_ACCESS_LIST_STORAGE_KEYS = 2**24
|
||||
MAX_ACCESS_LIST_SIZE = 2**24
|
||||
|
||||
|
||||
class AccessTuple(Container):
|
||||
address: Bytes20 # Address = Bytes20
|
||||
storage_keys: List[Bytes32, MAX_ACCESS_LIST_STORAGE_KEYS]
|
||||
|
||||
|
||||
class ECDSASignature(Container):
|
||||
y_parity: boolean
|
||||
r: uint256
|
||||
s: uint256
|
||||
|
||||
|
||||
class BlobTransaction(Container):
|
||||
chain_id: uint256
|
||||
nonce: uint64
|
||||
priority_fee_per_gas: uint256
|
||||
max_basefee_per_gas: uint256
|
||||
gas: uint64
|
||||
to: Union[None, Bytes20] # Address = Bytes20
|
||||
value: uint256
|
||||
data: ByteList[MAX_CALLDATA_SIZE]
|
||||
access_list: List[AccessTuple, MAX_ACCESS_LIST_SIZE]
|
||||
blob_versioned_hashes: List[Bytes32, MAX_VERSIONED_HASHES_LIST_SIZE]
|
||||
|
||||
|
||||
class SignedBlobTransaction(Container):
|
||||
message: BlobTransaction
|
||||
signature: ECDSASignature
|
||||
|
||||
|
||||
def get_sample_blob(spec, rng=None):
|
||||
if rng is None:
|
||||
rng = random.Random(5566)
|
||||
|
||||
return spec.Blob([
|
||||
rng.randint(0, spec.BLS_MODULUS - 1)
|
||||
for _ in range(spec.FIELD_ELEMENTS_PER_BLOB)
|
||||
])
|
||||
|
||||
|
||||
def get_sample_opaque_tx(spec, blob_count=1, rng=None):
|
||||
blobs = []
|
||||
blob_kzg_commitments = []
|
||||
blob_versioned_hashes = []
|
||||
for _ in range(blob_count):
|
||||
blob = get_sample_blob(spec, rng)
|
||||
blob_commitment = spec.KZGCommitment(spec.blob_to_kzg_commitment(blob))
|
||||
blob_versioned_hash = spec.kzg_commitment_to_versioned_hash(blob_commitment)
|
||||
blobs.append(blob)
|
||||
blob_kzg_commitments.append(blob_commitment)
|
||||
blob_versioned_hashes.append(blob_versioned_hash)
|
||||
|
||||
signed_blob_tx = SignedBlobTransaction(
|
||||
message=BlobTransaction(
|
||||
blob_versioned_hashes=blob_versioned_hashes,
|
||||
)
|
||||
)
|
||||
serialized_tx = serialize(signed_blob_tx)
|
||||
opaque_tx = spec.uint_to_bytes(spec.BLOB_TX_TYPE) + serialized_tx
|
||||
return opaque_tx, blobs, blob_kzg_commitments
|
|
@ -1,7 +1,7 @@
|
|||
from eth2spec.test.context import with_all_phases, spec_state_test
|
||||
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
|
||||
from eth2spec.test.helpers.attestations import get_valid_attestation, sign_attestation
|
||||
from eth2spec.test.helpers.constants import PHASE0, ALTAIR, BELLATRIX, CAPELLA
|
||||
from eth2spec.test.helpers.constants import ALL_PHASES
|
||||
from eth2spec.test.helpers.state import transition_to, state_transition_and_sign_block, next_epoch, next_slot
|
||||
from eth2spec.test.helpers.fork_choice import get_genesis_forkchoice_store
|
||||
|
||||
|
@ -19,7 +19,7 @@ def run_on_attestation(spec, state, store, attestation, valid=True):
|
|||
spec.on_attestation(store, attestation)
|
||||
|
||||
sample_index = indexed_attestation.attesting_indices[0]
|
||||
if spec.fork in (PHASE0, ALTAIR, BELLATRIX, CAPELLA):
|
||||
if spec.fork in ALL_PHASES:
|
||||
latest_message = spec.LatestMessage(
|
||||
epoch=attestation.data.target.epoch,
|
||||
root=attestation.data.beacon_block_root,
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
from py_ecc.bls import G2ProofOfPossession as py_ecc_bls
|
||||
from py_ecc.bls.g2_primatives import signature_to_G2 as _signature_to_G2
|
||||
from py_ecc.optimized_bls12_381 import ( # noqa: F401
|
||||
G1,
|
||||
G2,
|
||||
Z1,
|
||||
Z2,
|
||||
add,
|
||||
multiply,
|
||||
neg,
|
||||
pairing,
|
||||
final_exponentiate,
|
||||
FQ12
|
||||
)
|
||||
from py_ecc.bls.g2_primitives import ( # noqa: F401
|
||||
G1_to_pubkey as G1_to_bytes48,
|
||||
pubkey_to_G1 as bytes48_to_G1,
|
||||
G2_to_signature as G2_to_bytes96,
|
||||
signature_to_G2 as bytes96_to_G2,
|
||||
)
|
||||
|
||||
|
||||
import milagro_bls_binding as milagro_bls # noqa: F401 for BLS switching option
|
||||
|
||||
# Flag to make BLS active or not. Used for testing, do not ignore BLS in production unless you know what you are doing.
|
||||
|
@ -109,3 +129,12 @@ def SkToPk(SK):
|
|||
return bls.SkToPk(SK)
|
||||
else:
|
||||
return bls.SkToPk(SK.to_bytes(32, 'big'))
|
||||
|
||||
|
||||
def pairing_check(values):
|
||||
p_q_1, p_q_2 = values
|
||||
final_exponentiation = final_exponentiate(
|
||||
pairing(p_q_1[1], p_q_1[0], final_exponentiate=False)
|
||||
* pairing(p_q_2[1], p_q_2[0], final_exponentiate=False)
|
||||
)
|
||||
return final_exponentiation == FQ12.one()
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
# Ref:
|
||||
# - https://github.com/ethereum/research/blob/8f084630528ba33d92b2bc05edf5338dd193c6f1/trusted_setup/trusted_setup.py
|
||||
# - https://github.com/asn-d6/kzgverify
|
||||
from py_ecc.optimized_bls12_381 import ( # noqa: F401
|
||||
G1,
|
||||
G2,
|
||||
Z1,
|
||||
Z2,
|
||||
curve_order as BLS_MODULUS,
|
||||
add,
|
||||
multiply,
|
||||
neg,
|
||||
)
|
||||
from eth2spec.utils import bls
|
||||
|
||||
|
||||
PRIMITIVE_ROOT_OF_UNITY = 7
|
||||
|
||||
|
||||
def generate_setup(generator, secret, length):
|
||||
"""
|
||||
Generate trusted setup of ``generator`` in ``length``.
|
||||
"""
|
||||
result = [generator]
|
||||
for _ in range(1, length):
|
||||
result.append(multiply(result[-1], secret))
|
||||
return tuple(result)
|
||||
|
||||
|
||||
def fft(vals, modulus, domain):
|
||||
"""
|
||||
FFT for group elements
|
||||
"""
|
||||
if len(vals) == 1:
|
||||
return vals
|
||||
L = fft(vals[::2], modulus, domain[::2])
|
||||
R = fft(vals[1::2], modulus, domain[::2])
|
||||
o = [0] * len(vals)
|
||||
for i, (x, y) in enumerate(zip(L, R)):
|
||||
y_times_root = multiply(y, domain[i])
|
||||
o[i] = add(x, y_times_root)
|
||||
o[i + len(L)] = add(x, neg(y_times_root))
|
||||
return o
|
||||
|
||||
|
||||
def compute_root_of_unity(length) -> int:
|
||||
"""
|
||||
Generate a w such that ``w**length = 1``.
|
||||
"""
|
||||
assert (BLS_MODULUS - 1) % length == 0
|
||||
return pow(PRIMITIVE_ROOT_OF_UNITY, (BLS_MODULUS - 1) // length, BLS_MODULUS)
|
||||
|
||||
|
||||
def compute_roots_of_unity(field_elements_per_blob):
|
||||
"""
|
||||
Compute a list of roots of unity for a given order.
|
||||
The order must divide the BLS multiplicative group order, i.e. BLS_MODULUS - 1
|
||||
"""
|
||||
assert (BLS_MODULUS - 1) % field_elements_per_blob == 0
|
||||
root_of_unity = compute_root_of_unity(length=field_elements_per_blob)
|
||||
|
||||
roots = []
|
||||
current_root_of_unity = 1
|
||||
for _ in range(field_elements_per_blob):
|
||||
roots.append(current_root_of_unity)
|
||||
current_root_of_unity = current_root_of_unity * root_of_unity % BLS_MODULUS
|
||||
return roots
|
||||
|
||||
|
||||
def get_lagrange(setup):
|
||||
"""
|
||||
Convert a G1 or G2 portion of a setup into the Lagrange basis.
|
||||
"""
|
||||
root_of_unity = compute_root_of_unity(len(setup))
|
||||
assert pow(root_of_unity, len(setup), BLS_MODULUS) == 1
|
||||
domain = [pow(root_of_unity, i, BLS_MODULUS) for i in range(len(setup))]
|
||||
# TODO: introduce an IFFT function for simplicity
|
||||
fft_output = fft(setup, BLS_MODULUS, domain)
|
||||
inv_length = pow(len(setup), BLS_MODULUS - 2, BLS_MODULUS)
|
||||
return [bls.G1_to_bytes48(multiply(fft_output[-i], inv_length)) for i in range(len(fft_output))]
|
Loading…
Reference in New Issue