Merge pull request #3725 from ethereum/dev

release v1.5.0-alpha.1
This commit is contained in:
Hsiao-Wei Wang 2024-04-27 15:14:15 +08:00 committed by GitHub
commit 594daf424d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
55 changed files with 2202 additions and 505 deletions

View File

@ -52,14 +52,13 @@ DENEB_FORK_VERSION: 0x04000000
DENEB_FORK_EPOCH: 269568 # March 13, 2024, 01:55:35pm UTC
# Electra
ELECTRA_FORK_VERSION: 0x05000000
ELECTRA_FORK_EPOCH: 18446744073709551615
ELECTRA_FORK_EPOCH: 18446744073709551615 # temporary stub
# EIP7594
EIP7594_FORK_VERSION: 0x06000000 # temporary stub
EIP7594_FORK_EPOCH: 18446744073709551615
# WHISK
WHISK_FORK_VERSION: 0x08000000 # temporary stub
WHISK_FORK_EPOCH: 18446744073709551615
# EIP7594
EIP7594_FORK_VERSION: 0x06000001
EIP7594_FORK_EPOCH: 18446744073709551615
# Time parameters
# ---------------------------------------------------------------
@ -157,6 +156,7 @@ WHISK_PROPOSER_SELECTION_GAP: 2
# EIP7594
NUMBER_OF_COLUMNS: 128
MAX_CELLS_IN_EXTENDED_MATRIX: 768
DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32
MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384

View File

@ -52,12 +52,12 @@ DENEB_FORK_EPOCH: 18446744073709551615
# Electra
ELECTRA_FORK_VERSION: 0x05000001
ELECTRA_FORK_EPOCH: 18446744073709551615
# WHISK
WHISK_FORK_VERSION: 0x08000001
WHISK_FORK_EPOCH: 18446744073709551615
# EIP7594
EIP7594_FORK_VERSION: 0x06000001
EIP7594_FORK_EPOCH: 18446744073709551615
# WHISK
WHISK_FORK_VERSION: 0x08000001
WHISK_FORK_EPOCH: 18446744073709551615
# Time parameters
# ---------------------------------------------------------------
@ -155,6 +155,7 @@ WHISK_PROPOSER_SELECTION_GAP: 1
# EIP7594
NUMBER_OF_COLUMNS: 128
MAX_CELLS_IN_EXTENDED_MATRIX: 768
DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32
MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384

View File

@ -17,6 +17,7 @@ from eth2spec.capella import {preset_name} as capella
def preparations(cls):
return '''
T = TypeVar('T') # For generic function
TPoint = TypeVar('TPoint') # For generic function. G1 or G2 point.
'''
@classmethod

View File

@ -19,6 +19,7 @@ from eth2spec.deneb import {preset_name} as deneb
'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value,
'FIELD_ELEMENTS_PER_EXT_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_EXT_BLOB'].value,
'NUMBER_OF_COLUMNS': spec_object.config_vars['NUMBER_OF_COLUMNS'].value,
'MAX_CELLS_IN_EXTENDED_MATRIX': spec_object.config_vars['MAX_CELLS_IN_EXTENDED_MATRIX'].value,
}
@classmethod

View File

@ -8,6 +8,8 @@
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Constants](#constants)
- [Misc](#misc)
- [Custom types](#custom-types)
- [Configuration](#configuration)
- [Data size](#data-size)
@ -39,6 +41,16 @@
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Constants
The following values are (non-configurable) constants used throughout the specification.
### Misc
| Name | Value |
| - | - |
| `UINT256_MAX` | `uint256(2**256 - 1)` |
## Custom types
We define the following Python custom types for type hinting and readability:
@ -46,7 +58,7 @@ We define the following Python custom types for type hinting and readability:
| Name | SSZ equivalent | Description |
| - | - | - |
| `DataColumn` | `List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK]` | The data of each column in EIP-7594 |
| `ExtendedMatrix` | `List[Cell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data of one-dimensional erasure coding extended blobs (in row major format) |
| `ExtendedMatrix` | `List[Cell, MAX_CELLS_IN_EXTENDED_MATRIX]` | The full data of one-dimensional erasure coding extended blobs (in row major format). |
## Configuration
@ -54,7 +66,8 @@ We define the following Python custom types for type hinting and readability:
| Name | Value | Description |
| - | - | - |
| `NUMBER_OF_COLUMNS` | `uint64(FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. |
| `NUMBER_OF_COLUMNS` | `uint64(CELLS_PER_EXT_BLOB)` (= 128) | Number of columns in the extended data matrix. |
| `MAX_CELLS_IN_EXTENDED_MATRIX` | `uint64(MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS)` (= 768) | The data size of `ExtendedMatrix`. |
### Networking
@ -95,8 +108,11 @@ def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequen
subnet_ids = []
i = 0
while len(subnet_ids) < custody_subnet_count:
if node_id == UINT256_MAX:
node_id = 0
subnet_id = (
bytes_to_uint64(hash(uint_to_bytes(uint64(node_id + i)))[0:8])
bytes_to_uint64(hash(uint_to_bytes(uint256(node_id + i)))[0:8])
% DATA_COLUMN_SIDECAR_SUBNET_COUNT
)
if subnet_id not in subnet_ids:
@ -105,11 +121,11 @@ def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequen
assert len(subnet_ids) == len(set(subnet_ids))
columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT
return [
return sorted([
ColumnIndex(DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnet_id)
for i in range(columns_per_subnet)
for subnet_id in subnet_ids
]
])
```
#### `compute_extended_matrix`
@ -135,21 +151,16 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count:
"""
Return the recovered ``ExtendedMatrix``.
This helper demonstrates how to apply ``recover_polynomial``.
This helper demonstrates how to apply ``recover_all_cells``.
The data structure for storing cells is implementation-dependent.
"""
extended_matrix = []
for blob_index in range(blob_count):
cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index]
cells = [cells_dict[(blob_index, cell_id)] for cell_id in cell_ids]
cells_bytes = [[bls_field_to_bytes(element) for element in cell] for cell in cells]
full_polynomial = recover_polynomial(cell_ids, cells_bytes)
cells_from_full_polynomial = [
full_polynomial[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL]
for i in range(CELLS_PER_BLOB)
]
extended_matrix.extend(cells_from_full_polynomial)
all_cells_for_row = recover_all_cells(cell_ids, cells)
extended_matrix.extend(all_cells_for_row)
return ExtendedMatrix(extended_matrix)
```
@ -164,7 +175,7 @@ def get_data_column_sidecars(signed_block: SignedBeaconBlock,
block.body,
get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments'),
)
cells_and_proofs = [compute_cells_and_proofs(blob) for blob in blobs]
cells_and_proofs = [compute_cells_and_kzg_proofs(blob) for blob in blobs]
blob_count = len(blobs)
cells = [cells_and_proofs[i][0] for i in range(blob_count)]
proofs = [cells_and_proofs[i][1] for i in range(blob_count)]

View File

@ -28,7 +28,7 @@ Warning: this configuration is not definitive.
| Name | Value |
| - | - |
| `EIP7594_FORK_VERSION` | `Version('0x05000000')` |
| `EIP7594_FORK_VERSION` | `Version('0x06000000')` |
| `EIP7594_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** |
## Helper functions

View File

@ -74,7 +74,7 @@ def verify_data_column_sidecar_kzg_proofs(sidecar: DataColumnSidecar) -> bool:
row_ids = [RowIndex(i) for i in range(len(sidecar.column))]
# KZG batch verifies that the cells match the corresponding commitments and proofs
return verify_cell_proof_batch(
return verify_cell_kzg_proof_batch(
row_commitments=sidecar.kzg_commitments,
row_indices=row_ids, # all rows
column_indices=[sidecar.index],

View File

@ -13,7 +13,8 @@
- [Cells](#cells)
- [Helper functions](#helper-functions)
- [BLS12-381 helpers](#bls12-381-helpers)
- [`bytes_to_cell`](#bytes_to_cell)
- [`cell_to_coset_evals`](#cell_to_coset_evals)
- [`coset_evals_to_cell`](#coset_evals_to_cell)
- [Linear combinations](#linear-combinations)
- [`g2_lincomb`](#g2_lincomb)
- [FFTs](#ffts)
@ -36,16 +37,16 @@
- [`coset_for_cell`](#coset_for_cell)
- [Cells](#cells-1)
- [Cell computation](#cell-computation)
- [`compute_cells_and_proofs`](#compute_cells_and_proofs)
- [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs)
- [`compute_cells`](#compute_cells)
- [Cell verification](#cell-verification)
- [`verify_cell_proof`](#verify_cell_proof)
- [`verify_cell_proof_batch`](#verify_cell_proof_batch)
- [`verify_cell_kzg_proof`](#verify_cell_kzg_proof)
- [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch)
- [Reconstruction](#reconstruction)
- [`construct_vanishing_polynomial`](#construct_vanishing_polynomial)
- [`recover_shifted_data`](#recover_shifted_data)
- [`recover_original_data`](#recover_original_data)
- [`recover_polynomial`](#recover_polynomial)
- [`recover_all_cells`](#recover_all_cells)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
@ -63,7 +64,9 @@ Public functions MUST accept raw bytes as input and perform the required cryptog
| Name | SSZ equivalent | Description |
| - | - | - |
| `PolynomialCoeff` | `List[BLSFieldElement, FIELD_ELEMENTS_PER_EXT_BLOB]` | A polynomial in coefficient form |
| `Cell` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with their own KZG proofs |
| `Coset` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The evaluation domain of a cell |
| `CosetEvals` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The internal representation of a cell (the evaluations over its Coset) |
| `Cell` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with its own KZG proof |
| `CellID` | `uint64` | Cell identifier |
| `RowIndex` | `uint64` | Row identifier |
| `ColumnIndex` | `uint64` | Column identifier |
@ -84,21 +87,40 @@ Cells are the smallest unit of blob data that can come with their own KZG proofs
| `FIELD_ELEMENTS_PER_EXT_BLOB` | `2 * FIELD_ELEMENTS_PER_BLOB` | Number of field elements in a Reed-Solomon extended blob |
| `FIELD_ELEMENTS_PER_CELL` | `uint64(64)` | Number of field elements in a cell |
| `BYTES_PER_CELL` | `FIELD_ELEMENTS_PER_CELL * BYTES_PER_FIELD_ELEMENT` | The number of bytes in a cell |
| `CELLS_PER_BLOB` | `FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL` | The number of cells in a blob |
| `CELLS_PER_EXT_BLOB` | `FIELD_ELEMENTS_PER_EXT_BLOB // FIELD_ELEMENTS_PER_CELL` | The number of cells in an extended blob |
| `RANDOM_CHALLENGE_KZG_CELL_BATCH_DOMAIN` | `b'RCKZGCBATCH__V1_'` |
## Helper functions
### BLS12-381 helpers
#### `bytes_to_cell`
#### `cell_to_coset_evals`
```python
def bytes_to_cell(cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]) -> Cell:
def cell_to_coset_evals(cell: Cell) -> CosetEvals:
"""
Convert untrusted bytes into a Cell.
Convert an untrusted ``Cell`` into a trusted ``CosetEvals``.
"""
return [bytes_to_bls_field(element) for element in cell_bytes]
evals = []
for i in range(FIELD_ELEMENTS_PER_CELL):
start = i * BYTES_PER_FIELD_ELEMENT
end = (i + 1) * BYTES_PER_FIELD_ELEMENT
value = bytes_to_bls_field(cell[start:end])
evals.append(value)
return CosetEvals(evals)
```
#### `coset_evals_to_cell`
```python
def coset_evals_to_cell(coset_evals: CosetEvals) -> Cell:
"""
Convert a trusted ``CosetEval`` into an untrusted ``Cell``.
"""
cell = []
for i in range(FIELD_ELEMENTS_PER_CELL):
cell += bls_field_to_bytes(coset_evals[i])
return Cell(cell)
```
### Linear combinations
@ -106,14 +128,20 @@ def bytes_to_cell(cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]) -> Cell:
#### `g2_lincomb`
```python
def g2_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElement]) -> Bytes96:
def g2_lincomb(points: Sequence[G2Point], scalars: Sequence[BLSFieldElement]) -> Bytes96:
"""
BLS multiscalar multiplication in G2. This function can be optimized using Pippenger's algorithm and variants.
BLS multiscalar multiplication in G2. This can be naively implemented using double-and-add.
"""
assert len(points) == len(scalars)
result = bls.Z2()
for x, a in zip(points, scalars):
result = bls.add(result, bls.multiply(bls.bytes96_to_G2(x), a))
if len(points) == 0:
return bls.G2_to_bytes96(bls.Z2())
points_g2 = []
for point in points:
points_g2.append(bls.bytes96_to_G2(point))
result = bls.multi_exp(points_g2, scalars)
return Bytes96(bls.G2_to_bytes96(result))
```
@ -214,7 +242,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial
"""
Long polynomial division for two coefficient form polynomials ``a`` and ``b``
"""
a = [x for x in a]
a = a.copy() # Make a copy since `a` is passed by reference
o = []
apos = len(a) - 1
bpos = len(b) - 1
@ -223,7 +251,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial
quot = div(a[apos], b[bpos])
o.insert(0, quot)
for i in range(bpos, -1, -1):
a[diff + i] = (int(a[diff + i]) - int(b[i]) * int(quot)) % BLS_MODULUS
a[diff + i] = (int(a[diff + i]) - int(b[i] + BLS_MODULUS) * int(quot)) % BLS_MODULUS
apos -= 1
diff -= 1
return [x % BLS_MODULUS for x in o]
@ -264,7 +292,7 @@ def interpolate_polynomialcoeff(xs: Sequence[BLSFieldElement], ys: Sequence[BLSF
if j != i:
weight_adjustment = bls_modular_inverse(int(xs[i]) - int(xs[j]))
summand = multiply_polynomialcoeff(
summand, [(- int(weight_adjustment) * int(xs[j])) % BLS_MODULUS, weight_adjustment]
summand, [((BLS_MODULUS - int(weight_adjustment)) * int(xs[j])) % BLS_MODULUS, weight_adjustment]
)
r = add_polynomialcoeff(r, summand)
@ -280,7 +308,7 @@ def vanishing_polynomialcoeff(xs: Sequence[BLSFieldElement]) -> PolynomialCoeff:
"""
p = [1]
for x in xs:
p = multiply_polynomialcoeff(p, [-int(x), 1])
p = multiply_polynomialcoeff(p, [-int(x) + BLS_MODULUS, 1])
return p
```
@ -306,21 +334,28 @@ Extended KZG functions for multiproofs
```python
def compute_kzg_proof_multi_impl(
polynomial_coeff: PolynomialCoeff,
zs: Sequence[BLSFieldElement]) -> Tuple[KZGProof, Sequence[BLSFieldElement]]:
zs: Coset) -> Tuple[KZGProof, CosetEvals]:
"""
Helper function that computes multi-evaluation KZG proofs.
Compute a KZG multi-evaluation proof for a set of `k` points.
This is done by committing to the following quotient polynomial:
Q(X) = f(X) - I(X) / Z(X)
Where:
- I(X) is the degree `k-1` polynomial that agrees with f(x) at all `k` points
- Z(X) is the degree `k` polynomial that evaluates to zero on all `k` points
We further note that since the degree of I(X) is less than the degree of Z(X),
the computation can be simplified in monomial form to Q(X) = f(X) / Z(X)
"""
# For all x_i, compute p(x_i) - p(z)
# For all points, compute the evaluation of those points
ys = [evaluate_polynomialcoeff(polynomial_coeff, z) for z in zs]
interpolation_polynomial = interpolate_polynomialcoeff(zs, ys)
polynomial_shifted = add_polynomialcoeff(polynomial_coeff, neg_polynomialcoeff(interpolation_polynomial))
# For all x_i, compute (x_i - z)
# Compute Z(X)
denominator_poly = vanishing_polynomialcoeff(zs)
# Compute the quotient polynomial directly in evaluation form
quotient_polynomial = divide_polynomialcoeff(polynomial_shifted, denominator_poly)
# Compute the quotient polynomial directly in monomial form
quotient_polynomial = divide_polynomialcoeff(polynomial_coeff, denominator_poly)
return KZGProof(g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(quotient_polynomial)], quotient_polynomial)), ys
```
@ -329,16 +364,30 @@ def compute_kzg_proof_multi_impl(
```python
def verify_kzg_proof_multi_impl(commitment: KZGCommitment,
zs: Sequence[BLSFieldElement],
ys: Sequence[BLSFieldElement],
zs: Coset,
ys: CosetEvals,
proof: KZGProof) -> bool:
"""
Helper function that verifies a KZG multiproof
Verify a KZG multi-evaluation proof for a set of `k` points.
This is done by checking if the following equation holds:
Q(x) Z(x) = f(X) - I(X)
Where:
f(X) is the polynomial that we want to verify opens at `k` points to `k` values
Q(X) is the quotient polynomial computed by the prover
I(X) is the degree k-1 polynomial that evaluates to `ys` at all `zs`` points
Z(X) is the polynomial that evaluates to zero on all `k` points
The verifier receives the commitments to Q(X) and f(X), so they check the equation
holds by using the following pairing equation:
e([Q(X)]_1, [Z(X)]_2) == e([f(X)]_1 - [I(X)]_1, [1]_2)
"""
assert len(zs) == len(ys)
# Compute [Z(X)]_2
zero_poly = g2_lincomb(KZG_SETUP_G2_MONOMIAL[:len(zs) + 1], vanishing_polynomialcoeff(zs))
# Compute [I(X)]_1
interpolated_poly = g1_lincomb(KZG_SETUP_G1_MONOMIAL[:len(zs)], interpolate_polynomialcoeff(zs, ys))
return (bls.pairing_check([
@ -355,44 +404,46 @@ def verify_kzg_proof_multi_impl(commitment: KZGCommitment,
#### `coset_for_cell`
```python
def coset_for_cell(cell_id: CellID) -> Cell:
def coset_for_cell(cell_id: CellID) -> Coset:
"""
Get the coset for a given ``cell_id``
"""
assert cell_id < CELLS_PER_BLOB
assert cell_id < CELLS_PER_EXT_BLOB
roots_of_unity_brp = bit_reversal_permutation(
compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)
)
return Cell(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)])
return Coset(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)])
```
## Cells
### Cell computation
#### `compute_cells_and_proofs`
#### `compute_cells_and_kzg_proofs`
```python
def compute_cells_and_proofs(blob: Blob) -> Tuple[
Vector[Cell, CELLS_PER_BLOB],
Vector[KZGProof, CELLS_PER_BLOB]]:
def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[
Vector[Cell, CELLS_PER_EXT_BLOB],
Vector[KZGProof, CELLS_PER_EXT_BLOB]]:
"""
Compute all the cell proofs for one blob. This is an inefficient O(n^2) algorithm,
Compute all the cell proofs for an extended blob. This is an inefficient O(n^2) algorithm,
for performant implementation the FK20 algorithm that runs in O(n log n) should be
used instead.
Public method.
"""
assert len(blob) == BYTES_PER_BLOB
polynomial = blob_to_polynomial(blob)
polynomial_coeff = polynomial_eval_to_coeff(polynomial)
cells = []
proofs = []
for i in range(CELLS_PER_BLOB):
for i in range(CELLS_PER_EXT_BLOB):
coset = coset_for_cell(i)
proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset)
cells.append(ys)
cells.append(coset_evals_to_cell(ys))
proofs.append(proof)
return cells, proofs
@ -401,53 +452,64 @@ def compute_cells_and_proofs(blob: Blob) -> Tuple[
#### `compute_cells`
```python
def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_BLOB]:
def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]:
"""
Compute the cell data for a blob (without computing the proofs).
Compute the cell data for an extended blob (without computing the proofs).
Public method.
"""
assert len(blob) == BYTES_PER_BLOB
polynomial = blob_to_polynomial(blob)
polynomial_coeff = polynomial_eval_to_coeff(polynomial)
extended_data = fft_field(polynomial_coeff + [0] * FIELD_ELEMENTS_PER_BLOB,
compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB))
extended_data_rbo = bit_reversal_permutation(extended_data)
return [extended_data_rbo[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL]
for i in range(CELLS_PER_BLOB)]
cells = []
for cell_id in range(CELLS_PER_EXT_BLOB):
start = cell_id * FIELD_ELEMENTS_PER_CELL
end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL
cells.append(coset_evals_to_cell(extended_data_rbo[start:end]))
return cells
```
### Cell verification
#### `verify_cell_proof`
#### `verify_cell_kzg_proof`
```python
def verify_cell_proof(commitment_bytes: Bytes48,
cell_id: CellID,
cell_bytes: Vector[Bytes32, FIELD_ELEMENTS_PER_CELL],
proof_bytes: Bytes48) -> bool:
def verify_cell_kzg_proof(commitment_bytes: Bytes48,
cell_id: CellID,
cell: Cell,
proof_bytes: Bytes48) -> bool:
"""
Check a cell proof
Public method.
"""
assert len(commitment_bytes) == BYTES_PER_COMMITMENT
assert cell_id < CELLS_PER_EXT_BLOB
assert len(cell) == BYTES_PER_CELL
assert len(proof_bytes) == BYTES_PER_PROOF
coset = coset_for_cell(cell_id)
return verify_kzg_proof_multi_impl(
bytes_to_kzg_commitment(commitment_bytes),
coset,
bytes_to_cell(cell_bytes),
cell_to_coset_evals(cell),
bytes_to_kzg_proof(proof_bytes))
```
#### `verify_cell_proof_batch`
#### `verify_cell_kzg_proof_batch`
```python
def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48],
row_indices: Sequence[RowIndex],
column_indices: Sequence[ColumnIndex],
cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]],
proofs_bytes: Sequence[Bytes48]) -> bool:
def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48],
row_indices: Sequence[RowIndex],
column_indices: Sequence[ColumnIndex],
cells: Sequence[Cell],
proofs_bytes: Sequence[Bytes48]) -> bool:
"""
Verify a set of cells, given their corresponding proofs and their coordinates (row_id, column_id) in the blob
matrix. The list of all commitments is also provided in row_commitments_bytes.
@ -462,19 +524,29 @@ def verify_cell_proof_batch(row_commitments_bytes: Sequence[Bytes48],
Public method.
"""
assert len(cells_bytes) == len(proofs_bytes) == len(row_indices) == len(column_indices)
assert len(cells) == len(proofs_bytes) == len(row_indices) == len(column_indices)
for commitment_bytes in row_commitments_bytes:
assert len(commitment_bytes) == BYTES_PER_COMMITMENT
for row_index in row_indices:
assert row_index < len(row_commitments_bytes)
for column_index in column_indices:
assert column_index < CELLS_PER_EXT_BLOB
for cell in cells:
assert len(cell) == BYTES_PER_CELL
for proof_bytes in proofs_bytes:
assert len(proof_bytes) == BYTES_PER_PROOF
# Get commitments via row IDs
commitments_bytes = [row_commitments_bytes[row_index] for row_index in row_indices]
# Get objects from bytes
commitments = [bytes_to_kzg_commitment(commitment_bytes) for commitment_bytes in commitments_bytes]
cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes]
cosets_evals = [cell_to_coset_evals(cell) for cell in cells]
proofs = [bytes_to_kzg_proof(proof_bytes) for proof_bytes in proofs_bytes]
return all(
verify_kzg_proof_multi_impl(commitment, coset_for_cell(column_index), cell, proof)
for commitment, column_index, cell, proof in zip(commitments, column_indices, cells, proofs)
verify_kzg_proof_multi_impl(commitment, coset_for_cell(column_index), coset_evals, proof)
for commitment, column_index, coset_evals, proof in zip(commitments, column_indices, cosets_evals, proofs)
)
```
@ -491,11 +563,11 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[
corresponds to a missing field element.
"""
# Get the small domain
roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_BLOB)
roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_EXT_BLOB)
# Compute polynomial that vanishes at all the missing cells (over the small domain)
short_zero_poly = vanishing_polynomialcoeff([
roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_BLOB)]
roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_EXT_BLOB)]
for missing_cell_id in missing_cell_ids
])
@ -510,7 +582,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[
zero_poly_eval_brp = bit_reversal_permutation(zero_poly_eval)
# Sanity check
for cell_id in range(CELLS_PER_BLOB):
for cell_id in range(CELLS_PER_EXT_BLOB):
start = cell_id * FIELD_ELEMENTS_PER_CELL
end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL
if cell_id in missing_cell_ids:
@ -518,7 +590,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[
else: # cell_id in cell_ids
assert all(a != 0 for a in zero_poly_eval_brp[start:end])
return zero_poly_coeff, zero_poly_eval, zero_poly_eval_brp
return zero_poly_coeff, zero_poly_eval
```
### `recover_shifted_data`
@ -588,14 +660,14 @@ def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldEle
return reconstructed_data
```
### `recover_polynomial`
### `recover_all_cells`
```python
def recover_polynomial(cell_ids: Sequence[CellID],
cells_bytes: Sequence[Vector[Bytes32, FIELD_ELEMENTS_PER_CELL]]) -> Polynomial:
def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequence[Cell]:
"""
Recover original polynomial from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, half of which can be missing. This
algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here:
Recover all of the cells in the extended blob from FIELD_ELEMENTS_PER_EXT_BLOB evaluations,
half of which can be missing.
This algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here:
https://ethresear.ch/t/reed-solomon-erasure-code-recovery-in-n-log-2-n-time-with-ffts/3039
A faster version thanks to Qi Zhou can be found here:
@ -603,24 +675,27 @@ def recover_polynomial(cell_ids: Sequence[CellID],
Public method.
"""
assert len(cell_ids) == len(cells_bytes)
assert len(cell_ids) == len(cells)
# Check we have enough cells to be able to perform the reconstruction
assert CELLS_PER_BLOB / 2 <= len(cell_ids) <= CELLS_PER_BLOB
assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB
# Check for duplicates
assert len(cell_ids) == len(set(cell_ids))
# Check that each cell is the correct length
for cell in cells:
assert len(cell) == BYTES_PER_CELL
# Get the extended domain
roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)
# Convert from bytes to cells
cells = [bytes_to_cell(cell_bytes) for cell_bytes in cells_bytes]
# Convert cells to coset evals
cosets_evals = [cell_to_coset_evals(cell) for cell in cells]
missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_BLOB) if cell_id not in cell_ids]
zero_poly_coeff, zero_poly_eval, zero_poly_eval_brp = construct_vanishing_polynomial(missing_cell_ids)
missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids]
zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids)
eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data(
cell_ids,
cells,
cosets_evals,
zero_poly_eval,
zero_poly_coeff,
roots_of_unity_extended,
@ -633,10 +708,14 @@ def recover_polynomial(cell_ids: Sequence[CellID],
roots_of_unity_extended,
)
for cell_id, cell in zip(cell_ids, cells):
for cell_id, coset_evals in zip(cell_ids, cosets_evals):
start = cell_id * FIELD_ELEMENTS_PER_CELL
end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL
assert reconstructed_data[start:end] == cell
assert reconstructed_data[start:end] == coset_evals
return reconstructed_data
reconstructed_data_as_cells = [
coset_evals_to_cell(reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL])
for i in range(CELLS_PER_EXT_BLOB)]
return reconstructed_data_as_cells
```

View File

@ -18,6 +18,7 @@
- [`reverse_bits`](#reverse_bits)
- [`bit_reversal_permutation`](#bit_reversal_permutation)
- [BLS12-381 helpers](#bls12-381-helpers)
- [`multi_exp`](#multi_exp)
- [`hash_to_bls_field`](#hash_to_bls_field)
- [`bytes_to_bls_field`](#bytes_to_bls_field)
- [`bls_field_to_bytes`](#bls_field_to_bytes)
@ -146,6 +147,18 @@ def bit_reversal_permutation(sequence: Sequence[T]) -> Sequence[T]:
### BLS12-381 helpers
#### `multi_exp`
This function performs a multi-scalar multiplication between `points` and `integers`. `points` can either be in G1 or G2.
```python
def multi_exp(points: Sequence[TPoint],
integers: Sequence[uint64]) -> Sequence[TPoint]:
# pylint: disable=unused-argument
...
```
#### `hash_to_bls_field`
```python
@ -274,12 +287,18 @@ def div(x: BLSFieldElement, y: BLSFieldElement) -> BLSFieldElement:
```python
def g1_lincomb(points: Sequence[KZGCommitment], scalars: Sequence[BLSFieldElement]) -> KZGCommitment:
"""
BLS multiscalar multiplication. This function can be optimized using Pippenger's algorithm and variants.
BLS multiscalar multiplication in G1. This can be naively implemented using double-and-add.
"""
assert len(points) == len(scalars)
result = bls.Z1()
for x, a in zip(points, scalars):
result = bls.add(result, bls.multiply(bls.bytes48_to_G1(x), a))
if len(points) == 0:
return bls.G1_to_bytes48(bls.Z1())
points_g1 = []
for point in points:
points_g1.append(bls.bytes48_to_G1(point))
result = bls.multi_exp(points_g1, scalars)
return KZGCommitment(bls.G1_to_bytes48(result))
```

View File

@ -229,7 +229,7 @@ class PendingPartialWithdrawal(Container):
```
#### `ExecutionLayerWithdrawalRequest`
*Note*: The container is new in EIP7251.
*Note*: The container is new in EIP7251:EIP7002.
```python
class ExecutionLayerWithdrawalRequest(Container):

View File

@ -149,15 +149,18 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState:
deposit_receipts_start_index=UNSET_DEPOSIT_RECEIPTS_START_INDEX,
# [New in Electra:EIP7251]
deposit_balance_to_consume=0,
exit_balance_to_consume=get_activation_exit_churn_limit(pre),
exit_balance_to_consume=0,
earliest_exit_epoch=earliest_exit_epoch,
consolidation_balance_to_consume=get_consolidation_churn_limit(pre),
consolidation_balance_to_consume=0,
earliest_consolidation_epoch=compute_activation_exit_epoch(get_current_epoch(pre)),
pending_balance_deposits=[],
pending_partial_withdrawals=[],
pending_consolidations=[],
)
post.exit_balance_to_consume = get_activation_exit_churn_limit(post)
post.consolidation_balance_to_consume = get_consolidation_churn_limit(post)
# [New in Electra:EIP7251]
# add validators that are not yet active to pending balance deposits
pre_activation = sorted([

View File

@ -1 +1 @@
1.5.0-alpha.0
1.5.0-alpha.1

View File

@ -338,26 +338,30 @@ def run_randomized_non_validated_execution_fields_test(spec, state, execution_va
@with_bellatrix_and_later
@spec_state_test
def test_randomized_non_validated_execution_fields_first_payload__execution_valid(spec, state):
rng = Random(1111)
state = build_state_with_incomplete_transition(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state, rng=rng)
@with_bellatrix_and_later
@spec_state_test
def test_randomized_non_validated_execution_fields_regular_payload__execution_valid(spec, state):
rng = Random(2222)
state = build_state_with_complete_transition(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state, rng=rng)
@with_bellatrix_and_later
@spec_state_test
def test_invalid_randomized_non_validated_execution_fields_first_payload__execution_invalid(spec, state):
rng = Random(3333)
state = build_state_with_incomplete_transition(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False)
yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False, rng=rng)
@with_bellatrix_and_later
@spec_state_test
def test_invalid_randomized_non_validated_execution_fields_regular_payload__execution_invalid(spec, state):
rng = Random(4444)
state = build_state_with_complete_transition(spec, state)
yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False)
yield from run_randomized_non_validated_execution_fields_test(spec, state, execution_valid=False, rng=rng)

View File

@ -0,0 +1,89 @@
import random
from eth2spec.test.context import (
single_phase,
spec_test,
with_eip7594_and_later,
)
def _run_get_custody_columns(spec, rng, node_id=None, custody_subnet_count=None):
if node_id is None:
node_id = rng.randint(0, 2**256 - 1)
if custody_subnet_count is None:
custody_subnet_count = rng.randint(0, spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT)
result = spec.get_custody_columns(node_id, custody_subnet_count)
yield 'node_id', 'meta', node_id
yield 'custody_subnet_count', 'meta', custody_subnet_count
assert len(result) == len(set(result))
assert len(result) == (
custody_subnet_count * spec.config.NUMBER_OF_COLUMNS // spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT
)
assert all(i < spec.config.NUMBER_OF_COLUMNS for i in result)
python_list_result = [int(i) for i in result]
yield 'result', 'meta', python_list_result
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__min_node_id_min_custody_subnet_count(spec):
rng = random.Random(1111)
yield from _run_get_custody_columns(spec, rng, node_id=0, custody_subnet_count=0)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__min_node_id_max_custody_subnet_count(spec):
rng = random.Random(1111)
yield from _run_get_custody_columns(
spec, rng, node_id=0,
custody_subnet_count=spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__max_node_id_min_custody_subnet_count(spec):
rng = random.Random(1111)
yield from _run_get_custody_columns(spec, rng, node_id=2**256 - 1, custody_subnet_count=0)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__max_node_id_max_custody_subnet_count(spec):
rng = random.Random(1111)
yield from _run_get_custody_columns(
spec, rng, node_id=2**256 - 1,
custody_subnet_count=spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT,
)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__1(spec):
rng = random.Random(1111)
yield from _run_get_custody_columns(spec, rng)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__2(spec):
rng = random.Random(2222)
yield from _run_get_custody_columns(spec, rng)
@with_eip7594_and_later
@spec_test
@single_phase
def test_get_custody_columns__3(spec):
rng = random.Random(3333)
yield from _run_get_custody_columns(spec, rng)

View File

@ -18,16 +18,17 @@ def test_compute_extended_matrix(spec):
blob_count = 2
input_blobs = [get_sample_blob(spec, rng=rng) for _ in range(blob_count)]
extended_matrix = spec.compute_extended_matrix(input_blobs)
assert len(extended_matrix) == spec.CELLS_PER_BLOB * blob_count
assert len(extended_matrix) == spec.CELLS_PER_EXT_BLOB * blob_count
rows = [extended_matrix[i:(i + spec.CELLS_PER_BLOB)] for i in range(0, len(extended_matrix), spec.CELLS_PER_BLOB)]
rows = [extended_matrix[i:(i + spec.CELLS_PER_EXT_BLOB)]
for i in range(0, len(extended_matrix), spec.CELLS_PER_EXT_BLOB)]
assert len(rows) == blob_count
assert len(rows[0]) == spec.CELLS_PER_BLOB
assert len(rows[0]) == spec.CELLS_PER_EXT_BLOB
for blob_index, row in enumerate(rows):
extended_blob = []
for cell in row:
extended_blob.extend(cell)
extended_blob.extend(spec.cell_to_coset_evals(cell))
blob_part = extended_blob[0:len(extended_blob) // 2]
blob = b''.join([spec.bls_field_to_bytes(x) for x in blob_part])
assert blob == input_blobs[blob_index]
@ -40,7 +41,7 @@ def test_recover_matrix(spec):
rng = random.Random(5566)
# Number of samples we will be recovering from
N_SAMPLES = spec.CELLS_PER_BLOB // 2
N_SAMPLES = spec.CELLS_PER_EXT_BLOB // 2
blob_count = 2
cells_dict = {}
@ -54,9 +55,9 @@ def test_recover_matrix(spec):
cell_ids = []
# First figure out just the indices of the cells
for _ in range(N_SAMPLES):
cell_id = rng.randint(0, spec.CELLS_PER_BLOB - 1)
cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1)
while cell_id in cell_ids:
cell_id = rng.randint(0, spec.CELLS_PER_BLOB - 1)
cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1)
cell_ids.append(cell_id)
cell = cells[cell_id]
cells_dict[(blob_index, cell_id)] = cell

View File

@ -31,35 +31,32 @@ def test_fft(spec):
@with_eip7594_and_later
@spec_test
@single_phase
def test_verify_cell_proof(spec):
def test_verify_cell_kzg_proof(spec):
blob = get_sample_blob(spec)
commitment = spec.blob_to_kzg_commitment(blob)
cells, proofs = spec.compute_cells_and_proofs(blob)
cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells]
cells, proofs = spec.compute_cells_and_kzg_proofs(blob)
cell_id = 0
assert spec.verify_cell_proof(commitment, cell_id, cells_bytes[cell_id], proofs[cell_id])
assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id])
cell_id = 1
assert spec.verify_cell_proof(commitment, cell_id, cells_bytes[cell_id], proofs[cell_id])
assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id])
@with_eip7594_and_later
@spec_test
@single_phase
def test_verify_cell_proof_batch(spec):
def test_verify_cell_kzg_proof_batch(spec):
blob = get_sample_blob(spec)
commitment = spec.blob_to_kzg_commitment(blob)
cells, proofs = spec.compute_cells_and_proofs(blob)
cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells]
cells, proofs = spec.compute_cells_and_kzg_proofs(blob)
assert len(cells) == len(proofs)
assert spec.verify_cell_proof_batch(
assert spec.verify_cell_kzg_proof_batch(
row_commitments_bytes=[commitment],
row_indices=[0, 0],
column_indices=[0, 4],
cells_bytes=[cells_bytes[0], cells_bytes[4]],
cells=[cells[0], cells[4]],
proofs_bytes=[proofs[0], proofs[4]],
)
@ -67,41 +64,39 @@ def test_verify_cell_proof_batch(spec):
@with_eip7594_and_later
@spec_test
@single_phase
def test_recover_polynomial(spec):
def test_recover_all_cells(spec):
rng = random.Random(5566)
# Number of samples we will be recovering from
N_SAMPLES = spec.CELLS_PER_BLOB // 2
N_SAMPLES = spec.CELLS_PER_EXT_BLOB // 2
# Get the data we will be working with
blob = get_sample_blob(spec)
# Get the data in evaluation form
original_polynomial = spec.blob_to_polynomial(blob)
# Extend data with Reed-Solomon and split the extended data in cells
cells = spec.compute_cells(blob)
cells_bytes = [[spec.bls_field_to_bytes(element) for element in cell] for cell in cells]
# Compute the cells we will be recovering from
cell_ids = []
# First figure out just the indices of the cells
for i in range(N_SAMPLES):
j = rng.randint(0, spec.CELLS_PER_BLOB - 1)
j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1)
while j in cell_ids:
j = rng.randint(0, spec.CELLS_PER_BLOB - 1)
j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1)
cell_ids.append(j)
# Now the cells themselves
known_cells_bytes = [cells_bytes[cell_id] for cell_id in cell_ids]
known_cells = [cells[cell_id] for cell_id in cell_ids]
# Recover the data
recovered_data = spec.recover_polynomial(cell_ids, known_cells_bytes)
# Recover all of the cells
recovered_cells = spec.recover_all_cells(cell_ids, known_cells)
recovered_data = [x for xs in recovered_cells for x in xs]
# Check that the original data match the non-extended portion of the recovered data
assert original_polynomial == recovered_data[:len(recovered_data) // 2]
blob_byte_array = [b for b in blob]
assert blob_byte_array == recovered_data[:len(recovered_data) // 2]
# Now flatten the cells and check that they match the entirety of the recovered data
flattened_cells = [x for xs in cells for x in xs]
assert flattened_cells == recovered_data
# Check that the recovered cells match the original cells
assert cells == recovered_cells
@with_eip7594_and_later

View File

@ -18,6 +18,7 @@ def test_invariants(spec):
assert spec.config.MAX_REQUEST_DATA_COLUMN_SIDECARS == (
spec.config.MAX_REQUEST_BLOCKS_DENEB * spec.config.NUMBER_OF_COLUMNS
)
assert spec.config.MAX_CELLS_IN_EXTENDED_MATRIX == spec.MAX_BLOBS_PER_BLOCK * spec.config.NUMBER_OF_COLUMNS
@with_eip7594_and_later

View File

@ -404,251 +404,6 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state):
assert state.consolidation_balance_to_consume == expected_balance
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_below_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, based on the current state
consolidations = []
for i in range(3):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
consolidations.append(signed_consolidation)
# Now run all the consolidations
for consolidation in consolidations:
# the function yields data, but we are just interested in running it here, ignore yields.
for _ in run_consolidation_processing(spec, state, consolidation):
continue
yield "post", state
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch
assert (
state.consolidation_balance_to_consume
== consolidation_churn_limit - 3 * spec.MIN_ACTIVATION_BALANCE
)
for i in range(3):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_equal_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, based on the current state
consolidations = []
for i in range(4):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
consolidations.append(signed_consolidation)
# Now run all the consolidations
for consolidation in consolidations:
# the function yields data, but we are just interested in running it here, ignore yields.
for _ in run_consolidation_processing(spec, state, consolidation):
continue
yield "post", state
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch
assert state.consolidation_balance_to_consume == 0
for i in range(4):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_above_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
# Prepare a bunch of consolidations, based on the current state
consolidations = []
for i in range(4):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
consolidations.append(signed_consolidation)
# Now run all the consolidations
for consolidation in consolidations:
# the function yields data, but we are just interested in running it here, ignore yields.
for _ in run_consolidation_processing(spec, state, consolidation):
continue
# consolidate an additional validator
source_index = spec.get_active_validator_indices(state, current_epoch)[-2]
target_index = spec.get_active_validator_indices(state, current_epoch)[-1]
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch, source_index=source_index, target_index=target_index
),
source_privkey,
target_privkey,
)
# This is the interesting part of the test: on a pre-state with full consolidation queue,
# when processing an additional consolidation, it results in an exit in a later epoch
yield from run_consolidation_processing(spec, state, signed_consolidation)
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch + 1
assert (
state.consolidation_balance_to_consume
== consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE
)
assert state.validators[source_index].exit_epoch == expected_exit_epoch + 1
for i in range(4):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_equal_twice_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, based on the current state
consolidations = []
for i in range(8):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
consolidations.append(signed_consolidation)
# Now run all the consolidations
for consolidation in consolidations:
# the function yields data, but we are just interested in running it here, ignore yields.
for _ in run_consolidation_processing(spec, state, consolidation):
continue
yield "post", state
first_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.consolidation_balance_to_consume == 0
assert state.earliest_consolidation_epoch == first_exit_epoch + 1
for i in range(4):
assert state.validators[2 * i].exit_epoch == first_exit_epoch
for i in range(4, 8):
assert state.validators[2 * i].exit_epoch == first_exit_epoch + 1
# Failing tests
@with_electra_and_later
@ -856,9 +611,43 @@ def test_invalid_different_credentials(spec, state):
@with_electra_and_later
@spec_state_test
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
@always_bls
def test_invalid_source_signature(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
current_epoch = spec.get_current_epoch(state)
source_index = spec.get_active_validator_indices(state, current_epoch)[0]
target_index = spec.get_active_validator_indices(state, current_epoch)[1]
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch, source_index=source_index, target_index=target_index
),
source_privkey,
target_privkey,
)
# Set earliest consolidation epoch to the expected exit epoch
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
state.earliest_consolidation_epoch = expected_exit_epoch
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
source_privkey = pubkey_to_privkey[state.validators[0].pubkey]
target_privkey = pubkey_to_privkey[state.validators[1].pubkey]
@ -872,17 +661,53 @@ def test_invalid_source_signature(spec, state):
source_privkey,
target_privkey,
)
# Change the pubkey of the source validator, invalidating its signature
state.validators[0].pubkey = state.validators[1].pubkey
yield from run_consolidation_processing(
spec, state, signed_consolidation, valid=False
)
@with_electra_and_later
@spec_state_test
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
@always_bls
def test_invalid_target_signature(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
current_epoch = spec.get_current_epoch(state)
source_index = spec.get_active_validator_indices(state, current_epoch)[0]
target_index = spec.get_active_validator_indices(state, current_epoch)[1]
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch, source_index=source_index, target_index=target_index
),
source_privkey,
target_privkey,
)
# Set earliest consolidation epoch to the expected exit epoch
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
state.earliest_consolidation_epoch = expected_exit_epoch
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
source_privkey = pubkey_to_privkey[state.validators[0].pubkey]
target_privkey = pubkey_to_privkey[state.validators[1].pubkey]
@ -896,8 +721,10 @@ def test_invalid_target_signature(spec, state):
source_privkey,
target_privkey,
)
# Change the pubkey of the target validator, invalidating its signature
state.validators[1].pubkey = state.validators[2].pubkey
yield from run_consolidation_processing(
spec, state, signed_consolidation, valid=False
)

View File

@ -7,6 +7,6 @@ from eth2spec.test.phase0.epoch_processing.test_process_effective_balance_update
@with_electra_and_later
@spec_state_test
def test_effective_balance_hysteresis_with_compounding_credentials(spec, state):
run_test_effective_balance_hysteresis(
yield from run_test_effective_balance_hysteresis(
spec, state, with_compounding_credentials=True
)

View File

@ -0,0 +1,82 @@
from eth2spec.test.context import (
with_phases,
with_custom_state,
with_presets,
spec_test, with_state,
low_balances, misc_balances, large_validator_set,
)
from eth2spec.test.utils import with_meta_tags
from eth2spec.test.helpers.constants import (
DENEB, ELECTRA,
MINIMAL,
)
from eth2spec.test.helpers.state import (
next_epoch,
next_epoch_via_block,
)
from eth2spec.test.helpers.electra.fork import (
ELECTRA_FORK_TEST_META_TAGS,
run_fork_test,
)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_base_state(spec, phases, state):
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_next_epoch(spec, phases, state):
next_epoch(spec, state)
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_next_epoch_with_block(spec, phases, state):
next_epoch_via_block(spec, state)
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_many_next_epoch(spec, phases, state):
for _ in range(3):
next_epoch(spec, state)
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@spec_test
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_random_low_balances(spec, phases, state):
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@spec_test
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_random_misc_balances(spec, phases, state):
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@with_presets([MINIMAL],
reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated")
@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@spec_test
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_fork_random_large_validator_set(spec, phases, state):
yield from run_fork_test(phases[ELECTRA], state)

View File

@ -0,0 +1,84 @@
from random import Random
from eth2spec.test.context import (
with_phases,
with_custom_state,
with_presets,
spec_test, with_state,
low_balances, misc_balances, large_validator_set,
)
from eth2spec.test.utils import with_meta_tags
from eth2spec.test.helpers.constants import (
DENEB, ELECTRA,
MINIMAL,
)
from eth2spec.test.helpers.electra.fork import (
ELECTRA_FORK_TEST_META_TAGS,
run_fork_test,
)
from eth2spec.test.helpers.random import randomize_state
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_0(spec, phases, state):
randomize_state(spec, state, rng=Random(1010))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_1(spec, phases, state):
randomize_state(spec, state, rng=Random(2020))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_2(spec, phases, state):
randomize_state(spec, state, rng=Random(3030))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_state
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_3(spec, phases, state):
randomize_state(spec, state, rng=Random(4040))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_low_balances(spec, phases, state):
randomize_state(spec, state, rng=Random(5050))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@spec_test
@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_misc_balances(spec, phases, state):
randomize_state(spec, state, rng=Random(6060))
yield from run_fork_test(phases[ELECTRA], state)
@with_phases(phases=[DENEB], other_phases=[ELECTRA])
@with_presets([MINIMAL],
reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated")
@spec_test
@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
@with_meta_tags(ELECTRA_FORK_TEST_META_TAGS)
def test_electra_fork_random_large_validator_set(spec, phases, state):
randomize_state(spec, state, rng=Random(7070))
yield from run_fork_test(phases[ELECTRA], state)

View File

@ -1 +1,2 @@
from .test_consolidation import * # noqa: F401 F403
from .test_deposit_transition import * # noqa: F401 F403

View File

@ -0,0 +1,271 @@
from eth2spec.test.context import (
with_electra_and_later,
with_presets,
spec_test,
single_phase,
with_custom_state,
scaled_churn_balances_exceed_activation_exit_churn_limit,
default_activation_threshold,
)
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot
)
from eth2spec.test.helpers.consolidations import (
sign_consolidation,
)
from eth2spec.test.helpers.constants import MINIMAL
from eth2spec.test.helpers.keys import pubkey_to_privkey
from eth2spec.test.helpers.state import (
state_transition_and_sign_block,
)
from eth2spec.test.helpers.withdrawals import (
set_eth1_withdrawal_credential_with_balance,
)
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_below_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, each of them in a block, based on the current state
blocks = []
consolidation_count = 3
for i in range(consolidation_count):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
block = build_empty_block_for_next_slot(spec, state)
block.body.consolidations = [signed_consolidation]
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
yield "blocks", blocks
yield "post", state
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch
assert (
state.consolidation_balance_to_consume
== consolidation_churn_limit - 3 * spec.MIN_ACTIVATION_BALANCE
)
for i in range(consolidation_count):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_equal_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, each of them in a block, based on the current state
blocks = []
consolidation_count = 4
for i in range(consolidation_count):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
block = build_empty_block_for_next_slot(spec, state)
block.body.consolidations = [signed_consolidation]
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
yield "blocks", blocks
yield "post", state
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch
assert state.consolidation_balance_to_consume == 0
for i in range(consolidation_count):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_above_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
# Prepare a bunch of consolidations, each of them in a block, based on the current state
blocks = []
consolidation_count = 4
for i in range(consolidation_count):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
block = build_empty_block_for_next_slot(spec, state)
block.body.consolidations = [signed_consolidation]
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
# consolidate an additional validator
source_index = spec.get_active_validator_indices(state, current_epoch)[-2]
target_index = spec.get_active_validator_indices(state, current_epoch)[-1]
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
# This is the interesting part of the test: on a pre-state with full consolidation queue,
# when processing an additional consolidation, it results in an exit in a later epoch
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch, source_index=source_index, target_index=target_index
),
source_privkey,
target_privkey,
)
block = build_empty_block_for_next_slot(spec, state)
block.body.consolidations = [signed_consolidation]
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
yield "blocks", blocks
yield "post", state
expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.earliest_consolidation_epoch == expected_exit_epoch + 1
assert (
state.consolidation_balance_to_consume
== consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE
)
assert state.validators[source_index].exit_epoch == expected_exit_epoch + 1
for i in range(consolidation_count):
assert state.validators[2 * i].exit_epoch == expected_exit_epoch
@with_electra_and_later
@with_presets([MINIMAL], "need sufficient consolidation churn limit")
@with_custom_state(
balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit,
threshold_fn=default_activation_threshold,
)
@spec_test
@single_phase
def test_multiple_consolidations_equal_twice_churn(spec, state):
# This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn
consolidation_churn_limit = spec.get_consolidation_churn_limit(state)
# Set the consolidation balance to consume equal to churn limit
state.consolidation_balance_to_consume = consolidation_churn_limit
current_epoch = spec.get_current_epoch(state)
yield "pre", state
# Prepare a bunch of consolidations, each of them in a block, based on the current state
blocks = []
consolidation_count = 8
for i in range(consolidation_count):
source_index = 2 * i
target_index = 2 * i + 1
source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey]
target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey]
# Set source and target withdrawal credentials to the same eth1 credential
set_eth1_withdrawal_credential_with_balance(spec, state, source_index)
set_eth1_withdrawal_credential_with_balance(spec, state, target_index)
signed_consolidation = sign_consolidation(
spec,
state,
spec.Consolidation(
epoch=current_epoch,
source_index=source_index,
target_index=target_index,
),
source_privkey,
target_privkey,
)
block = build_empty_block_for_next_slot(spec, state)
block.body.consolidations = [signed_consolidation]
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
yield "blocks", blocks
yield "post", state
first_exit_epoch = spec.compute_activation_exit_epoch(current_epoch)
assert state.consolidation_balance_to_consume == 0
assert state.earliest_consolidation_epoch == first_exit_epoch + 1
for i in range(consolidation_count // 2):
assert state.validators[2 * i].exit_epoch == first_exit_epoch
for i in range(consolidation_count // 2, consolidation_count):
assert state.validators[2 * i].exit_epoch == first_exit_epoch + 1

View File

@ -26,7 +26,7 @@ EIP7594 = SpecForkName('eip7594')
#
# The forks that are deployed on Mainnet
MAINNET_FORKS = (PHASE0, ALTAIR, BELLATRIX, CAPELLA)
MAINNET_FORKS = (PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB)
LATEST_FORK = MAINNET_FORKS[-1]
# The forks that pytest can run with.
# Note: when adding a new fork here, all tests from previous forks with decorator `with_X_and_later`
@ -40,9 +40,9 @@ ALL_PHASES = (
EIP7594,
)
# The forks that have light client specs
LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0], DENEB)
LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0],)
# The forks that output to the test vectors.
TESTGEN_FORKS = (*MAINNET_FORKS, DENEB, ELECTRA, WHISK)
TESTGEN_FORKS = (*MAINNET_FORKS, ELECTRA, EIP7594, WHISK)
# Forks allowed in the test runner `--fork` flag, to fail fast in case of typos
ALLOWED_TEST_RUNNER_FORKS = (*ALL_PHASES, WHISK)

View File

@ -36,6 +36,8 @@ def run_fork_test(post_spec, pre_state):
'current_sync_committee', 'next_sync_committee',
# Withdrawals
'next_withdrawal_index', 'next_withdrawal_validator_index',
# Deep history valid from Capella onwards
'historical_summaries',
]
for field in stable_fields:
assert getattr(pre_state, field) == getattr(post_state, field)

View File

@ -0,0 +1,65 @@
from eth2spec.test.helpers.constants import (
ELECTRA,
)
ELECTRA_FORK_TEST_META_TAGS = {
'fork': ELECTRA,
}
def run_fork_test(post_spec, pre_state):
yield 'pre', pre_state
post_state = post_spec.upgrade_to_electra(pre_state)
# Stable fields
stable_fields = [
'genesis_time', 'genesis_validators_root', 'slot',
# History
'latest_block_header', 'block_roots', 'state_roots', 'historical_roots',
# Eth1
'eth1_data', 'eth1_data_votes', 'eth1_deposit_index',
# Registry
# NOTE: 'validators', 'balances' could be changed.
# Randomness
'randao_mixes',
# Slashings
'slashings',
# Participation
'previous_epoch_participation', 'current_epoch_participation',
# Finality
'justification_bits', 'previous_justified_checkpoint', 'current_justified_checkpoint', 'finalized_checkpoint',
# Inactivity
'inactivity_scores',
# Sync
'current_sync_committee', 'next_sync_committee',
# Withdrawals
'next_withdrawal_index', 'next_withdrawal_validator_index',
# Deep history valid from Capella onwards
'historical_summaries',
]
for field in stable_fields:
assert getattr(pre_state, field) == getattr(post_state, field)
# Modified fields
modified_fields = ['fork', 'latest_execution_payload_header']
for field in modified_fields:
assert getattr(pre_state, field) != getattr(post_state, field)
assert len(pre_state.validators) == len(post_state.validators)
for pre_validator, post_validator in zip(pre_state.validators, post_state.validators):
stable_validator_fields = [
'pubkey', 'withdrawal_credentials',
'slashed',
'exit_epoch', 'withdrawable_epoch',
]
for field in stable_validator_fields:
assert getattr(pre_validator, field) == getattr(post_validator, field)
assert pre_state.fork.current_version == post_state.fork.previous_version
assert post_state.fork.current_version == post_spec.config.ELECTRA_FORK_VERSION
assert post_state.fork.epoch == post_spec.get_current_epoch(post_state)
yield 'post', post_state

View File

@ -0,0 +1,155 @@
from hashlib import sha256
from eth_utils import (
encode_hex,
int_to_big_endian,
)
from eth2spec.utils import bls
from eth2spec.eip7594 import spec
###############################################################################
# Helper functions
###############################################################################
def expect_exception(func, *args):
try:
func(*args)
except Exception:
pass
else:
raise Exception("should have raised exception")
def bls_add_one(x):
"""
Adds "one" (actually bls.G1()) to a compressed group element.
Useful to compute definitely incorrect proofs.
"""
return bls.G1_to_bytes48(
bls.add(bls.bytes48_to_G1(x), bls.G1())
)
def hash(x):
return sha256(x).digest()
def make_id(*args):
values_str = "_".join(str(arg) for arg in args)
return hash(bytes(values_str, "utf-8"))[:8].hex()
def field_element_bytes(x):
return int.to_bytes(x % spec.BLS_MODULUS, 32, spec.KZG_ENDIANNESS)
def field_element_bytes_unchecked(x):
return int.to_bytes(x, 32, spec.KZG_ENDIANNESS)
def encode_hex_list(a):
return [encode_hex(x) for x in a]
def int_to_hex(n: int, byte_length: int = None) -> str:
byte_value = int_to_big_endian(n)
if byte_length:
byte_value = byte_value.rjust(byte_length, b'\x00')
return encode_hex(byte_value)
def evaluate_blob_at(blob, z):
return field_element_bytes(
spec.evaluate_polynomial_in_evaluation_form(spec.blob_to_polynomial(blob), spec.bytes_to_bls_field(z))
)
###############################################################################
# Global variables
###############################################################################
BLS_MODULUS_BYTES = spec.BLS_MODULUS.to_bytes(32, spec.KZG_ENDIANNESS)
# Field Elements
FE_VALID1 = field_element_bytes(0)
FE_VALID2 = field_element_bytes(1)
FE_VALID3 = field_element_bytes(2)
FE_VALID4 = field_element_bytes(pow(5, 1235, spec.BLS_MODULUS))
FE_VALID5 = field_element_bytes(spec.BLS_MODULUS - 1)
FE_VALID6 = field_element_bytes(spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB)[1])
VALID_FIELD_ELEMENTS = [FE_VALID1, FE_VALID2, FE_VALID3, FE_VALID4, FE_VALID5, FE_VALID6]
FE_INVALID_EQUAL_TO_MODULUS = field_element_bytes_unchecked(spec.BLS_MODULUS)
FE_INVALID_MODULUS_PLUS_ONE = field_element_bytes_unchecked(spec.BLS_MODULUS + 1)
FE_INVALID_UINT256_MAX = field_element_bytes_unchecked(2**256 - 1)
FE_INVALID_UINT256_MID = field_element_bytes_unchecked(2**256 - 2**128)
FE_INVALID_LENGTH_PLUS_ONE = VALID_FIELD_ELEMENTS[0] + b"\x00"
FE_INVALID_LENGTH_MINUS_ONE = VALID_FIELD_ELEMENTS[0][:-1]
INVALID_FIELD_ELEMENTS = [FE_INVALID_EQUAL_TO_MODULUS, FE_INVALID_MODULUS_PLUS_ONE,
FE_INVALID_UINT256_MAX, FE_INVALID_UINT256_MID,
FE_INVALID_LENGTH_PLUS_ONE, FE_INVALID_LENGTH_MINUS_ONE]
# Blobs
BLOB_ALL_ZEROS = spec.Blob()
BLOB_ALL_TWOS = spec.Blob(b''.join([field_element_bytes(2) for n in range(4096)]))
BLOB_RANDOM_VALID1 = spec.Blob(b''.join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_RANDOM_VALID2 = spec.Blob(b''.join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_RANDOM_VALID3 = spec.Blob(b''.join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_ALL_MODULUS_MINUS_ONE = spec.Blob(b''.join([field_element_bytes(spec.BLS_MODULUS - 1) for n in range(4096)]))
BLOB_ALMOST_ZERO = spec.Blob(b''.join([field_element_bytes(1 if n == 3211 else 0) for n in range(4096)]))
BLOB_INVALID = spec.Blob(b'\xFF' * 4096 * 32)
BLOB_INVALID_CLOSE = spec.Blob(b''.join(
[BLS_MODULUS_BYTES if n == 2111 else field_element_bytes(0) for n in range(4096)]
))
BLOB_INVALID_LENGTH_PLUS_ONE = BLOB_RANDOM_VALID1 + b"\x00"
BLOB_INVALID_LENGTH_MINUS_ONE = BLOB_RANDOM_VALID1[:-1]
VALID_BLOBS = [BLOB_ALL_ZEROS, BLOB_ALL_TWOS, BLOB_RANDOM_VALID1, BLOB_RANDOM_VALID2,
BLOB_RANDOM_VALID3, BLOB_ALL_MODULUS_MINUS_ONE, BLOB_ALMOST_ZERO]
INVALID_BLOBS = [BLOB_INVALID, BLOB_INVALID_CLOSE, BLOB_INVALID_LENGTH_PLUS_ONE, BLOB_INVALID_LENGTH_MINUS_ONE]
# Commitments
VALID_COMMITMENTS = [spec.blob_to_kzg_commitment(blob) for blob in VALID_BLOBS]
# Points
G1 = bls.G1_to_bytes48(bls.G1())
G1_INVALID_TOO_FEW_BYTES = G1[:-1]
G1_INVALID_TOO_MANY_BYTES = G1 + b"\x00"
G1_INVALID_P1_NOT_IN_G1 = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" +
"0123456789abcdef0123456789abcdef0123456789abcdef")
G1_INVALID_P1_NOT_ON_CURVE = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" +
"0123456789abcdef0123456789abcdef0123456789abcde0")
INVALID_G1_POINTS = [G1_INVALID_TOO_FEW_BYTES, G1_INVALID_TOO_MANY_BYTES,
G1_INVALID_P1_NOT_IN_G1, G1_INVALID_P1_NOT_ON_CURVE]
# Individual Cells
CELL_RANDOM_VALID1 = b"".join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS))
for n in range(spec.FIELD_ELEMENTS_PER_CELL)])
CELL_RANDOM_VALID2 = b"".join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS))
for n in range(spec.FIELD_ELEMENTS_PER_CELL)])
CELL_RANDOM_VALID3 = b"".join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS))
for n in range(spec.FIELD_ELEMENTS_PER_CELL)])
CELL_ALL_MAX_VALUE = b"".join([field_element_bytes_unchecked(2 ** 256 - 1)
for n in range(spec.FIELD_ELEMENTS_PER_CELL)])
CELL_ONE_INVALID_FIELD = b"".join([field_element_bytes_unchecked(spec.BLS_MODULUS)
if n == 7 else field_element_bytes(0)
for n in range(spec.FIELD_ELEMENTS_PER_CELL)])
CELL_INVALID_TOO_FEW_BYTES = CELL_RANDOM_VALID1[:-1]
CELL_INVALID_TOO_MANY_BYTES = CELL_RANDOM_VALID2 + b"\x00"
VALID_INDIVIDUAL_RANDOM_CELL_BYTES = [CELL_RANDOM_VALID1, CELL_RANDOM_VALID2, CELL_RANDOM_VALID3]
INVALID_INDIVIDUAL_CELL_BYTES = [CELL_ALL_MAX_VALUE, CELL_ONE_INVALID_FIELD, CELL_INVALID_TOO_FEW_BYTES,
CELL_INVALID_TOO_MANY_BYTES]
# Cells & Proofs
VALID_CELLS_AND_PROOFS = [] # Saved in case02_compute_cells_and_kzg_proofs

View File

@ -11,6 +11,8 @@ from py_ecc.optimized_bls12_381 import ( # noqa: F401
pairing as py_ecc_pairing,
final_exponentiate as py_ecc_final_exponentiate,
FQ12 as py_ecc_GT,
FQ,
FQ2,
)
from py_ecc.bls.g2_primitives import ( # noqa: F401
curve_order as BLS_MODULUS,
@ -225,6 +227,45 @@ def multiply(point, scalar):
return py_ecc_mul(point, scalar)
def multi_exp(points, integers):
"""
Performs a multi-scalar multiplication between
`points` and `integers`.
`points` can either be in G1 or G2.
"""
# Since this method accepts either G1 or G2, we need to know
# the type of the point to return. Hence, we need at least one point.
if not points or not integers:
raise Exception("Cannot call multi_exp with zero points or zero integers")
if bls == arkworks_bls or bls == fastest_bls:
# Convert integers into arkworks Scalars
scalars = []
for integer in integers:
int_as_bytes = integer.to_bytes(32, 'little')
scalars.append(arkworks_Scalar.from_le_bytes(int_as_bytes))
# Check if we need to perform a G1 or G2 multiexp
if isinstance(points[0], arkworks_G1):
return arkworks_G1.multiexp_unchecked(points, scalars)
elif isinstance(points[0], arkworks_G2):
return arkworks_G2.multiexp_unchecked(points, scalars)
else:
raise Exception("Invalid point type")
result = None
if isinstance(points[0][0], FQ):
result = Z1()
elif isinstance(points[0][0], FQ2):
result = Z2()
else:
raise Exception("Invalid point type")
for point, scalar in zip(points, integers):
result = add(result, multiply(point, scalar))
return result
def neg(point):
"""
Returns the point negation of `point`

View File

@ -0,0 +1,13 @@
# KZG tests for EIP-7594
A test type for KZG libraries. Tests all the public interfaces that a KZG library is required to implement for EIP-7594, as defined in `polynomial-commitments-sampling.md`.
We do not recommend rolling your own crypto or using an untested KZG library.
The KZG test suite runner has the following handlers:
- [`compute_cells`](./compute_cells.md)
- [`compute_cells_and_kzg_proofs`](./compute_cells_and_kzg_proofs.md)
- [`verify_cell_kzg_proof`](./verify_cell_kzg_proof.md)
- [`verify_cell_kzg_proof_batch`](./verify_cell_kzg_proof_batch.md)
- [`recover_all_cells`](./recover_all_cells.md)

View File

@ -0,0 +1,22 @@
# Test format: Compute cells
Compute the cells for a given `blob`.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
blob: Blob -- the data blob
output: List[Cell] -- the cells
```
- `Blob` is a 131072-byte hexadecimal string, prefixed with `0x`.
- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`.
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
## Condition
The `compute_cells` handler should compute the cells (chunks of an extended blob) for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`.

View File

@ -0,0 +1,23 @@
# Test format: Compute cells and KZG proofs
Compute the cells and cell KZG proofs for a given `blob`.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
blob: Blob -- the data blob
output: Tuple[List[Cell], List[KZGProof]] -- the cells and proofs
```
- `Blob` is a 131072-byte hexadecimal string, prefixed with `0x`.
- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`.
- `KZGProof` is a 48-byte hexadecimal string, prefixed with `0x`.
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
## Condition
The `compute_cells_and_kzg_proofs` handler should compute the cells (chunks of an extended blob) and cell KZG proofs for `blob`, and the result should match the expected `output`. If the blob is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) it should error, i.e. the output should be `null`.

View File

@ -0,0 +1,23 @@
# Test format: Recover all cells
Recover all cells given at least 50% of the original `cells`.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
cell_ids: List[CellID] -- the cell identifier for each cell
cells: List[Cell] -- the partial collection of cells
output: List[Cell] -- all cells, including recovered cells
```
- `CellID` is an unsigned 64-bit integer.
- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`.
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
## Condition
The `recover_all_cells` handler should recover missing cells, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`.

View File

@ -0,0 +1,26 @@
# Test format: Verify cell KZG proof
Use the cell KZG `proof` to verify that the KZG `commitment` for a given `cell` is correct.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
commitment: Bytes48 -- the KZG commitment
cell_id: CellID -- the identifier for the cell
cell: Cell -- the cell
proof: Bytes48 -- the KZG proof for the cell
output: bool -- true (correct proof) or false (incorrect proof)
```
- `Bytes48` is a 48-byte hexadecimal string, prefixed with `0x`.
- `CellID` is an unsigned 64-bit integer.
- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`.
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
## Condition
The `verify_cell_kzg_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`.

View File

@ -0,0 +1,28 @@
# Test format: Verify cell KZG proof batch
Use the cell KZG `proofs` to verify that the KZG `row_commitments` for the given `cells` are correct.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
row_commitments: List[Bytes48] -- the KZG commitments
row_indices: List[RowIndex] -- the commitment index for each cell
column_indices: List[ColumnIndex] -- the column index for each cell
cells: List[Cell] -- the cells
proofs: List[Bytes48] -- the KZG proof for each cell
output: bool -- true (all proofs are correct) or false (some proofs incorrect)
```
- `Bytes48` is a 48-byte hexadecimal string, prefixed with `0x`.
- `RowIndex` is an unsigned 64-bit integer.
- `ColumnIndex` is an unsigned 64-bit integer.
- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`.
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
## Condition
The `verify_cell_kzg_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`.

View File

@ -0,0 +1,6 @@
# Networking tests
The aim of the networking tests is to set a base-line on what really needs to pass, i.e. the essentials.
Handlers:
- [`get_custody_columns`](./get_custody_columns.md): `get_custody_columns` helper tests

View File

@ -0,0 +1,14 @@
# `get_custody_columns` tests
`get_custody_columns` tests provide sanity check of the correctness of `get_custody_columns` helper.
## Test case format
### `meta.yaml`
```yaml
description: string -- optional: description of test case, purely for debugging purposes.
node_id: int -- argument: the NodeId input.
custody_subnet_count: int -- argument: the count of custody subnets.
result: list of int -- output: the list of resulting column indices.
```

View File

@ -1,7 +1,7 @@
from typing import Iterable
from eth2spec.test.helpers.constants import (
PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB,
PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, ELECTRA,
MINIMAL, MAINNET,
)
from eth2spec.test.helpers.typing import SpecForkName, PresetBaseName
@ -9,6 +9,7 @@ from eth2spec.test.altair.fork import test_altair_fork_basic, test_altair_fork_r
from eth2spec.test.bellatrix.fork import test_bellatrix_fork_basic, test_bellatrix_fork_random
from eth2spec.test.capella.fork import test_capella_fork_basic, test_capella_fork_random
from eth2spec.test.deneb.fork import test_deneb_fork_basic, test_deneb_fork_random
from eth2spec.test.electra.fork import test_electra_fork_basic, test_electra_fork_random
from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests
@ -42,6 +43,8 @@ def _get_fork_tests_providers():
yield create_provider(test_capella_fork_random, preset, BELLATRIX, CAPELLA)
yield create_provider(test_deneb_fork_basic, preset, CAPELLA, DENEB)
yield create_provider(test_deneb_fork_random, preset, CAPELLA, DENEB)
yield create_provider(test_electra_fork_basic, preset, DENEB, ELECTRA)
yield create_provider(test_electra_fork_random, preset, DENEB, ELECTRA)
if __name__ == "__main__":

View File

@ -1,118 +1,37 @@
"""
KZG 4844 test vectors generator
KZG test vectors generator for EIP-4844
"""
from hashlib import sha256
from typing import Tuple, Iterable, Any, Callable, Dict
from eth_utils import (
encode_hex,
int_to_big_endian,
)
from eth_utils import encode_hex
from eth2spec.utils import bls
from eth2spec.deneb import spec
from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.test.helpers.constants import DENEB
from eth2spec.test.helpers.typing import SpecForkName
from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.deneb import spec
from eth2spec.test.utils.kzg_tests import (
BLOB_ALL_TWOS,
BLOB_ALL_ZEROS,
BLOB_RANDOM_VALID1,
G1,
INVALID_BLOBS,
INVALID_FIELD_ELEMENTS,
INVALID_G1_POINTS,
VALID_BLOBS,
VALID_FIELD_ELEMENTS,
bls_add_one,
encode_hex_list,
expect_exception,
field_element_bytes,
hash,
)
from eth2spec.utils import bls
def expect_exception(func, *args):
try:
func(*args)
except Exception:
pass
else:
raise Exception("should have raised exception")
def field_element_bytes(x):
return int.to_bytes(x % spec.BLS_MODULUS, 32, spec.KZG_ENDIANNESS)
def field_element_bytes_unchecked(x):
return int.to_bytes(x, 32, spec.KZG_ENDIANNESS)
def encode_hex_list(a):
return [encode_hex(x) for x in a]
def bls_add_one(x):
"""
Adds "one" (actually bls.G1()) to a compressed group element.
Useful to compute definitely incorrect proofs.
"""
return bls.G1_to_bytes48(
bls.add(bls.bytes48_to_G1(x), bls.G1())
)
def evaluate_blob_at(blob, z):
return field_element_bytes(
spec.evaluate_polynomial_in_evaluation_form(spec.blob_to_polynomial(blob), spec.bytes_to_bls_field(z))
)
BLS_MODULUS_BYTES = spec.BLS_MODULUS.to_bytes(32, spec.KZG_ENDIANNESS)
G1 = bls.G1_to_bytes48(bls.G1())
G1_INVALID_TOO_FEW_BYTES = G1[:-1]
G1_INVALID_TOO_MANY_BYTES = G1 + b"\x00"
G1_INVALID_P1_NOT_IN_G1 = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" +
"0123456789abcdef0123456789abcdef0123456789abcdef")
G1_INVALID_P1_NOT_ON_CURVE = bytes.fromhex("8123456789abcdef0123456789abcdef0123456789abcdef" +
"0123456789abcdef0123456789abcdef0123456789abcde0")
INVALID_G1_POINTS = [G1_INVALID_TOO_FEW_BYTES, G1_INVALID_TOO_MANY_BYTES,
G1_INVALID_P1_NOT_IN_G1, G1_INVALID_P1_NOT_ON_CURVE]
BLOB_ALL_ZEROS = spec.Blob()
BLOB_ALL_TWOS = spec.Blob(b''.join([field_element_bytes(2) for n in range(4096)]))
BLOB_RANDOM_VALID1 = spec.Blob(b''.join([field_element_bytes(pow(2, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_RANDOM_VALID2 = spec.Blob(b''.join([field_element_bytes(pow(3, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_RANDOM_VALID3 = spec.Blob(b''.join([field_element_bytes(pow(5, n + 256, spec.BLS_MODULUS)) for n in range(4096)]))
BLOB_ALL_MODULUS_MINUS_ONE = spec.Blob(b''.join([field_element_bytes(spec.BLS_MODULUS - 1) for n in range(4096)]))
BLOB_ALMOST_ZERO = spec.Blob(b''.join([field_element_bytes(1 if n == 3211 else 0) for n in range(4096)]))
BLOB_INVALID = spec.Blob(b'\xFF' * 4096 * 32)
BLOB_INVALID_CLOSE = spec.Blob(b''.join(
[BLS_MODULUS_BYTES if n == 2111 else field_element_bytes(0) for n in range(4096)]
))
BLOB_INVALID_LENGTH_PLUS_ONE = BLOB_RANDOM_VALID1 + b"\x00"
BLOB_INVALID_LENGTH_MINUS_ONE = BLOB_RANDOM_VALID1[:-1]
VALID_BLOBS = [BLOB_ALL_ZEROS, BLOB_ALL_TWOS, BLOB_RANDOM_VALID1, BLOB_RANDOM_VALID2,
BLOB_RANDOM_VALID3, BLOB_ALL_MODULUS_MINUS_ONE, BLOB_ALMOST_ZERO]
INVALID_BLOBS = [BLOB_INVALID, BLOB_INVALID_CLOSE, BLOB_INVALID_LENGTH_PLUS_ONE, BLOB_INVALID_LENGTH_MINUS_ONE]
FE_VALID1 = field_element_bytes(0)
FE_VALID2 = field_element_bytes(1)
FE_VALID3 = field_element_bytes(2)
FE_VALID4 = field_element_bytes(pow(5, 1235, spec.BLS_MODULUS))
FE_VALID5 = field_element_bytes(spec.BLS_MODULUS - 1)
FE_VALID6 = field_element_bytes(spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB)[1])
VALID_FIELD_ELEMENTS = [FE_VALID1, FE_VALID2, FE_VALID3, FE_VALID4, FE_VALID5, FE_VALID6]
FE_INVALID_EQUAL_TO_MODULUS = field_element_bytes_unchecked(spec.BLS_MODULUS)
FE_INVALID_MODULUS_PLUS_ONE = field_element_bytes_unchecked(spec.BLS_MODULUS + 1)
FE_INVALID_UINT256_MAX = field_element_bytes_unchecked(2**256 - 1)
FE_INVALID_UINT256_MID = field_element_bytes_unchecked(2**256 - 2**128)
FE_INVALID_LENGTH_PLUS_ONE = VALID_FIELD_ELEMENTS[0] + b"\x00"
FE_INVALID_LENGTH_MINUS_ONE = VALID_FIELD_ELEMENTS[0][:-1]
INVALID_FIELD_ELEMENTS = [FE_INVALID_EQUAL_TO_MODULUS, FE_INVALID_MODULUS_PLUS_ONE,
FE_INVALID_UINT256_MAX, FE_INVALID_UINT256_MID,
FE_INVALID_LENGTH_PLUS_ONE, FE_INVALID_LENGTH_MINUS_ONE]
def hash(x):
return sha256(x).digest()
def int_to_hex(n: int, byte_length: int = None) -> str:
byte_value = int_to_big_endian(n)
if byte_length:
byte_value = byte_value.rjust(byte_length, b'\x00')
return encode_hex(byte_value)
###############################################################################
# Test cases for blob_to_kzg_commitment
###############################################################################
def case01_blob_to_kzg_commitment():
# Valid cases
@ -138,6 +57,10 @@ def case01_blob_to_kzg_commitment():
}
###############################################################################
# Test cases for compute_kzg_proof
###############################################################################
def case02_compute_kzg_proof():
# Valid cases
for blob in VALID_BLOBS:
@ -179,6 +102,10 @@ def case02_compute_kzg_proof():
}
###############################################################################
# Test cases for verify_kzg_proof
###############################################################################
def case03_verify_kzg_proof():
# Valid cases
for blob in VALID_BLOBS:
@ -341,6 +268,10 @@ def case03_verify_kzg_proof():
}
###############################################################################
# Test cases for compute_blob_kzg_proof
###############################################################################
def case04_compute_blob_kzg_proof():
# Valid cases
for blob in VALID_BLOBS:
@ -382,6 +313,10 @@ def case04_compute_blob_kzg_proof():
}
###############################################################################
# Test cases for verify_blob_kzg_proof
###############################################################################
def case05_verify_blob_kzg_proof():
# Valid cases
for blob in VALID_BLOBS:
@ -503,6 +438,10 @@ def case05_verify_blob_kzg_proof():
}
###############################################################################
# Test cases for verify_blob_kzg_proof_batch
###############################################################################
def case06_verify_blob_kzg_proof_batch():
# Valid cases
proofs = []
@ -627,6 +566,10 @@ def case06_verify_blob_kzg_proof_batch():
}
###############################################################################
# Main logic
###############################################################################
def create_provider(fork_name: SpecForkName,
handler_name: str,
test_case_fn: Callable[[], Iterable[Tuple[str, Dict[str, Any]]]]) -> gen_typing.TestProvider:

View File

@ -0,0 +1,3 @@
# KZG Test Generator for EIP-7594
These tests are specific to the API required for implementing PeerDAS polynomial commitment sampling.

View File

@ -0,0 +1,837 @@
"""
KZG test vectors generator for EIP-7594
"""
from typing import Tuple, Iterable, Any, Callable, Dict
from eth_utils import encode_hex
from eth2spec.eip7594 import spec
from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.test.helpers.constants import EIP7594
from eth2spec.test.helpers.typing import SpecForkName
from eth2spec.test.utils.kzg_tests import (
BLOB_RANDOM_VALID1,
BLOB_RANDOM_VALID2,
BLOB_RANDOM_VALID3,
CELL_RANDOM_VALID1,
CELL_RANDOM_VALID2,
INVALID_BLOBS,
INVALID_G1_POINTS,
INVALID_INDIVIDUAL_CELL_BYTES,
VALID_BLOBS,
VALID_CELLS_AND_PROOFS,
VALID_COMMITMENTS,
VALID_INDIVIDUAL_RANDOM_CELL_BYTES,
bls_add_one,
encode_hex_list,
expect_exception,
make_id,
)
from eth2spec.utils import bls
###############################################################################
# Test cases for compute_cells
###############################################################################
def case01_compute_cells():
# Valid cases
for blob in VALID_BLOBS:
cells = spec.compute_cells(blob)
identifier = make_id(blob)
yield f'compute_cells_case_valid_{identifier}', {
'input': {
'blob': encode_hex(blob),
},
'output': encode_hex_list(cells)
}
# Edge case: Invalid blobs
for blob in INVALID_BLOBS:
expect_exception(spec.compute_cells, blob)
identifier = make_id(blob)
yield f'compute_cells_case_invalid_blob_{identifier}', {
'input': {
'blob': encode_hex(blob)
},
'output': None
}
###############################################################################
# Test cases for compute_cells_and_kzg_proofs
###############################################################################
def case02_compute_cells_and_kzg_proofs():
# Valid cases
for blob in VALID_BLOBS:
cells, proofs = spec.compute_cells_and_kzg_proofs(blob)
# Save cells & proofs here to save on time.
VALID_CELLS_AND_PROOFS.append((cells, proofs))
identifier = make_id(blob)
yield f'compute_cells_and_kzg_proofs_case_valid_{identifier}', {
'input': {
'blob': encode_hex(blob),
},
'output': (encode_hex_list(cells), encode_hex_list(proofs))
}
# Edge case: Invalid blobs
for blob in INVALID_BLOBS:
expect_exception(spec.compute_cells_and_kzg_proofs, blob)
identifier = make_id(blob)
yield f'compute_cells_and_kzg_proofs_case_invalid_blob_{identifier}', {
'input': {
'blob': encode_hex(blob)
},
'output': None
}
###############################################################################
# Test cases for verify_cell_kzg_proof
###############################################################################
def case03_verify_cell_kzg_proof():
# Valid cases
for i in range(len(VALID_BLOBS)):
cells, proofs = VALID_CELLS_AND_PROOFS[i]
commitment = VALID_COMMITMENTS[i]
cell_id = (2 ** i - 1) % spec.CELLS_PER_EXT_BLOB
cell = cells[cell_id]
proof = proofs[cell_id]
assert spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_valid_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': True
}
# Incorrect commitment
for i in range(len(VALID_BLOBS)):
cells, proofs = VALID_CELLS_AND_PROOFS[i]
commitment = bls_add_one(VALID_COMMITMENTS[i])
cell_id = 99 % spec.CELLS_PER_EXT_BLOB
cell = cells[cell_id]
proof = proofs[cell_id]
assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_incorrect_commitment_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': False
}
# Incorrect cell
for i in range(len(VALID_INDIVIDUAL_RANDOM_CELL_BYTES)):
cell_id = 16 % spec.CELLS_PER_EXT_BLOB
commitment = VALID_COMMITMENTS[i]
cells, proofs = VALID_CELLS_AND_PROOFS[i]
cell = VALID_INDIVIDUAL_RANDOM_CELL_BYTES[i]
proof = proofs[cell_id]
assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_incorrect_cell_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': False
}
# Incorrect proof
for i in range(len(VALID_BLOBS)):
cell_id = 91 % spec.CELLS_PER_EXT_BLOB
commitment = VALID_COMMITMENTS[i]
cells, proofs = VALID_CELLS_AND_PROOFS[i]
cell = cells[cell_id]
proof = bls_add_one(proofs[cell_id])
assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_incorrect_proof_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': False
}
# Edge case: Invalid commitment
for commitment in INVALID_G1_POINTS:
cells, proofs = VALID_CELLS_AND_PROOFS[0]
cell_id = 81 % spec.CELLS_PER_EXT_BLOB
cell = cells[cell_id]
proof = proofs[cell_id]
expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_invalid_commitment_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': None
}
# Edge case: Invalid cell_id
for cell_id in [spec.CELLS_PER_EXT_BLOB, spec.CELLS_PER_EXT_BLOB + 1]:
cells, proofs = VALID_CELLS_AND_PROOFS[1]
commitment = VALID_COMMITMENTS[1]
cell = cells[0]
proof = proofs[0]
expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_invalid_cell_id_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': None
}
# Edge case: Invalid cell
for cell in INVALID_INDIVIDUAL_CELL_BYTES:
cell_id = 32 % spec.CELLS_PER_EXT_BLOB
commitment = VALID_COMMITMENTS[2]
cells, proofs = VALID_CELLS_AND_PROOFS[2]
proof = proofs[cell_id]
expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_invalid_cell_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': None
}
# Edge case: Invalid proof
for proof in INVALID_G1_POINTS:
cells, _ = VALID_CELLS_AND_PROOFS[3]
commitment = VALID_COMMITMENTS[3]
cell_id = 36 % spec.CELLS_PER_EXT_BLOB
cell = cells[cell_id]
expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof)
identifier = make_id(commitment, cell_id, cell, proof)
yield f'verify_cell_kzg_proof_case_invalid_proof_{identifier}', {
'input': {
'commitment': encode_hex(commitment),
'cell_id': cell_id,
'cell': encode_hex(cell),
'proof': encode_hex(proof),
},
'output': None
}
###############################################################################
# Test cases for verify_cell_kzg_proof_batch
###############################################################################
def case04_verify_cell_kzg_proof_batch():
# Valid cases
for i in range(len(VALID_BLOBS)):
cells, proofs = VALID_CELLS_AND_PROOFS[i]
row_commitments = [VALID_COMMITMENTS[i]]
row_indices = [0] * spec.CELLS_PER_EXT_BLOB
column_indices = list(range(spec.CELLS_PER_EXT_BLOB))
assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_valid_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': True
}
# Valid: zero cells
cells, row_commitments, row_indices, column_indices, proofs = [], [], [], [], []
assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_valid_zero_cells_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': True
}
# Valid: Verify cells from multiple blobs
cells0, proofs0 = VALID_CELLS_AND_PROOFS[0]
cells1, proofs1 = VALID_CELLS_AND_PROOFS[1]
row_commitments = VALID_COMMITMENTS[:2]
row_indices = [0, 1]
column_indices = [0, 0]
cells = [cells0[0], cells1[0]]
proofs = [proofs0[0], proofs1[0]]
assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_valid_multiple_blobs_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': True
}
# Valid: Unused row commitments
cells, proofs = VALID_CELLS_AND_PROOFS[2]
cells, proofs = cells[:3], proofs[:3]
# Provide list of all commitments
row_commitments = VALID_COMMITMENTS
row_indices = [2] * len(cells)
column_indices = list(range(len(cells)))
assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_valid_unused_row_commitments_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': True
}
# Valid: Same cell multiple times
row_commitments = [VALID_COMMITMENTS[3]]
num_duplicates = 3
row_indices = [0] * num_duplicates
column_indices = [0] * num_duplicates
cells = [VALID_CELLS_AND_PROOFS[3][0][0]] * num_duplicates
proofs = [VALID_CELLS_AND_PROOFS[3][1][0]] * num_duplicates
assert spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_valid_same_cell_multiple_times_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': True
}
# Incorrect row commitment
cells, proofs = VALID_CELLS_AND_PROOFS[5]
cells, proofs = cells[:1], proofs[:1]
# Change commitment so it's wrong
row_commitments = [bls_add_one(VALID_COMMITMENTS[5])]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_incorrect_row_commitment_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': False
}
# Incorrect cell
cells, proofs = VALID_CELLS_AND_PROOFS[6]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[6]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Change last cell so it's wrong
cells[-1] = CELL_RANDOM_VALID2
assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_incorrect_cell_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': False
}
# Incorrect proof
cells, proofs = VALID_CELLS_AND_PROOFS[0]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[0]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Change last proof so it's wrong
proofs[-1] = bls_add_one(proofs[-1])
assert not spec.verify_cell_kzg_proof_batch(row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_incorrect_proof_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': False
}
# Edge case: Invalid row commitment
for i, commitment in enumerate(INVALID_G1_POINTS):
cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_G1_POINTS)]
cells, proofs = cells[:1], proofs[:1]
# Set row_commitments to the invalid commitment
row_commitments = [commitment]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_row_commitment_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Invalid row_index
cells, proofs = VALID_CELLS_AND_PROOFS[0]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[0]]
row_indices = [0] * len(cells)
# Set first row index to an invalid value
row_indices[0] = 1
column_indices = list(range(len(cells)))
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_row_index_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Invalid column_index
cells, proofs = VALID_CELLS_AND_PROOFS[1]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[1]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Set first column index to an invalid value
column_indices[0] = spec.CELLS_PER_EXT_BLOB
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_column_index_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Invalid cell
for i, cell in enumerate(INVALID_INDIVIDUAL_CELL_BYTES):
cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_INDIVIDUAL_CELL_BYTES)]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[i % len(INVALID_INDIVIDUAL_CELL_BYTES)]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Set first cell to the invalid cell
cells[0] = cell
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_cell_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Invalid proof
for i, proof in enumerate(INVALID_G1_POINTS):
cells, proofs = VALID_CELLS_AND_PROOFS[i % len(INVALID_G1_POINTS)]
cells, proofs = cells[:1], proofs[:1]
row_commitments = [VALID_COMMITMENTS[i % len(INVALID_G1_POINTS)]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Set first proof to the invalid proof
proofs[0] = proof
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_proof_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Missing a row commitment
cells, proofs = VALID_CELLS_AND_PROOFS[0]
cells, proofs = cells[:1], proofs[:1]
# Do not include the row commitment
row_commitments = []
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_missing_row_commitment_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Missing a row index
cells, proofs = VALID_CELLS_AND_PROOFS[1]
cells, proofs = cells[:2], proofs[:2]
row_commitments = [VALID_COMMITMENTS[1]]
# Leave off one of the row indices
row_indices = [0] * (len(cells) - 1)
column_indices = list(range(len(cells)))
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_missing_row_index_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Missing a column index
cells, proofs = VALID_CELLS_AND_PROOFS[2]
cells, proofs = cells[:2], proofs[:2]
row_commitments = [VALID_COMMITMENTS[2]]
row_indices = [0] * len(cells)
# Leave off one of the column indices
column_indices = list(range(len(cells) - 1))
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_missing_column_index_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Missing a cell
cells, proofs = VALID_CELLS_AND_PROOFS[3]
cells, proofs = cells[:2], proofs[:2]
row_commitments = [VALID_COMMITMENTS[3]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Remove the last proof
cells = cells[:-1]
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_missing_cell_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
# Edge case: Missing a proof
cells, proofs = VALID_CELLS_AND_PROOFS[4]
cells, proofs = cells[:2], proofs[:2]
row_commitments = [VALID_COMMITMENTS[4]]
row_indices = [0] * len(cells)
column_indices = list(range(len(cells)))
# Remove the last proof
proofs = proofs[:-1]
expect_exception(spec.verify_cell_kzg_proof_batch, row_commitments, row_indices, column_indices, cells, proofs)
identifier = make_id(row_commitments, row_indices, column_indices, cells, proofs)
yield f'verify_cell_kzg_proof_batch_case_invalid_missing_proof_{identifier}', {
'input': {
'row_commitments': encode_hex_list(row_commitments),
'row_indices': row_indices,
'column_indices': column_indices,
'cells': encode_hex_list(cells),
'proofs': encode_hex_list(proofs),
},
'output': None
}
###############################################################################
# Test cases for recover_all_cells
###############################################################################
def case05_recover_all_cells():
# Valid: No missing cells
blob = BLOB_RANDOM_VALID1
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB))
recovered_cells = spec.recover_all_cells(cell_ids, cells)
assert recovered_cells == cells
identifier = make_id(cell_ids, cells)
yield f'recover_all_cells_case_valid_no_missing_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(cells),
},
'output': encode_hex_list(recovered_cells)
}
# Valid: Half missing cells (every other cell)
blob = BLOB_RANDOM_VALID2
cells = spec.compute_cells(blob)
cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
recovered_cells = spec.recover_all_cells(cell_ids, partial_cells)
assert recovered_cells == cells
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_valid_half_missing_every_other_cell_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': encode_hex_list(recovered_cells)
}
# Valid: Half missing cells (first half)
blob = BLOB_RANDOM_VALID3
cells = spec.compute_cells(blob)
cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB // 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
recovered_cells = spec.recover_all_cells(cell_ids, partial_cells)
assert recovered_cells == cells
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_valid_half_missing_first_half_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': encode_hex_list(recovered_cells)
}
# Valid: Half missing cells (second half)
blob = BLOB_RANDOM_VALID1
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2, spec.CELLS_PER_EXT_BLOB))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
recovered_cells = spec.recover_all_cells(cell_ids, partial_cells)
assert recovered_cells == cells
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_valid_half_missing_second_half_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': encode_hex_list(recovered_cells)
}
# Edge case: All cells are missing
cell_ids, partial_cells = [], []
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_all_cells_are_missing_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: More than half missing
blob = BLOB_RANDOM_VALID2
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 - 1))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_more_than_half_missing_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: Invalid cell_id
blob = BLOB_RANDOM_VALID1
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
# Replace first cell_id with an invalid value
cell_ids[0] = spec.CELLS_PER_EXT_BLOB
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_cell_id_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: Invalid cell
blob = BLOB_RANDOM_VALID2
for cell in INVALID_INDIVIDUAL_CELL_BYTES:
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
# Replace first cell with an invalid value
partial_cells[0] = cell
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_cell_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: More cell_ids than cells
blob = BLOB_RANDOM_VALID3
cells = spec.compute_cells(blob)
cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
# Add another cell_id
cell_ids.append(spec.CELLS_PER_EXT_BLOB - 1)
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_more_cell_ids_than_cells_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: More cells than cell_ids
blob = BLOB_RANDOM_VALID1
cells = spec.compute_cells(blob)
cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
# Add another cell
partial_cells.append(CELL_RANDOM_VALID1)
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_more_cells_than_cell_ids_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
# Edge case: Duplicate cell_id
blob = BLOB_RANDOM_VALID2
cells = spec.compute_cells(blob)
cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2))
partial_cells = [cells[cell_id] for cell_id in cell_ids]
# Replace first cell_id with the second cell_id
cell_ids[0] = cell_ids[1]
expect_exception(spec.recover_all_cells, cell_ids, partial_cells)
identifier = make_id(cell_ids, partial_cells)
yield f'recover_all_cells_case_invalid_duplicate_cell_id_{identifier}', {
'input': {
'cell_ids': cell_ids,
'cells': encode_hex_list(partial_cells),
},
'output': None
}
###############################################################################
# Main logic
###############################################################################
def create_provider(fork_name: SpecForkName,
handler_name: str,
test_case_fn: Callable[[], Iterable[Tuple[str, Dict[str, Any]]]]) -> gen_typing.TestProvider:
def prepare_fn() -> None:
# Nothing to load / change in spec. Maybe in future forks.
# Put the tests into the general config category, to not require any particular configuration.
return
def cases_fn() -> Iterable[gen_typing.TestCase]:
for data in test_case_fn():
(case_name, case_content) = data
yield gen_typing.TestCase(
fork_name=fork_name,
preset_name='general',
runner_name='kzg',
handler_name=handler_name,
suite_name='kzg-mainnet',
case_name=case_name,
case_fn=lambda: [('data', 'data', case_content)]
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
bls.use_arkworks()
gen_runner.run_generator("kzg_7594", [
# EIP-7594
create_provider(EIP7594, 'compute_cells', case01_compute_cells),
create_provider(EIP7594, 'compute_cells_and_kzg_proofs', case02_compute_cells_and_kzg_proofs),
create_provider(EIP7594, 'verify_cell_kzg_proof', case03_verify_cell_kzg_proof),
create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case04_verify_cell_kzg_proof_batch),
create_provider(EIP7594, 'recover_all_cells', case05_recover_all_cells),
])

View File

@ -0,0 +1,2 @@
pytest>=4.4
../../../[generator]

View File

@ -0,0 +1,5 @@
# Networking tests
The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the networking protocol.
Test-format documentation can be found [here](../../formats/networking/README.md).

View File

View File

@ -0,0 +1,14 @@
from eth2spec.test.helpers.constants import EIP7594
from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
if __name__ == "__main__":
eip7594_mods = {key: 'eth2spec.test.eip7594.networking.test_' + key for key in [
'get_custody_columns',
]}
all_mods = {
EIP7594: eip7594_mods
}
run_state_test_generators(runner_name="networking", all_mods=all_mods)

View File

@ -0,0 +1,2 @@
pytest>=4.4
../../../[generator]