Migrating to latest crypto APIs

This commit is contained in:
Hsiao-Wei Wang 2024-01-12 00:36:06 +08:00
parent 93dddd15f2
commit 504b4f9036
No known key found for this signature in database
GPG Key ID: AE3D6B174F971DE4
4 changed files with 21 additions and 24 deletions

View File

@ -203,8 +203,7 @@ def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[s
for item in [dep, key] + key_list[key_list.index(dep)+1:]: for item in [dep, key] + key_list[key_list.index(dep)+1:]:
objects[item] = objects.pop(item) objects[item] = objects.pop(item)
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
""" """
Takes in old spec and new spec ssz objects, combines them, Takes in old spec and new spec ssz objects, combines them,
and returns the newer versions of the objects in dependency order. and returns the newer versions of the objects in dependency order.
@ -226,7 +225,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars) config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants) ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
func_dep_presets = combine_dicts(spec0.func_dep_presets, spec1.func_dep_presets) func_dep_presets = combine_dicts(spec0.func_dep_presets, spec1.func_dep_presets)
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types) ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects)
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses) dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
return SpecObject( return SpecObject(
functions=functions, functions=functions,

View File

@ -35,6 +35,11 @@ EXTRA_SPEC_FILES = {
BELLATRIX: "sync/optimistic.md" BELLATRIX: "sync/optimistic.md"
} }
DEFAULT_ORDER = (
"beacon-chain",
"polynomial-commitments",
)
def is_post_fork(a, b) -> bool: def is_post_fork(a, b) -> bool:
""" """
@ -62,15 +67,25 @@ def get_fork_directory(fork):
raise FileNotFoundError(f"No directory found for fork: {fork}") raise FileNotFoundError(f"No directory found for fork: {fork}")
def sort_key(s):
for index, key in enumerate(DEFAULT_ORDER):
if key in s:
return (index, s)
return (len(DEFAULT_ORDER), s)
def get_md_doc_paths(spec_fork: str) -> str: def get_md_doc_paths(spec_fork: str) -> str:
md_doc_paths = "" md_doc_paths = ""
for fork in ALL_FORKS: for fork in ALL_FORKS:
if is_post_fork(spec_fork, fork): if is_post_fork(spec_fork, fork):
# Append all files in fork directory recursively # Append all files in fork directory recursively
for root, dirs, files in os.walk(get_fork_directory(fork)): for root, _, files in os.walk(get_fork_directory(fork)):
filepaths = []
for filename in files: for filename in files:
filepath = os.path.join(root, filename) filepath = os.path.join(root, filename)
filepaths.append(filepath)
for filepath in sorted(filepaths, key=sort_key):
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES: if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
md_doc_paths += filepath + "\n" md_doc_paths += filepath + "\n"
# Append extra files if any # Append extra files if any

View File

@ -18,7 +18,6 @@ from eth2spec.deneb import {preset_name} as deneb
return { return {
'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value, 'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value,
'NUMBER_OF_COLUMNS': spec_object.preset_vars['NUMBER_OF_COLUMNS'].value, 'NUMBER_OF_COLUMNS': spec_object.preset_vars['NUMBER_OF_COLUMNS'].value,
'FIELD_ELEMENTS_PER_CELL': spec_object.preset_vars['FIELD_ELEMENTS_PER_CELL'].value,
} }
@classmethod @classmethod

View File

@ -16,7 +16,6 @@
- [`get_custody_lines`](#get_custody_lines) - [`get_custody_lines`](#get_custody_lines)
- [`compute_extended_data`](#compute_extended_data) - [`compute_extended_data`](#compute_extended_data)
- [`compute_extended_matrix`](#compute_extended_matrix) - [`compute_extended_matrix`](#compute_extended_matrix)
- [`compute_samples_and_proofs`](#compute_samples_and_proofs)
- [`get_data_column_sidecars`](#get_data_column_sidecars) - [`get_data_column_sidecars`](#get_data_column_sidecars)
- [Custody](#custody) - [Custody](#custody)
- [Custody requirement](#custody-requirement) - [Custody requirement](#custody-requirement)
@ -43,9 +42,8 @@ We define the following Python custom types for type hinting and readability:
| Name | SSZ equivalent | Description | | Name | SSZ equivalent | Description |
| - | - | - | | - | - | - |
| `DataCell` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The data unit of a cell in the extended data matrix | | `DataColumn` | `List[Cell, MAX_BLOBS_PER_BLOCK]` | The data of each column in EIP7594 |
| `DataColumn` | `List[DataCell, MAX_BLOBS_PER_BLOCK]` | The data of each column in EIP7594 | | `ExtendedMatrix` | `List[Cell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data with blobs and one-dimensional erasure coding extension |
| `ExtendedMatrix` | `List[DataCell, MAX_BLOBS_PER_BLOCK * NUMBER_OF_COLUMNS]` | The full data with blobs and one-dimensional erasure coding extension |
| `FlatExtendedMatrix` | `List[BLSFieldElement, MAX_BLOBS_PER_BLOCK * FIELD_ELEMENTS_PER_BLOB * NUMBER_OF_COLUMNS]` | The flattened format of `ExtendedMatrix` | | `FlatExtendedMatrix` | `List[BLSFieldElement, MAX_BLOBS_PER_BLOCK * FIELD_ELEMENTS_PER_BLOB * NUMBER_OF_COLUMNS]` | The flattened format of `ExtendedMatrix` |
| `LineIndex` | `uint64` | The index of the rows or columns in `FlatExtendedMatrix` matrix | | `LineIndex` | `uint64` | The index of the rows or columns in `FlatExtendedMatrix` matrix |
@ -55,7 +53,6 @@ We define the following Python custom types for type hinting and readability:
| Name | Value | Description | | Name | Value | Description |
| - | - | - | | - | - | - |
| `FIELD_ELEMENTS_PER_CELL` | `uint64(2**6)` (= 64) | Elements per `DataCell` |
| `NUMBER_OF_COLUMNS` | `uint64((FIELD_ELEMENTS_PER_BLOB * 2) // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. | | `NUMBER_OF_COLUMNS` | `uint64((FIELD_ELEMENTS_PER_BLOB * 2) // FIELD_ELEMENTS_PER_CELL)` (= 128) | Number of columns in the extended data matrix. |
### Custody setting ### Custody setting
@ -95,19 +92,6 @@ def compute_extended_matrix(blobs: Sequence[Blob]) -> FlatExtendedMatrix:
return FlatExtendedMatrix(matrix) return FlatExtendedMatrix(matrix)
``` ```
#### `compute_samples_and_proofs`
```python
def compute_samples_and_proofs(blob: Blob) -> Tuple[
Vector[DataCell, NUMBER_OF_COLUMNS],
Vector[KZGProof, NUMBER_OF_COLUMNS]]:
"""
Defined in polynomial-commitments-sampling.md
"""
# pylint: disable=unused-argument
...
```
#### `get_data_column_sidecars` #### `get_data_column_sidecars`
```python ```python
@ -119,7 +103,7 @@ def get_data_column_sidecars(signed_block: SignedBeaconBlock,
block.body, block.body,
get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments'), get_generalized_index(BeaconBlockBody, 'blob_kzg_commitments'),
) )
cells_and_proofs = [compute_samples_and_proofs(blob) for blob in blobs] cells_and_proofs = [compute_cells_and_proofs(blob) for blob in blobs]
blob_count = len(blobs) blob_count = len(blobs)
cells = [cells_and_proofs[i][0] for i in range(blob_count)] cells = [cells_and_proofs[i][0] for i in range(blob_count)]
proofs = [cells_and_proofs[i][1] for i in range(blob_count)] proofs = [cells_and_proofs[i][1] for i in range(blob_count)]