chore: Add type casts reducing the diff for #3697 (#3734)

This commit is contained in:
kevaundray 2024-04-29 14:14:55 +01:00 committed by GitHub
parent 186c9435dd
commit e7b49dc670
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 11 additions and 11 deletions

View File

@ -105,11 +105,11 @@ class DataColumnSidecar(Container):
def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequence[ColumnIndex]: def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequence[ColumnIndex]:
assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT
subnet_ids = [] subnet_ids: List[uint64] = []
i = 0 i = 0
while len(subnet_ids) < custody_subnet_count: while len(subnet_ids) < custody_subnet_count:
if node_id == UINT256_MAX: if node_id == UINT256_MAX:
node_id = 0 node_id = NodeID(0)
subnet_id = ( subnet_id = (
bytes_to_uint64(hash(uint_to_bytes(uint256(node_id + i)))[0:8]) bytes_to_uint64(hash(uint_to_bytes(uint256(node_id + i)))[0:8])
@ -154,10 +154,10 @@ def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count:
This helper demonstrates how to apply ``recover_all_cells``. This helper demonstrates how to apply ``recover_all_cells``.
The data structure for storing cells is implementation-dependent. The data structure for storing cells is implementation-dependent.
""" """
extended_matrix = [] extended_matrix: List[Cell] = []
for blob_index in range(blob_count): for blob_index in range(blob_count):
cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index] cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index]
cells = [cells_dict[(blob_index, cell_id)] for cell_id in cell_ids] cells = [cells_dict[(BlobIndex(blob_index), cell_id)] for cell_id in cell_ids]
all_cells_for_row = recover_all_cells(cell_ids, cells) all_cells_for_row = recover_all_cells(cell_ids, cells)
extended_matrix.extend(all_cells_for_row) extended_matrix.extend(all_cells_for_row)

View File

@ -75,11 +75,11 @@ def verify_data_column_sidecar_kzg_proofs(sidecar: DataColumnSidecar) -> bool:
# KZG batch verifies that the cells match the corresponding commitments and proofs # KZG batch verifies that the cells match the corresponding commitments and proofs
return verify_cell_kzg_proof_batch( return verify_cell_kzg_proof_batch(
row_commitments=sidecar.kzg_commitments, row_commitments_bytes=sidecar.kzg_commitments,
row_indices=row_ids, # all rows row_indices=row_ids, # all rows
column_indices=[sidecar.index], column_indices=[sidecar.index],
cells=sidecar.column, cells=sidecar.column,
proofs=sidecar.kzg_proofs, proofs_bytes=sidecar.kzg_proofs,
) )
``` ```

View File

@ -243,7 +243,7 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial
Long polynomial division for two coefficient form polynomials ``a`` and ``b`` Long polynomial division for two coefficient form polynomials ``a`` and ``b``
""" """
a = a.copy() # Make a copy since `a` is passed by reference a = a.copy() # Make a copy since `a` is passed by reference
o = [] o: List[BLSFieldElement] = []
apos = len(a) - 1 apos = len(a) - 1
bpos = len(b) - 1 bpos = len(b) - 1
diff = apos - bpos diff = apos - bpos
@ -441,7 +441,7 @@ def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[
proofs = [] proofs = []
for i in range(CELLS_PER_EXT_BLOB): for i in range(CELLS_PER_EXT_BLOB):
coset = coset_for_cell(i) coset = coset_for_cell(CellID(i))
proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset)
cells.append(coset_evals_to_cell(ys)) cells.append(coset_evals_to_cell(ys))
proofs.append(proof) proofs.append(proof)
@ -470,7 +470,7 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]:
for cell_id in range(CELLS_PER_EXT_BLOB): for cell_id in range(CELLS_PER_EXT_BLOB):
start = cell_id * FIELD_ELEMENTS_PER_CELL start = cell_id * FIELD_ELEMENTS_PER_CELL
end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL
cells.append(coset_evals_to_cell(extended_data_rbo[start:end])) cells.append(coset_evals_to_cell(CosetEvals(extended_data_rbo[start:end])))
return cells return cells
``` ```
@ -572,7 +572,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[
]) ])
# Extend vanishing polynomial to full domain using the closed form of the vanishing polynomial over a coset # Extend vanishing polynomial to full domain using the closed form of the vanishing polynomial over a coset
zero_poly_coeff = [0] * FIELD_ELEMENTS_PER_EXT_BLOB zero_poly_coeff = [BLSFieldElement(0)] * FIELD_ELEMENTS_PER_EXT_BLOB
for i, coeff in enumerate(short_zero_poly): for i, coeff in enumerate(short_zero_poly):
zero_poly_coeff[i * FIELD_ELEMENTS_PER_CELL] = coeff zero_poly_coeff[i * FIELD_ELEMENTS_PER_CELL] = coeff
@ -690,7 +690,7 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ
# Convert cells to coset evals # Convert cells to coset evals
cosets_evals = [cell_to_coset_evals(cell) for cell in cells] cosets_evals = [cell_to_coset_evals(cell) for cell in cells]
missing_cell_ids = [cell_id for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] missing_cell_ids = [CellID(cell_id) for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids]
zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids) zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids)
eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data(