diff --git a/da/common.py b/da/common.py index 9f8487d..5f3fa3c 100644 --- a/da/common.py +++ b/da/common.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from hashlib import sha256 +from hashlib import blake2b from itertools import chain, zip_longest, compress from typing import List, Generator, Self, Sequence @@ -42,7 +42,7 @@ class Bitfield(List[bool]): def build_blob_id(row_commitments: Sequence[Commitment]) -> BlobId: - hasher = sha256() + hasher = blake2b(digest_size=32) for c in row_commitments: hasher.update(bytes(c)) return hasher.digest() diff --git a/da/encoder.py b/da/encoder.py index f976651..9721c66 100644 --- a/da/encoder.py +++ b/da/encoder.py @@ -7,7 +7,7 @@ from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Pr from da.common import ChunksMatrix, Chunk, Row from da.kzg_rs import kzg, rs -from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT +from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT, BLS_MODULUS from da.kzg_rs.poly import Polynomial # Domain separation tag @@ -76,7 +76,7 @@ class DAEncoder: def _combined_polynomial( polys: Sequence[Polynomial], h: BLSFieldElement ) -> Polynomial: - combined = Polynomial.zero() + combined = Polynomial([0], BLS_MODULUS) power = BLSFieldElement(1) for poly in polys: combined = combined + poly * int(power) diff --git a/da/test_encoder.py b/da/test_encoder.py index fdcd353..5d33378 100644 --- a/da/test_encoder.py +++ b/da/test_encoder.py @@ -6,6 +6,7 @@ from eth2spec.deneb.mainnet import bytes_to_bls_field from da import encoder from da.encoder import DAEncoderParams, DAEncoder +from da.verifier import DAVerifier from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement from da.kzg_rs.common import BLS_MODULUS, ROOTS_OF_UNITY @@ -33,24 +34,26 @@ class TestEncoder(TestCase): self.assertEqual(columns_len, column_count) chunks_size = (len(data) // encoder_params.bytes_per_chunk) // encoder_params.column_count self.assertEqual(len(encoded_data.row_commitments), chunks_size) - self.assertEqual(len(encoded_data.row_proofs), chunks_size) - self.assertEqual(len(encoded_data.row_proofs[0]), column_count) - self.assertIsNotNone(encoded_data.aggregated_column_commitment) - self.assertEqual(len(encoded_data.aggregated_column_proofs), columns_len) + self.assertEqual(len(encoded_data.combined_column_proofs), columns_len) # verify rows - for row, proofs, commitment in zip(encoded_data.extended_matrix, encoded_data.row_proofs, encoded_data.row_commitments): - for i, (chunk, proof) in enumerate(zip(row, proofs)): - self.assertTrue( - kzg.verify_element_proof(bytes_to_bls_field(chunk), commitment, proof, i, ROOTS_OF_UNITY) - ) + h = DAVerifier._derive_challenge(encoded_data.row_commitments) + com_C = encoded_data.row_commitments[0] + power = h + for com in encoded_data.row_commitments[1:]: + com_C = com_C + com * int(power) + power = power * h - # verify column aggregation - for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.aggregated_column_proofs)): - data = DAEncoder.hash_commitment_blake2b31(commitment) + for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.combined_column_proofs)): + v = BLSFieldElement(0) + power = BLSFieldElement(1) + for chunk in column.chunks: + x = BLSFieldElement(int.from_bytes(bytes(chunk), byteorder="big")) + v = v + x * power + power = power * h kzg.verify_element_proof( - bytes_to_bls_field(data), - encoded_data.aggregated_column_commitment, + v, + com_C, proof, i, ROOTS_OF_UNITY @@ -84,41 +87,14 @@ class TestEncoder(TestCase): poly_2 = rs.decode(r2, ROOTS_OF_UNITY, len(poly_1)) self.assertEqual(poly_1, poly_2) - def test_compute_rows_proofs(self): - chunks_matrix = self.encoder._chunkify_data(self.data) - polynomials, commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix)) - extended_chunks_matrix = self.encoder._rs_encode_rows(chunks_matrix) - original_proofs = self.encoder._compute_rows_proofs(chunks_matrix, polynomials, commitments) - extended_proofs = self.encoder._compute_rows_proofs(extended_chunks_matrix, polynomials, commitments) - # check original sized matrix - for row, poly, commitment, proofs in zip(chunks_matrix, polynomials, commitments, original_proofs): - self.assertEqual(len(proofs), len(row)) - for i, chunk in enumerate(row): - self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY)) - # check extended matrix - for row, poly, commitment, proofs in zip(extended_chunks_matrix, polynomials, commitments, extended_proofs): - for i, chunk in enumerate(row): - self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY)) - def test_compute_column_kzg_commitments(self): + def test_generate_combined_column_proofs(self): chunks_matrix = self.encoder._chunkify_data(self.data) - polynomials, commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix)) - self.assertEqual(len(commitments), len(chunks_matrix[0])) - self.assertEqual(len(polynomials), len(chunks_matrix[0])) - - def test_generate_aggregated_column_commitments(self): - chunks_matrix = self.encoder._chunkify_data(self.data) - _, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix)) - poly, commitment = self.encoder._compute_aggregated_column_commitment(column_commitments) - self.assertIsNotNone(poly) - self.assertIsNotNone(commitment) - - def test_generate_aggregated_column_proofs(self): - chunks_matrix = self.encoder._chunkify_data(self.data) - _, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix)) - poly, _ = self.encoder._compute_aggregated_column_commitment(column_commitments) - proofs = self.encoder._compute_aggregated_column_proofs(poly, column_commitments) - self.assertEqual(len(proofs), len(column_commitments)) + row_polynomials, row_commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix)) + h = self.encoder._derive_challenge(row_commitments) + combined_poly = self.encoder._combined_polynomial(row_polynomials, h) + proofs = self.encoder._compute_combined_column_proofs(combined_poly) + self.assertEqual(len(proofs), len(row_commitments)) def test_encode(self): from random import randbytes diff --git a/da/test_verifier.py b/da/test_verifier.py index 101895b..4c2bcc7 100644 --- a/da/test_verifier.py +++ b/da/test_verifier.py @@ -5,7 +5,7 @@ from da.encoder import DAEncoder from da.kzg_rs import kzg from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY from da.test_encoder import TestEncoder -from da.verifier import DAVerifier, DABlob +from da.verifier import DAVerifier, DAShare class TestVerifier(TestCase): @@ -13,18 +13,6 @@ class TestVerifier(TestCase): def setUp(self): self.verifier = DAVerifier() - def test_verify_column(self): - column = Column(int.to_bytes(i, length=32) for i in range(8)) - _, column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS) - aggregated_poly, aggregated_column_commitment = kzg.bytes_to_commitment( - DAEncoder.hash_commitment_blake2b31(column_commitment), GLOBAL_PARAMETERS - ) - aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY) - self.assertTrue( - self.verifier._verify_column( - column, 0, column_commitment, aggregated_column_commitment, aggregated_proof, - ) - ) def test_verify(self): _ = TestEncoder() @@ -32,14 +20,11 @@ class TestVerifier(TestCase): encoded_data = _.encoder.encode(_.data) for i, column in enumerate(encoded_data.chunked_data.columns): verifier = DAVerifier() - da_blob = DABlob( + da_blob = DAShare( Column(column), i, - encoded_data.column_commitments[i], - encoded_data.aggregated_column_commitment, - encoded_data.aggregated_column_proofs[i], + encoded_data.combined_column_proofs[i], encoded_data.row_commitments, - [row[i] for row in encoded_data.row_proofs], ) self.assertIsNotNone(verifier.verify(da_blob)) @@ -49,24 +34,18 @@ class TestVerifier(TestCase): encoded_data = _.encoder.encode(_.data) columns = enumerate(encoded_data.chunked_data.columns) i, column = next(columns) - da_blob = DABlob( + da_blob = DAShare( Column(column), i, - encoded_data.column_commitments[i], - encoded_data.aggregated_column_commitment, - encoded_data.aggregated_column_proofs[i], + encoded_data.combined_column_proofs[i], encoded_data.row_commitments, - [row[i] for row in encoded_data.row_proofs], ) self.assertIsNotNone(self.verifier.verify(da_blob)) for i, column in columns: - da_blob = DABlob( + da_blob = DAShare( Column(column), i, - encoded_data.column_commitments[i], - encoded_data.aggregated_column_commitment, - encoded_data.aggregated_column_proofs[i], + encoded_data.combined_column_proofs[i], encoded_data.row_commitments, - [row[i] for row in encoded_data.row_proofs], ) self.assertTrue(self.verifier.verify(da_blob))