Use square size data for encoder tests

This commit is contained in:
Daniel Sanchez Quiros 2024-03-06 17:18:13 +01:00
parent 6d8a370ee7
commit 3019a92460
3 changed files with 8 additions and 5 deletions

View File

@ -33,7 +33,7 @@ class DAEncoder:
def _chunkify_data(self, data: bytes) -> ChunksMatrix:
size: int = self.params.column_count * self.params.bytes_per_field_element
return ChunksMatrix(
Row([bytes(chunk) for chunk in batched(b, self.params.bytes_per_field_element)])
Row(Chunk(bytes(chunk)) for chunk in batched(b, self.params.bytes_per_field_element))
for b in batched(data, size)
)

View File

@ -15,3 +15,6 @@ class TestCommon(TestCase):
matrix = ChunksMatrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
expected = ChunksMatrix([[1, 4, 7], [2, 5, 8], [3, 6, 9]])
self.assertEqual(matrix.transposed(), expected)
matrix = ChunksMatrix([[1, 2, 3], [4, 5, 6]])
expected = ChunksMatrix([[1, 4], [2, 5], [3, 6]])
self.assertEqual(matrix.transposed(), expected)

View File

@ -14,7 +14,7 @@ class TestEncoder(TestCase):
def setUp(self):
self.params: DAEncoderParams = DAEncoderParams(column_count=16, bytes_per_field_element=32)
self.encoder: DAEncoder = DAEncoder(self.params)
self.elements = 100
self.elements = 32
self.data = bytearray(
chain.from_iterable(
randrange(BLS_MODULUS).to_bytes(length=self.params.bytes_per_field_element, byteorder='big')
@ -76,9 +76,9 @@ class TestEncoder(TestCase):
def test_compute_column_kzg_commitments(self):
chunks_matrix = self.encoder._chunkify_data(self.data)
polynomials, commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix.transposed()))
self.assertEqual(len(commitments), len(chunks_matrix))
self.assertEqual(len(polynomials), len(chunks_matrix))
polynomials, commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
self.assertEqual(len(commitments), len(chunks_matrix[0]))
self.assertEqual(len(polynomials), len(chunks_matrix[0]))
def test_generate_aggregated_column_commitments(self):
pass