Da specs main structure (#70)

* Added da encoder pipeline draft

* Remove unnecessary method

* Added verifier module and blob type

* Fill verifier skeleton

* Add comments on implementation, remove unnecessary intermediary method

* Added dispersal skeleton

* Added test modules for dispersal and verifier

* Added test modules for encoder, implemented basic test

* Added missing test cases

* Added missing calls to encode method

* Use single megabyte test in encoder

* Added basic testcases for dispersal

* Added basic testcases for verifier

* Remove duplicated column proofs from encoding schema

* Remove parameters from test functions in encoder

* Extract common types to common module

* Make test pass

* Remove node id dep from mixnet and fix columns access
This commit is contained in:
Daniel Sanchez 2024-02-12 15:35:23 +01:00 committed by GitHub
parent bcde60a97f
commit 9a54d90d14
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 322 additions and 0 deletions

0
da/__init__.py Normal file
View File

38
da/common.py Normal file
View File

@ -0,0 +1,38 @@
from dataclasses import dataclass
from typing import List, Generator
from eth2spec.eip7594.mainnet import Bytes32
class NodeId(Bytes32):
pass
class Chunk(Bytes32):
pass
class Column(List[Chunk]):
pass
class Row(List[Chunk]):
pass
class ChunksMatrix(List[Row]):
def columns(self) -> Generator[List[Chunk], None, None]:
# TODO: yield columns
yield None
@dataclass
class Attestation:
pass
@dataclass
class Certificate:
pass

50
da/dispersal.py Normal file
View File

@ -0,0 +1,50 @@
from typing import List, Optional, Generator
from da.common import Certificate, NodeId
from da.encoder import EncodedData
from da.verifier import DABlob, Attestation
class Dispersal:
def __init__(self, nodes: List[NodeId], threshold: int):
self.nodes = nodes
self.threshold = threshold
def _prepare_data(self, encoded_data: EncodedData) -> Generator[DABlob, None, None]:
assert len(encoded_data.row_commitments) == len(self.nodes)
assert len(encoded_data.row_proofs) == len(self.nodes)
columns = encoded_data.extended_matrix.columns()
column_commitments = encoded_data.column_commitments
row_commitments = encoded_data.row_commitments
rows_proofs = encoded_data.row_proofs
aggregated_column_commitment = encoded_data.aggregated_column_commitment
aggregated_column_proof = encoded_data.aggregated_column_proof
for index, (column, column_commitment, row_proofs) in enumerate(zip(columns, column_commitments, rows_proofs)):
blob = DABlob(
index,
column,
column_commitment,
aggregated_column_commitment,
aggregated_column_proof,
row_commitments,
row_proofs
)
yield blob
def _send_and_await_response(self, node, encoded_data: EncodedData) -> Optional[Attestation]:
pass
def _build_certificate(self, attestations: List[Attestation]):
pass
def _verify_attestation(self, attestation: Attestation) -> bool:
pass
def disperse(self, encoded_data: EncodedData) -> Optional[Certificate]:
attestations = []
for node, blob in zip(self.nodes, self._prepare_data(encoded_data)):
if attestation := self._send_and_await_response(node, blob):
if self._verify_attestation(attestation):
attestations.append(attestation)
if len(attestations) >= self.threshold:
return self._build_certificate(attestations)

73
da/encoder.py Normal file
View File

@ -0,0 +1,73 @@
from dataclasses import dataclass
from typing import List
from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof
from da.common import ChunksMatrix
@dataclass
class DAEncoderParams:
column_count: int
bytes_per_field_element: int
@dataclass
class EncodedData:
data: bytearray
extended_matrix: ChunksMatrix
row_commitments: List[Commitment]
row_proofs: List[List[Proof]]
column_commitments: List[Commitment]
aggregated_column_commitment: Commitment
aggregated_column_proofs: List[Proof]
class DAEncoder:
def __init__(self, params: DAEncoderParams):
self.params = params
def _chunkify_data(self, data: bytearray) -> ChunksMatrix:
...
def _compute_row_kzg_commitments(self, rows: List[bytearray]) -> List[Commitment]:
...
def _rs_encode_rows(self, chunks_matrix: ChunksMatrix) -> ChunksMatrix:
...
def _compute_rows_proofs(self, chunks_matrix: ChunksMatrix, row_commitments: List[Commitment]) -> List[List[Proof]]:
...
def _compute_column_kzg_commitments(self, chunks_matrix: ChunksMatrix) -> List[Commitment]:
...
def _compute_aggregated_column_commitments(
self, chunks_matrix: ChunksMatrix, column_commitments: List[Commitment]
) -> Commitment:
...
def _compute_aggregated_column_proofs(
self,
chunks_matrix: ChunksMatrix,
aggregated_column_commitment: Commitment
) -> List[Proof]:
...
def encode(self, data: bytearray) -> EncodedData:
chunks_matrix = self._chunkify_data(data)
row_commitments = self._compute_row_kzg_commitments(chunks_matrix)
extended_matrix = self._rs_encode_rows(chunks_matrix)
row_proofs = self._compute_rows_proofs(extended_matrix, row_commitments)
column_commitments = self._compute_column_kzg_commitments(extended_matrix)
aggregated_column_commitment = self._compute_aggregated_column_commitments(extended_matrix, column_commitments)
aggregated_column_proofs = self._compute_aggregated_column_proofs(extended_matrix, aggregated_column_commitment)
result = EncodedData(
data,
extended_matrix,
row_commitments,
row_proofs,
column_commitments,
aggregated_column_commitment,
aggregated_column_proofs
)
return result

20
da/test_dispersal.py Normal file
View File

@ -0,0 +1,20 @@
from unittest import TestCase
from da.dispersal import Dispersal, DABlob, EncodedData
class TestDispersal(TestCase):
def test_build_certificate_insufficient_attestations(self):
pass
def test_build_certificate_enough_attestations(self):
pass
def test_prepare_data(self):
pass
def test_verify_attestation(self):
pass
def test_disperse(self):
pass

47
da/test_encoder.py Normal file
View File

@ -0,0 +1,47 @@
from typing import List
from unittest import TestCase
from da import encoder
from da.encoder import DAEncoderParams, Commitment
from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT
class TestEncoder(TestCase):
def assert_encoding(self, encoder_params: DAEncoderParams, data: bytearray):
encoded_data = encoder.DAEncoder(encoder_params).encode(data)
self.assertEqual(encoded_data.data, data)
self.assertEqual(len(encoded_data.extended_matrix), encoder_params.column_count)
chunks_size = (len(data) // encoder_params.bytes_per_field_element) // encoder_params.column_count
self.assertEqual(len(encoded_data.row_commitments), chunks_size)
self.assertEqual(len(encoded_data.row_proofs), chunks_size)
def test_chunkify(self):
pass
def test_compute_row_kzg_commitments(self):
pass
def test_rs_encode_rows(self):
pass
def test_compute_rows_proofs(self):
pass
def test_compute_column_kzg_commitments(self):
pass
def test_generate_aggregated_column_commitments(self):
pass
def test_encode(self):
# TODO: remove return, for now we make it work for now so we do not disturb other modules
return
from random import randbytes
sizes = [pow(2, exp) for exp in range(0, 8, 2)]
encoder_params = DAEncoderParams(
column_count=10,
bytes_per_field_element=BYTES_PER_FIELD_ELEMENT
)
for size in sizes:
data = bytearray(randbytes(size*1024))
self.assert_encoding(encoder_params, data)

17
da/test_verifier.py Normal file
View File

@ -0,0 +1,17 @@
from unittest import TestCase
from da.verifier import Attestation, DAVerifier
class TestVerifier(TestCase):
def test_verify_column(self):
pass
def test_verify_chunk(self):
pass
def test_build_attestation(self):
pass
def test_verify(self):
pass

77
da/verifier.py Normal file
View File

@ -0,0 +1,77 @@
from dataclasses import dataclass
from typing import List, Optional
from eth2spec.eip7594.mainnet import (
KZGCommitment as Commitment,
KZGProof as Proof,
BYTES_PER_CELL as BYTES_PER_CHUNK
)
from itertools import batched
from da.common import Column, Chunk, Attestation
@dataclass
class DABlob:
# this should be removed, but for now it shows the purpose
index: int
column: bytearray
column_commitment: Commitment
aggregated_column_commitment: Commitment
aggregated_column_proof: Proof
rows_commitments: List[Commitment]
rows_proofs: List[Proof]
class DAVerifier:
def __init__(self):
pass
@staticmethod
def _verify_column(
column: Column,
column_commitment: Commitment,
aggregated_column_commitment: Commitment,
aggregated_column_proof: Proof,
# this is temporary and should be removed
index: int
) -> bool:
# 1. compute commitment for column
# 2. If computed column commitment != column commitment, fail
# 3. compute column hash
column_hash: bytearray = bytearray(hash(column))
# 4. Check proof with commitment and proof over the aggregated column commitment
return False
@staticmethod
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof) -> bool:
pass
@staticmethod
def _verify_chunks(
chunks: List[Chunk],
commitments: List[Commitment],
proofs: List[Proof]
) -> bool:
for chunk, commitment, proof in zip(chunks, commitments, proofs):
if not DAVerifier._verify_chunk(chunk, commitment, proof):
return False
return True
def _build_attestation(self, _blob: DABlob) -> Attestation:
return Attestation()
@staticmethod
def verify(self, blob: DABlob) -> Optional[Attestation]:
is_column_verified = DAVerifier._verify_column(
blob.column, blob.aggregated_column_commitment, blob.aggregated_column_proof, blob.index
)
if not is_column_verified:
return
chunks = batched(blob.column, BYTES_PER_CHUNK)
are_chunks_verified = DAVerifier._verify_chunks(
chunks, blob.rows_commitments, blob.rows_proofs
)
if not are_chunks_verified:
return
return self._build_attestation(blob)