Da verifier protocol (#78)

* Implement generator polynomial and rs encoding

* Implement encode/decode+test using fft. Non-working

* Use lagrange for interpolation

* Remove fft, use evaluations instead

* Move and rename kzg and rs test modules

* Update docs

* Added columns property to chunks matrix
Added test for columns

* Added chunkify and test

* Added compute row commitments
Added row commitments size test

* Fix poly from evaluations method

* Implement encode rows and test

* Update encode row test

* Implement compute row proofs (not working on extended data)

* Use same polynomials for commitment and proof creation after extend

* Fix polynomial from/to evaluations

* Use chunks for verification

* Refactor interpolate

* Implement chunks matrix transposed method

* Added compute column kzg commitments

* Use square size data for encoder tests

* Add column type to columns method

* Added compute columns aggregated commitment
Added aggregated commitment test
Fixed and expanded encode test

* Use sha3 for hashing

* Make encoder hashing method public

* Fill up verifier implementation

* Added verify column test

* Implement verier verify test
Fixed small issues

* Implement verier verify test
Extend verify test for all columns
This commit is contained in:
Daniel Sanchez 2024-03-11 10:01:34 +01:00 committed by GitHub
parent 2d3f463bb7
commit 7ffb7cc7ed
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 76 additions and 30 deletions

View File

@ -20,6 +20,7 @@ class DAEncoderParams:
@dataclass
class EncodedData:
data: bytes
chunked_data: ChunksMatrix
extended_matrix: ChunksMatrix
row_commitments: List[Commitment]
row_proofs: List[List[Proof]]
@ -28,6 +29,7 @@ class EncodedData:
aggregated_column_proofs: List[Proof]
class DAEncoder:
def __init__(self, params: DAEncoderParams):
self.params = params
@ -78,7 +80,7 @@ class DAEncoder:
chunks_matrix: ChunksMatrix, column_commitments: Sequence[Commitment]
) -> Tuple[Polynomial, Commitment]:
data = bytes(chain.from_iterable(
DAEncoder._hash_column_and_commitment(column, commitment)
DAEncoder.hash_column_and_commitment(column, commitment)
for column, commitment in zip(chunks_matrix.columns, column_commitments)
))
return kzg.bytes_to_commitment(data, GLOBAL_PARAMETERS)
@ -107,6 +109,7 @@ class DAEncoder:
)
result = EncodedData(
data,
chunks_matrix,
extended_matrix,
row_commitments,
row_proofs,
@ -117,7 +120,7 @@ class DAEncoder:
return result
@staticmethod
def _hash_column_and_commitment(column: Column, commitment: Commitment) -> bytes:
def hash_column_and_commitment(column: Column, commitment: Commitment) -> bytes:
# TODO: Check correctness of bytes to blsfieldelement using modulus over the hash
return (
int.from_bytes(sha3_256(column.as_bytes() + bytes(commitment)).digest()) % BLS_MODULUS

View File

@ -11,8 +11,8 @@ from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement
from da.kzg_rs.common import BLS_MODULUS, ROOTS_OF_UNITY
from da.kzg_rs import kzg, rs
class TestEncoder(TestCase):
class TestEncoder(TestCase):
def setUp(self):
self.params: DAEncoderParams = DAEncoderParams(column_count=16, bytes_per_field_element=32)
self.encoder: DAEncoder = DAEncoder(self.params)
@ -47,7 +47,7 @@ class TestEncoder(TestCase):
# verify column aggregation
for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.aggregated_column_proofs)):
data = DAEncoder._hash_column_and_commitment(column, commitment)
data = DAEncoder.hash_column_and_commitment(column, commitment)
kzg.verify_element_proof(
bytes_to_bls_field(data),
encoded_data.aggregated_column_commitment,

View File

@ -1,17 +1,46 @@
from unittest import TestCase
from da.verifier import Attestation, DAVerifier
from da.common import Column
from da.encoder import DAEncoder
from da.kzg_rs import kzg
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY
from da.test_encoder import TestEncoder
from da.verifier import Attestation, DAVerifier, DABlob
class TestVerifier(TestCase):
def test_verify_column(self):
pass
def setUp(self):
self.verifier = DAVerifier(b"")
def test_verify_chunk(self):
pass
def test_verify_column(self):
column = Column(int.to_bytes(i, length=32) for i in range(8))
_, column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
aggregated_poly, aggregated_column_commitment = kzg.bytes_to_commitment(
DAEncoder.hash_column_and_commitment(column, column_commitment), GLOBAL_PARAMETERS
)
aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
self.assertTrue(
self.verifier._verify_column(
column, column_commitment, aggregated_column_commitment, aggregated_proof, 0
)
)
def test_build_attestation(self):
pass
def test_verify(self):
pass
_ = TestEncoder()
_.setUp()
encoded_data = _.encoder.encode(_.data)
for i, column in enumerate(encoded_data.chunked_data.columns):
da_blob = DABlob(
i,
Column(column),
encoded_data.column_commitments[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs],
)
self.assertIsNotNone(self.verifier.verify(da_blob))

View File

@ -1,20 +1,24 @@
from dataclasses import dataclass
from typing import List, Optional
from hashlib import sha3_256
from typing import List, Optional, Sequence
from eth2spec.deneb.mainnet import BLSFieldElement
from eth2spec.eip7594.mainnet import (
KZGCommitment as Commitment,
KZGProof as Proof,
BYTES_PER_CELL as BYTES_PER_CHUNK
)
from itertools import batched
from da.common import Column, Chunk, Attestation
from da.encoder import DAEncoder
from da.kzg_rs import kzg
from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS
@dataclass
class DABlob:
# this should be removed, but for now it shows the purpose
index: int
column: bytearray
column: Column
column_commitment: Commitment
aggregated_column_commitment: Commitment
aggregated_column_proof: Proof
@ -23,8 +27,8 @@ class DABlob:
class DAVerifier:
def __init__(self):
pass
def __init__(self, sk: bytes):
self.sk = sk
@staticmethod
def _verify_column(
@ -32,45 +36,55 @@ class DAVerifier:
column_commitment: Commitment,
aggregated_column_commitment: Commitment,
aggregated_column_proof: Proof,
# this is temporary and should be removed
index: int
) -> bool:
# 1. compute commitment for column
_, computed_column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
# 2. If computed column commitment != column commitment, fail
if column_commitment != computed_column_commitment:
return False
# 3. compute column hash
column_hash: bytearray = bytearray(hash(column))
column_hash = DAEncoder.hash_column_and_commitment(column, column_commitment)
# 4. Check proof with commitment and proof over the aggregated column commitment
return False
chunk = BLSFieldElement.from_bytes(column_hash)
return kzg.verify_element_proof(
chunk, aggregated_column_commitment, aggregated_column_proof, index, ROOTS_OF_UNITY
)
@staticmethod
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof) -> bool:
pass
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof, index: int) -> bool:
chunk = BLSFieldElement(int.from_bytes(bytes(chunk)) % BLS_MODULUS)
return kzg.verify_element_proof(chunk, commitment, proof, index, ROOTS_OF_UNITY)
@staticmethod
def _verify_chunks(
chunks: List[Chunk],
commitments: List[Commitment],
proofs: List[Proof]
chunks: Sequence[Chunk],
commitments: Sequence[Commitment],
proofs: Sequence[Proof],
index: int
) -> bool:
if not (len(chunks) == len(commitments) == len(proofs)):
return False
for chunk, commitment, proof in zip(chunks, commitments, proofs):
if not DAVerifier._verify_chunk(chunk, commitment, proof):
if not DAVerifier._verify_chunk(chunk, commitment, proof, index):
return False
return True
def _build_attestation(self, _blob: DABlob) -> Attestation:
return Attestation()
@staticmethod
def verify(self, blob: DABlob) -> Optional[Attestation]:
is_column_verified = DAVerifier._verify_column(
blob.column, blob.aggregated_column_commitment, blob.aggregated_column_proof, blob.index
blob.column,
blob.column_commitment,
blob.aggregated_column_commitment,
blob.aggregated_column_proof,
blob.index
)
if not is_column_verified:
return
chunks = batched(blob.column, BYTES_PER_CHUNK)
are_chunks_verified = DAVerifier._verify_chunks(
chunks, blob.rows_commitments, blob.rows_proofs
blob.column, blob.rows_commitments, blob.rows_proofs, blob.index
)
if not are_chunks_verified:
return