Merge pull request #126 from logos-co/feature/v1.1-da-updates

Update DA encoder/verifier to v1.1
This commit is contained in:
megonen 2025-06-04 18:01:06 +03:00 committed by GitHub
commit 30ef110f24
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 173 additions and 279 deletions

View File

@ -3,7 +3,7 @@ from dataclasses import dataclass
from typing import Optional, List, Sequence from typing import Optional, List, Sequence
from da.common import BlobId from da.common import BlobId
from da.verifier import DABlob from da.verifier import DAShare
@dataclass @dataclass
@ -31,7 +31,7 @@ class BlobStore(ABC):
pass pass
@abstractmethod @abstractmethod
def get_multiple(self, app_id: bytes, indexes: Sequence[int]) -> List[Optional[DABlob]]: def get_multiple(self, app_id: bytes, indexes: Sequence[int]) -> List[Optional[DAShare]]:
pass pass
@ -48,7 +48,7 @@ class DAApi:
""" """
self.store.add(id, metadata) self.store.add(id, metadata)
def read(self, app_id, indexes) -> List[Optional[DABlob]]: def read(self, app_id, indexes) -> List[Optional[DAShare]]:
""" """
Read method should accept only `app_id` and a list of indexes. The returned list of Read method should accept only `app_id` and a list of indexes. The returned list of
blobs should be ordered in the same sequence as `indexes` in a request. blobs should be ordered in the same sequence as `indexes` in a request.

View File

@ -2,6 +2,7 @@ from unittest import TestCase
from collections import defaultdict from collections import defaultdict
from da.api.common import * from da.api.common import *
from da.verifier import DAShare
@dataclass @dataclass
@ -25,7 +26,7 @@ class MockStore(BlobStore):
self.app_id_store[metadata.app_id][metadata.index] = cert_id self.app_id_store[metadata.app_id][metadata.index] = cert_id
# Implements `get_multiple` method from BlobStore abstract class. # Implements `get_multiple` method from BlobStore abstract class.
def get_multiple(self, app_id, indexes) -> List[Optional[DABlob]]: def get_multiple(self, app_id, indexes) -> List[Optional[DAShare]]:
return [ return [
self.blob_store.get(self.app_id_store[app_id].get(i), None) if self.app_id_store[app_id].get(i) else None for i in indexes self.blob_store.get(self.app_id_store[app_id].get(i), None) if self.app_id_store[app_id].get(i) else None for i in indexes
] ]

View File

@ -1,12 +1,11 @@
from dataclasses import dataclass from dataclasses import dataclass
from hashlib import sha3_256 from hashlib import blake2b
from itertools import chain, zip_longest, compress from itertools import chain, zip_longest, compress
from typing import List, Generator, Self, Sequence from typing import List, Generator, Self, Sequence
from eth2spec.eip7594.mainnet import Bytes32, KZGCommitment as Commitment from eth2spec.eip7594.mainnet import Bytes32, KZGCommitment as Commitment
from py_ecc.bls import G2ProofOfPossession from py_ecc.bls import G2ProofOfPossession
type BlobId = bytes type BlobId = bytes
class NodeId(Bytes32): class NodeId(Bytes32):
@ -41,13 +40,13 @@ class Bitfield(List[bool]):
pass pass
def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId: def build_blob_id(row_commitments: Sequence[Commitment]) -> BlobId:
hasher = sha3_256() hasher = blake2b(digest_size=32)
hasher.update(bytes(aggregated_column_commitment))
for c in row_commitments: for c in row_commitments:
hasher.update(bytes(c)) hasher.update(bytes(c))
return hasher.digest() return hasher.digest()
class NomosDaG2ProofOfPossession(G2ProofOfPossession): class NomosDaG2ProofOfPossession(G2ProofOfPossession):
# Domain specific tag for Nomos DA protocol # Domain specific tag for Nomos DA protocol
DST = b"NOMOS_DA_AVAIL" DST = b"NOMOS_DA_AVAIL"

View File

@ -1,9 +1,9 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Generator from typing import List, Generator
from da.common import NodeId from da.common import NodeId, Column
from da.encoder import EncodedData from da.encoder import EncodedData
from da.verifier import DABlob from da.verifier import DAShare
@dataclass @dataclass
@ -18,29 +18,21 @@ class Dispersal:
# sort over public keys # sort over public keys
self.settings.nodes_ids.sort() self.settings.nodes_ids.sort()
def _prepare_data(self, encoded_data: EncodedData) -> Generator[DABlob, None, None]: def _prepare_data(self, encoded_data: EncodedData) -> Generator[DAShare, None, None]:
assert len(encoded_data.column_commitments) == len(self.settings.nodes_ids)
assert len(encoded_data.aggregated_column_proofs) == len(self.settings.nodes_ids)
columns = encoded_data.extended_matrix.columns columns = encoded_data.extended_matrix.columns
column_commitments = encoded_data.column_commitments
row_commitments = encoded_data.row_commitments row_commitments = encoded_data.row_commitments
rows_proofs = encoded_data.row_proofs column_proofs = encoded_data.combined_column_proofs
aggregated_column_commitment = encoded_data.aggregated_column_commitment blobs_data = zip(columns, column_proofs)
aggregated_column_proofs = encoded_data.aggregated_column_proofs for column_idx, (column, proof) in enumerate(blobs_data):
blobs_data = zip(columns, column_commitments, zip(*rows_proofs), aggregated_column_proofs) blob = DAShare(
for column_idx, (column, column_commitment, row_proofs, column_proof) in enumerate(blobs_data): Column(column),
blob = DABlob(
column,
column_idx, column_idx,
column_commitment, proof,
aggregated_column_commitment, row_commitments
column_proof,
row_commitments,
row_proofs
) )
yield blob yield blob
def _send_and_await_response(self, node: NodeId, blob: DABlob) -> bool: def _send_and_await_response(self, node: NodeId, blob: DAShare) -> bool:
pass pass
def disperse(self, encoded_data: EncodedData): def disperse(self, encoded_data: EncodedData):

View File

@ -1,15 +1,18 @@
from dataclasses import dataclass from dataclasses import dataclass
from itertools import batched, chain from itertools import batched
from typing import List, Sequence, Tuple from typing import List, Tuple
from hashlib import blake2b
from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof, BLSFieldElement from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof, BLSFieldElement
from da.common import ChunksMatrix, Chunk, Row from da.common import ChunksMatrix, Chunk, Row
from da.kzg_rs import kzg, rs from da.kzg_rs import kzg, rs
from da.kzg_rs.bdfg_proving import derive_challenge
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT
from da.kzg_rs.poly import Polynomial from da.kzg_rs.poly import Polynomial
from da.kzg_rs.bdfg_proving import compute_combined_polynomial
# Domain separation tag
_DST = b"NOMOS_DA_V1"
@dataclass @dataclass
class DAEncoderParams: class DAEncoderParams:
@ -23,10 +26,7 @@ class EncodedData:
chunked_data: ChunksMatrix chunked_data: ChunksMatrix
extended_matrix: ChunksMatrix extended_matrix: ChunksMatrix
row_commitments: List[Commitment] row_commitments: List[Commitment]
row_proofs: List[List[Proof]] combined_column_proofs: List[Proof]
column_commitments: List[Commitment]
aggregated_column_commitment: Commitment
aggregated_column_proofs: List[Proof]
class DAEncoder: class DAEncoder:
@ -65,72 +65,25 @@ class DAEncoder:
) )
return ChunksMatrix(__rs_encode_row(row) for row in chunks_matrix) return ChunksMatrix(__rs_encode_row(row) for row in chunks_matrix)
@staticmethod def _compute_combined_column_proofs(self, combined_poly: Polynomial) -> List[Proof]:
def _compute_rows_proofs( total_cols = self.params.column_count * 2
chunks_matrix: ChunksMatrix,
polynomials: Sequence[Polynomial],
row_commitments: Sequence[Commitment]
) -> List[List[Proof]]:
proofs = []
for row, poly, commitment in zip(chunks_matrix, polynomials, row_commitments):
proofs.append(
[
kzg.generate_element_proof(i, poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
for i in range(len(row))
]
)
return proofs
def _compute_column_kzg_commitments(self, chunks_matrix: ChunksMatrix) -> List[Tuple[Polynomial, Commitment]]:
return self._compute_row_kzg_commitments(chunks_matrix.transposed())
@staticmethod
def _compute_aggregated_column_commitment(
column_commitments: Sequence[Commitment]
) -> Tuple[Polynomial, Commitment]:
data = bytes(chain.from_iterable(
DAEncoder.hash_commitment_blake2b31(commitment)
for commitment in column_commitments
))
return kzg.bytes_to_commitment(data, GLOBAL_PARAMETERS)
@staticmethod
def _compute_aggregated_column_proofs(
polynomial: Polynomial,
column_commitments: Sequence[Commitment],
) -> List[Proof]:
return [ return [
kzg.generate_element_proof(i, polynomial, GLOBAL_PARAMETERS, ROOTS_OF_UNITY) kzg.generate_element_proof(i, combined_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
for i in range(len(column_commitments)) for i in range(total_cols)
] ]
def encode(self, data: bytes) -> EncodedData: def encode(self, data: bytes) -> EncodedData:
chunks_matrix = self._chunkify_data(data) chunks_matrix = self._chunkify_data(data)
row_polynomials, row_commitments = zip(*self._compute_row_kzg_commitments(chunks_matrix)) row_polynomials, row_commitments = zip(*self._compute_row_kzg_commitments(chunks_matrix))
extended_matrix = self._rs_encode_rows(chunks_matrix) extended_matrix = self._rs_encode_rows(chunks_matrix)
row_proofs = self._compute_rows_proofs(extended_matrix, row_polynomials, row_commitments) h = derive_challenge(row_commitments)
column_polynomials, column_commitments = zip(*self._compute_column_kzg_commitments(extended_matrix)) combined_poly = compute_combined_polynomial(row_polynomials, h)
aggregated_column_polynomial, aggregated_column_commitment = ( combined_column_proofs = self._compute_combined_column_proofs(combined_poly)
self._compute_aggregated_column_commitment(column_commitments)
)
aggregated_column_proofs = self._compute_aggregated_column_proofs(
aggregated_column_polynomial, column_commitments
)
result = EncodedData( result = EncodedData(
data, data,
chunks_matrix, chunks_matrix,
extended_matrix, extended_matrix,
row_commitments, row_commitments,
row_proofs, combined_column_proofs
column_commitments,
aggregated_column_commitment,
aggregated_column_proofs
) )
return result return result
@staticmethod
def hash_commitment_blake2b31(commitment: Commitment) -> bytes:
return (
# digest size must be 31 bytes as we cannot encode 32 without risking overflowing the BLS_MODULUS
int.from_bytes(blake2b(bytes(commitment), digest_size=31).digest())
).to_bytes(32, byteorder="big") # rewrap into 32 padded bytes for the field elements, EC library dependant

61
da/kzg_rs/bdfg_proving.py Normal file
View File

@ -0,0 +1,61 @@
from hashlib import blake2b
from typing import List, Sequence
from da.common import Chunk
from da.kzg_rs.common import BLS_MODULUS
from eth2spec.eip7594.mainnet import BLSFieldElement, KZGCommitment as Commitment
from eth2spec.utils import bls
from da.kzg_rs.poly import Polynomial
def derive_challenge(row_commitments: List[Commitment]) -> BLSFieldElement:
"""
Derive a FiatShamir challenge scalar h from the row commitments:
h = BLAKE2b-31( DST || bytes(com1) || bytes(com2) || ... )
"""
_DST = b"NOMOS_DA_V1"
h = blake2b(digest_size=31)
h.update(_DST)
for com in row_commitments:
h.update(bytes(com))
digest31 = h.digest() # 31 bytes
# pad to 32 bytes for field element conversion
padded = digest31 + b'\x00'
return BLSFieldElement.from_bytes(padded)
def combine_commitments(row_commitments: List[Commitment], h: BLSFieldElement) -> Commitment:
combined_commitment = bls.bytes48_to_G1(row_commitments[0])
power = int(h) % BLS_MODULUS
for commitment in row_commitments[1:]:
commitment = bls.bytes48_to_G1(commitment)
combined_commitment = bls.add(combined_commitment, bls.multiply(commitment, power))
power = (power * int(h)) % BLS_MODULUS
return bls.G1_to_bytes48(combined_commitment)
def compute_combined_polynomial(
polys: Sequence[Polynomial], h: BLSFieldElement
) -> Polynomial:
combined_polynomial = polys[0]
h_int = int(h) # raw integer challenge
int_pow = 1
for poly in polys[1:]:
int_pow = (int_pow * h_int) % BLS_MODULUS
combined_polynomial = combined_polynomial + Polynomial([int_pow * coeff for coeff in poly], BLS_MODULUS)
return combined_polynomial
def compute_combined_evaluation(
evals: Sequence[Chunk],
h: BLSFieldElement
) -> BLSFieldElement:
combined_eval_int = 0
power_int = 1
h_int = int(h) % BLS_MODULUS
for chunk in evals:
chunk_int = int.from_bytes(bytes(chunk), byteorder="big")
combined_eval_int = (combined_eval_int + chunk_int * power_int) % BLS_MODULUS
power_int = (power_int * h_int) % BLS_MODULUS
return BLSFieldElement(combined_eval_int)

View File

@ -1,3 +1,4 @@
from itertools import zip_longest from itertools import zip_longest
from typing import List, Sequence, Self from typing import List, Sequence, Self
@ -108,4 +109,4 @@ class Polynomial[T]:
)) % self.modulus )) % self.modulus
def evaluation_form(self) -> List[T]: def evaluation_form(self) -> List[T]:
return [self.eval(ROOTS_OF_UNITY[i]) for i in range(len(self))] return [self.eval(ROOTS_OF_UNITY[i]) for i in range(len(self))]

View File

@ -1,10 +1,9 @@
from hashlib import sha3_256
from unittest import TestCase from unittest import TestCase
from da.encoder import DAEncoderParams, DAEncoder from da.encoder import DAEncoderParams, DAEncoder
from da.test_encoder import TestEncoder from da.test_encoder import TestEncoder
from da.verifier import DAVerifier, DABlob from da.verifier import DAVerifier, DAShare
from da.common import NodeId, NomosDaG2ProofOfPossession as bls_pop from da.common import NodeId
from da.dispersal import Dispersal, DispersalSettings from da.dispersal import Dispersal, DispersalSettings
@ -27,7 +26,7 @@ class TestDispersal(TestCase):
# mock send and await method with local verifiers # mock send and await method with local verifiers
verifiers_res = [] verifiers_res = []
def __send_and_await_response(_, blob: DABlob): def __send_and_await_response(_, blob: DAShare):
verifier = DAVerifier() verifier = DAVerifier()
res = verifier.verify(blob) res = verifier.verify(blob)
verifiers_res.append(res) verifiers_res.append(res)

View File

@ -1,14 +1,15 @@
from itertools import chain, batched from itertools import chain, batched
from random import randrange, randbytes from random import randbytes
from unittest import TestCase from unittest import TestCase
from eth2spec.deneb.mainnet import bytes_to_bls_field
from da import encoder from da import encoder
from da.common import Column
from da.kzg_rs.bdfg_proving import derive_challenge, compute_combined_polynomial
from da.encoder import DAEncoderParams, DAEncoder from da.encoder import DAEncoderParams, DAEncoder
from da.verifier import DAVerifier, DAShare
from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement
from da.kzg_rs.common import BLS_MODULUS, ROOTS_OF_UNITY from da.kzg_rs.common import ROOTS_OF_UNITY
from da.kzg_rs import kzg, rs from da.kzg_rs import kzg, rs
@ -33,28 +34,19 @@ class TestEncoder(TestCase):
self.assertEqual(columns_len, column_count) self.assertEqual(columns_len, column_count)
chunks_size = (len(data) // encoder_params.bytes_per_chunk) // encoder_params.column_count chunks_size = (len(data) // encoder_params.bytes_per_chunk) // encoder_params.column_count
self.assertEqual(len(encoded_data.row_commitments), chunks_size) self.assertEqual(len(encoded_data.row_commitments), chunks_size)
self.assertEqual(len(encoded_data.row_proofs), chunks_size)
self.assertEqual(len(encoded_data.row_proofs[0]), column_count)
self.assertIsNotNone(encoded_data.aggregated_column_commitment)
self.assertEqual(len(encoded_data.aggregated_column_proofs), columns_len)
# verify rows verifier = DAVerifier()
for row, proofs, commitment in zip(encoded_data.extended_matrix, encoded_data.row_proofs, encoded_data.row_commitments): # verify columns
for i, (chunk, proof) in enumerate(zip(row, proofs)): for idx, (column, column_proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.combined_column_proofs)):
self.assertTrue( share = DAShare(
kzg.verify_element_proof(bytes_to_bls_field(chunk), commitment, proof, i, ROOTS_OF_UNITY) column=Column(column),
) column_idx=idx,
combined_column_proof=column_proof,
# verify column aggregation row_commitments=encoded_data.row_commitments
for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.aggregated_column_proofs)):
data = DAEncoder.hash_commitment_blake2b31(commitment)
kzg.verify_element_proof(
bytes_to_bls_field(data),
encoded_data.aggregated_column_commitment,
proof,
i,
ROOTS_OF_UNITY
) )
verifier.verify(share)
def test_chunkify(self): def test_chunkify(self):
encoder_settings = DAEncoderParams(column_count=2, bytes_per_chunk=31) encoder_settings = DAEncoderParams(column_count=2, bytes_per_chunk=31)
@ -84,41 +76,15 @@ class TestEncoder(TestCase):
poly_2 = rs.decode(r2, ROOTS_OF_UNITY, len(poly_1)) poly_2 = rs.decode(r2, ROOTS_OF_UNITY, len(poly_1))
self.assertEqual(poly_1, poly_2) self.assertEqual(poly_1, poly_2)
def test_compute_rows_proofs(self):
chunks_matrix = self.encoder._chunkify_data(self.data)
polynomials, commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix))
extended_chunks_matrix = self.encoder._rs_encode_rows(chunks_matrix)
original_proofs = self.encoder._compute_rows_proofs(chunks_matrix, polynomials, commitments)
extended_proofs = self.encoder._compute_rows_proofs(extended_chunks_matrix, polynomials, commitments)
# check original sized matrix
for row, poly, commitment, proofs in zip(chunks_matrix, polynomials, commitments, original_proofs):
self.assertEqual(len(proofs), len(row))
for i, chunk in enumerate(row):
self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY))
# check extended matrix
for row, poly, commitment, proofs in zip(extended_chunks_matrix, polynomials, commitments, extended_proofs):
for i, chunk in enumerate(row):
self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY))
def test_compute_column_kzg_commitments(self): def test_generate_combined_column_proofs(self):
chunks_matrix = self.encoder._chunkify_data(self.data) chunks_matrix = self.encoder._chunkify_data(self.data)
polynomials, commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix)) row_polynomials, row_commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix))
self.assertEqual(len(commitments), len(chunks_matrix[0])) h = derive_challenge(row_commitments)
self.assertEqual(len(polynomials), len(chunks_matrix[0])) combined_poly = compute_combined_polynomial(row_polynomials, h)
proofs = self.encoder._compute_combined_column_proofs(combined_poly)
def test_generate_aggregated_column_commitments(self): expected_extended_columns = self.params.column_count * 2
chunks_matrix = self.encoder._chunkify_data(self.data) self.assertEqual(len(proofs), expected_extended_columns)
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
poly, commitment = self.encoder._compute_aggregated_column_commitment(column_commitments)
self.assertIsNotNone(poly)
self.assertIsNotNone(commitment)
def test_generate_aggregated_column_proofs(self):
chunks_matrix = self.encoder._chunkify_data(self.data)
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
poly, _ = self.encoder._compute_aggregated_column_commitment(column_commitments)
proofs = self.encoder._compute_aggregated_column_proofs(poly, column_commitments)
self.assertEqual(len(proofs), len(column_commitments))
def test_encode(self): def test_encode(self):
from random import randbytes from random import randbytes
@ -134,4 +100,4 @@ class TestEncoder(TestCase):
for _ in range(size*encoder_params.column_count) for _ in range(size*encoder_params.column_count)
) )
) )
self.assert_encoding(encoder_params, data) self.assert_encoding(encoder_params, data)

View File

@ -2,9 +2,9 @@ from itertools import chain
from unittest import TestCase from unittest import TestCase
from typing import List, Optional from typing import List, Optional
from da.common import NodeId, build_blob_id, NomosDaG2ProofOfPossession as bls_pop from da.common import NodeId, build_blob_id
from da.api.common import DAApi, BlobMetadata, Metadata from da.api.common import DAApi, BlobMetadata, Metadata
from da.verifier import DAVerifier, DABlob from da.verifier import DAVerifier, DAShare
from da.api.test_flow import MockStore from da.api.test_flow import MockStore
from da.dispersal import Dispersal, DispersalSettings from da.dispersal import Dispersal, DispersalSettings
from da.test_encoder import TestEncoder from da.test_encoder import TestEncoder
@ -17,11 +17,11 @@ class DAVerifierWApi:
self.api = DAApi(self.store) self.api = DAApi(self.store)
self.verifier = DAVerifier() self.verifier = DAVerifier()
def receive_blob(self, blob: DABlob): def receive_blob(self, blob: DAShare):
if self.verifier.verify(blob): if self.verifier.verify(blob):
# Warning: If aggregated col commitment and row commitment are the same, # Warning: If aggregated col commitment and row commitment are the same,
# the build_attestation_message method will produce the same output. # the build_attestation_message method will produce the same output.
blob_id = build_blob_id(blob.aggregated_column_commitment, blob.rows_commitments) blob_id = build_blob_id(blob.row_commitments)
self.store.populate(blob, blob_id) self.store.populate(blob, blob_id)
def receive_metadata(self, blob_metadata: BlobMetadata): def receive_metadata(self, blob_metadata: BlobMetadata):
@ -30,7 +30,7 @@ class DAVerifierWApi:
# in which case all certificates had been already verified by the DA Node. # in which case all certificates had been already verified by the DA Node.
self.api.write(blob_metadata.blob_id, blob_metadata.metadata) self.api.write(blob_metadata.blob_id, blob_metadata.metadata)
def read(self, app_id, indexes) -> List[Optional[DABlob]]: def read(self, app_id, indexes) -> List[Optional[DAShare]]:
return self.api.read(app_id, indexes) return self.api.read(app_id, indexes)
@ -59,14 +59,14 @@ class TestFullFlow(TestCase):
encoded_data = DAEncoder(encoding_params).encode(data) encoded_data = DAEncoder(encoding_params).encode(data)
# mock send and await method with local verifiers # mock send and await method with local verifiers
def __send_and_await_response(node: int, blob: DABlob): def __send_and_await_response(node: int, blob: DAShare):
node = self.api_nodes[int.from_bytes(node)] node = self.api_nodes[int.from_bytes(node)]
node.receive_blob(blob) node.receive_blob(blob)
# inject mock send and await method # inject mock send and await method
self.dispersal._send_and_await_response = __send_and_await_response self.dispersal._send_and_await_response = __send_and_await_response
self.dispersal.disperse(encoded_data) self.dispersal.disperse(encoded_data)
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments) blob_id = build_blob_id(encoded_data.row_commitments)
blob_metadata = BlobMetadata( blob_metadata = BlobMetadata(
blob_id, blob_id,
Metadata(app_id, index) Metadata(app_id, index)
@ -96,14 +96,14 @@ class TestFullFlow(TestCase):
encoded_data = DAEncoder(encoding_params).encode(data) encoded_data = DAEncoder(encoding_params).encode(data)
# mock send and await method with local verifiers # mock send and await method with local verifiers
def __send_and_await_response(node: int, blob: DABlob): def __send_and_await_response(node: int, blob: DAShare):
node = self.api_nodes[int.from_bytes(node)] node = self.api_nodes[int.from_bytes(node)]
return node.receive_blob(blob) return node.receive_blob(blob)
# inject mock send and await method # inject mock send and await method
self.dispersal._send_and_await_response = __send_and_await_response self.dispersal._send_and_await_response = __send_and_await_response
self.dispersal.disperse(encoded_data) self.dispersal.disperse(encoded_data)
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments) blob_id = build_blob_id(encoded_data.row_commitments)
# Loop through each index and simulate dispersal with the same cert_id but different metadata # Loop through each index and simulate dispersal with the same cert_id but different metadata
for index in indexes: for index in indexes:

View File

@ -5,7 +5,7 @@ from da.encoder import DAEncoder
from da.kzg_rs import kzg from da.kzg_rs import kzg
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY
from da.test_encoder import TestEncoder from da.test_encoder import TestEncoder
from da.verifier import DAVerifier, DABlob from da.verifier import DAVerifier, DAShare
class TestVerifier(TestCase): class TestVerifier(TestCase):
@ -13,33 +13,18 @@ class TestVerifier(TestCase):
def setUp(self): def setUp(self):
self.verifier = DAVerifier() self.verifier = DAVerifier()
def test_verify_column(self):
column = Column(int.to_bytes(i, length=32) for i in range(8))
_, column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
aggregated_poly, aggregated_column_commitment = kzg.bytes_to_commitment(
DAEncoder.hash_commitment_blake2b31(column_commitment), GLOBAL_PARAMETERS
)
aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
self.assertTrue(
self.verifier._verify_column(
column, 0, column_commitment, aggregated_column_commitment, aggregated_proof,
)
)
def test_verify(self): def test_verify(self):
_ = TestEncoder() _ = TestEncoder()
_.setUp() _.setUp()
encoded_data = _.encoder.encode(_.data) encoded_data = _.encoder.encode(_.data)
for i, column in enumerate(encoded_data.chunked_data.columns): for i, column in enumerate(encoded_data.extended_matrix.columns):
verifier = DAVerifier() verifier = DAVerifier()
da_blob = DABlob( da_blob = DAShare(
Column(column), Column(column),
i, i,
encoded_data.column_commitments[i], encoded_data.combined_column_proofs[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
encoded_data.row_commitments, encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs],
) )
self.assertIsNotNone(verifier.verify(da_blob)) self.assertIsNotNone(verifier.verify(da_blob))
@ -47,26 +32,20 @@ class TestVerifier(TestCase):
_ = TestEncoder() _ = TestEncoder()
_.setUp() _.setUp()
encoded_data = _.encoder.encode(_.data) encoded_data = _.encoder.encode(_.data)
columns = enumerate(encoded_data.chunked_data.columns) columns = enumerate(encoded_data.extended_matrix.columns)
i, column = next(columns) i, column = next(columns)
da_blob = DABlob( da_blob = DAShare(
Column(column), Column(column),
i, i,
encoded_data.column_commitments[i], encoded_data.combined_column_proofs[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
encoded_data.row_commitments, encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs],
) )
self.assertIsNotNone(self.verifier.verify(da_blob)) self.assertIsNotNone(self.verifier.verify(da_blob))
for i, column in columns: for i, column in columns:
da_blob = DABlob( da_blob = DAShare(
Column(column), Column(column),
i, i,
encoded_data.column_commitments[i], encoded_data.combined_column_proofs[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
encoded_data.row_commitments, encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs],
) )
self.assertTrue(self.verifier.verify(da_blob)) self.assertIsNotNone(self.verifier.verify(da_blob))

View File

@ -1,106 +1,49 @@
from dataclasses import dataclass from dataclasses import dataclass
from hashlib import sha3_256 from typing import List
from typing import List, Sequence, Set
from eth2spec.deneb.mainnet import BLSFieldElement
from eth2spec.eip7594.mainnet import ( from eth2spec.eip7594.mainnet import (
KZGCommitment as Commitment, KZGCommitment as Commitment,
KZGProof as Proof, KZGProof as Proof,
) )
import da.common from da.common import Column, BlobId, build_blob_id
from da.common import Column, Chunk, BlobId
from da.encoder import DAEncoder
from da.kzg_rs import kzg from da.kzg_rs import kzg
from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS from da.kzg_rs.bdfg_proving import combine_commitments, derive_challenge, compute_combined_evaluation
from da.kzg_rs.common import ROOTS_OF_UNITY
# Domain separation tag
_DST = b"NOMOS_DA_V1"
@dataclass @dataclass
class DABlob: class DAShare:
column: Column column: Column
column_idx: int column_idx: int
column_commitment: Commitment combined_column_proof: Proof
aggregated_column_commitment: Commitment row_commitments: List[Commitment]
aggregated_column_proof: Proof
rows_commitments: List[Commitment]
rows_proofs: List[Proof]
def blob_id(self) -> bytes:
return da.common.build_blob_id(self.aggregated_column_commitment, self.rows_commitments)
def column_id(self) -> bytes:
return sha3_256(self.column.as_bytes()).digest()
def blob_id(self) -> BlobId:
return build_blob_id(self.row_commitments)
class DAVerifier: class DAVerifier:
@staticmethod @staticmethod
def _verify_column( def verify(blob: DAShare) -> bool:
column: Column, """
column_idx: int, Verifies that blob.column at index blob.column_idx is consistent
column_commitment: Commitment, with the row commitments and the combined column proof.
aggregated_column_commitment: Commitment,
aggregated_column_proof: Proof, Returns True if verification succeeds, False otherwise.
) -> bool: """
# 1. compute commitment for column # 1. Derive challenge
_, computed_column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS) h = derive_challenge(blob.row_commitments)
# 2. If computed column commitment != column commitment, fail # 2. Reconstruct combined commitment: combined_commitment = sum_{i=0..l-1} h^i * row_commitments[i]
if column_commitment != computed_column_commitment: combined_commitment = combine_commitments(blob.row_commitments, h)
return False # 3. Compute combined evaluation v = sum_{i=0..l-1} (h^i * column_data[i])
# 3. compute column hash combined_eval_point = compute_combined_evaluation(blob.column, h)
column_hash = DAEncoder.hash_commitment_blake2b31(column_commitment) # 4. Verify the single KZG proof for evaluation at point w^{column_idx}
# 4. Check proof with commitment and proof over the aggregated column commitment
chunk = BLSFieldElement.from_bytes(column_hash)
return kzg.verify_element_proof( return kzg.verify_element_proof(
chunk, aggregated_column_commitment, aggregated_column_proof, column_idx, ROOTS_OF_UNITY combined_eval_point,
) combined_commitment,
blob.combined_column_proof,
@staticmethod
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof, index: int) -> bool:
chunk = BLSFieldElement(int.from_bytes(bytes(chunk)) % BLS_MODULUS)
return kzg.verify_element_proof(chunk, commitment, proof, index, ROOTS_OF_UNITY)
@staticmethod
def _verify_chunks(
chunks: Sequence[Chunk],
commitments: Sequence[Commitment],
proofs: Sequence[Proof],
index: int
) -> bool:
if not (len(chunks) == len(commitments) == len(proofs)):
return False
for chunk, commitment, proof in zip(chunks, commitments, proofs):
if not DAVerifier._verify_chunk(chunk, commitment, proof, index):
return False
return True
def verify(self, blob: DABlob) -> bool:
"""
Verify the integrity of the given blob.
This function must be idempotent. The implementer should ensure that
repeated verification attempts do not result in inconsistent states.
Args:
blob (DABlob): The blob to verify.
Returns:
bool: True if the blob is verified successfully, False otherwise.
"""
is_column_verified = DAVerifier._verify_column(
blob.column,
blob.column_idx, blob.column_idx,
blob.column_commitment, ROOTS_OF_UNITY
blob.aggregated_column_commitment,
blob.aggregated_column_proof,
) )
if not is_column_verified:
return False
are_chunks_verified = DAVerifier._verify_chunks(
blob.column, blob.rows_commitments, blob.rows_proofs, blob.column_idx
)
if not are_chunks_verified:
return False
# Ensure idempotency: Implementers should define how to avoid redundant verification.
return True