mirror of
https://github.com/logos-blockchain/logos-blockchain-specs.git
synced 2026-01-02 13:13:06 +00:00
Merge pull request #126 from logos-co/feature/v1.1-da-updates
Update DA encoder/verifier to v1.1
This commit is contained in:
commit
30ef110f24
@ -3,7 +3,7 @@ from dataclasses import dataclass
|
||||
from typing import Optional, List, Sequence
|
||||
|
||||
from da.common import BlobId
|
||||
from da.verifier import DABlob
|
||||
from da.verifier import DAShare
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -31,7 +31,7 @@ class BlobStore(ABC):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_multiple(self, app_id: bytes, indexes: Sequence[int]) -> List[Optional[DABlob]]:
|
||||
def get_multiple(self, app_id: bytes, indexes: Sequence[int]) -> List[Optional[DAShare]]:
|
||||
pass
|
||||
|
||||
|
||||
@ -48,7 +48,7 @@ class DAApi:
|
||||
"""
|
||||
self.store.add(id, metadata)
|
||||
|
||||
def read(self, app_id, indexes) -> List[Optional[DABlob]]:
|
||||
def read(self, app_id, indexes) -> List[Optional[DAShare]]:
|
||||
"""
|
||||
Read method should accept only `app_id` and a list of indexes. The returned list of
|
||||
blobs should be ordered in the same sequence as `indexes` in a request.
|
||||
|
||||
@ -2,6 +2,7 @@ from unittest import TestCase
|
||||
from collections import defaultdict
|
||||
|
||||
from da.api.common import *
|
||||
from da.verifier import DAShare
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -25,7 +26,7 @@ class MockStore(BlobStore):
|
||||
self.app_id_store[metadata.app_id][metadata.index] = cert_id
|
||||
|
||||
# Implements `get_multiple` method from BlobStore abstract class.
|
||||
def get_multiple(self, app_id, indexes) -> List[Optional[DABlob]]:
|
||||
def get_multiple(self, app_id, indexes) -> List[Optional[DAShare]]:
|
||||
return [
|
||||
self.blob_store.get(self.app_id_store[app_id].get(i), None) if self.app_id_store[app_id].get(i) else None for i in indexes
|
||||
]
|
||||
|
||||
@ -1,12 +1,11 @@
|
||||
from dataclasses import dataclass
|
||||
from hashlib import sha3_256
|
||||
from hashlib import blake2b
|
||||
from itertools import chain, zip_longest, compress
|
||||
from typing import List, Generator, Self, Sequence
|
||||
|
||||
from eth2spec.eip7594.mainnet import Bytes32, KZGCommitment as Commitment
|
||||
from py_ecc.bls import G2ProofOfPossession
|
||||
|
||||
|
||||
type BlobId = bytes
|
||||
|
||||
class NodeId(Bytes32):
|
||||
@ -41,13 +40,13 @@ class Bitfield(List[bool]):
|
||||
pass
|
||||
|
||||
|
||||
def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId:
|
||||
hasher = sha3_256()
|
||||
hasher.update(bytes(aggregated_column_commitment))
|
||||
def build_blob_id(row_commitments: Sequence[Commitment]) -> BlobId:
|
||||
hasher = blake2b(digest_size=32)
|
||||
for c in row_commitments:
|
||||
hasher.update(bytes(c))
|
||||
return hasher.digest()
|
||||
|
||||
|
||||
class NomosDaG2ProofOfPossession(G2ProofOfPossession):
|
||||
# Domain specific tag for Nomos DA protocol
|
||||
DST = b"NOMOS_DA_AVAIL"
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Generator
|
||||
|
||||
from da.common import NodeId
|
||||
from da.common import NodeId, Column
|
||||
from da.encoder import EncodedData
|
||||
from da.verifier import DABlob
|
||||
from da.verifier import DAShare
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -18,29 +18,21 @@ class Dispersal:
|
||||
# sort over public keys
|
||||
self.settings.nodes_ids.sort()
|
||||
|
||||
def _prepare_data(self, encoded_data: EncodedData) -> Generator[DABlob, None, None]:
|
||||
assert len(encoded_data.column_commitments) == len(self.settings.nodes_ids)
|
||||
assert len(encoded_data.aggregated_column_proofs) == len(self.settings.nodes_ids)
|
||||
def _prepare_data(self, encoded_data: EncodedData) -> Generator[DAShare, None, None]:
|
||||
columns = encoded_data.extended_matrix.columns
|
||||
column_commitments = encoded_data.column_commitments
|
||||
row_commitments = encoded_data.row_commitments
|
||||
rows_proofs = encoded_data.row_proofs
|
||||
aggregated_column_commitment = encoded_data.aggregated_column_commitment
|
||||
aggregated_column_proofs = encoded_data.aggregated_column_proofs
|
||||
blobs_data = zip(columns, column_commitments, zip(*rows_proofs), aggregated_column_proofs)
|
||||
for column_idx, (column, column_commitment, row_proofs, column_proof) in enumerate(blobs_data):
|
||||
blob = DABlob(
|
||||
column,
|
||||
column_proofs = encoded_data.combined_column_proofs
|
||||
blobs_data = zip(columns, column_proofs)
|
||||
for column_idx, (column, proof) in enumerate(blobs_data):
|
||||
blob = DAShare(
|
||||
Column(column),
|
||||
column_idx,
|
||||
column_commitment,
|
||||
aggregated_column_commitment,
|
||||
column_proof,
|
||||
row_commitments,
|
||||
row_proofs
|
||||
proof,
|
||||
row_commitments
|
||||
)
|
||||
yield blob
|
||||
|
||||
def _send_and_await_response(self, node: NodeId, blob: DABlob) -> bool:
|
||||
def _send_and_await_response(self, node: NodeId, blob: DAShare) -> bool:
|
||||
pass
|
||||
|
||||
def disperse(self, encoded_data: EncodedData):
|
||||
|
||||
@ -1,15 +1,18 @@
|
||||
from dataclasses import dataclass
|
||||
from itertools import batched, chain
|
||||
from typing import List, Sequence, Tuple
|
||||
from hashlib import blake2b
|
||||
from itertools import batched
|
||||
from typing import List, Tuple
|
||||
|
||||
from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof, BLSFieldElement
|
||||
|
||||
from da.common import ChunksMatrix, Chunk, Row
|
||||
from da.kzg_rs import kzg, rs
|
||||
from da.kzg_rs.bdfg_proving import derive_challenge
|
||||
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT
|
||||
from da.kzg_rs.poly import Polynomial
|
||||
from da.kzg_rs.bdfg_proving import compute_combined_polynomial
|
||||
|
||||
# Domain separation tag
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
|
||||
@dataclass
|
||||
class DAEncoderParams:
|
||||
@ -23,10 +26,7 @@ class EncodedData:
|
||||
chunked_data: ChunksMatrix
|
||||
extended_matrix: ChunksMatrix
|
||||
row_commitments: List[Commitment]
|
||||
row_proofs: List[List[Proof]]
|
||||
column_commitments: List[Commitment]
|
||||
aggregated_column_commitment: Commitment
|
||||
aggregated_column_proofs: List[Proof]
|
||||
combined_column_proofs: List[Proof]
|
||||
|
||||
|
||||
class DAEncoder:
|
||||
@ -65,72 +65,25 @@ class DAEncoder:
|
||||
)
|
||||
return ChunksMatrix(__rs_encode_row(row) for row in chunks_matrix)
|
||||
|
||||
@staticmethod
|
||||
def _compute_rows_proofs(
|
||||
chunks_matrix: ChunksMatrix,
|
||||
polynomials: Sequence[Polynomial],
|
||||
row_commitments: Sequence[Commitment]
|
||||
) -> List[List[Proof]]:
|
||||
proofs = []
|
||||
for row, poly, commitment in zip(chunks_matrix, polynomials, row_commitments):
|
||||
proofs.append(
|
||||
[
|
||||
kzg.generate_element_proof(i, poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
|
||||
for i in range(len(row))
|
||||
]
|
||||
)
|
||||
return proofs
|
||||
|
||||
def _compute_column_kzg_commitments(self, chunks_matrix: ChunksMatrix) -> List[Tuple[Polynomial, Commitment]]:
|
||||
return self._compute_row_kzg_commitments(chunks_matrix.transposed())
|
||||
|
||||
@staticmethod
|
||||
def _compute_aggregated_column_commitment(
|
||||
column_commitments: Sequence[Commitment]
|
||||
) -> Tuple[Polynomial, Commitment]:
|
||||
data = bytes(chain.from_iterable(
|
||||
DAEncoder.hash_commitment_blake2b31(commitment)
|
||||
for commitment in column_commitments
|
||||
))
|
||||
return kzg.bytes_to_commitment(data, GLOBAL_PARAMETERS)
|
||||
|
||||
@staticmethod
|
||||
def _compute_aggregated_column_proofs(
|
||||
polynomial: Polynomial,
|
||||
column_commitments: Sequence[Commitment],
|
||||
) -> List[Proof]:
|
||||
def _compute_combined_column_proofs(self, combined_poly: Polynomial) -> List[Proof]:
|
||||
total_cols = self.params.column_count * 2
|
||||
return [
|
||||
kzg.generate_element_proof(i, polynomial, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
|
||||
for i in range(len(column_commitments))
|
||||
kzg.generate_element_proof(i, combined_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
|
||||
for i in range(total_cols)
|
||||
]
|
||||
|
||||
def encode(self, data: bytes) -> EncodedData:
|
||||
chunks_matrix = self._chunkify_data(data)
|
||||
row_polynomials, row_commitments = zip(*self._compute_row_kzg_commitments(chunks_matrix))
|
||||
extended_matrix = self._rs_encode_rows(chunks_matrix)
|
||||
row_proofs = self._compute_rows_proofs(extended_matrix, row_polynomials, row_commitments)
|
||||
column_polynomials, column_commitments = zip(*self._compute_column_kzg_commitments(extended_matrix))
|
||||
aggregated_column_polynomial, aggregated_column_commitment = (
|
||||
self._compute_aggregated_column_commitment(column_commitments)
|
||||
)
|
||||
aggregated_column_proofs = self._compute_aggregated_column_proofs(
|
||||
aggregated_column_polynomial, column_commitments
|
||||
)
|
||||
h = derive_challenge(row_commitments)
|
||||
combined_poly = compute_combined_polynomial(row_polynomials, h)
|
||||
combined_column_proofs = self._compute_combined_column_proofs(combined_poly)
|
||||
result = EncodedData(
|
||||
data,
|
||||
chunks_matrix,
|
||||
extended_matrix,
|
||||
row_commitments,
|
||||
row_proofs,
|
||||
column_commitments,
|
||||
aggregated_column_commitment,
|
||||
aggregated_column_proofs
|
||||
combined_column_proofs
|
||||
)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def hash_commitment_blake2b31(commitment: Commitment) -> bytes:
|
||||
return (
|
||||
# digest size must be 31 bytes as we cannot encode 32 without risking overflowing the BLS_MODULUS
|
||||
int.from_bytes(blake2b(bytes(commitment), digest_size=31).digest())
|
||||
).to_bytes(32, byteorder="big") # rewrap into 32 padded bytes for the field elements, EC library dependant
|
||||
61
da/kzg_rs/bdfg_proving.py
Normal file
61
da/kzg_rs/bdfg_proving.py
Normal file
@ -0,0 +1,61 @@
|
||||
from hashlib import blake2b
|
||||
from typing import List, Sequence
|
||||
|
||||
from da.common import Chunk
|
||||
from da.kzg_rs.common import BLS_MODULUS
|
||||
|
||||
from eth2spec.eip7594.mainnet import BLSFieldElement, KZGCommitment as Commitment
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from da.kzg_rs.poly import Polynomial
|
||||
|
||||
|
||||
def derive_challenge(row_commitments: List[Commitment]) -> BLSFieldElement:
|
||||
"""
|
||||
Derive a Fiat–Shamir challenge scalar h from the row commitments:
|
||||
h = BLAKE2b-31( DST || bytes(com1) || bytes(com2) || ... )
|
||||
"""
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
h = blake2b(digest_size=31)
|
||||
h.update(_DST)
|
||||
for com in row_commitments:
|
||||
h.update(bytes(com))
|
||||
digest31 = h.digest() # 31 bytes
|
||||
# pad to 32 bytes for field element conversion
|
||||
padded = digest31 + b'\x00'
|
||||
return BLSFieldElement.from_bytes(padded)
|
||||
|
||||
|
||||
def combine_commitments(row_commitments: List[Commitment], h: BLSFieldElement) -> Commitment:
|
||||
combined_commitment = bls.bytes48_to_G1(row_commitments[0])
|
||||
power = int(h) % BLS_MODULUS
|
||||
for commitment in row_commitments[1:]:
|
||||
commitment = bls.bytes48_to_G1(commitment)
|
||||
combined_commitment = bls.add(combined_commitment, bls.multiply(commitment, power))
|
||||
power = (power * int(h)) % BLS_MODULUS
|
||||
return bls.G1_to_bytes48(combined_commitment)
|
||||
|
||||
|
||||
def compute_combined_polynomial(
|
||||
polys: Sequence[Polynomial], h: BLSFieldElement
|
||||
) -> Polynomial:
|
||||
combined_polynomial = polys[0]
|
||||
h_int = int(h) # raw integer challenge
|
||||
int_pow = 1
|
||||
for poly in polys[1:]:
|
||||
int_pow = (int_pow * h_int) % BLS_MODULUS
|
||||
combined_polynomial = combined_polynomial + Polynomial([int_pow * coeff for coeff in poly], BLS_MODULUS)
|
||||
return combined_polynomial
|
||||
|
||||
def compute_combined_evaluation(
|
||||
evals: Sequence[Chunk],
|
||||
h: BLSFieldElement
|
||||
) -> BLSFieldElement:
|
||||
combined_eval_int = 0
|
||||
power_int = 1
|
||||
h_int = int(h) % BLS_MODULUS
|
||||
for chunk in evals:
|
||||
chunk_int = int.from_bytes(bytes(chunk), byteorder="big")
|
||||
combined_eval_int = (combined_eval_int + chunk_int * power_int) % BLS_MODULUS
|
||||
power_int = (power_int * h_int) % BLS_MODULUS
|
||||
return BLSFieldElement(combined_eval_int)
|
||||
@ -1,3 +1,4 @@
|
||||
|
||||
from itertools import zip_longest
|
||||
from typing import List, Sequence, Self
|
||||
|
||||
@ -108,4 +109,4 @@ class Polynomial[T]:
|
||||
)) % self.modulus
|
||||
|
||||
def evaluation_form(self) -> List[T]:
|
||||
return [self.eval(ROOTS_OF_UNITY[i]) for i in range(len(self))]
|
||||
return [self.eval(ROOTS_OF_UNITY[i]) for i in range(len(self))]
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
from hashlib import sha3_256
|
||||
from unittest import TestCase
|
||||
|
||||
from da.encoder import DAEncoderParams, DAEncoder
|
||||
from da.test_encoder import TestEncoder
|
||||
from da.verifier import DAVerifier, DABlob
|
||||
from da.common import NodeId, NomosDaG2ProofOfPossession as bls_pop
|
||||
from da.verifier import DAVerifier, DAShare
|
||||
from da.common import NodeId
|
||||
from da.dispersal import Dispersal, DispersalSettings
|
||||
|
||||
|
||||
@ -27,7 +26,7 @@ class TestDispersal(TestCase):
|
||||
|
||||
# mock send and await method with local verifiers
|
||||
verifiers_res = []
|
||||
def __send_and_await_response(_, blob: DABlob):
|
||||
def __send_and_await_response(_, blob: DAShare):
|
||||
verifier = DAVerifier()
|
||||
res = verifier.verify(blob)
|
||||
verifiers_res.append(res)
|
||||
|
||||
@ -1,14 +1,15 @@
|
||||
from itertools import chain, batched
|
||||
from random import randrange, randbytes
|
||||
from random import randbytes
|
||||
from unittest import TestCase
|
||||
|
||||
from eth2spec.deneb.mainnet import bytes_to_bls_field
|
||||
|
||||
from da import encoder
|
||||
from da.common import Column
|
||||
from da.kzg_rs.bdfg_proving import derive_challenge, compute_combined_polynomial
|
||||
from da.encoder import DAEncoderParams, DAEncoder
|
||||
from da.verifier import DAVerifier, DAShare
|
||||
from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement
|
||||
|
||||
from da.kzg_rs.common import BLS_MODULUS, ROOTS_OF_UNITY
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY
|
||||
from da.kzg_rs import kzg, rs
|
||||
|
||||
|
||||
@ -33,28 +34,19 @@ class TestEncoder(TestCase):
|
||||
self.assertEqual(columns_len, column_count)
|
||||
chunks_size = (len(data) // encoder_params.bytes_per_chunk) // encoder_params.column_count
|
||||
self.assertEqual(len(encoded_data.row_commitments), chunks_size)
|
||||
self.assertEqual(len(encoded_data.row_proofs), chunks_size)
|
||||
self.assertEqual(len(encoded_data.row_proofs[0]), column_count)
|
||||
self.assertIsNotNone(encoded_data.aggregated_column_commitment)
|
||||
self.assertEqual(len(encoded_data.aggregated_column_proofs), columns_len)
|
||||
|
||||
# verify rows
|
||||
for row, proofs, commitment in zip(encoded_data.extended_matrix, encoded_data.row_proofs, encoded_data.row_commitments):
|
||||
for i, (chunk, proof) in enumerate(zip(row, proofs)):
|
||||
self.assertTrue(
|
||||
kzg.verify_element_proof(bytes_to_bls_field(chunk), commitment, proof, i, ROOTS_OF_UNITY)
|
||||
)
|
||||
|
||||
# verify column aggregation
|
||||
for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.aggregated_column_proofs)):
|
||||
data = DAEncoder.hash_commitment_blake2b31(commitment)
|
||||
kzg.verify_element_proof(
|
||||
bytes_to_bls_field(data),
|
||||
encoded_data.aggregated_column_commitment,
|
||||
proof,
|
||||
i,
|
||||
ROOTS_OF_UNITY
|
||||
verifier = DAVerifier()
|
||||
# verify columns
|
||||
for idx, (column, column_proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.combined_column_proofs)):
|
||||
share = DAShare(
|
||||
column=Column(column),
|
||||
column_idx=idx,
|
||||
combined_column_proof=column_proof,
|
||||
row_commitments=encoded_data.row_commitments
|
||||
)
|
||||
verifier.verify(share)
|
||||
|
||||
|
||||
|
||||
def test_chunkify(self):
|
||||
encoder_settings = DAEncoderParams(column_count=2, bytes_per_chunk=31)
|
||||
@ -84,41 +76,15 @@ class TestEncoder(TestCase):
|
||||
poly_2 = rs.decode(r2, ROOTS_OF_UNITY, len(poly_1))
|
||||
self.assertEqual(poly_1, poly_2)
|
||||
|
||||
def test_compute_rows_proofs(self):
|
||||
chunks_matrix = self.encoder._chunkify_data(self.data)
|
||||
polynomials, commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix))
|
||||
extended_chunks_matrix = self.encoder._rs_encode_rows(chunks_matrix)
|
||||
original_proofs = self.encoder._compute_rows_proofs(chunks_matrix, polynomials, commitments)
|
||||
extended_proofs = self.encoder._compute_rows_proofs(extended_chunks_matrix, polynomials, commitments)
|
||||
# check original sized matrix
|
||||
for row, poly, commitment, proofs in zip(chunks_matrix, polynomials, commitments, original_proofs):
|
||||
self.assertEqual(len(proofs), len(row))
|
||||
for i, chunk in enumerate(row):
|
||||
self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY))
|
||||
# check extended matrix
|
||||
for row, poly, commitment, proofs in zip(extended_chunks_matrix, polynomials, commitments, extended_proofs):
|
||||
for i, chunk in enumerate(row):
|
||||
self.assertTrue(kzg.verify_element_proof(BLSFieldElement.from_bytes(chunk), commitment, proofs[i], i, ROOTS_OF_UNITY))
|
||||
|
||||
def test_compute_column_kzg_commitments(self):
|
||||
def test_generate_combined_column_proofs(self):
|
||||
chunks_matrix = self.encoder._chunkify_data(self.data)
|
||||
polynomials, commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
|
||||
self.assertEqual(len(commitments), len(chunks_matrix[0]))
|
||||
self.assertEqual(len(polynomials), len(chunks_matrix[0]))
|
||||
|
||||
def test_generate_aggregated_column_commitments(self):
|
||||
chunks_matrix = self.encoder._chunkify_data(self.data)
|
||||
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
|
||||
poly, commitment = self.encoder._compute_aggregated_column_commitment(column_commitments)
|
||||
self.assertIsNotNone(poly)
|
||||
self.assertIsNotNone(commitment)
|
||||
|
||||
def test_generate_aggregated_column_proofs(self):
|
||||
chunks_matrix = self.encoder._chunkify_data(self.data)
|
||||
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
|
||||
poly, _ = self.encoder._compute_aggregated_column_commitment(column_commitments)
|
||||
proofs = self.encoder._compute_aggregated_column_proofs(poly, column_commitments)
|
||||
self.assertEqual(len(proofs), len(column_commitments))
|
||||
row_polynomials, row_commitments = zip(*self.encoder._compute_row_kzg_commitments(chunks_matrix))
|
||||
h = derive_challenge(row_commitments)
|
||||
combined_poly = compute_combined_polynomial(row_polynomials, h)
|
||||
proofs = self.encoder._compute_combined_column_proofs(combined_poly)
|
||||
expected_extended_columns = self.params.column_count * 2
|
||||
self.assertEqual(len(proofs), expected_extended_columns)
|
||||
|
||||
def test_encode(self):
|
||||
from random import randbytes
|
||||
@ -134,4 +100,4 @@ class TestEncoder(TestCase):
|
||||
for _ in range(size*encoder_params.column_count)
|
||||
)
|
||||
)
|
||||
self.assert_encoding(encoder_params, data)
|
||||
self.assert_encoding(encoder_params, data)
|
||||
@ -2,9 +2,9 @@ from itertools import chain
|
||||
from unittest import TestCase
|
||||
from typing import List, Optional
|
||||
|
||||
from da.common import NodeId, build_blob_id, NomosDaG2ProofOfPossession as bls_pop
|
||||
from da.common import NodeId, build_blob_id
|
||||
from da.api.common import DAApi, BlobMetadata, Metadata
|
||||
from da.verifier import DAVerifier, DABlob
|
||||
from da.verifier import DAVerifier, DAShare
|
||||
from da.api.test_flow import MockStore
|
||||
from da.dispersal import Dispersal, DispersalSettings
|
||||
from da.test_encoder import TestEncoder
|
||||
@ -17,11 +17,11 @@ class DAVerifierWApi:
|
||||
self.api = DAApi(self.store)
|
||||
self.verifier = DAVerifier()
|
||||
|
||||
def receive_blob(self, blob: DABlob):
|
||||
def receive_blob(self, blob: DAShare):
|
||||
if self.verifier.verify(blob):
|
||||
# Warning: If aggregated col commitment and row commitment are the same,
|
||||
# the build_attestation_message method will produce the same output.
|
||||
blob_id = build_blob_id(blob.aggregated_column_commitment, blob.rows_commitments)
|
||||
blob_id = build_blob_id(blob.row_commitments)
|
||||
self.store.populate(blob, blob_id)
|
||||
|
||||
def receive_metadata(self, blob_metadata: BlobMetadata):
|
||||
@ -30,7 +30,7 @@ class DAVerifierWApi:
|
||||
# in which case all certificates had been already verified by the DA Node.
|
||||
self.api.write(blob_metadata.blob_id, blob_metadata.metadata)
|
||||
|
||||
def read(self, app_id, indexes) -> List[Optional[DABlob]]:
|
||||
def read(self, app_id, indexes) -> List[Optional[DAShare]]:
|
||||
return self.api.read(app_id, indexes)
|
||||
|
||||
|
||||
@ -59,14 +59,14 @@ class TestFullFlow(TestCase):
|
||||
encoded_data = DAEncoder(encoding_params).encode(data)
|
||||
|
||||
# mock send and await method with local verifiers
|
||||
def __send_and_await_response(node: int, blob: DABlob):
|
||||
def __send_and_await_response(node: int, blob: DAShare):
|
||||
node = self.api_nodes[int.from_bytes(node)]
|
||||
node.receive_blob(blob)
|
||||
|
||||
# inject mock send and await method
|
||||
self.dispersal._send_and_await_response = __send_and_await_response
|
||||
self.dispersal.disperse(encoded_data)
|
||||
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
|
||||
blob_id = build_blob_id(encoded_data.row_commitments)
|
||||
blob_metadata = BlobMetadata(
|
||||
blob_id,
|
||||
Metadata(app_id, index)
|
||||
@ -96,14 +96,14 @@ class TestFullFlow(TestCase):
|
||||
encoded_data = DAEncoder(encoding_params).encode(data)
|
||||
|
||||
# mock send and await method with local verifiers
|
||||
def __send_and_await_response(node: int, blob: DABlob):
|
||||
def __send_and_await_response(node: int, blob: DAShare):
|
||||
node = self.api_nodes[int.from_bytes(node)]
|
||||
return node.receive_blob(blob)
|
||||
|
||||
# inject mock send and await method
|
||||
self.dispersal._send_and_await_response = __send_and_await_response
|
||||
self.dispersal.disperse(encoded_data)
|
||||
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
|
||||
blob_id = build_blob_id(encoded_data.row_commitments)
|
||||
|
||||
# Loop through each index and simulate dispersal with the same cert_id but different metadata
|
||||
for index in indexes:
|
||||
|
||||
@ -5,7 +5,7 @@ from da.encoder import DAEncoder
|
||||
from da.kzg_rs import kzg
|
||||
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY
|
||||
from da.test_encoder import TestEncoder
|
||||
from da.verifier import DAVerifier, DABlob
|
||||
from da.verifier import DAVerifier, DAShare
|
||||
|
||||
|
||||
class TestVerifier(TestCase):
|
||||
@ -13,33 +13,18 @@ class TestVerifier(TestCase):
|
||||
def setUp(self):
|
||||
self.verifier = DAVerifier()
|
||||
|
||||
def test_verify_column(self):
|
||||
column = Column(int.to_bytes(i, length=32) for i in range(8))
|
||||
_, column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
|
||||
aggregated_poly, aggregated_column_commitment = kzg.bytes_to_commitment(
|
||||
DAEncoder.hash_commitment_blake2b31(column_commitment), GLOBAL_PARAMETERS
|
||||
)
|
||||
aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
|
||||
self.assertTrue(
|
||||
self.verifier._verify_column(
|
||||
column, 0, column_commitment, aggregated_column_commitment, aggregated_proof,
|
||||
)
|
||||
)
|
||||
|
||||
def test_verify(self):
|
||||
_ = TestEncoder()
|
||||
_.setUp()
|
||||
encoded_data = _.encoder.encode(_.data)
|
||||
for i, column in enumerate(encoded_data.chunked_data.columns):
|
||||
for i, column in enumerate(encoded_data.extended_matrix.columns):
|
||||
verifier = DAVerifier()
|
||||
da_blob = DABlob(
|
||||
da_blob = DAShare(
|
||||
Column(column),
|
||||
i,
|
||||
encoded_data.column_commitments[i],
|
||||
encoded_data.aggregated_column_commitment,
|
||||
encoded_data.aggregated_column_proofs[i],
|
||||
encoded_data.combined_column_proofs[i],
|
||||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertIsNotNone(verifier.verify(da_blob))
|
||||
|
||||
@ -47,26 +32,20 @@ class TestVerifier(TestCase):
|
||||
_ = TestEncoder()
|
||||
_.setUp()
|
||||
encoded_data = _.encoder.encode(_.data)
|
||||
columns = enumerate(encoded_data.chunked_data.columns)
|
||||
columns = enumerate(encoded_data.extended_matrix.columns)
|
||||
i, column = next(columns)
|
||||
da_blob = DABlob(
|
||||
da_blob = DAShare(
|
||||
Column(column),
|
||||
i,
|
||||
encoded_data.column_commitments[i],
|
||||
encoded_data.aggregated_column_commitment,
|
||||
encoded_data.aggregated_column_proofs[i],
|
||||
encoded_data.combined_column_proofs[i],
|
||||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertIsNotNone(self.verifier.verify(da_blob))
|
||||
for i, column in columns:
|
||||
da_blob = DABlob(
|
||||
da_blob = DAShare(
|
||||
Column(column),
|
||||
i,
|
||||
encoded_data.column_commitments[i],
|
||||
encoded_data.aggregated_column_commitment,
|
||||
encoded_data.aggregated_column_proofs[i],
|
||||
encoded_data.combined_column_proofs[i],
|
||||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertTrue(self.verifier.verify(da_blob))
|
||||
self.assertIsNotNone(self.verifier.verify(da_blob))
|
||||
|
||||
115
da/verifier.py
115
da/verifier.py
@ -1,106 +1,49 @@
|
||||
from dataclasses import dataclass
|
||||
from hashlib import sha3_256
|
||||
from typing import List, Sequence, Set
|
||||
from typing import List
|
||||
|
||||
from eth2spec.deneb.mainnet import BLSFieldElement
|
||||
from eth2spec.eip7594.mainnet import (
|
||||
KZGCommitment as Commitment,
|
||||
KZGProof as Proof,
|
||||
)
|
||||
|
||||
import da.common
|
||||
from da.common import Column, Chunk, BlobId
|
||||
from da.encoder import DAEncoder
|
||||
from da.common import Column, BlobId, build_blob_id
|
||||
from da.kzg_rs import kzg
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS
|
||||
from da.kzg_rs.bdfg_proving import combine_commitments, derive_challenge, compute_combined_evaluation
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY
|
||||
|
||||
# Domain separation tag
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
|
||||
@dataclass
|
||||
class DABlob:
|
||||
class DAShare:
|
||||
column: Column
|
||||
column_idx: int
|
||||
column_commitment: Commitment
|
||||
aggregated_column_commitment: Commitment
|
||||
aggregated_column_proof: Proof
|
||||
rows_commitments: List[Commitment]
|
||||
rows_proofs: List[Proof]
|
||||
|
||||
def blob_id(self) -> bytes:
|
||||
return da.common.build_blob_id(self.aggregated_column_commitment, self.rows_commitments)
|
||||
|
||||
def column_id(self) -> bytes:
|
||||
return sha3_256(self.column.as_bytes()).digest()
|
||||
combined_column_proof: Proof
|
||||
row_commitments: List[Commitment]
|
||||
|
||||
def blob_id(self) -> BlobId:
|
||||
return build_blob_id(self.row_commitments)
|
||||
|
||||
class DAVerifier:
|
||||
@staticmethod
|
||||
def _verify_column(
|
||||
column: Column,
|
||||
column_idx: int,
|
||||
column_commitment: Commitment,
|
||||
aggregated_column_commitment: Commitment,
|
||||
aggregated_column_proof: Proof,
|
||||
) -> bool:
|
||||
# 1. compute commitment for column
|
||||
_, computed_column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
|
||||
# 2. If computed column commitment != column commitment, fail
|
||||
if column_commitment != computed_column_commitment:
|
||||
return False
|
||||
# 3. compute column hash
|
||||
column_hash = DAEncoder.hash_commitment_blake2b31(column_commitment)
|
||||
# 4. Check proof with commitment and proof over the aggregated column commitment
|
||||
chunk = BLSFieldElement.from_bytes(column_hash)
|
||||
def verify(blob: DAShare) -> bool:
|
||||
"""
|
||||
Verifies that blob.column at index blob.column_idx is consistent
|
||||
with the row commitments and the combined column proof.
|
||||
|
||||
Returns True if verification succeeds, False otherwise.
|
||||
"""
|
||||
# 1. Derive challenge
|
||||
h = derive_challenge(blob.row_commitments)
|
||||
# 2. Reconstruct combined commitment: combined_commitment = sum_{i=0..l-1} h^i * row_commitments[i]
|
||||
combined_commitment = combine_commitments(blob.row_commitments, h)
|
||||
# 3. Compute combined evaluation v = sum_{i=0..l-1} (h^i * column_data[i])
|
||||
combined_eval_point = compute_combined_evaluation(blob.column, h)
|
||||
# 4. Verify the single KZG proof for evaluation at point w^{column_idx}
|
||||
return kzg.verify_element_proof(
|
||||
chunk, aggregated_column_commitment, aggregated_column_proof, column_idx, ROOTS_OF_UNITY
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof, index: int) -> bool:
|
||||
chunk = BLSFieldElement(int.from_bytes(bytes(chunk)) % BLS_MODULUS)
|
||||
return kzg.verify_element_proof(chunk, commitment, proof, index, ROOTS_OF_UNITY)
|
||||
|
||||
@staticmethod
|
||||
def _verify_chunks(
|
||||
chunks: Sequence[Chunk],
|
||||
commitments: Sequence[Commitment],
|
||||
proofs: Sequence[Proof],
|
||||
index: int
|
||||
) -> bool:
|
||||
if not (len(chunks) == len(commitments) == len(proofs)):
|
||||
return False
|
||||
for chunk, commitment, proof in zip(chunks, commitments, proofs):
|
||||
if not DAVerifier._verify_chunk(chunk, commitment, proof, index):
|
||||
return False
|
||||
return True
|
||||
|
||||
def verify(self, blob: DABlob) -> bool:
|
||||
"""
|
||||
Verify the integrity of the given blob.
|
||||
|
||||
This function must be idempotent. The implementer should ensure that
|
||||
repeated verification attempts do not result in inconsistent states.
|
||||
|
||||
Args:
|
||||
blob (DABlob): The blob to verify.
|
||||
|
||||
Returns:
|
||||
bool: True if the blob is verified successfully, False otherwise.
|
||||
"""
|
||||
is_column_verified = DAVerifier._verify_column(
|
||||
blob.column,
|
||||
combined_eval_point,
|
||||
combined_commitment,
|
||||
blob.combined_column_proof,
|
||||
blob.column_idx,
|
||||
blob.column_commitment,
|
||||
blob.aggregated_column_commitment,
|
||||
blob.aggregated_column_proof,
|
||||
ROOTS_OF_UNITY
|
||||
)
|
||||
if not is_column_verified:
|
||||
return False
|
||||
|
||||
are_chunks_verified = DAVerifier._verify_chunks(
|
||||
blob.column, blob.rows_commitments, blob.rows_proofs, blob.column_idx
|
||||
)
|
||||
if not are_chunks_verified:
|
||||
return False
|
||||
|
||||
# Ensure idempotency: Implementers should define how to avoid redundant verification.
|
||||
return True
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user