NomosDA spec v1 updates (#117)

* Modify and rename hash_commitment method. Now we just hash the column commitment itself.

* Fix calls

* Remove certificate from verifier

* Update verifier

* Fix tests

* Fix verifier imports

* Fix more imports

* Fix dispersal

* Fix more imports

* Fix missing parameter in dispersal

* Fix tests

* Full flow renaming

* Disperse encoded data in full flow test

* Make da verification indempotent (#118)

---------

Co-authored-by: Gusto <bacvinka@gmail.com>
Co-authored-by: gusto <bacv@users.noreply.github.com>
This commit is contained in:
Daniel Sanchez 2025-01-29 10:42:53 +00:00 committed by GitHub
parent 5434fcb315
commit 3f3427ee9f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 119 additions and 232 deletions

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Optional, List, Sequence
from da.common import Certificate
from da.common import BlobId
from da.verifier import DABlob
@ -15,16 +15,16 @@ class Metadata:
@dataclass
class VID:
# da certificate id
cert_id: bytes
class BlobMetadata:
# da blob_id id
blob_id: BlobId
# application + index information
metadata: Metadata
class BlobStore(ABC):
@abstractmethod
def add(self, certificate: Certificate, metadata: Metadata):
def add(self, id: BlobId, metadata: Metadata):
"""
Raises: ValueError if there is already a registered certificate fot the given metadata
"""
@ -39,14 +39,14 @@ class DAApi:
def __init__(self, bs: BlobStore):
self.store = bs
def write(self, certificate: Certificate, metadata: Metadata):
def write(self, id: BlobId, metadata: Metadata):
"""
Write method should be used by a service that is able to retrieve verified certificates
from the latest Block. Once a certificate is retrieved, api creates a relation between
the blob of an original data, certificate and index for the app_id of the certificate.
Raises: ValueError if there is already a registered certificate for a given metadata
"""
self.store.add(certificate, metadata)
self.store.add(id, metadata)
def read(self, app_id, indexes) -> List[Optional[DABlob]]:
"""

View File

@ -14,8 +14,8 @@ class MockStore(BlobStore):
self.blob_store = {}
self.app_id_store = defaultdict(dict)
def populate(self, blob, cert_id: bytes):
self.blob_store[cert_id] = blob
def populate(self, blob, blob_id: BlobId):
self.blob_store[blob_id] = blob
# Implements `add` method from BlobStore abstract class.
def add(self, cert_id: bytes, metadata: Metadata):
@ -35,36 +35,36 @@ class MockStore(BlobStore):
class TestFlow(TestCase):
def test_api_write_read(self):
expected_blob = "hello"
cert_id = b"11"*32
blob_id = b"11"*32
app_id = 1
idx = 1
mock_meta = Metadata(1, 1)
mock_store = MockStore()
mock_store.populate(expected_blob, cert_id)
mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store)
api.write(cert_id, mock_meta)
api.write(blob_id, mock_meta)
blobs = api.read(app_id, [idx])
self.assertEqual([expected_blob], blobs)
def test_same_index(self):
expected_blob = "hello"
cert_id = b"11"*32
blob_id = b"11"*32
app_id = 1
idx = 1
mock_meta = Metadata(1, 1)
mock_store = MockStore()
mock_store.populate(expected_blob, cert_id)
mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store)
api.write(cert_id, mock_meta)
api.write(blob_id, mock_meta)
with self.assertRaises(ValueError):
api.write(cert_id, mock_meta)
api.write(blob_id, mock_meta)
blobs = api.read(app_id, [idx])
@ -72,7 +72,7 @@ class TestFlow(TestCase):
def test_multiple_indexes_same_data(self):
expected_blob = "hello"
cert_id = b"11"*32
blob_id = b"11"*32
app_id = 1
idx1 = 1
idx2 = 2
@ -80,13 +80,13 @@ class TestFlow(TestCase):
mock_meta2 = Metadata(app_id, idx2)
mock_store = MockStore()
mock_store.populate(expected_blob, cert_id)
mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store)
api.write(cert_id, mock_meta1)
mock_store.populate(expected_blob, cert_id)
api.write(cert_id, mock_meta2)
api.write(blob_id, mock_meta1)
mock_store.populate(expected_blob, blob_id)
api.write(blob_id, mock_meta2)
blobs_idx1 = api.read(app_id, [idx1])
blobs_idx2 = api.read(app_id, [idx2])

View File

@ -7,6 +7,8 @@ from eth2spec.eip7594.mainnet import Bytes32, KZGCommitment as Commitment
from py_ecc.bls import G2ProofOfPossession
type BlobId = bytes
class NodeId(Bytes32):
pass
@ -34,43 +36,12 @@ class ChunksMatrix(List[Row | Column]):
return ChunksMatrix(self.columns)
BLSPublicKey = bytes
BLSPrivateKey = int
BLSSignature = bytes
class Bitfield(List[bool]):
pass
@dataclass
class Attestation:
signature: BLSSignature
@dataclass
class Certificate:
aggregated_signatures: BLSSignature
signers: Bitfield
aggregated_column_commitment: Commitment
row_commitments: List[Commitment]
def id(self) -> bytes:
return build_attestation_message(self.aggregated_column_commitment, self.row_commitments)
def verify(self, nodes_public_keys: List[BLSPublicKey]) -> bool:
"""
List of nodes public keys should be a trusted list of verified proof of possession keys.
Otherwise, we could fall under the Rogue Key Attack
`assert all(bls_pop.PopVerify(pk, proof) for pk, proof in zip(node_public_keys, pops))`
"""
# we sort them as the signers bitfield is sorted by the public keys as well
signers_keys = list(compress(sorted(nodes_public_keys), self.signers))
message = build_attestation_message(self.aggregated_column_commitment, self.row_commitments)
return NomosDaG2ProofOfPossession.AggregateVerify(signers_keys, [message]*len(signers_keys), self.aggregated_signatures)
def build_attestation_message(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> bytes:
def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId:
hasher = sha3_256()
hasher.update(bytes(aggregated_column_commitment))
for c in row_commitments:

View File

@ -1,16 +1,14 @@
from dataclasses import dataclass
from hashlib import sha3_256
from typing import List, Optional, Generator, Sequence
from typing import List, Generator
from da.common import Certificate, NodeId, BLSPublicKey, Bitfield, build_attestation_message, NomosDaG2ProofOfPossession as bls_pop
from da.common import NodeId
from da.encoder import EncodedData
from da.verifier import DABlob, Attestation
from da.verifier import DABlob
@dataclass
class DispersalSettings:
nodes_ids: List[NodeId]
nodes_pubkey: List[BLSPublicKey]
threshold: int
@ -18,9 +16,7 @@ class Dispersal:
def __init__(self, settings: DispersalSettings):
self.settings = settings
# sort over public keys
self.settings.nodes_ids, self.settings.nodes_pubkey = zip(
*sorted(zip(self.settings.nodes_ids, self.settings.nodes_pubkey), key=lambda x: x[1])
)
self.settings.nodes_ids.sort()
def _prepare_data(self, encoded_data: EncodedData) -> Generator[DABlob, None, None]:
assert len(encoded_data.column_commitments) == len(self.settings.nodes_ids)
@ -32,9 +28,10 @@ class Dispersal:
aggregated_column_commitment = encoded_data.aggregated_column_commitment
aggregated_column_proofs = encoded_data.aggregated_column_proofs
blobs_data = zip(columns, column_commitments, zip(*rows_proofs), aggregated_column_proofs)
for (column, column_commitment, row_proofs, column_proof) in blobs_data:
for column_idx, (column, column_commitment, row_proofs, column_proof) in enumerate(blobs_data):
blob = DABlob(
column,
column_idx,
column_commitment,
aggregated_column_commitment,
column_proof,
@ -43,48 +40,14 @@ class Dispersal:
)
yield blob
def _send_and_await_response(self, node: NodeId, blob: DABlob) -> Optional[Attestation]:
def _send_and_await_response(self, node: NodeId, blob: DABlob) -> bool:
pass
def _build_certificate(
self,
encoded_data: EncodedData,
attestations: Sequence[Attestation],
signers: Bitfield
) -> Certificate:
assert len(attestations) >= self.settings.threshold
assert len(attestations) == signers.count(True)
aggregated = bls_pop.Aggregate([attestation.signature for attestation in attestations])
return Certificate(
aggregated_signatures=aggregated,
signers=signers,
aggregated_column_commitment=encoded_data.aggregated_column_commitment,
row_commitments=encoded_data.row_commitments
)
@staticmethod
def _verify_attestation(public_key: BLSPublicKey, attested_message: bytes, attestation: Attestation) -> bool:
return bls_pop.Verify(public_key, attested_message, attestation.signature)
@staticmethod
def _build_attestation_message(encoded_data: EncodedData) -> bytes:
return build_attestation_message(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
def disperse(self, encoded_data: EncodedData) -> Optional[Certificate]:
attestations = []
attested_message = self._build_attestation_message(encoded_data)
signed = Bitfield(False for _ in range(len(self.settings.nodes_ids)))
def disperse(self, encoded_data: EncodedData):
blob_data = zip(
range(len(self.settings.nodes_ids)),
self.settings.nodes_ids,
self.settings.nodes_pubkey,
self._prepare_data(encoded_data)
)
for i, node, pk, blob in blob_data:
if attestation := self._send_and_await_response(node, blob):
if self._verify_attestation(pk, attested_message, attestation):
# mark as received
signed[i] = True
attestations.append(attestation)
if len(attestations) >= self.settings.threshold:
return self._build_certificate(encoded_data, attestations, signed)
for node, blob in blob_data:
self._send_and_await_response(node, blob)

View File

@ -5,9 +5,9 @@ from hashlib import blake2b
from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof, BLSFieldElement
from da.common import ChunksMatrix, Chunk, Row, Column
from da.common import ChunksMatrix, Chunk, Row
from da.kzg_rs import kzg, rs
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BLS_MODULUS, BYTES_PER_FIELD_ELEMENT
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT
from da.kzg_rs.poly import Polynomial
@ -86,11 +86,11 @@ class DAEncoder:
@staticmethod
def _compute_aggregated_column_commitment(
chunks_matrix: ChunksMatrix, column_commitments: Sequence[Commitment]
column_commitments: Sequence[Commitment]
) -> Tuple[Polynomial, Commitment]:
data = bytes(chain.from_iterable(
DAEncoder.hash_column_and_commitment(column, commitment)
for column, commitment in zip(chunks_matrix.columns, column_commitments)
DAEncoder.hash_commitment_blake2b31(commitment)
for commitment in column_commitments
))
return kzg.bytes_to_commitment(data, GLOBAL_PARAMETERS)
@ -111,7 +111,7 @@ class DAEncoder:
row_proofs = self._compute_rows_proofs(extended_matrix, row_polynomials, row_commitments)
column_polynomials, column_commitments = zip(*self._compute_column_kzg_commitments(extended_matrix))
aggregated_column_polynomial, aggregated_column_commitment = (
self._compute_aggregated_column_commitment(extended_matrix, column_commitments)
self._compute_aggregated_column_commitment(column_commitments)
)
aggregated_column_proofs = self._compute_aggregated_column_proofs(
aggregated_column_polynomial, column_commitments
@ -129,8 +129,8 @@ class DAEncoder:
return result
@staticmethod
def hash_column_and_commitment(column: Column, commitment: Commitment) -> bytes:
def hash_commitment_blake2b31(commitment: Commitment) -> bytes:
return (
# digest size must be 31 bytes as we cannot encode 32 without risking overflowing the BLS_MODULUS
int.from_bytes(blake2b(column.as_bytes() + bytes(commitment), digest_size=31).digest())
).to_bytes(32, byteorder="big")
int.from_bytes(blake2b(bytes(commitment), digest_size=31).digest())
).to_bytes(32, byteorder="big") # rewrap into 32 padded bytes for the field elements, EC library dependant

View File

@ -4,71 +4,37 @@ from unittest import TestCase
from da.encoder import DAEncoderParams, DAEncoder
from da.test_encoder import TestEncoder
from da.verifier import DAVerifier, DABlob
from da.common import NodeId, Attestation, Bitfield, NomosDaG2ProofOfPossession as bls_pop
from da.dispersal import Dispersal, EncodedData, DispersalSettings
from da.common import NodeId, NomosDaG2ProofOfPossession as bls_pop
from da.dispersal import Dispersal, DispersalSettings
class TestDispersal(TestCase):
def setUp(self):
self.n_nodes = 16
self.nodes_ids = [NodeId(x.to_bytes(length=32, byteorder='big')) for x in range(self.n_nodes)]
self.secret_keys = list(range(1, self.n_nodes+1))
self.public_keys = [bls_pop.SkToPk(sk) for sk in self.secret_keys]
# sort by pk as we do in dispersal
self.secret_keys, self.public_keys = zip(
*sorted(zip(self.secret_keys, self.public_keys), key=lambda x: x[1])
)
dispersal_settings = DispersalSettings(
self.nodes_ids,
self.public_keys,
self.n_nodes // 2 + 1
)
self.dispersal = Dispersal(dispersal_settings)
self.encoder_test = TestEncoder()
self.encoder_test.setUp()
def test_build_certificate_insufficient_attestations(self):
with self.assertRaises(AssertionError):
self.dispersal._build_certificate(None, [], [])
def test_build_certificate_enough_attestations(self):
mock_encoded_data = EncodedData(
None, None, None, [], [], [], bytes(b"f"*48), []
)
mock_message = sha3_256(mock_encoded_data.aggregated_column_commitment).digest()
mock_attestations = [Attestation(bls_pop.Sign(sk, mock_message)) for sk in self.secret_keys]
certificate = self.dispersal._build_certificate(
mock_encoded_data,
mock_attestations,
Bitfield([True for _ in range(len(self.secret_keys))])
)
self.assertIsNotNone(certificate)
self.assertEqual(certificate.aggregated_column_commitment, mock_encoded_data.aggregated_column_commitment)
self.assertEqual(certificate.row_commitments, [])
self.assertIsNotNone(certificate.aggregated_signatures)
self.assertTrue(
certificate.verify(self.public_keys)
)
def test_disperse(self):
data = self.encoder_test.data
encoding_params = DAEncoderParams(column_count=self.n_nodes // 2, bytes_per_chunk=31)
encoded_data = DAEncoder(encoding_params).encode(data)
# mock send and await method with local verifiers
def __send_and_await_response(node: NodeId, blob: DABlob):
sk = self.secret_keys[int.from_bytes(node)]
verifier = DAVerifier(sk, self.public_keys)
return verifier.verify(blob)
verifiers_res = []
def __send_and_await_response(_, blob: DABlob):
verifier = DAVerifier()
res = verifier.verify(blob)
verifiers_res.append(res)
return res
# inject mock send and await method
self.dispersal._send_and_await_response = __send_and_await_response
certificate = self.dispersal.disperse(encoded_data)
self.assertIsNotNone(certificate)
self.assertTrue(certificate.verify(self.public_keys)
)
self.assertEqual(
certificate.signers,
[True if i < self.dispersal.settings.threshold else False for i in range(self.n_nodes)]
)
self.dispersal.disperse(encoded_data)
for res in verifiers_res:
self.assertTrue(res)

View File

@ -47,7 +47,7 @@ class TestEncoder(TestCase):
# verify column aggregation
for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.aggregated_column_proofs)):
data = DAEncoder.hash_column_and_commitment(column, commitment)
data = DAEncoder.hash_commitment_blake2b31(commitment)
kzg.verify_element_proof(
bytes_to_bls_field(data),
encoded_data.aggregated_column_commitment,
@ -109,14 +109,14 @@ class TestEncoder(TestCase):
def test_generate_aggregated_column_commitments(self):
chunks_matrix = self.encoder._chunkify_data(self.data)
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
poly, commitment = self.encoder._compute_aggregated_column_commitment(chunks_matrix, column_commitments)
poly, commitment = self.encoder._compute_aggregated_column_commitment(column_commitments)
self.assertIsNotNone(poly)
self.assertIsNotNone(commitment)
def test_generate_aggregated_column_proofs(self):
chunks_matrix = self.encoder._chunkify_data(self.data)
_, column_commitments = zip(*self.encoder._compute_column_kzg_commitments(chunks_matrix))
poly, _ = self.encoder._compute_aggregated_column_commitment(chunks_matrix, column_commitments)
poly, _ = self.encoder._compute_aggregated_column_commitment(column_commitments)
proofs = self.encoder._compute_aggregated_column_proofs(poly, column_commitments)
self.assertEqual(len(proofs), len(column_commitments))

View File

@ -2,8 +2,8 @@ from itertools import chain
from unittest import TestCase
from typing import List, Optional
from da.common import NodeId, build_attestation_message, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop
from da.api.common import DAApi, VID, Metadata
from da.common import NodeId, build_blob_id, NomosDaG2ProofOfPossession as bls_pop
from da.api.common import DAApi, BlobMetadata, Metadata
from da.verifier import DAVerifier, DABlob
from da.api.test_flow import MockStore
from da.dispersal import Dispersal, DispersalSettings
@ -12,24 +12,23 @@ from da.encoder import DAEncoderParams, DAEncoder
class DAVerifierWApi:
def __init__(self, sk: int, public_keys: List[BLSPublicKey]):
def __init__(self):
self.store = MockStore()
self.api = DAApi(self.store)
self.verifier = DAVerifier(sk, public_keys)
self.verifier = DAVerifier()
def receive_blob(self, blob: DABlob):
if attestation := self.verifier.verify(blob):
if self.verifier.verify(blob):
# Warning: If aggregated col commitment and row commitment are the same,
# the build_attestation_message method will produce the same output.
cert_id = build_attestation_message(blob.aggregated_column_commitment, blob.rows_commitments)
self.store.populate(blob, cert_id)
return attestation
blob_id = build_blob_id(blob.aggregated_column_commitment, blob.rows_commitments)
self.store.populate(blob, blob_id)
def receive_cert(self, vid: VID):
def receive_metadata(self, blob_metadata: BlobMetadata):
# Usually the certificate would be verifier here,
# but we are assuming that this it is already coming from the verified block,
# in which case all certificates had been already verified by the DA Node.
self.api.write(vid.cert_id, vid.metadata)
self.api.write(blob_metadata.blob_id, blob_metadata.metadata)
def read(self, app_id, indexes) -> List[Optional[DABlob]]:
return self.api.read(app_id, indexes)
@ -39,22 +38,16 @@ class TestFullFlow(TestCase):
def setUp(self):
self.n_nodes = 16
self.nodes_ids = [NodeId(x.to_bytes(length=32, byteorder='big')) for x in range(self.n_nodes)]
self.secret_keys = list(range(1, self.n_nodes+1))
self.public_keys = [bls_pop.SkToPk(sk) for sk in self.secret_keys]
# sort by pk as we do in dispersal
self.secret_keys, self.public_keys = zip(
*sorted(zip(self.secret_keys, self.public_keys), key=lambda x: x[1])
)
dispersal_settings = DispersalSettings(
self.nodes_ids,
self.public_keys,
self.n_nodes
)
self.dispersal = Dispersal(dispersal_settings)
self.encoder_test = TestEncoder()
self.encoder_test.setUp()
self.api_nodes = [DAVerifierWApi(k, self.public_keys) for k in self.secret_keys]
self.api_nodes = [DAVerifierWApi() for _ in range(self.n_nodes)]
def test_full_flow(self):
app_id = int.to_bytes(1)
@ -68,27 +61,27 @@ class TestFullFlow(TestCase):
# mock send and await method with local verifiers
def __send_and_await_response(node: int, blob: DABlob):
node = self.api_nodes[int.from_bytes(node)]
return node.receive_blob(blob)
node.receive_blob(blob)
# inject mock send and await method
self.dispersal._send_and_await_response = __send_and_await_response
certificate = self.dispersal.disperse(encoded_data)
vid = VID(
certificate.id(),
self.dispersal.disperse(encoded_data)
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
blob_metadata = BlobMetadata(
blob_id,
Metadata(app_id, index)
)
# verifier
for node in self.api_nodes:
node.receive_cert(vid)
node.receive_metadata(blob_metadata)
# read from api and confirm its working
# notice that we need to sort the api_nodes by their public key to have the blobs sorted in the same fashion
# we do actually do dispersal.
blobs = list(chain.from_iterable(
node.read(app_id, [index])
for node in sorted(self.api_nodes, key=lambda n: bls_pop.SkToPk(n.verifier.sk))
for node in self.api_nodes
))
original_blobs = list(self.dispersal._prepare_data(encoded_data))
self.assertEqual(blobs, original_blobs)
@ -109,18 +102,19 @@ class TestFullFlow(TestCase):
# inject mock send and await method
self.dispersal._send_and_await_response = __send_and_await_response
certificate = self.dispersal.disperse(encoded_data)
self.dispersal.disperse(encoded_data)
blob_id = build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
# Loop through each index and simulate dispersal with the same cert_id but different metadata
for index in indexes:
vid = VID(
certificate.id(),
metadata = BlobMetadata(
blob_id,
Metadata(app_id, index)
)
# verifier
for node in self.api_nodes:
node.receive_cert(vid)
node.receive_metadata(metadata)
# Verify retrieval for each index
for index in indexes:
@ -128,7 +122,7 @@ class TestFullFlow(TestCase):
# as we do actually do dispersal.
blobs = list(chain.from_iterable(
node.read(app_id, [index])
for node in sorted(self.api_nodes, key=lambda n: bls_pop.SkToPk(n.verifier.sk))
for node in self.api_nodes
))
original_blobs = list(self.dispersal._prepare_data(encoded_data))
self.assertEqual(blobs, original_blobs, f"Failed at index {index}")

View File

@ -1,28 +1,28 @@
from unittest import TestCase
from da.common import Column, NomosDaG2ProofOfPossession as bls_pop
from da.common import Column
from da.encoder import DAEncoder
from da.kzg_rs import kzg
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY
from da.test_encoder import TestEncoder
from da.verifier import Attestation, DAVerifier, DABlob
from da.verifier import DAVerifier, DABlob
class TestVerifier(TestCase):
def setUp(self):
self.verifier = DAVerifier(1987, [bls_pop.SkToPk(1987)])
self.verifier = DAVerifier()
def test_verify_column(self):
column = Column(int.to_bytes(i, length=32) for i in range(8))
_, column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
aggregated_poly, aggregated_column_commitment = kzg.bytes_to_commitment(
DAEncoder.hash_column_and_commitment(column, column_commitment), GLOBAL_PARAMETERS
DAEncoder.hash_commitment_blake2b31(column_commitment), GLOBAL_PARAMETERS
)
aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
self.assertTrue(
self.verifier._verify_column(
column, column_commitment, aggregated_column_commitment, aggregated_proof, 0
column, 0, column_commitment, aggregated_column_commitment, aggregated_proof,
)
)
@ -30,12 +30,11 @@ class TestVerifier(TestCase):
_ = TestEncoder()
_.setUp()
encoded_data = _.encoder.encode(_.data)
verifiers_sk = [i for i in range(1000, 1000+len(encoded_data.chunked_data[0]))]
vefiers_pk = [bls_pop.SkToPk(k) for k in verifiers_sk]
for i, column in enumerate(encoded_data.chunked_data.columns):
verifier = DAVerifier(verifiers_sk[i], vefiers_pk)
verifier = DAVerifier()
da_blob = DABlob(
Column(column),
i,
encoded_data.column_commitments[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
@ -52,6 +51,7 @@ class TestVerifier(TestCase):
i, column = next(columns)
da_blob = DABlob(
Column(column),
i,
encoded_data.column_commitments[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
@ -62,10 +62,11 @@ class TestVerifier(TestCase):
for i, column in columns:
da_blob = DABlob(
Column(column),
i,
encoded_data.column_commitments[i],
encoded_data.aggregated_column_commitment,
encoded_data.aggregated_column_proofs[i],
encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs],
)
self.assertIsNone(self.verifier.verify(da_blob))
self.assertTrue(self.verifier.verify(da_blob))

View File

@ -1,6 +1,6 @@
from dataclasses import dataclass
from hashlib import sha3_256
from typing import List, Optional, Sequence, Set, Dict
from typing import List, Sequence, Set
from eth2spec.deneb.mainnet import BLSFieldElement
from eth2spec.eip7594.mainnet import (
@ -9,7 +9,7 @@ from eth2spec.eip7594.mainnet import (
)
import da.common
from da.common import Column, Chunk, Attestation, BLSPrivateKey, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop
from da.common import Column, Chunk, BlobId
from da.encoder import DAEncoder
from da.kzg_rs import kzg
from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS
@ -18,32 +18,28 @@ from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS
@dataclass
class DABlob:
column: Column
column_idx: int
column_commitment: Commitment
aggregated_column_commitment: Commitment
aggregated_column_proof: Proof
rows_commitments: List[Commitment]
rows_proofs: List[Proof]
def id(self) -> bytes:
return da.common.build_attestation_message(self.aggregated_column_commitment, self.rows_commitments)
def blob_id(self) -> bytes:
return da.common.build_blob_id(self.aggregated_column_commitment, self.rows_commitments)
def column_id(self) -> bytes:
return sha3_256(self.column.as_bytes()).digest()
class DAVerifier:
def __init__(self, sk: BLSPrivateKey, nodes_pks: List[BLSPublicKey]):
self.attested_blobs: Dict[bytes, (bytes, Attestation)] = dict()
self.sk = sk
self.index = nodes_pks.index(bls_pop.SkToPk(self.sk))
@staticmethod
def _verify_column(
column: Column,
column_idx: int,
column_commitment: Commitment,
aggregated_column_commitment: Commitment,
aggregated_column_proof: Proof,
index: int
) -> bool:
# 1. compute commitment for column
_, computed_column_commitment = kzg.bytes_to_commitment(column.as_bytes(), GLOBAL_PARAMETERS)
@ -51,11 +47,11 @@ class DAVerifier:
if column_commitment != computed_column_commitment:
return False
# 3. compute column hash
column_hash = DAEncoder.hash_column_and_commitment(column, column_commitment)
column_hash = DAEncoder.hash_commitment_blake2b31(column_commitment)
# 4. Check proof with commitment and proof over the aggregated column commitment
chunk = BLSFieldElement.from_bytes(column_hash)
return kzg.verify_element_proof(
chunk, aggregated_column_commitment, aggregated_column_proof, index, ROOTS_OF_UNITY
chunk, aggregated_column_commitment, aggregated_column_proof, column_idx, ROOTS_OF_UNITY
)
@staticmethod
@ -77,38 +73,34 @@ class DAVerifier:
return False
return True
def _build_attestation(self, blob: DABlob) -> Attestation:
hasher = sha3_256()
hasher.update(bytes(blob.aggregated_column_commitment))
for c in blob.rows_commitments:
hasher.update(bytes(c))
message = hasher.digest()
return Attestation(signature=bls_pop.Sign(self.sk, message))
def verify(self, blob: DABlob) -> bool:
"""
Verify the integrity of the given blob.
def verify(self, blob: DABlob) -> Optional[Attestation]:
blob_id = blob.id()
if previous_attestation := self.attested_blobs.get(blob_id):
column_id, attestation = previous_attestation
# we already attested, is cached so we return it
if column_id == blob.column_id():
return attestation
# we already attested and they are asking us to attest the same data different column
# skip
return None
This function must be idempotent. The implementer should ensure that
repeated verification attempts do not result in inconsistent states.
Args:
blob (DABlob): The blob to verify.
Returns:
bool: True if the blob is verified successfully, False otherwise.
"""
is_column_verified = DAVerifier._verify_column(
blob.column,
blob.column_idx,
blob.column_commitment,
blob.aggregated_column_commitment,
blob.aggregated_column_proof,
self.index
)
if not is_column_verified:
return
return False
are_chunks_verified = DAVerifier._verify_chunks(
blob.column, blob.rows_commitments, blob.rows_proofs, self.index
blob.column, blob.rows_commitments, blob.rows_proofs, blob.column_idx
)
if not are_chunks_verified:
return
attestation = self._build_attestation(blob)
self.attested_blobs[blob_id] = (blob.column_id(), attestation)
return attestation
return False
# Ensure idempotency: Implementers should define how to avoid redundant verification.
return True