diff --git a/da/api/common.py b/da/api/common.py index 5dbd588..5b7a908 100644 --- a/da/api/common.py +++ b/da/api/common.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Optional, List, Sequence from da.common import BlobId -from da.verifier import DAShare +from da.verifier import DABlob @dataclass diff --git a/da/common.py b/da/common.py index 1c1804d..9b44f56 100644 --- a/da/common.py +++ b/da/common.py @@ -42,33 +42,6 @@ class Bitfield(List[bool]): pass -@dataclass -class Attestation: - signature: BLSSignature - - -@dataclass -class Certificate: - aggregated_signatures: BLSSignature - signers: Bitfield - aggregated_column_commitment: Commitment - row_commitments: List[Commitment] - - def id(self) -> bytes: - return build_blob_id(self.aggregated_column_commitment, self.row_commitments) - - def verify(self, nodes_public_keys: List[BLSPublicKey]) -> bool: - """ - List of nodes public keys should be a trusted list of verified proof of possession keys. - Otherwise, we could fall under the Rogue Key Attack - `assert all(bls_pop.PopVerify(pk, proof) for pk, proof in zip(node_public_keys, pops))` - """ - # we sort them as the signers bitfield is sorted by the public keys as well - signers_keys = list(compress(sorted(nodes_public_keys), self.signers)) - message = build_blob_id(self.aggregated_column_commitment, self.row_commitments) - return NomosDaG2ProofOfPossession.AggregateVerify(signers_keys, [message]*len(signers_keys), self.aggregated_signatures) - - def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId: hasher = sha3_256() hasher.update(bytes(aggregated_column_commitment)) diff --git a/da/test_full_flow.py b/da/test_full_flow.py index b463163..7f09fb6 100644 --- a/da/test_full_flow.py +++ b/da/test_full_flow.py @@ -2,8 +2,8 @@ from itertools import chain from unittest import TestCase from typing import List, Optional -from da.common import NodeId, build_blob_id, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop -from da.api.common import DAApi, VID, Metadata +from da.common import NodeId, build_blob_id, NomosDaG2ProofOfPossession as bls_pop +from da.api.common import DAApi, BlobMetadata, Metadata from da.verifier import DAVerifier, DABlob from da.api.test_flow import MockStore from da.dispersal import Dispersal, DispersalSettings @@ -25,7 +25,7 @@ class DAVerifierWApi: self.store.populate(blob, cert_id) return attestation - def receive_metadata(self, blob_metadata: BlobMetadata): + def receive_metadata(self, vid: BlobMetadata): # Usually the certificate would be verifier here, # but we are assuming that this it is already coming from the verified block, # in which case all certificates had been already verified by the DA Node. @@ -66,16 +66,16 @@ class TestFullFlow(TestCase): # inject mock send and await method self.dispersal._send_and_await_response = __send_and_await_response - self.dispersal.disperse(encoded_data) - blob_id = build_blob_id(encoded_data.row_commitments) - blob_metadata = BlobMetadata( - blob_id, + certificate = self.dispersal.disperse(encoded_data) + + vid = BlobMetadata( + certificate.id(), Metadata(app_id, index) ) # verifier for node in self.api_nodes: - node.receive_metadata(blob_metadata) + node.receive_metadata(vid) # read from api and confirm its working # notice that we need to sort the api_nodes by their public key to have the blobs sorted in the same fashion @@ -109,7 +109,7 @@ class TestFullFlow(TestCase): # Loop through each index and simulate dispersal with the same cert_id but different metadata for index in indexes: metadata = BlobMetadata( - blob_id, + certificate.id(), Metadata(app_id, index) ) diff --git a/da/test_verifier.py b/da/test_verifier.py index e86d42e..2d09eea 100644 --- a/da/test_verifier.py +++ b/da/test_verifier.py @@ -22,7 +22,7 @@ class TestVerifier(TestCase): aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY) self.assertTrue( self.verifier._verify_column( - column, column_commitment, aggregated_column_commitment, aggregated_proof, 0 + column, 0, column_commitment, aggregated_column_commitment, aggregated_proof, ) ) @@ -30,9 +30,9 @@ class TestVerifier(TestCase): _ = TestEncoder() _.setUp() encoded_data = _.encoder.encode(_.data) - for i, column in enumerate(encoded_data.extended_matrix.columns): + for i, column in enumerate(encoded_data.chunked_data.columns): verifier = DAVerifier() - da_blob = DAShare( + da_blob = DABlob( Column(column), i, encoded_data.combined_column_proofs[i], @@ -60,4 +60,4 @@ class TestVerifier(TestCase): encoded_data.combined_column_proofs[i], encoded_data.row_commitments, ) - self.assertIsNotNone(self.verifier.verify(da_blob)) + self.assertFalse(self.verifier.verify(da_blob))