Fix tests

This commit is contained in:
danielSanchezQ 2025-01-24 15:53:12 +00:00
parent b31c571663
commit 8a12deacfa
5 changed files with 34 additions and 73 deletions

View File

@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional, List, Sequence from typing import Optional, List, Sequence
from da.common import Certificate from da.common import BlobId
from da.verifier import DABlob from da.verifier import DABlob
@ -15,16 +15,16 @@ class Metadata:
@dataclass @dataclass
class VID: class BlobMetadata:
# da certificate id # da blob_id id
cert_id: bytes blob_id: BlobId
# application + index information # application + index information
metadata: Metadata metadata: Metadata
class BlobStore(ABC): class BlobStore(ABC):
@abstractmethod @abstractmethod
def add(self, certificate: Certificate, metadata: Metadata): def add(self, id: BlobId, metadata: Metadata):
""" """
Raises: ValueError if there is already a registered certificate fot the given metadata Raises: ValueError if there is already a registered certificate fot the given metadata
""" """
@ -39,14 +39,14 @@ class DAApi:
def __init__(self, bs: BlobStore): def __init__(self, bs: BlobStore):
self.store = bs self.store = bs
def write(self, certificate: Certificate, metadata: Metadata): def write(self, id: BlobId, metadata: Metadata):
""" """
Write method should be used by a service that is able to retrieve verified certificates Write method should be used by a service that is able to retrieve verified certificates
from the latest Block. Once a certificate is retrieved, api creates a relation between from the latest Block. Once a certificate is retrieved, api creates a relation between
the blob of an original data, certificate and index for the app_id of the certificate. the blob of an original data, certificate and index for the app_id of the certificate.
Raises: ValueError if there is already a registered certificate for a given metadata Raises: ValueError if there is already a registered certificate for a given metadata
""" """
self.store.add(certificate, metadata) self.store.add(id, metadata)
def read(self, app_id, indexes) -> List[Optional[DABlob]]: def read(self, app_id, indexes) -> List[Optional[DABlob]]:
""" """

View File

@ -35,36 +35,36 @@ class MockStore(BlobStore):
class TestFlow(TestCase): class TestFlow(TestCase):
def test_api_write_read(self): def test_api_write_read(self):
expected_blob = "hello" expected_blob = "hello"
cert_id = b"11"*32 blob_id = b"11"*32
app_id = 1 app_id = 1
idx = 1 idx = 1
mock_meta = Metadata(1, 1) mock_meta = Metadata(1, 1)
mock_store = MockStore() mock_store = MockStore()
mock_store.populate(expected_blob, cert_id) mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store) api = DAApi(mock_store)
api.write(cert_id, mock_meta) api.write(blob_id, mock_meta)
blobs = api.read(app_id, [idx]) blobs = api.read(app_id, [idx])
self.assertEqual([expected_blob], blobs) self.assertEqual([expected_blob], blobs)
def test_same_index(self): def test_same_index(self):
expected_blob = "hello" expected_blob = "hello"
cert_id = b"11"*32 blob_id = b"11"*32
app_id = 1 app_id = 1
idx = 1 idx = 1
mock_meta = Metadata(1, 1) mock_meta = Metadata(1, 1)
mock_store = MockStore() mock_store = MockStore()
mock_store.populate(expected_blob, cert_id) mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store) api = DAApi(mock_store)
api.write(cert_id, mock_meta) api.write(blob_id, mock_meta)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
api.write(cert_id, mock_meta) api.write(blob_id, mock_meta)
blobs = api.read(app_id, [idx]) blobs = api.read(app_id, [idx])
@ -72,7 +72,7 @@ class TestFlow(TestCase):
def test_multiple_indexes_same_data(self): def test_multiple_indexes_same_data(self):
expected_blob = "hello" expected_blob = "hello"
cert_id = b"11"*32 blob_id = b"11"*32
app_id = 1 app_id = 1
idx1 = 1 idx1 = 1
idx2 = 2 idx2 = 2
@ -80,13 +80,13 @@ class TestFlow(TestCase):
mock_meta2 = Metadata(app_id, idx2) mock_meta2 = Metadata(app_id, idx2)
mock_store = MockStore() mock_store = MockStore()
mock_store.populate(expected_blob, cert_id) mock_store.populate(expected_blob, blob_id)
api = DAApi(mock_store) api = DAApi(mock_store)
api.write(cert_id, mock_meta1) api.write(blob_id, mock_meta1)
mock_store.populate(expected_blob, cert_id) mock_store.populate(expected_blob, blob_id)
api.write(cert_id, mock_meta2) api.write(blob_id, mock_meta2)
blobs_idx1 = api.read(app_id, [idx1]) blobs_idx1 = api.read(app_id, [idx1])
blobs_idx2 = api.read(app_id, [idx2]) blobs_idx2 = api.read(app_id, [idx2])

View File

@ -36,42 +36,11 @@ class ChunksMatrix(List[Row | Column]):
return ChunksMatrix(self.columns) return ChunksMatrix(self.columns)
BLSPublicKey = bytes
BLSPrivateKey = int
BLSSignature = bytes
class Bitfield(List[bool]): class Bitfield(List[bool]):
pass pass
@dataclass
class Attestation:
signature: BLSSignature
@dataclass
class Certificate:
aggregated_signatures: BLSSignature
signers: Bitfield
aggregated_column_commitment: Commitment
row_commitments: List[Commitment]
def id(self) -> bytes:
return build_blob_id(self.aggregated_column_commitment, self.row_commitments)
def verify(self, nodes_public_keys: List[BLSPublicKey]) -> bool:
"""
List of nodes public keys should be a trusted list of verified proof of possession keys.
Otherwise, we could fall under the Rogue Key Attack
`assert all(bls_pop.PopVerify(pk, proof) for pk, proof in zip(node_public_keys, pops))`
"""
# we sort them as the signers bitfield is sorted by the public keys as well
signers_keys = list(compress(sorted(nodes_public_keys), self.signers))
message = build_blob_id(self.aggregated_column_commitment, self.row_commitments)
return NomosDaG2ProofOfPossession.AggregateVerify(signers_keys, [message]*len(signers_keys), self.aggregated_signatures)
def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId: def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId:
hasher = sha3_256() hasher = sha3_256()
hasher.update(bytes(aggregated_column_commitment)) hasher.update(bytes(aggregated_column_commitment))

View File

@ -2,8 +2,8 @@ from itertools import chain
from unittest import TestCase from unittest import TestCase
from typing import List, Optional from typing import List, Optional
from da.common import NodeId, build_blob_id, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop from da.common import NodeId, build_blob_id, NomosDaG2ProofOfPossession as bls_pop
from da.api.common import DAApi, VID, Metadata from da.api.common import DAApi, BlobMetadata, Metadata
from da.verifier import DAVerifier, DABlob from da.verifier import DAVerifier, DABlob
from da.api.test_flow import MockStore from da.api.test_flow import MockStore
from da.dispersal import Dispersal, DispersalSettings from da.dispersal import Dispersal, DispersalSettings
@ -12,10 +12,10 @@ from da.encoder import DAEncoderParams, DAEncoder
class DAVerifierWApi: class DAVerifierWApi:
def __init__(self, sk: int, public_keys: List[BLSPublicKey]): def __init__(self):
self.store = MockStore() self.store = MockStore()
self.api = DAApi(self.store) self.api = DAApi(self.store)
self.verifier = DAVerifier(sk, public_keys) self.verifier = DAVerifier()
def receive_blob(self, blob: DABlob): def receive_blob(self, blob: DABlob):
if attestation := self.verifier.verify(blob): if attestation := self.verifier.verify(blob):
@ -25,7 +25,7 @@ class DAVerifierWApi:
self.store.populate(blob, cert_id) self.store.populate(blob, cert_id)
return attestation return attestation
def receive_cert(self, vid: VID): def receive_metadata(self, vid: BlobMetadata):
# Usually the certificate would be verifier here, # Usually the certificate would be verifier here,
# but we are assuming that this it is already coming from the verified block, # but we are assuming that this it is already coming from the verified block,
# in which case all certificates had been already verified by the DA Node. # in which case all certificates had been already verified by the DA Node.
@ -39,22 +39,16 @@ class TestFullFlow(TestCase):
def setUp(self): def setUp(self):
self.n_nodes = 16 self.n_nodes = 16
self.nodes_ids = [NodeId(x.to_bytes(length=32, byteorder='big')) for x in range(self.n_nodes)] self.nodes_ids = [NodeId(x.to_bytes(length=32, byteorder='big')) for x in range(self.n_nodes)]
self.secret_keys = list(range(1, self.n_nodes+1))
self.public_keys = [bls_pop.SkToPk(sk) for sk in self.secret_keys]
# sort by pk as we do in dispersal # sort by pk as we do in dispersal
self.secret_keys, self.public_keys = zip(
*sorted(zip(self.secret_keys, self.public_keys), key=lambda x: x[1])
)
dispersal_settings = DispersalSettings( dispersal_settings = DispersalSettings(
self.nodes_ids, self.nodes_ids,
self.public_keys,
self.n_nodes self.n_nodes
) )
self.dispersal = Dispersal(dispersal_settings) self.dispersal = Dispersal(dispersal_settings)
self.encoder_test = TestEncoder() self.encoder_test = TestEncoder()
self.encoder_test.setUp() self.encoder_test.setUp()
self.api_nodes = [DAVerifierWApi(k, self.public_keys) for k in self.secret_keys] self.api_nodes = [DAVerifierWApi() for _ in range(self.n_nodes)]
def test_full_flow(self): def test_full_flow(self):
app_id = int.to_bytes(1) app_id = int.to_bytes(1)
@ -74,21 +68,21 @@ class TestFullFlow(TestCase):
self.dispersal._send_and_await_response = __send_and_await_response self.dispersal._send_and_await_response = __send_and_await_response
certificate = self.dispersal.disperse(encoded_data) certificate = self.dispersal.disperse(encoded_data)
vid = VID( vid = BlobMetadata(
certificate.id(), certificate.id(),
Metadata(app_id, index) Metadata(app_id, index)
) )
# verifier # verifier
for node in self.api_nodes: for node in self.api_nodes:
node.receive_cert(vid) node.receive_metadata(vid)
# read from api and confirm its working # read from api and confirm its working
# notice that we need to sort the api_nodes by their public key to have the blobs sorted in the same fashion # notice that we need to sort the api_nodes by their public key to have the blobs sorted in the same fashion
# we do actually do dispersal. # we do actually do dispersal.
blobs = list(chain.from_iterable( blobs = list(chain.from_iterable(
node.read(app_id, [index]) node.read(app_id, [index])
for node in sorted(self.api_nodes, key=lambda n: bls_pop.SkToPk(n.verifier.sk)) for node in self.api_nodes
)) ))
original_blobs = list(self.dispersal._prepare_data(encoded_data)) original_blobs = list(self.dispersal._prepare_data(encoded_data))
self.assertEqual(blobs, original_blobs) self.assertEqual(blobs, original_blobs)
@ -113,14 +107,14 @@ class TestFullFlow(TestCase):
# Loop through each index and simulate dispersal with the same cert_id but different metadata # Loop through each index and simulate dispersal with the same cert_id but different metadata
for index in indexes: for index in indexes:
vid = VID( metadata = BlobMetadata(
certificate.id(), certificate.id(),
Metadata(app_id, index) Metadata(app_id, index)
) )
# verifier # verifier
for node in self.api_nodes: for node in self.api_nodes:
node.receive_cert(vid) node.receive_metadata(metadata)
# Verify retrieval for each index # Verify retrieval for each index
for index in indexes: for index in indexes:

View File

@ -11,7 +11,7 @@ from da.verifier import Attestation, DAVerifier, DABlob
class TestVerifier(TestCase): class TestVerifier(TestCase):
def setUp(self): def setUp(self):
self.verifier = DAVerifier(1987, [bls_pop.SkToPk(1987)]) self.verifier = DAVerifier()
def test_verify_column(self): def test_verify_column(self):
column = Column(int.to_bytes(i, length=32) for i in range(8)) column = Column(int.to_bytes(i, length=32) for i in range(8))
@ -22,7 +22,7 @@ class TestVerifier(TestCase):
aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY) aggregated_proof = kzg.generate_element_proof(0, aggregated_poly, GLOBAL_PARAMETERS, ROOTS_OF_UNITY)
self.assertTrue( self.assertTrue(
self.verifier._verify_column( self.verifier._verify_column(
column, column_commitment, aggregated_column_commitment, aggregated_proof, 0 column, 0, column_commitment, aggregated_column_commitment, aggregated_proof,
) )
) )
@ -30,10 +30,8 @@ class TestVerifier(TestCase):
_ = TestEncoder() _ = TestEncoder()
_.setUp() _.setUp()
encoded_data = _.encoder.encode(_.data) encoded_data = _.encoder.encode(_.data)
verifiers_sk = [i for i in range(1000, 1000+len(encoded_data.chunked_data[0]))]
vefiers_pk = [bls_pop.SkToPk(k) for k in verifiers_sk]
for i, column in enumerate(encoded_data.chunked_data.columns): for i, column in enumerate(encoded_data.chunked_data.columns):
verifier = DAVerifier(verifiers_sk[i], vefiers_pk) verifier = DAVerifier()
da_blob = DABlob( da_blob = DABlob(
Column(column), Column(column),
encoded_data.column_commitments[i], encoded_data.column_commitments[i],
@ -68,4 +66,4 @@ class TestVerifier(TestCase):
encoded_data.row_commitments, encoded_data.row_commitments,
[row[i] for row in encoded_data.row_proofs], [row[i] for row in encoded_data.row_proofs],
) )
self.assertIsNone(self.verifier.verify(da_blob)) self.assertFalse(self.verifier.verify(da_blob))