Remove certificate from verifier

This commit is contained in:
danielSanchezQ 2025-01-24 15:14:02 +00:00
parent 5ecbb60e9f
commit fa4b97926b
4 changed files with 131 additions and 13 deletions

View File

@ -8,6 +8,8 @@ from py_ecc.bls import G2ProofOfPossession
type BlobId = bytes
type BlobId = bytes
class NodeId(Bytes32):
pass
@ -40,8 +42,36 @@ class Bitfield(List[bool]):
pass
def build_blob_id(row_commitments: Sequence[Commitment]) -> BlobId:
hasher = blake2b(digest_size=32)
@dataclass
class Attestation:
signature: BLSSignature
@dataclass
class Certificate:
aggregated_signatures: BLSSignature
signers: Bitfield
aggregated_column_commitment: Commitment
row_commitments: List[Commitment]
def id(self) -> bytes:
return build_blob_id(self.aggregated_column_commitment, self.row_commitments)
def verify(self, nodes_public_keys: List[BLSPublicKey]) -> bool:
"""
List of nodes public keys should be a trusted list of verified proof of possession keys.
Otherwise, we could fall under the Rogue Key Attack
`assert all(bls_pop.PopVerify(pk, proof) for pk, proof in zip(node_public_keys, pops))`
"""
# we sort them as the signers bitfield is sorted by the public keys as well
signers_keys = list(compress(sorted(nodes_public_keys), self.signers))
message = build_blob_id(self.aggregated_column_commitment, self.row_commitments)
return NomosDaG2ProofOfPossession.AggregateVerify(signers_keys, [message]*len(signers_keys), self.aggregated_signatures)
def build_blob_id(aggregated_column_commitment: Commitment, row_commitments: Sequence[Commitment]) -> BlobId:
hasher = sha3_256()
hasher.update(bytes(aggregated_column_commitment))
for c in row_commitments:
hasher.update(bytes(c))
return hasher.digest()

View File

@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import List, Generator
from da.common import NodeId, Column
from da.common import Certificate, NodeId, BLSPublicKey, Bitfield, build_blob_id, NomosDaG2ProofOfPossession as bls_pop
from da.encoder import EncodedData
from da.verifier import DAShare
@ -35,7 +35,34 @@ class Dispersal:
def _send_and_await_response(self, node: NodeId, blob: DAShare) -> bool:
pass
def disperse(self, encoded_data: EncodedData):
def _build_certificate(
self,
encoded_data: EncodedData,
attestations: Sequence[Attestation],
signers: Bitfield
) -> Certificate:
assert len(attestations) >= self.settings.threshold
assert len(attestations) == signers.count(True)
aggregated = bls_pop.Aggregate([attestation.signature for attestation in attestations])
return Certificate(
aggregated_signatures=aggregated,
signers=signers,
aggregated_column_commitment=encoded_data.aggregated_column_commitment,
row_commitments=encoded_data.row_commitments
)
@staticmethod
def _verify_attestation(public_key: BLSPublicKey, attested_message: bytes, attestation: Attestation) -> bool:
return bls_pop.Verify(public_key, attested_message, attestation.signature)
@staticmethod
def _build_attestation_message(encoded_data: EncodedData) -> bytes:
return build_blob_id(encoded_data.aggregated_column_commitment, encoded_data.row_commitments)
def disperse(self, encoded_data: EncodedData) -> Optional[Certificate]:
attestations = []
attested_message = self._build_attestation_message(encoded_data)
signed = Bitfield(False for _ in range(len(self.settings.nodes_ids)))
blob_data = zip(
self.settings.nodes_ids,
self._prepare_data(encoded_data)

View File

@ -2,9 +2,9 @@ from itertools import chain
from unittest import TestCase
from typing import List, Optional
from da.common import NodeId, build_blob_id
from da.api.common import DAApi, BlobMetadata, Metadata
from da.verifier import DAVerifier, DAShare
from da.common import NodeId, build_blob_id, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop
from da.api.common import DAApi, VID, Metadata
from da.verifier import DAVerifier, DABlob
from da.api.test_flow import MockStore
from da.dispersal import Dispersal, DispersalSettings
from da.test_encoder import TestEncoder
@ -21,8 +21,9 @@ class DAVerifierWApi:
if self.verifier.verify(blob):
# Warning: If aggregated col commitment and row commitment are the same,
# the build_attestation_message method will produce the same output.
blob_id = build_blob_id(blob.row_commitments)
self.store.populate(blob, blob_id)
cert_id = build_blob_id(blob.aggregated_column_commitment, blob.rows_commitments)
self.store.populate(blob, cert_id)
return attestation
def receive_metadata(self, blob_metadata: BlobMetadata):
# Usually the certificate would be verifier here,

View File

@ -6,7 +6,9 @@ from eth2spec.eip7594.mainnet import (
KZGProof as Proof,
)
from da.common import Column, BlobId, build_blob_id
import da.common
from da.common import Column, Chunk, Attestation, BlobId, BLSPublicKey, NomosDaG2ProofOfPossession as bls_pop
from da.encoder import DAEncoder
from da.kzg_rs import kzg
from da.kzg_rs.bdfg_proving import combine_commitments, derive_challenge, compute_combined_evaluation
from da.kzg_rs.common import ROOTS_OF_UNITY
@ -17,14 +19,26 @@ _DST = b"NOMOS_DA_V1"
@dataclass
class DAShare:
column: Column
column_idx: int
combined_column_proof: Proof
row_commitments: List[Commitment]
column_commitment: Commitment
aggregated_column_commitment: Commitment
aggregated_column_proof: Proof
rows_commitments: List[Commitment]
rows_proofs: List[Proof]
def blob_id(self) -> bytes:
return da.common.build_blob_id(self.aggregated_column_commitment, self.rows_commitments)
def column_id(self) -> bytes:
return sha3_256(self.column.as_bytes()).digest()
def blob_id(self) -> BlobId:
return build_blob_id(self.row_commitments)
class DAVerifier:
def __init__(self, nodes_pks: List[BLSPublicKey]):
self.attested_blobs: Set[BlobId] = set()
self.index = nodes_pks.index(bls_pop.SkToPk(self.sk))
@staticmethod
def _verify_column(
column: Column,
@ -49,3 +63,49 @@ class DAVerifier:
blob.column_idx,
ROOTS_OF_UNITY
)
@staticmethod
def _verify_chunk(chunk: Chunk, commitment: Commitment, proof: Proof, index: int) -> bool:
chunk = BLSFieldElement(int.from_bytes(bytes(chunk)) % BLS_MODULUS)
return kzg.verify_element_proof(chunk, commitment, proof, index, ROOTS_OF_UNITY)
@staticmethod
def _verify_chunks(
chunks: Sequence[Chunk],
commitments: Sequence[Commitment],
proofs: Sequence[Proof],
index: int
) -> bool:
if not (len(chunks) == len(commitments) == len(proofs)):
return False
for chunk, commitment, proof in zip(chunks, commitments, proofs):
if not DAVerifier._verify_chunk(chunk, commitment, proof, index):
return False
return True
def verify(self, blob: DABlob) -> bool:
blob_id = blob.blob_id()
if previous_attestation := self.attested_blobs.get(blob_id):
column_id, attestation = previous_attestation
# we already attested, is cached so we return it
if column_id == blob.column_id():
return attestation
# we already attested and they are asking us to attest the same data different column
# skip
return False
is_column_verified = DAVerifier._verify_column(
blob.column,
blob.column_commitment,
blob.aggregated_column_commitment,
blob.aggregated_column_proof,
self.index
)
if not is_column_verified:
return False
are_chunks_verified = DAVerifier._verify_chunks(
blob.column, blob.rows_commitments, blob.rows_proofs, self.index
)
if not are_chunks_verified:
return False
self.attested_blobs.add(blob_id)
return True