DA: Verifier skip duplicated blobs intents (#88)
* Make verifier skip duplicated blobs intents * Add case in where verifier data is the same but column is different
This commit is contained in:
parent
601598f814
commit
8dd2dabb7d
|
@ -26,14 +26,12 @@ class TestVerifier(TestCase):
|
|||
)
|
||||
)
|
||||
|
||||
def test_build_attestation(self):
|
||||
pass
|
||||
|
||||
def test_verify(self):
|
||||
_ = TestEncoder()
|
||||
_.setUp()
|
||||
encoded_data = _.encoder.encode(_.data)
|
||||
for i, column in enumerate(encoded_data.chunked_data.columns):
|
||||
verifier = DAVerifier(1987)
|
||||
da_blob = DABlob(
|
||||
i,
|
||||
Column(column),
|
||||
|
@ -43,4 +41,32 @@ class TestVerifier(TestCase):
|
|||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertIsNotNone(self.verifier.verify(da_blob))
|
||||
self.assertIsNotNone(verifier.verify(da_blob))
|
||||
|
||||
def test_verify_duplicated_blob(self):
|
||||
_ = TestEncoder()
|
||||
_.setUp()
|
||||
encoded_data = _.encoder.encode(_.data)
|
||||
columns = enumerate(encoded_data.chunked_data.columns)
|
||||
i, column = next(columns)
|
||||
da_blob = DABlob(
|
||||
i,
|
||||
Column(column),
|
||||
encoded_data.column_commitments[i],
|
||||
encoded_data.aggregated_column_commitment,
|
||||
encoded_data.aggregated_column_proofs[i],
|
||||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertIsNotNone(self.verifier.verify(da_blob))
|
||||
for i, column in columns:
|
||||
da_blob = DABlob(
|
||||
i,
|
||||
Column(column),
|
||||
encoded_data.column_commitments[i],
|
||||
encoded_data.aggregated_column_commitment,
|
||||
encoded_data.aggregated_column_proofs[i],
|
||||
encoded_data.row_commitments,
|
||||
[row[i] for row in encoded_data.row_proofs],
|
||||
)
|
||||
self.assertIsNone(self.verifier.verify(da_blob))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from dataclasses import dataclass
|
||||
from hashlib import sha3_256
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import List, Optional, Sequence, Set, Dict
|
||||
|
||||
from eth2spec.deneb.mainnet import BLSFieldElement
|
||||
from eth2spec.eip7594.mainnet import (
|
||||
|
@ -9,6 +9,7 @@ from eth2spec.eip7594.mainnet import (
|
|||
)
|
||||
from py_ecc.bls import G2ProofOfPossession as bls_pop
|
||||
|
||||
import da.common
|
||||
from da.common import Column, Chunk, Attestation, BLSPrivateKey
|
||||
from da.encoder import DAEncoder
|
||||
from da.kzg_rs import kzg
|
||||
|
@ -25,9 +26,16 @@ class DABlob:
|
|||
rows_commitments: List[Commitment]
|
||||
rows_proofs: List[Proof]
|
||||
|
||||
def id(self) -> bytes:
|
||||
return da.common.build_attestation_message(self.aggregated_column_commitment, self.rows_commitments)
|
||||
|
||||
def column_id(self) -> bytes:
|
||||
return sha3_256(self.column.as_bytes()).digest()
|
||||
|
||||
|
||||
class DAVerifier:
|
||||
def __init__(self, sk: BLSPrivateKey):
|
||||
self.attested_blobs: Dict[bytes, (bytes, Attestation)] = dict()
|
||||
self.sk = sk
|
||||
|
||||
@staticmethod
|
||||
|
@ -79,6 +87,15 @@ class DAVerifier:
|
|||
return Attestation(signature=bls_pop.Sign(self.sk, message))
|
||||
|
||||
def verify(self, blob: DABlob) -> Optional[Attestation]:
|
||||
blob_id = blob.id()
|
||||
if previous_attestation := self.attested_blobs.get(blob_id):
|
||||
column_id, attestation = previous_attestation
|
||||
# we already attested, is cached so we return it
|
||||
if column_id == blob.column_id():
|
||||
return attestation
|
||||
# we already attested and they are asking us to attest the same data different column
|
||||
# skip
|
||||
return None
|
||||
is_column_verified = DAVerifier._verify_column(
|
||||
blob.column,
|
||||
blob.column_commitment,
|
||||
|
@ -93,4 +110,6 @@ class DAVerifier:
|
|||
)
|
||||
if not are_chunks_verified:
|
||||
return
|
||||
return self._build_attestation(blob)
|
||||
attestation = self._build_attestation(blob)
|
||||
self.attested_blobs[blob_id] = (blob.column_id(), attestation)
|
||||
return attestation
|
||||
|
|
Loading…
Reference in New Issue