mirror of
https://github.com/logos-blockchain/logos-blockchain-specs.git
synced 2026-01-28 09:53:09 +00:00
Refactor code and tests
This commit is contained in:
parent
f735f514e6
commit
6e026f3a7e
18
da/common.py
18
da/common.py
@ -4,10 +4,8 @@ from itertools import chain, zip_longest, compress
|
||||
from typing import List, Generator, Self, Sequence
|
||||
|
||||
from eth2spec.eip7594.mainnet import Bytes32, KZGCommitment as Commitment
|
||||
from eth2spec.eip7594.mainnet import BLSFieldElement
|
||||
from py_ecc.bls import G2ProofOfPossession
|
||||
|
||||
|
||||
type BlobId = bytes
|
||||
|
||||
class NodeId(Bytes32):
|
||||
@ -49,22 +47,6 @@ def build_blob_id(row_commitments: Sequence[Commitment]) -> BlobId:
|
||||
return hasher.digest()
|
||||
|
||||
|
||||
def derive_challenge(row_commitments: List[Commitment]) -> BLSFieldElement:
|
||||
"""
|
||||
Derive a Fiat–Shamir challenge scalar h from the row commitments:
|
||||
h = BLAKE2b-31( DST || bytes(com1) || bytes(com2) || ... )
|
||||
"""
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
h = blake2b(digest_size=31)
|
||||
h.update(_DST)
|
||||
for com in row_commitments:
|
||||
h.update(bytes(com))
|
||||
digest31 = h.digest() # 31 bytes
|
||||
# pad to 32 bytes for field element conversion
|
||||
padded = digest31 + b'\x00'
|
||||
return BLSFieldElement.from_bytes(padded)
|
||||
|
||||
|
||||
class NomosDaG2ProofOfPossession(G2ProofOfPossession):
|
||||
# Domain specific tag for Nomos DA protocol
|
||||
DST = b"NOMOS_DA_AVAIL"
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
from dataclasses import dataclass
|
||||
from itertools import batched
|
||||
from typing import List, Sequence, Tuple
|
||||
from hashlib import blake2b
|
||||
from typing import List, Tuple
|
||||
|
||||
from eth2spec.eip7594.mainnet import KZGCommitment as Commitment, KZGProof as Proof, BLSFieldElement
|
||||
|
||||
@ -9,6 +8,7 @@ from da.common import ChunksMatrix, Chunk, Row, derive_challenge
|
||||
from da.kzg_rs import kzg, rs
|
||||
from da.kzg_rs.common import GLOBAL_PARAMETERS, ROOTS_OF_UNITY, BYTES_PER_FIELD_ELEMENT, BLS_MODULUS
|
||||
from da.kzg_rs.poly import Polynomial
|
||||
from da.kzg_rs.bdfg_proving import compute_combined_polynomial
|
||||
|
||||
# Domain separation tag
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
@ -64,18 +64,6 @@ class DAEncoder:
|
||||
)
|
||||
return ChunksMatrix(__rs_encode_row(row) for row in chunks_matrix)
|
||||
|
||||
@staticmethod
|
||||
def _combined_polynomial(
|
||||
polys: Sequence[Polynomial], h: BLSFieldElement
|
||||
) -> Polynomial:
|
||||
combined_polynomial = polys[0]
|
||||
h_int = int(h) # raw integer challenge
|
||||
int_pow = 1
|
||||
for poly in polys[1:]:
|
||||
int_pow = (int_pow * h_int) % BLS_MODULUS
|
||||
combined_polynomial = combined_polynomial + Polynomial({int_pow * coeff for coeff in poly},BLS_MODULUS)
|
||||
return combined_polynomial
|
||||
|
||||
def _compute_combined_column_proofs(self, combined_poly: Polynomial) -> List[Proof]:
|
||||
total_cols = self.params.column_count * 2
|
||||
return [
|
||||
@ -88,7 +76,7 @@ class DAEncoder:
|
||||
row_polynomials, row_commitments = zip(*self._compute_row_kzg_commitments(chunks_matrix))
|
||||
extended_matrix = self._rs_encode_rows(chunks_matrix)
|
||||
h = derive_challenge(row_commitments)
|
||||
combined_poly = self._combined_polynomial(row_polynomials, h)
|
||||
combined_poly = compute_combined_polynomial(row_polynomials, h)
|
||||
combined_column_proofs = self._compute_combined_column_proofs(combined_poly)
|
||||
result = EncodedData(
|
||||
data,
|
||||
|
||||
61
da/kzg_rs/bdfg_proving.py
Normal file
61
da/kzg_rs/bdfg_proving.py
Normal file
@ -0,0 +1,61 @@
|
||||
from hashlib import blake2b
|
||||
from typing import List, Sequence
|
||||
|
||||
from da.common import Chunk
|
||||
from da.kzg_rs.common import BLS_MODULUS
|
||||
|
||||
from eth2spec.eip7594.mainnet import BLSFieldElement, KZGCommitment as Commitment
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from da.kzg_rs.poly import Polynomial
|
||||
|
||||
|
||||
def derive_challenge(row_commitments: List[Commitment]) -> BLSFieldElement:
|
||||
"""
|
||||
Derive a Fiat–Shamir challenge scalar h from the row commitments:
|
||||
h = BLAKE2b-31( DST || bytes(com1) || bytes(com2) || ... )
|
||||
"""
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
h = blake2b(digest_size=31)
|
||||
h.update(_DST)
|
||||
for com in row_commitments:
|
||||
h.update(bytes(com))
|
||||
digest31 = h.digest() # 31 bytes
|
||||
# pad to 32 bytes for field element conversion
|
||||
padded = digest31 + b'\x00'
|
||||
return BLSFieldElement.from_bytes(padded)
|
||||
|
||||
|
||||
def combine_commitments(row_commitments: List[Commitment], h: BLSFieldElement) -> Commitment:
|
||||
combined_commitment = bls.bytes48_to_G1(row_commitments[0])
|
||||
power = int(h) % BLS_MODULUS
|
||||
for commitment in row_commitments[1:]:
|
||||
commitment = bls.bytes48_to_G1(commitment)
|
||||
combined_commitment = bls.add(combined_commitment, bls.multiply(commitment, power))
|
||||
power = (power * int(h)) % BLS_MODULUS
|
||||
return bls.G1_to_bytes48(combined_commitment)
|
||||
|
||||
|
||||
def compute_combined_polynomial(
|
||||
polys: Sequence[Polynomial], h: BLSFieldElement
|
||||
) -> Polynomial:
|
||||
combined_polynomial = polys[0]
|
||||
h_int = int(h) # raw integer challenge
|
||||
int_pow = 1
|
||||
for poly in polys[1:]:
|
||||
int_pow = (int_pow * h_int) % BLS_MODULUS
|
||||
combined_polynomial = combined_polynomial + Polynomial([int_pow * coeff for coeff in poly], BLS_MODULUS)
|
||||
return combined_polynomial
|
||||
|
||||
def compute_combined_evaluation(
|
||||
evals: Sequence[Chunk],
|
||||
h: BLSFieldElement
|
||||
) -> BLSFieldElement:
|
||||
combined_eval_int = 0
|
||||
power_int = 1
|
||||
h_int = int(h) % BLS_MODULUS
|
||||
for chunk in evals:
|
||||
chunk_int = int.from_bytes(bytes(chunk), byteorder="big")
|
||||
combined_eval_int = (combined_eval_int + chunk_int * power_int) % BLS_MODULUS
|
||||
power_int = (power_int * h_int) % BLS_MODULUS
|
||||
return BLSFieldElement(combined_eval_int)
|
||||
@ -1,16 +1,15 @@
|
||||
from itertools import chain, batched
|
||||
from random import randrange, randbytes
|
||||
from random import randbytes
|
||||
from unittest import TestCase
|
||||
from eth2spec.utils import bls
|
||||
from eth2spec.deneb.mainnet import bytes_to_bls_field
|
||||
|
||||
from da import encoder
|
||||
from da.common import derive_challenge
|
||||
from da.common import Column
|
||||
from kzg_rs.bdfg_proving import derive_challenge
|
||||
from da.encoder import DAEncoderParams, DAEncoder
|
||||
from da.verifier import DAVerifier
|
||||
from da.verifier import DAVerifier, DAShare
|
||||
from eth2spec.eip7594.mainnet import BYTES_PER_FIELD_ELEMENT, BLSFieldElement
|
||||
|
||||
from da.kzg_rs.common import BLS_MODULUS, ROOTS_OF_UNITY
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY
|
||||
from da.kzg_rs import kzg, rs
|
||||
|
||||
|
||||
@ -35,32 +34,19 @@ class TestEncoder(TestCase):
|
||||
self.assertEqual(columns_len, column_count)
|
||||
chunks_size = (len(data) // encoder_params.bytes_per_chunk) // encoder_params.column_count
|
||||
self.assertEqual(len(encoded_data.row_commitments), chunks_size)
|
||||
self.assertEqual(len(encoded_data.combined_column_proofs), columns_len)
|
||||
|
||||
# verify rows
|
||||
h = derive_challenge(encoded_data.row_commitments)
|
||||
combined_commitment = bls.bytes48_to_G1(encoded_data.row_commitments[0])
|
||||
power = int(h) % BLS_MODULUS
|
||||
for commitment in encoded_data.row_commitments[1:]:
|
||||
commitment=bls.bytes48_to_G1(commitment)
|
||||
combined_commitment = bls.add(combined_commitment,bls.multiply(commitment,power))
|
||||
power = (power * int(h)) % BLS_MODULUS
|
||||
combined_commitment = bls.G1_to_bytes48(combined_commitment)
|
||||
for i, (column, proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.combined_column_proofs)):
|
||||
combined_eval_int = 0
|
||||
power_int = 1
|
||||
h_int = int(h)
|
||||
for data in column:
|
||||
chunk_int = int.from_bytes(bytes(data), byteorder="big")
|
||||
combined_eval_point = (combined_eval_int + chunk_int * power_int) % BLS_MODULUS
|
||||
power_int = (power_int * h_int) % BLS_MODULUS
|
||||
kzg.verify_element_proof(
|
||||
combined_eval_point,
|
||||
combined_commitment,
|
||||
proof,
|
||||
i,
|
||||
ROOTS_OF_UNITY
|
||||
verifier = DAVerifier()
|
||||
# verify columns
|
||||
for idx, (column, column_proof) in enumerate(zip(encoded_data.extended_matrix.columns, encoded_data.combined_column_proofs)):
|
||||
share = DAShare(
|
||||
column=Column(column),
|
||||
column_idx=idx,
|
||||
combined_column_proof=column_proof,
|
||||
row_commitments=encoded_data.row_commitments
|
||||
)
|
||||
verifier.verify(share)
|
||||
|
||||
|
||||
|
||||
def test_chunkify(self):
|
||||
encoder_settings = DAEncoderParams(column_count=2, bytes_per_chunk=31)
|
||||
|
||||
@ -1,16 +1,15 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Sequence, Set
|
||||
from eth2spec.utils import bls
|
||||
from eth2spec.deneb.mainnet import BLSFieldElement
|
||||
from typing import List
|
||||
|
||||
from eth2spec.eip7594.mainnet import (
|
||||
KZGCommitment as Commitment,
|
||||
KZGProof as Proof,
|
||||
)
|
||||
|
||||
import da.common
|
||||
from da.common import Column, Chunk, BlobId, build_blob_id, derive_challenge
|
||||
from da.common import Column, BlobId, build_blob_id
|
||||
from da.kzg_rs import kzg
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY, GLOBAL_PARAMETERS, BLS_MODULUS
|
||||
from da.kzg_rs.bdfg_proving import combine_commitments, derive_challenge, compute_combined_evaluation
|
||||
from da.kzg_rs.common import ROOTS_OF_UNITY
|
||||
|
||||
# Domain separation tag
|
||||
_DST = b"NOMOS_DA_V1"
|
||||
@ -37,21 +36,14 @@ class DAVerifier:
|
||||
# 1. Derive challenge
|
||||
h = derive_challenge(blob.row_commitments)
|
||||
# 2. Reconstruct combined commitment: combined_commitment = sum_{i=0..l-1} h^i * row_commitments[i]
|
||||
combined_commitment = bls.bytes48_to_G1(blob.row_commitments[0])
|
||||
power = int(h) % BLS_MODULUS
|
||||
for commitment in blob.row_commitments[1:]:
|
||||
commitment = bls.bytes48_to_G1(commitment)
|
||||
combined_commitment = bls.add(combined_commitment,bls.multiply(commitment, power))
|
||||
power = (power * int(h)) % BLS_MODULUS
|
||||
combined_commitment = bls.G1_to_bytes48(combined_commitment)
|
||||
combined_commitment = combine_commitments(blob.row_commitments, h)
|
||||
# 3. Compute combined evaluation v = sum_{i=0..l-1} (h^i * column_data[i])
|
||||
combined_eval_int = 0
|
||||
power_int = 1
|
||||
h_int = int(h) % BLS_MODULUS
|
||||
for chunk in blob.column:
|
||||
chunk_int = int.from_bytes(bytes(chunk), byteorder="big")
|
||||
combined_eval_int = (combined_eval_int + chunk_int * power_int) % BLS_MODULUS
|
||||
power_int = (power_int * h_int) % BLS_MODULUS
|
||||
combined_eval_point = BLSFieldElement(combined_eval_int)
|
||||
combined_eval_point = compute_combined_evaluation(blob.column, h)
|
||||
# 4. Verify the single KZG proof for evaluation at point w^{column_idx}
|
||||
return kzg.verify_element_proof(combined_eval_point,combined_commitment,blob.combined_column_proof,blob.column_idx,ROOTS_OF_UNITY)
|
||||
return kzg.verify_element_proof(
|
||||
combined_eval_point,
|
||||
combined_commitment,
|
||||
blob.combined_column_proof,
|
||||
blob.column_idx,
|
||||
ROOTS_OF_UNITY
|
||||
)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user