Compare commits

...

3 Commits

Author SHA1 Message Date
danielsanchezq ff2fa2958c Fix size in verifier to fit tests 2024-06-24 16:37:46 +02:00
danielsanchezq 442cec3c89 Fix domains in verifier, decoder and dispersal 2024-06-24 15:50:06 +02:00
danielsanchezq befc472768 Fix domains in verifier, decoder and dispersal 2024-06-24 15:37:07 +02:00
5 changed files with 106 additions and 73 deletions

View File

@ -250,6 +250,7 @@ mod tests {
verifiers: &[DaVerifier],
) -> Vec<Attestation> {
let mut attestations = Vec::new();
let domain_size = encoded_data.extended_data.0[0].len();
for (i, column) in encoded_data.extended_data.columns().enumerate() {
let verifier = &verifiers[i];
let da_blob = DaBlob {
@ -264,7 +265,7 @@ mod tests {
.map(|proofs| proofs.get(i).cloned().unwrap())
.collect(),
};
attestations.push(verifier.verify(da_blob).unwrap());
attestations.push(verifier.verify(da_blob, domain_size).unwrap());
}
attestations
}

View File

@ -3,7 +3,7 @@ use std::ops::Div;
// crates
use ark_ff::{BigInteger, PrimeField};
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use ark_poly::EvaluationDomain;
use kzgrs::common::bytes_to_polynomial_unchecked;
use kzgrs::fk20::fk20_batch_generate_elements_proofs;
use kzgrs::{
@ -205,7 +205,6 @@ impl DaEncoder {
pub mod test {
use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::GLOBAL_PARAMETERS;
use ark_bls12_381::Fr;
use ark_ff::PrimeField;
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use itertools::izip;
@ -215,10 +214,10 @@ pub mod test {
BYTES_PER_FIELD_ELEMENT,
};
use rand::RngCore;
use std::io::Read;
use std::ops::Div;
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(16);
pub const DOMAIN_SIZE: usize = 16;
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(DOMAIN_SIZE);
pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS);
pub fn rand_data(elements_count: usize) -> Vec<u8> {
@ -244,8 +243,9 @@ pub mod test {
#[test]
fn test_compute_row_kzg_commitments() {
let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix).unwrap();
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix, domain).unwrap();
assert_eq!(commitments_data.len(), matrix.len());
}
@ -253,7 +253,7 @@ pub mod test {
fn test_evals_to_chunk_matrix() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let domain = PolynomialEvaluationDomain::new(32).unwrap();
let domain = PolynomialEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let (poly_data, _): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.unwrap()
@ -272,14 +272,15 @@ pub mod test {
#[test]
fn test_rs_encode_rows() {
let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(32).unwrap();
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let (evals, _): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let (poly_data, _): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.unwrap()
.into_iter()
.unzip();
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let extended_rows = DaEncoder::rs_encode_rows(&polynomials, domain);
// check encoding went well, original evaluation points vs extended ones
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
for (c1, c2) in izip!(&e1.evals, &e2.evals) {
@ -305,33 +306,33 @@ pub mod test {
#[test]
fn test_compute_row_proofs() {
let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(32).unwrap();
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix)
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.unwrap()
.into_iter()
.unzip();
let extended_evaluations = DaEncoder::rs_encode_rows(&poly_data);
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let (_evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let extended_evaluations = DaEncoder::rs_encode_rows(&polynomials, domain);
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_evaluations);
let proofs = DaEncoder::compute_rows_proofs(&polynomials).unwrap();
// let checks = izip!(matrix.iter(), &commitments, &proofs);
// for (row, commitment, proofs) in checks {
// assert_eq!(proofs.len(), row.len() * 2);
// for (i, chunk) in row.iter().enumerate() {
// let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
// assert!(verify_element_proof(
// i,
// &element,
// &commitment,
// &proofs[i],
// domain,
// &GLOBAL_PARAMETERS
// ));
// }
// }
let checks = izip!(matrix.iter(), &commitments, &proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len() * 2);
for (i, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
i,
&element,
&commitment,
&proofs[i],
domain,
&GLOBAL_PARAMETERS
));
}
}
let checks = izip!(extended_matrix.iter(), &commitments, &proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len());
@ -352,8 +353,9 @@ pub mod test {
#[test]
fn test_compute_column_kzg_commitments() {
let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix).unwrap();
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix, domain).unwrap();
assert_eq!(commitments_data.len(), matrix.columns().count());
}
@ -361,38 +363,58 @@ pub mod test {
fn test_compute_aggregated_column_kzg_commitment() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (_, commitments): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_column_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let _ = DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let (_, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_column_commitments(&matrix, domain)
.unwrap()
.into_iter()
.unzip();
let _ =
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
}
#[test]
fn test_compute_aggregated_column_kzg_proofs() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let (_poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_column_commitments(&matrix)
DaEncoder::compute_kzg_column_commitments(&matrix, domain)
.unwrap()
.into_iter()
.unzip();
let ((_evals, polynomial), _aggregated_commitment) =
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
DaEncoder::compute_aggregated_column_proofs(&polynomial).unwrap();
}
#[test]
fn test_roots() {
let d1: Vec<_> = GeneralEvaluationDomain::<Fr>::new(16)
.unwrap()
.elements()
.collect();
let d2: Vec<_> = GeneralEvaluationDomain::<Fr>::new(32)
.unwrap()
.elements()
.take(16)
.collect();
assert_eq!(d1, d2);
fn test_full_encode_flow() {
let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let encoding_data = ENCODER.encode(&data).unwrap();
assert_eq!(encoding_data.data, data);
assert_eq!(encoding_data.row_commitments.len(), 4);
assert_eq!(encoding_data.column_commitments.len(), 16);
assert_eq!(encoding_data.rows_proofs.len(), 4);
assert_eq!(encoding_data.rows_proofs[0].len(), 16);
assert_eq!(encoding_data.aggregated_column_proofs.len(), 16);
for (row, proofs, commitment) in izip!(
encoding_data.extended_data.rows(),
encoding_data.rows_proofs,
encoding_data.row_commitments
) {
for (chunk_idx, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
chunk_idx,
&element,
&commitment,
&proofs[chunk_idx],
domain,
&GLOBAL_PARAMETERS
));
}
}
}
}

View File

@ -1,5 +1,4 @@
use ark_poly::EvaluationDomain;
use kzgrs::{global_parameters_from_randomness, GlobalParameters, PolynomialEvaluationDomain};
use kzgrs::{global_parameters_from_randomness, GlobalParameters};
use once_cell::sync::Lazy;
pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| {

View File

@ -1,13 +1,13 @@
// std
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use ark_poly::EvaluationDomain;
// crates
use blst::min_sig::{PublicKey, SecretKey};
use itertools::{izip, Itertools};
use kzgrs::common::field_element_from_bytes_le;
use kzgrs::{
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment, Proof,
BYTES_PER_FIELD_ELEMENT,
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment,
PolynomialEvaluationDomain, Proof, BYTES_PER_FIELD_ELEMENT,
};
use crate::common::blob::DaBlob;
@ -44,13 +44,15 @@ impl DaVerifier {
aggregated_column_commitment: &Commitment,
aggregated_column_proof: &Proof,
index: usize,
rows_domain: PolynomialEvaluationDomain,
) -> bool {
let domain =
GeneralEvaluationDomain::new(column.len()).expect("Domain should be able to build");
let column_domain =
PolynomialEvaluationDomain::new(column.len()).expect("Domain should be able to build");
// 1. compute commitment for column
let Ok((_, polynomial)) =
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain)
else {
let Ok((_, polynomial)) = bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(
column.as_bytes().as_slice(),
column_domain,
) else {
return false;
};
let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS)
@ -72,7 +74,7 @@ impl DaVerifier {
&element,
aggregated_column_commitment,
aggregated_column_proof,
domain,
rows_domain,
&GLOBAL_PARAMETERS,
)
}
@ -82,10 +84,8 @@ impl DaVerifier {
commitment: &Commitment,
proof: &Proof,
index: usize,
domain_size: usize,
domain: PolynomialEvaluationDomain,
) -> bool {
let domain =
GeneralEvaluationDomain::new(domain_size).expect("Domain should be able to build");
let element = field_element_from_bytes_le(chunk.as_bytes().as_slice());
verify_element_proof(
index,
@ -102,6 +102,7 @@ impl DaVerifier {
commitments: &[Commitment],
proofs: &[Proof],
index: usize,
domain: PolynomialEvaluationDomain,
) -> bool {
if ![chunks.len(), commitments.len(), proofs.len()]
.iter()
@ -110,7 +111,7 @@ impl DaVerifier {
return false;
}
for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) {
if !DaVerifier::verify_chunk(chunk, commitment, proof, index, chunk.len()) {
if !DaVerifier::verify_chunk(chunk, commitment, proof, index, domain) {
return false;
}
}
@ -133,13 +134,16 @@ impl DaVerifier {
}
}
pub fn verify(&self, blob: DaBlob) -> Option<Attestation> {
pub fn verify(&self, blob: DaBlob, rows_domain_size: usize) -> Option<Attestation> {
let rows_domain = PolynomialEvaluationDomain::new(rows_domain_size)
.expect("Domain should be able to build");
let is_column_verified = DaVerifier::verify_column(
&blob.column,
&blob.column_commitment,
&blob.aggregated_column_commitment,
&blob.aggregated_column_proof,
self.index,
rows_domain,
);
if !is_column_verified {
return None;
@ -150,6 +154,7 @@ impl DaVerifier {
&blob.rows_commitments,
&blob.rows_proofs,
self.index,
rows_domain,
);
if !are_chunks_verified {
return None;
@ -176,7 +181,7 @@ mod test {
#[test]
fn test_verify_column() {
let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect();
let domain = GeneralEvaluationDomain::new(32).unwrap();
let domain = GeneralEvaluationDomain::new(10).unwrap();
let (_, column_poly) =
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain)
.unwrap();
@ -207,14 +212,16 @@ mod test {
&column_commitment,
&aggregated_commitment,
&column_proof,
0
0,
domain
));
}
#[test]
fn test_verify() {
let encoder = &ENCODER;
let data = rand_data(8);
let data = rand_data(32);
let domain_size = 16usize;
let mut rng = thread_rng();
let sks: Vec<SecretKey> = (0..16)
.map(|_| {
@ -230,6 +237,7 @@ mod test {
.collect();
let encoded_data = encoder.encode(&data).unwrap();
for (i, column) in encoded_data.extended_data.columns().enumerate() {
println!("{i}");
let verifier = &verifiers[i];
let da_blob = DaBlob {
column,
@ -243,7 +251,7 @@ mod test {
.map(|proofs| proofs.get(i).cloned().unwrap())
.collect(),
};
assert!(verifier.verify(da_blob).is_some());
assert!(verifier.verify(da_blob, domain_size).is_some());
}
}
}

View File

@ -45,7 +45,10 @@ impl DaVerifier for KzgrsDaVerifier {
fn verify(&self, blob: &Self::DaBlob) -> Result<Self::Attestation, Self::Error> {
let blob = blob.clone();
match self.verifier.verify(blob) {
// TODO: Prepare the domain depending the size, if fixed, so fixed domain, if not it needs
// to come with some metadata.
let domain_size = 2usize;
match self.verifier.verify(blob, domain_size) {
Some(attestation) => Ok(attestation),
None => Err(KzgrsDaVerifierError::VerificationError),
}