Compare commits

...

3 Commits

Author SHA1 Message Date
danielsanchezq ff2fa2958c Fix size in verifier to fit tests 2024-06-24 16:37:46 +02:00
danielsanchezq 442cec3c89 Fix domains in verifier, decoder and dispersal 2024-06-24 15:50:06 +02:00
danielsanchezq befc472768 Fix domains in verifier, decoder and dispersal 2024-06-24 15:37:07 +02:00
5 changed files with 106 additions and 73 deletions

View File

@ -250,6 +250,7 @@ mod tests {
verifiers: &[DaVerifier], verifiers: &[DaVerifier],
) -> Vec<Attestation> { ) -> Vec<Attestation> {
let mut attestations = Vec::new(); let mut attestations = Vec::new();
let domain_size = encoded_data.extended_data.0[0].len();
for (i, column) in encoded_data.extended_data.columns().enumerate() { for (i, column) in encoded_data.extended_data.columns().enumerate() {
let verifier = &verifiers[i]; let verifier = &verifiers[i];
let da_blob = DaBlob { let da_blob = DaBlob {
@ -264,7 +265,7 @@ mod tests {
.map(|proofs| proofs.get(i).cloned().unwrap()) .map(|proofs| proofs.get(i).cloned().unwrap())
.collect(), .collect(),
}; };
attestations.push(verifier.verify(da_blob).unwrap()); attestations.push(verifier.verify(da_blob, domain_size).unwrap());
} }
attestations attestations
} }

View File

@ -3,7 +3,7 @@ use std::ops::Div;
// crates // crates
use ark_ff::{BigInteger, PrimeField}; use ark_ff::{BigInteger, PrimeField};
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_poly::EvaluationDomain;
use kzgrs::common::bytes_to_polynomial_unchecked; use kzgrs::common::bytes_to_polynomial_unchecked;
use kzgrs::fk20::fk20_batch_generate_elements_proofs; use kzgrs::fk20::fk20_batch_generate_elements_proofs;
use kzgrs::{ use kzgrs::{
@ -205,7 +205,6 @@ impl DaEncoder {
pub mod test { pub mod test {
use crate::encoder::{DaEncoder, DaEncoderParams}; use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::GLOBAL_PARAMETERS; use crate::global::GLOBAL_PARAMETERS;
use ark_bls12_381::Fr;
use ark_ff::PrimeField; use ark_ff::PrimeField;
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use itertools::izip; use itertools::izip;
@ -215,10 +214,10 @@ pub mod test {
BYTES_PER_FIELD_ELEMENT, BYTES_PER_FIELD_ELEMENT,
}; };
use rand::RngCore; use rand::RngCore;
use std::io::Read;
use std::ops::Div; use std::ops::Div;
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(16); pub const DOMAIN_SIZE: usize = 16;
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(DOMAIN_SIZE);
pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS); pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS);
pub fn rand_data(elements_count: usize) -> Vec<u8> { pub fn rand_data(elements_count: usize) -> Vec<u8> {
@ -244,8 +243,9 @@ pub mod test {
#[test] #[test]
fn test_compute_row_kzg_commitments() { fn test_compute_row_kzg_commitments() {
let data = rand_data(32); let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix).unwrap(); let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix, domain).unwrap();
assert_eq!(commitments_data.len(), matrix.len()); assert_eq!(commitments_data.len(), matrix.len());
} }
@ -253,7 +253,7 @@ pub mod test {
fn test_evals_to_chunk_matrix() { fn test_evals_to_chunk_matrix() {
let data = rand_data(32); let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let domain = PolynomialEvaluationDomain::new(32).unwrap(); let domain = PolynomialEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let (poly_data, _): (Vec<_>, Vec<_>) = let (poly_data, _): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix, domain) DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.unwrap() .unwrap()
@ -272,14 +272,15 @@ pub mod test {
#[test] #[test]
fn test_rs_encode_rows() { fn test_rs_encode_rows() {
let data = rand_data(32); let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(32).unwrap(); let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix) let (poly_data, _): (Vec<_>, Vec<_>) =
.unwrap() DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.into_iter() .unwrap()
.unzip(); .into_iter()
let extended_rows = DaEncoder::rs_encode_rows(&poly_data); .unzip();
let (evals, _): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip(); let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let extended_rows = DaEncoder::rs_encode_rows(&polynomials, domain);
// check encoding went well, original evaluation points vs extended ones // check encoding went well, original evaluation points vs extended ones
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) { for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
for (c1, c2) in izip!(&e1.evals, &e2.evals) { for (c1, c2) in izip!(&e1.evals, &e2.evals) {
@ -305,33 +306,33 @@ pub mod test {
#[test] #[test]
fn test_compute_row_proofs() { fn test_compute_row_proofs() {
let data = rand_data(32); let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(32).unwrap(); let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, commitments): (Vec<_>, Vec<_>) = let (poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix) DaEncoder::compute_kzg_row_commitments(&matrix, domain)
.unwrap() .unwrap()
.into_iter() .into_iter()
.unzip(); .unzip();
let extended_evaluations = DaEncoder::rs_encode_rows(&poly_data); let (_evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip(); let extended_evaluations = DaEncoder::rs_encode_rows(&polynomials, domain);
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_evaluations); let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_evaluations);
let proofs = DaEncoder::compute_rows_proofs(&polynomials).unwrap(); let proofs = DaEncoder::compute_rows_proofs(&polynomials).unwrap();
// let checks = izip!(matrix.iter(), &commitments, &proofs); let checks = izip!(matrix.iter(), &commitments, &proofs);
// for (row, commitment, proofs) in checks { for (row, commitment, proofs) in checks {
// assert_eq!(proofs.len(), row.len() * 2); assert_eq!(proofs.len(), row.len() * 2);
// for (i, chunk) in row.iter().enumerate() { for (i, chunk) in row.iter().enumerate() {
// let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref()); let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
// assert!(verify_element_proof( assert!(verify_element_proof(
// i, i,
// &element, &element,
// &commitment, &commitment,
// &proofs[i], &proofs[i],
// domain, domain,
// &GLOBAL_PARAMETERS &GLOBAL_PARAMETERS
// )); ));
// } }
// } }
let checks = izip!(extended_matrix.iter(), &commitments, &proofs); let checks = izip!(extended_matrix.iter(), &commitments, &proofs);
for (row, commitment, proofs) in checks { for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len()); assert_eq!(proofs.len(), row.len());
@ -352,8 +353,9 @@ pub mod test {
#[test] #[test]
fn test_compute_column_kzg_commitments() { fn test_compute_column_kzg_commitments() {
let data = rand_data(32); let data = rand_data(32);
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix).unwrap(); let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix, domain).unwrap();
assert_eq!(commitments_data.len(), matrix.columns().count()); assert_eq!(commitments_data.len(), matrix.columns().count());
} }
@ -361,38 +363,58 @@ pub mod test {
fn test_compute_aggregated_column_kzg_commitment() { fn test_compute_aggregated_column_kzg_commitment() {
let data = rand_data(32); let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let (_, commitments): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_column_commitments(&matrix) let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
.unwrap() let (_, commitments): (Vec<_>, Vec<_>) =
.into_iter() DaEncoder::compute_kzg_column_commitments(&matrix, domain)
.unzip(); .unwrap()
let _ = DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap(); .into_iter()
.unzip();
let _ =
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
} }
#[test] #[test]
fn test_compute_aggregated_column_kzg_proofs() { fn test_compute_aggregated_column_kzg_proofs() {
let data = rand_data(32); let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref()); let matrix = ENCODER.chunkify(data.as_ref());
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
let (_poly_data, commitments): (Vec<_>, Vec<_>) = let (_poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_column_commitments(&matrix) DaEncoder::compute_kzg_column_commitments(&matrix, domain)
.unwrap() .unwrap()
.into_iter() .into_iter()
.unzip(); .unzip();
let ((_evals, polynomial), _aggregated_commitment) = let ((_evals, polynomial), _aggregated_commitment) =
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap(); DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
DaEncoder::compute_aggregated_column_proofs(&polynomial).unwrap(); DaEncoder::compute_aggregated_column_proofs(&polynomial).unwrap();
} }
#[test] #[test]
fn test_roots() { fn test_full_encode_flow() {
let d1: Vec<_> = GeneralEvaluationDomain::<Fr>::new(16) let data = rand_data(32);
.unwrap() let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
.elements() let encoding_data = ENCODER.encode(&data).unwrap();
.collect(); assert_eq!(encoding_data.data, data);
let d2: Vec<_> = GeneralEvaluationDomain::<Fr>::new(32) assert_eq!(encoding_data.row_commitments.len(), 4);
.unwrap() assert_eq!(encoding_data.column_commitments.len(), 16);
.elements() assert_eq!(encoding_data.rows_proofs.len(), 4);
.take(16) assert_eq!(encoding_data.rows_proofs[0].len(), 16);
.collect(); assert_eq!(encoding_data.aggregated_column_proofs.len(), 16);
assert_eq!(d1, d2); for (row, proofs, commitment) in izip!(
encoding_data.extended_data.rows(),
encoding_data.rows_proofs,
encoding_data.row_commitments
) {
for (chunk_idx, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
chunk_idx,
&element,
&commitment,
&proofs[chunk_idx],
domain,
&GLOBAL_PARAMETERS
));
}
}
} }
} }

View File

@ -1,5 +1,4 @@
use ark_poly::EvaluationDomain; use kzgrs::{global_parameters_from_randomness, GlobalParameters};
use kzgrs::{global_parameters_from_randomness, GlobalParameters, PolynomialEvaluationDomain};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| { pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| {

View File

@ -1,13 +1,13 @@
// std // std
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_poly::EvaluationDomain;
// crates // crates
use blst::min_sig::{PublicKey, SecretKey}; use blst::min_sig::{PublicKey, SecretKey};
use itertools::{izip, Itertools}; use itertools::{izip, Itertools};
use kzgrs::common::field_element_from_bytes_le; use kzgrs::common::field_element_from_bytes_le;
use kzgrs::{ use kzgrs::{
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment, Proof, bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment,
BYTES_PER_FIELD_ELEMENT, PolynomialEvaluationDomain, Proof, BYTES_PER_FIELD_ELEMENT,
}; };
use crate::common::blob::DaBlob; use crate::common::blob::DaBlob;
@ -44,13 +44,15 @@ impl DaVerifier {
aggregated_column_commitment: &Commitment, aggregated_column_commitment: &Commitment,
aggregated_column_proof: &Proof, aggregated_column_proof: &Proof,
index: usize, index: usize,
rows_domain: PolynomialEvaluationDomain,
) -> bool { ) -> bool {
let domain = let column_domain =
GeneralEvaluationDomain::new(column.len()).expect("Domain should be able to build"); PolynomialEvaluationDomain::new(column.len()).expect("Domain should be able to build");
// 1. compute commitment for column // 1. compute commitment for column
let Ok((_, polynomial)) = let Ok((_, polynomial)) = bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain) column.as_bytes().as_slice(),
else { column_domain,
) else {
return false; return false;
}; };
let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS) let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS)
@ -72,7 +74,7 @@ impl DaVerifier {
&element, &element,
aggregated_column_commitment, aggregated_column_commitment,
aggregated_column_proof, aggregated_column_proof,
domain, rows_domain,
&GLOBAL_PARAMETERS, &GLOBAL_PARAMETERS,
) )
} }
@ -82,10 +84,8 @@ impl DaVerifier {
commitment: &Commitment, commitment: &Commitment,
proof: &Proof, proof: &Proof,
index: usize, index: usize,
domain_size: usize, domain: PolynomialEvaluationDomain,
) -> bool { ) -> bool {
let domain =
GeneralEvaluationDomain::new(domain_size).expect("Domain should be able to build");
let element = field_element_from_bytes_le(chunk.as_bytes().as_slice()); let element = field_element_from_bytes_le(chunk.as_bytes().as_slice());
verify_element_proof( verify_element_proof(
index, index,
@ -102,6 +102,7 @@ impl DaVerifier {
commitments: &[Commitment], commitments: &[Commitment],
proofs: &[Proof], proofs: &[Proof],
index: usize, index: usize,
domain: PolynomialEvaluationDomain,
) -> bool { ) -> bool {
if ![chunks.len(), commitments.len(), proofs.len()] if ![chunks.len(), commitments.len(), proofs.len()]
.iter() .iter()
@ -110,7 +111,7 @@ impl DaVerifier {
return false; return false;
} }
for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) { for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) {
if !DaVerifier::verify_chunk(chunk, commitment, proof, index, chunk.len()) { if !DaVerifier::verify_chunk(chunk, commitment, proof, index, domain) {
return false; return false;
} }
} }
@ -133,13 +134,16 @@ impl DaVerifier {
} }
} }
pub fn verify(&self, blob: DaBlob) -> Option<Attestation> { pub fn verify(&self, blob: DaBlob, rows_domain_size: usize) -> Option<Attestation> {
let rows_domain = PolynomialEvaluationDomain::new(rows_domain_size)
.expect("Domain should be able to build");
let is_column_verified = DaVerifier::verify_column( let is_column_verified = DaVerifier::verify_column(
&blob.column, &blob.column,
&blob.column_commitment, &blob.column_commitment,
&blob.aggregated_column_commitment, &blob.aggregated_column_commitment,
&blob.aggregated_column_proof, &blob.aggregated_column_proof,
self.index, self.index,
rows_domain,
); );
if !is_column_verified { if !is_column_verified {
return None; return None;
@ -150,6 +154,7 @@ impl DaVerifier {
&blob.rows_commitments, &blob.rows_commitments,
&blob.rows_proofs, &blob.rows_proofs,
self.index, self.index,
rows_domain,
); );
if !are_chunks_verified { if !are_chunks_verified {
return None; return None;
@ -176,7 +181,7 @@ mod test {
#[test] #[test]
fn test_verify_column() { fn test_verify_column() {
let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect(); let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect();
let domain = GeneralEvaluationDomain::new(32).unwrap(); let domain = GeneralEvaluationDomain::new(10).unwrap();
let (_, column_poly) = let (_, column_poly) =
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain) bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain)
.unwrap(); .unwrap();
@ -207,14 +212,16 @@ mod test {
&column_commitment, &column_commitment,
&aggregated_commitment, &aggregated_commitment,
&column_proof, &column_proof,
0 0,
domain
)); ));
} }
#[test] #[test]
fn test_verify() { fn test_verify() {
let encoder = &ENCODER; let encoder = &ENCODER;
let data = rand_data(8); let data = rand_data(32);
let domain_size = 16usize;
let mut rng = thread_rng(); let mut rng = thread_rng();
let sks: Vec<SecretKey> = (0..16) let sks: Vec<SecretKey> = (0..16)
.map(|_| { .map(|_| {
@ -230,6 +237,7 @@ mod test {
.collect(); .collect();
let encoded_data = encoder.encode(&data).unwrap(); let encoded_data = encoder.encode(&data).unwrap();
for (i, column) in encoded_data.extended_data.columns().enumerate() { for (i, column) in encoded_data.extended_data.columns().enumerate() {
println!("{i}");
let verifier = &verifiers[i]; let verifier = &verifiers[i];
let da_blob = DaBlob { let da_blob = DaBlob {
column, column,
@ -243,7 +251,7 @@ mod test {
.map(|proofs| proofs.get(i).cloned().unwrap()) .map(|proofs| proofs.get(i).cloned().unwrap())
.collect(), .collect(),
}; };
assert!(verifier.verify(da_blob).is_some()); assert!(verifier.verify(da_blob, domain_size).is_some());
} }
} }
} }

View File

@ -45,7 +45,10 @@ impl DaVerifier for KzgrsDaVerifier {
fn verify(&self, blob: &Self::DaBlob) -> Result<Self::Attestation, Self::Error> { fn verify(&self, blob: &Self::DaBlob) -> Result<Self::Attestation, Self::Error> {
let blob = blob.clone(); let blob = blob.clone();
match self.verifier.verify(blob) { // TODO: Prepare the domain depending the size, if fixed, so fixed domain, if not it needs
// to come with some metadata.
let domain_size = 2usize;
match self.verifier.verify(blob, domain_size) {
Some(attestation) => Ok(attestation), Some(attestation) => Ok(attestation),
None => Err(KzgrsDaVerifierError::VerificationError), None => Err(KzgrsDaVerifierError::VerificationError),
} }