Implement row proofs tests

Fix fieldelement encoding/decoding bug
This commit is contained in:
Daniel Sanchez Quiros 2024-04-16 16:11:15 +02:00
parent 76bc6a4a6c
commit fa61a80a1d
5 changed files with 115 additions and 20 deletions

View File

@ -14,4 +14,7 @@ rand = "0.8.5"
once_cell = "1.19"
[dev-dependencies]
rand = "0.8"
rand = "0.8"
itertools = "0.12"
ark-ff = "0.4"
num-bigint = "0.4.4"

View File

@ -3,7 +3,7 @@ use blake2::digest::{Update, VariableOutput};
use kzgrs::Commitment;
use std::io::Cursor;
#[derive(Clone)]
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Chunk(pub Vec<u8>);
pub struct Row(pub Vec<Chunk>);
pub struct Column(pub Vec<Chunk>);
@ -29,6 +29,9 @@ impl From<&[u8]> for Chunk {
}
impl Row {
pub fn iter(&self) -> impl Iterator<Item = &Chunk> {
self.0.iter()
}
pub fn len(&self) -> usize {
self.0.len()
}
@ -38,6 +41,9 @@ impl Row {
}
impl Column {
pub fn iter(&self) -> impl Iterator<Item = &Chunk> {
self.0.iter()
}
pub fn len(&self) -> usize {
self.0.len()
}

View File

@ -1,3 +1,9 @@
// std
use ark_ff::{BigInteger, PrimeField};
use std::ops::Div;
// crates
// internal
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use ark_poly::univariate::DensePolynomial;
@ -42,7 +48,8 @@ impl DaEncoder {
fn chunkify(&self, data: &[u8]) -> ChunksMatrix {
let chunk_size =
self.params.column_count * DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE;
// column count is divided by two, as later on rows are encoded to twice the size
self.params.column_count.div(2) * DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE;
data.chunks(chunk_size)
.map(|d| {
d.chunks(DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE)
@ -89,13 +96,13 @@ impl DaEncoder {
fn compute_rows_proofs(
polynomials: &[Polynomial],
commitments: &[Commitment],
size: usize,
proof_count: usize,
) -> Result<Vec<Vec<Proof>>, KzgRsError> {
polynomials
.iter()
.zip(commitments.iter())
.map(|(poly, commitment)| {
(0..size)
(0..proof_count)
.map(|i| generate_element_proof(i, poly, &GLOBAL_PARAMETERS, *DOMAIN))
.collect()
})
@ -149,9 +156,7 @@ impl DaEncoder {
Row(eval
.evals
.iter()
.map(|point| {
Chunk(point.0 .0.iter().flat_map(|n| n.to_le_bytes()).collect())
})
.map(|point| Chunk(point.into_bigint().to_bytes_le()))
.collect())
})
.collect(),
@ -196,9 +201,12 @@ impl DaEncoder {
#[cfg(test)]
pub mod test {
use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::DOMAIN;
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use ark_ff::{BigInt, BigInteger, PrimeField};
use itertools::izip;
use kzgrs::common::bytes_to_polynomial_unchecked;
use kzgrs::{decode, BYTES_PER_FIELD_ELEMENT};
use kzgrs::{decode, verify_element_proof, FieldElement, BYTES_PER_FIELD_ELEMENT};
use num_bigint::BigUint;
use rand::RngCore;
use std::ops::Div;
@ -215,12 +223,12 @@ pub mod test {
fn test_chunkify() {
let params = DaEncoderParams::default_with(2);
let elements = 10usize;
let data = rand_data(10);
let data = rand_data(elements);
let encoder = DaEncoder::new(params);
let matrix = encoder.chunkify(&data);
assert_eq!(matrix.len(), elements.div(params.column_count));
assert_eq!(matrix.len(), elements.div(params.column_count.div(2)));
for row in matrix.rows() {
assert_eq!(row.len(), params.column_count);
assert_eq!(row.len(), params.column_count.div(2));
assert_eq!(row.0[0].len(), BYTES_PER_FIELD_ELEMENT);
}
}
@ -234,7 +242,7 @@ pub mod test {
}
#[test]
fn test_rs_encode_rows() {
fn test_evals_to_chunk_matrix() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
@ -243,8 +251,35 @@ pub mod test {
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
for ((r1, r2), evals) in matrix.iter().zip(extended_matrix.iter()).zip(extended_rows) {
for (r1, r2) in izip!(matrix.iter(), extended_matrix.iter()) {
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
assert_eq!(c1, c2);
}
}
}
#[test]
fn test_rs_encode_rows() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let (evals, _): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
// check encoding went well, original evaluation points vs extended ones
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
for (c1, c2) in izip!(&e1.evals, &e2.evals) {
assert_eq!(c1, c2);
}
}
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
for (r1, r2, evals) in izip!(matrix.iter(), extended_matrix.iter(), extended_rows) {
assert_eq!(r1.len(), r2.len().div(2));
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
assert_eq!(c1, c2);
}
let points: Vec<_> = evals.evals.iter().cloned().map(Some).collect();
let poly_2 = decode(r1.len(), &points, *DOMAIN);
let (poly_1, _) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
@ -254,4 +289,56 @@ pub mod test {
assert_eq!(poly_1, poly_2);
}
}
#[test]
fn test_compute_row_proofs() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
let original_proofs =
DaEncoder::compute_rows_proofs(&polynomials, &commitments, PARAMS.column_count.div(2))
.unwrap();
let extended_proofs =
DaEncoder::compute_rows_proofs(&polynomials, &commitments, PARAMS.column_count)
.unwrap();
let checks = izip!(matrix.iter(), &commitments, &original_proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len());
for (i, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
i,
&element,
&commitment,
&proofs[i],
*DOMAIN,
&GLOBAL_PARAMETERS
));
}
}
let checks = izip!(extended_matrix.iter(), &commitments, &extended_proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len());
for (i, chunk) in row.iter().enumerate() {
println!("{i}");
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
i,
&element,
&commitment,
&proofs[i],
*DOMAIN,
&GLOBAL_PARAMETERS
));
}
}
}
}

View File

@ -6,6 +6,7 @@ use ark_ff::{BigInteger256, PrimeField, Zero};
use ark_poly::domain::general::GeneralEvaluationDomain;
use ark_poly::evaluations::univariate::Evaluations;
use ark_poly::univariate::DensePolynomial;
use num_bigint::BigUint;
use thiserror::Error;
// internal
@ -34,10 +35,8 @@ pub fn bytes_to_evaluations<const CHUNK_SIZE: usize>(
.map(|e| {
// use little endian for convenience as shortening 1 byte (<32 supported)
// do not matter in this endianness
let bint: BigInteger256 = Fr::from_le_bytes_mod_order(e)
.try_into()
.expect("Bytes size should fit for an 256 bits integer");
Fr::new(bint)
let bui = BigUint::from_bytes_le(e);
Fr::from(bui)
})
.collect(),
domain,

View File

@ -49,7 +49,7 @@ pub fn verify_element_proof(
element: &Fr,
commitment: &Commitment<Bls12_381>,
proof: &Proof<Bls12_381>,
domain: &GeneralEvaluationDomain<Fr>,
domain: GeneralEvaluationDomain<Fr>,
global_parameters: &UniversalParams<Bls12_381>,
) -> bool {
let u = domain.element(element_index);