Da: fk20 encoder (#664)
* Use fft for rs * Specify evaluation domains * Fix domains in verifier, decoder and dispersal * Fix domains in verifier, decoder and dispersal * Fix size in verifier to fit tests
This commit is contained in:
parent
9adfbdddc6
commit
96a8ef6032
|
@ -20,7 +20,17 @@ rand = "0.8.5"
|
||||||
once_cell = "1.19"
|
once_cell = "1.19"
|
||||||
sha3 = "0.10"
|
sha3 = "0.10"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
rayon = { version = "1.10.0", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
|
ark-bls12-381 = "0.4.0"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
|
single = []
|
||||||
|
parallel = [
|
||||||
|
"rayon",
|
||||||
|
"kzgrs/parallel"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
|
@ -250,6 +250,7 @@ mod tests {
|
||||||
verifiers: &[DaVerifier],
|
verifiers: &[DaVerifier],
|
||||||
) -> Vec<Attestation> {
|
) -> Vec<Attestation> {
|
||||||
let mut attestations = Vec::new();
|
let mut attestations = Vec::new();
|
||||||
|
let domain_size = encoded_data.extended_data.0[0].len();
|
||||||
for (i, column) in encoded_data.extended_data.columns().enumerate() {
|
for (i, column) in encoded_data.extended_data.columns().enumerate() {
|
||||||
let verifier = &verifiers[i];
|
let verifier = &verifiers[i];
|
||||||
let da_blob = DaBlob {
|
let da_blob = DaBlob {
|
||||||
|
@ -264,7 +265,7 @@ mod tests {
|
||||||
.map(|proofs| proofs.get(i).cloned().unwrap())
|
.map(|proofs| proofs.get(i).cloned().unwrap())
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
attestations.push(verifier.verify(da_blob).unwrap());
|
attestations.push(verifier.verify(da_blob, domain_size).unwrap());
|
||||||
}
|
}
|
||||||
attestations
|
attestations
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,15 +3,17 @@ use std::ops::Div;
|
||||||
|
|
||||||
// crates
|
// crates
|
||||||
use ark_ff::{BigInteger, PrimeField};
|
use ark_ff::{BigInteger, PrimeField};
|
||||||
|
use ark_poly::EvaluationDomain;
|
||||||
use kzgrs::common::bytes_to_polynomial_unchecked;
|
use kzgrs::common::bytes_to_polynomial_unchecked;
|
||||||
|
use kzgrs::fk20::fk20_batch_generate_elements_proofs;
|
||||||
use kzgrs::{
|
use kzgrs::{
|
||||||
bytes_to_polynomial, commit_polynomial, encode, generate_element_proof, Commitment,
|
bytes_to_polynomial, commit_polynomial, encode, Commitment, Evaluations, KzgRsError,
|
||||||
Evaluations, KzgRsError, Polynomial, Proof, BYTES_PER_FIELD_ELEMENT,
|
Polynomial, PolynomialEvaluationDomain, Proof, BYTES_PER_FIELD_ELEMENT,
|
||||||
};
|
};
|
||||||
|
|
||||||
// internal
|
// internal
|
||||||
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
|
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
|
||||||
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
|
use crate::global::GLOBAL_PARAMETERS;
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct DaEncoderParams {
|
pub struct DaEncoderParams {
|
||||||
|
@ -67,6 +69,7 @@ impl DaEncoder {
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
fn compute_kzg_row_commitments(
|
fn compute_kzg_row_commitments(
|
||||||
matrix: &ChunksMatrix,
|
matrix: &ChunksMatrix,
|
||||||
|
polynomial_evaluation_domain: PolynomialEvaluationDomain,
|
||||||
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
|
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
|
||||||
matrix
|
matrix
|
||||||
.rows()
|
.rows()
|
||||||
|
@ -76,7 +79,7 @@ impl DaEncoder {
|
||||||
// Also, after rs encoding, we are sure all `Fr` elements already fits within modulus.
|
// Also, after rs encoding, we are sure all `Fr` elements already fits within modulus.
|
||||||
let (evals, poly) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
|
let (evals, poly) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
|
||||||
r.as_bytes().as_ref(),
|
r.as_bytes().as_ref(),
|
||||||
*DOMAIN,
|
polynomial_evaluation_domain,
|
||||||
);
|
);
|
||||||
commit_polynomial(&poly, &GLOBAL_PARAMETERS)
|
commit_polynomial(&poly, &GLOBAL_PARAMETERS)
|
||||||
.map(|commitment| ((evals, poly), commitment))
|
.map(|commitment| ((evals, poly), commitment))
|
||||||
|
@ -84,42 +87,41 @@ impl DaEncoder {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rs_encode_row(evaluations: &Evaluations, row: &Polynomial) -> Evaluations {
|
fn rs_encode_row(
|
||||||
encode(row, evaluations, 2, *DOMAIN)
|
row: &Polynomial,
|
||||||
|
polynomial_evaluation_domain: PolynomialEvaluationDomain,
|
||||||
|
) -> Evaluations {
|
||||||
|
encode(row, polynomial_evaluation_domain)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rs_encode_rows(rows: &[(Evaluations, Polynomial)]) -> Vec<Evaluations> {
|
fn rs_encode_rows(
|
||||||
|
rows: &[Polynomial],
|
||||||
|
polynomial_evaluation_domain: PolynomialEvaluationDomain,
|
||||||
|
) -> Vec<Evaluations> {
|
||||||
rows.iter()
|
rows.iter()
|
||||||
.map(|(eval, poly)| Self::rs_encode_row(eval, poly))
|
.map(|poly| Self::rs_encode_row(poly, polynomial_evaluation_domain))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_rows_proofs(
|
fn compute_rows_proofs(polynomials: &[Polynomial]) -> Result<Vec<Vec<Proof>>, KzgRsError> {
|
||||||
polynomials: &[Polynomial],
|
Ok(polynomials
|
||||||
evals: &[Evaluations],
|
|
||||||
proof_count: usize,
|
|
||||||
) -> Result<Vec<Vec<Proof>>, KzgRsError> {
|
|
||||||
polynomials
|
|
||||||
.iter()
|
.iter()
|
||||||
.zip(evals)
|
.map(|poly| fk20_batch_generate_elements_proofs(poly, &GLOBAL_PARAMETERS))
|
||||||
.map(|(poly, eval)| {
|
.collect())
|
||||||
(0..proof_count)
|
|
||||||
.map(|i| generate_element_proof(i, poly, eval, &GLOBAL_PARAMETERS, *DOMAIN))
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
fn compute_kzg_column_commitments(
|
fn compute_kzg_column_commitments(
|
||||||
matrix: &ChunksMatrix,
|
matrix: &ChunksMatrix,
|
||||||
|
polynomial_evaluation_domain: PolynomialEvaluationDomain,
|
||||||
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
|
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
|
||||||
Self::compute_kzg_row_commitments(&matrix.transposed())
|
Self::compute_kzg_row_commitments(&matrix.transposed(), polynomial_evaluation_domain)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_aggregated_column_commitment(
|
fn compute_aggregated_column_commitment(
|
||||||
matrix: &ChunksMatrix,
|
matrix: &ChunksMatrix,
|
||||||
commitments: &[Commitment],
|
commitments: &[Commitment],
|
||||||
|
polynomial_evaluation_domain: PolynomialEvaluationDomain,
|
||||||
) -> Result<((Evaluations, Polynomial), Commitment), KzgRsError> {
|
) -> Result<((Evaluations, Polynomial), Commitment), KzgRsError> {
|
||||||
let hashes: Vec<u8> =
|
let hashes: Vec<u8> =
|
||||||
matrix
|
matrix
|
||||||
|
@ -133,19 +135,16 @@ impl DaEncoder {
|
||||||
.collect();
|
.collect();
|
||||||
let (evals, poly) = bytes_to_polynomial::<
|
let (evals, poly) = bytes_to_polynomial::<
|
||||||
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
|
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
|
||||||
>(hashes.as_ref(), *DOMAIN)?;
|
>(hashes.as_ref(), polynomial_evaluation_domain)?;
|
||||||
let commitment = commit_polynomial(&poly, &GLOBAL_PARAMETERS)?;
|
let commitment = commit_polynomial(&poly, &GLOBAL_PARAMETERS)?;
|
||||||
Ok(((evals, poly), commitment))
|
Ok(((evals, poly), commitment))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compute_aggregated_column_proofs(
|
fn compute_aggregated_column_proofs(polynomial: &Polynomial) -> Result<Vec<Proof>, KzgRsError> {
|
||||||
polynomial: &Polynomial,
|
Ok(fk20_batch_generate_elements_proofs(
|
||||||
evals: &Evaluations,
|
polynomial,
|
||||||
proof_count: usize,
|
&GLOBAL_PARAMETERS,
|
||||||
) -> Result<Vec<Proof>, KzgRsError> {
|
))
|
||||||
(0..proof_count)
|
|
||||||
.map(|i| generate_element_proof(i, polynomial, evals, &GLOBAL_PARAMETERS, *DOMAIN))
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evals_to_chunk_matrix(evals: &[Evaluations]) -> ChunksMatrix {
|
fn evals_to_chunk_matrix(evals: &[Evaluations]) -> ChunksMatrix {
|
||||||
|
@ -165,29 +164,30 @@ impl DaEncoder {
|
||||||
|
|
||||||
pub fn encode(&self, data: &[u8]) -> Result<EncodedData, kzgrs::KzgRsError> {
|
pub fn encode(&self, data: &[u8]) -> Result<EncodedData, kzgrs::KzgRsError> {
|
||||||
let chunked_data = self.chunkify(data);
|
let chunked_data = self.chunkify(data);
|
||||||
|
let row_domain = PolynomialEvaluationDomain::new(self.params.column_count)
|
||||||
|
.expect("Domain should be able to build");
|
||||||
|
let column_domain = PolynomialEvaluationDomain::new(chunked_data.len())
|
||||||
|
.expect("Domain should be able to build");
|
||||||
let (row_polynomials, row_commitments): (Vec<_>, Vec<_>) =
|
let (row_polynomials, row_commitments): (Vec<_>, Vec<_>) =
|
||||||
Self::compute_kzg_row_commitments(&chunked_data)?
|
Self::compute_kzg_row_commitments(&chunked_data, row_domain)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let encoded_evaluations = Self::rs_encode_rows(&row_polynomials);
|
let (_, row_polynomials): (Vec<_>, Vec<_>) = row_polynomials.into_iter().unzip();
|
||||||
|
let encoded_evaluations = Self::rs_encode_rows(&row_polynomials, row_domain);
|
||||||
let extended_data = Self::evals_to_chunk_matrix(&encoded_evaluations);
|
let extended_data = Self::evals_to_chunk_matrix(&encoded_evaluations);
|
||||||
let row_polynomials: Vec<_> = row_polynomials.into_iter().map(|(_, p)| p).collect();
|
let rows_proofs = Self::compute_rows_proofs(&row_polynomials)?;
|
||||||
let rows_proofs = Self::compute_rows_proofs(
|
|
||||||
&row_polynomials,
|
|
||||||
&encoded_evaluations,
|
|
||||||
self.params.column_count,
|
|
||||||
)?;
|
|
||||||
let (_column_polynomials, column_commitments): (Vec<_>, Vec<_>) =
|
let (_column_polynomials, column_commitments): (Vec<_>, Vec<_>) =
|
||||||
Self::compute_kzg_column_commitments(&extended_data)?
|
Self::compute_kzg_column_commitments(&extended_data, column_domain)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let ((aggregated_evals, aggregated_polynomial), aggregated_column_commitment) =
|
let ((_aggregated_evals, aggregated_polynomial), aggregated_column_commitment) =
|
||||||
Self::compute_aggregated_column_commitment(&extended_data, &column_commitments)?;
|
Self::compute_aggregated_column_commitment(
|
||||||
let aggregated_column_proofs = Self::compute_aggregated_column_proofs(
|
&extended_data,
|
||||||
&aggregated_polynomial,
|
&column_commitments,
|
||||||
&aggregated_evals,
|
row_domain,
|
||||||
column_commitments.len(),
|
|
||||||
)?;
|
)?;
|
||||||
|
let aggregated_column_proofs =
|
||||||
|
Self::compute_aggregated_column_proofs(&aggregated_polynomial)?;
|
||||||
Ok(EncodedData {
|
Ok(EncodedData {
|
||||||
data: data.to_vec(),
|
data: data.to_vec(),
|
||||||
chunked_data,
|
chunked_data,
|
||||||
|
@ -204,15 +204,20 @@ impl DaEncoder {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test {
|
pub mod test {
|
||||||
use crate::encoder::{DaEncoder, DaEncoderParams};
|
use crate::encoder::{DaEncoder, DaEncoderParams};
|
||||||
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
|
use crate::global::GLOBAL_PARAMETERS;
|
||||||
use ark_ff::PrimeField;
|
use ark_ff::PrimeField;
|
||||||
|
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
|
||||||
use itertools::izip;
|
use itertools::izip;
|
||||||
use kzgrs::common::bytes_to_polynomial_unchecked;
|
use kzgrs::common::bytes_to_polynomial_unchecked;
|
||||||
use kzgrs::{decode, verify_element_proof, FieldElement, BYTES_PER_FIELD_ELEMENT};
|
use kzgrs::{
|
||||||
|
decode, verify_element_proof, FieldElement, PolynomialEvaluationDomain,
|
||||||
|
BYTES_PER_FIELD_ELEMENT,
|
||||||
|
};
|
||||||
use rand::RngCore;
|
use rand::RngCore;
|
||||||
use std::ops::Div;
|
use std::ops::Div;
|
||||||
|
|
||||||
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(16);
|
pub const DOMAIN_SIZE: usize = 16;
|
||||||
|
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(DOMAIN_SIZE);
|
||||||
pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS);
|
pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS);
|
||||||
|
|
||||||
pub fn rand_data(elements_count: usize) -> Vec<u8> {
|
pub fn rand_data(elements_count: usize) -> Vec<u8> {
|
||||||
|
@ -238,8 +243,9 @@ pub mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_compute_row_kzg_commitments() {
|
fn test_compute_row_kzg_commitments() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix).unwrap();
|
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix, domain).unwrap();
|
||||||
assert_eq!(commitments_data.len(), matrix.len());
|
assert_eq!(commitments_data.len(), matrix.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,11 +253,14 @@ pub mod test {
|
||||||
fn test_evals_to_chunk_matrix() {
|
fn test_evals_to_chunk_matrix() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
|
let domain = PolynomialEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
|
let (poly_data, _): (Vec<_>, Vec<_>) =
|
||||||
|
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
|
let (_, poly_data): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
|
||||||
|
let extended_rows = DaEncoder::rs_encode_rows(&poly_data, domain);
|
||||||
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
|
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
|
||||||
for (r1, r2) in izip!(matrix.iter(), extended_matrix.iter()) {
|
for (r1, r2) in izip!(matrix.iter(), extended_matrix.iter()) {
|
||||||
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
|
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
|
||||||
|
@ -263,13 +272,15 @@ pub mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rs_encode_rows() {
|
fn test_rs_encode_rows() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
|
let (poly_data, _): (Vec<_>, Vec<_>) =
|
||||||
|
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
|
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
|
||||||
let (evals, _): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
|
let extended_rows = DaEncoder::rs_encode_rows(&polynomials, domain);
|
||||||
// check encoding went well, original evaluation points vs extended ones
|
// check encoding went well, original evaluation points vs extended ones
|
||||||
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
|
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
|
||||||
for (c1, c2) in izip!(&e1.evals, &e2.evals) {
|
for (c1, c2) in izip!(&e1.evals, &e2.evals) {
|
||||||
|
@ -283,10 +294,10 @@ pub mod test {
|
||||||
assert_eq!(c1, c2);
|
assert_eq!(c1, c2);
|
||||||
}
|
}
|
||||||
let points: Vec<_> = evals.evals.iter().cloned().map(Some).collect();
|
let points: Vec<_> = evals.evals.iter().cloned().map(Some).collect();
|
||||||
let poly_2 = decode(r1.len(), &points, *DOMAIN);
|
let poly_2 = decode(r1.len(), &points, domain);
|
||||||
let (poly_1, _) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
|
let (poly_1, _) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
|
||||||
r1.as_bytes().as_ref(),
|
r1.as_bytes().as_ref(),
|
||||||
*DOMAIN,
|
domain,
|
||||||
);
|
);
|
||||||
assert_eq!(poly_1, poly_2);
|
assert_eq!(poly_1, poly_2);
|
||||||
}
|
}
|
||||||
|
@ -295,28 +306,21 @@ pub mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_compute_row_proofs() {
|
fn test_compute_row_proofs() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let (poly_data, commitments): (Vec<_>, Vec<_>) =
|
let (poly_data, commitments): (Vec<_>, Vec<_>) =
|
||||||
DaEncoder::compute_kzg_row_commitments(&matrix)
|
DaEncoder::compute_kzg_row_commitments(&matrix, domain)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let extended_evaluations = DaEncoder::rs_encode_rows(&poly_data);
|
let (_evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
|
||||||
let (evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
|
let extended_evaluations = DaEncoder::rs_encode_rows(&polynomials, domain);
|
||||||
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_evaluations);
|
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_evaluations);
|
||||||
let original_proofs =
|
let proofs = DaEncoder::compute_rows_proofs(&polynomials).unwrap();
|
||||||
DaEncoder::compute_rows_proofs(&polynomials, &evals, PARAMS.column_count.div(2))
|
|
||||||
.unwrap();
|
|
||||||
let extended_proofs = DaEncoder::compute_rows_proofs(
|
|
||||||
&polynomials,
|
|
||||||
&extended_evaluations,
|
|
||||||
PARAMS.column_count,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let checks = izip!(matrix.iter(), &commitments, &original_proofs);
|
let checks = izip!(matrix.iter(), &commitments, &proofs);
|
||||||
for (row, commitment, proofs) in checks {
|
for (row, commitment, proofs) in checks {
|
||||||
assert_eq!(proofs.len(), row.len());
|
assert_eq!(proofs.len(), row.len() * 2);
|
||||||
for (i, chunk) in row.iter().enumerate() {
|
for (i, chunk) in row.iter().enumerate() {
|
||||||
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
|
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
|
||||||
assert!(verify_element_proof(
|
assert!(verify_element_proof(
|
||||||
|
@ -324,12 +328,12 @@ pub mod test {
|
||||||
&element,
|
&element,
|
||||||
&commitment,
|
&commitment,
|
||||||
&proofs[i],
|
&proofs[i],
|
||||||
*DOMAIN,
|
domain,
|
||||||
&GLOBAL_PARAMETERS
|
&GLOBAL_PARAMETERS
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let checks = izip!(extended_matrix.iter(), &commitments, &extended_proofs);
|
let checks = izip!(extended_matrix.iter(), &commitments, &proofs);
|
||||||
for (row, commitment, proofs) in checks {
|
for (row, commitment, proofs) in checks {
|
||||||
assert_eq!(proofs.len(), row.len());
|
assert_eq!(proofs.len(), row.len());
|
||||||
for (i, chunk) in row.iter().enumerate() {
|
for (i, chunk) in row.iter().enumerate() {
|
||||||
|
@ -339,7 +343,7 @@ pub mod test {
|
||||||
&element,
|
&element,
|
||||||
&commitment,
|
&commitment,
|
||||||
&proofs[i],
|
&proofs[i],
|
||||||
*DOMAIN,
|
domain,
|
||||||
&GLOBAL_PARAMETERS
|
&GLOBAL_PARAMETERS
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -349,8 +353,9 @@ pub mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_compute_column_kzg_commitments() {
|
fn test_compute_column_kzg_commitments() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix).unwrap();
|
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix, domain).unwrap();
|
||||||
assert_eq!(commitments_data.len(), matrix.columns().count());
|
assert_eq!(commitments_data.len(), matrix.columns().count());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,25 +363,58 @@ pub mod test {
|
||||||
fn test_compute_aggregated_column_kzg_commitment() {
|
fn test_compute_aggregated_column_kzg_commitment() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
let (_, commitments): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_column_commitments(&matrix)
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
|
let (_, commitments): (Vec<_>, Vec<_>) =
|
||||||
|
DaEncoder::compute_kzg_column_commitments(&matrix, domain)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let _ = DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
|
let _ =
|
||||||
|
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_compute_aggregated_column_kzg_proofs() {
|
fn test_compute_aggregated_column_kzg_proofs() {
|
||||||
let data = rand_data(32);
|
let data = rand_data(32);
|
||||||
let matrix = ENCODER.chunkify(data.as_ref());
|
let matrix = ENCODER.chunkify(data.as_ref());
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
let (_poly_data, commitments): (Vec<_>, Vec<_>) =
|
let (_poly_data, commitments): (Vec<_>, Vec<_>) =
|
||||||
DaEncoder::compute_kzg_column_commitments(&matrix)
|
DaEncoder::compute_kzg_column_commitments(&matrix, domain)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.unzip();
|
.unzip();
|
||||||
let ((evals, polynomial), _aggregated_commitment) =
|
let ((_evals, polynomial), _aggregated_commitment) =
|
||||||
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
|
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments, domain).unwrap();
|
||||||
DaEncoder::compute_aggregated_column_proofs(&polynomial, &evals, commitments.len())
|
DaEncoder::compute_aggregated_column_proofs(&polynomial).unwrap();
|
||||||
.unwrap();
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_full_encode_flow() {
|
||||||
|
let data = rand_data(32);
|
||||||
|
let domain = GeneralEvaluationDomain::new(DOMAIN_SIZE).unwrap();
|
||||||
|
let encoding_data = ENCODER.encode(&data).unwrap();
|
||||||
|
assert_eq!(encoding_data.data, data);
|
||||||
|
assert_eq!(encoding_data.row_commitments.len(), 4);
|
||||||
|
assert_eq!(encoding_data.column_commitments.len(), 16);
|
||||||
|
assert_eq!(encoding_data.rows_proofs.len(), 4);
|
||||||
|
assert_eq!(encoding_data.rows_proofs[0].len(), 16);
|
||||||
|
assert_eq!(encoding_data.aggregated_column_proofs.len(), 16);
|
||||||
|
for (row, proofs, commitment) in izip!(
|
||||||
|
encoding_data.extended_data.rows(),
|
||||||
|
encoding_data.rows_proofs,
|
||||||
|
encoding_data.row_commitments
|
||||||
|
) {
|
||||||
|
for (chunk_idx, chunk) in row.iter().enumerate() {
|
||||||
|
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
|
||||||
|
assert!(verify_element_proof(
|
||||||
|
chunk_idx,
|
||||||
|
&element,
|
||||||
|
&commitment,
|
||||||
|
&proofs[chunk_idx],
|
||||||
|
domain,
|
||||||
|
&GLOBAL_PARAMETERS
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
use ark_poly::EvaluationDomain;
|
use kzgrs::{global_parameters_from_randomness, GlobalParameters};
|
||||||
use kzgrs::{global_parameters_from_randomness, GlobalParameters, PolynomialEvaluationDomain};
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| {
|
pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
global_parameters_from_randomness(&mut rng)
|
global_parameters_from_randomness(&mut rng)
|
||||||
});
|
});
|
||||||
|
|
||||||
pub static DOMAIN: Lazy<PolynomialEvaluationDomain> =
|
|
||||||
Lazy::new(|| PolynomialEvaluationDomain::new(8192).unwrap());
|
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
// std
|
// std
|
||||||
|
|
||||||
|
use ark_poly::EvaluationDomain;
|
||||||
// crates
|
// crates
|
||||||
use blst::min_sig::{PublicKey, SecretKey};
|
use blst::min_sig::{PublicKey, SecretKey};
|
||||||
use itertools::{izip, Itertools};
|
use itertools::{izip, Itertools};
|
||||||
use kzgrs::common::field_element_from_bytes_le;
|
use kzgrs::common::field_element_from_bytes_le;
|
||||||
use kzgrs::{
|
use kzgrs::{
|
||||||
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment, Proof,
|
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment,
|
||||||
BYTES_PER_FIELD_ELEMENT,
|
PolynomialEvaluationDomain, Proof, BYTES_PER_FIELD_ELEMENT,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::common::blob::DaBlob;
|
use crate::common::blob::DaBlob;
|
||||||
|
@ -16,7 +17,7 @@ use crate::common::{
|
||||||
attestation::Attestation, build_attestation_message, hash_column_and_commitment, Chunk, Column,
|
attestation::Attestation, build_attestation_message, hash_column_and_commitment, Chunk, Column,
|
||||||
};
|
};
|
||||||
use crate::encoder::DaEncoderParams;
|
use crate::encoder::DaEncoderParams;
|
||||||
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
|
use crate::global::GLOBAL_PARAMETERS;
|
||||||
|
|
||||||
pub struct DaVerifier {
|
pub struct DaVerifier {
|
||||||
// TODO: substitute this for an abstraction to sign things over
|
// TODO: substitute this for an abstraction to sign things over
|
||||||
|
@ -43,11 +44,15 @@ impl DaVerifier {
|
||||||
aggregated_column_commitment: &Commitment,
|
aggregated_column_commitment: &Commitment,
|
||||||
aggregated_column_proof: &Proof,
|
aggregated_column_proof: &Proof,
|
||||||
index: usize,
|
index: usize,
|
||||||
|
rows_domain: PolynomialEvaluationDomain,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
|
let column_domain =
|
||||||
|
PolynomialEvaluationDomain::new(column.len()).expect("Domain should be able to build");
|
||||||
// 1. compute commitment for column
|
// 1. compute commitment for column
|
||||||
let Ok((_, polynomial)) =
|
let Ok((_, polynomial)) = bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(
|
||||||
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), *DOMAIN)
|
column.as_bytes().as_slice(),
|
||||||
else {
|
column_domain,
|
||||||
|
) else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS)
|
let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS)
|
||||||
|
@ -69,19 +74,25 @@ impl DaVerifier {
|
||||||
&element,
|
&element,
|
||||||
aggregated_column_commitment,
|
aggregated_column_commitment,
|
||||||
aggregated_column_proof,
|
aggregated_column_proof,
|
||||||
*DOMAIN,
|
rows_domain,
|
||||||
&GLOBAL_PARAMETERS,
|
&GLOBAL_PARAMETERS,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_chunk(chunk: &Chunk, commitment: &Commitment, proof: &Proof, index: usize) -> bool {
|
fn verify_chunk(
|
||||||
|
chunk: &Chunk,
|
||||||
|
commitment: &Commitment,
|
||||||
|
proof: &Proof,
|
||||||
|
index: usize,
|
||||||
|
domain: PolynomialEvaluationDomain,
|
||||||
|
) -> bool {
|
||||||
let element = field_element_from_bytes_le(chunk.as_bytes().as_slice());
|
let element = field_element_from_bytes_le(chunk.as_bytes().as_slice());
|
||||||
verify_element_proof(
|
verify_element_proof(
|
||||||
index,
|
index,
|
||||||
&element,
|
&element,
|
||||||
commitment,
|
commitment,
|
||||||
proof,
|
proof,
|
||||||
*DOMAIN,
|
domain,
|
||||||
&GLOBAL_PARAMETERS,
|
&GLOBAL_PARAMETERS,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -91,6 +102,7 @@ impl DaVerifier {
|
||||||
commitments: &[Commitment],
|
commitments: &[Commitment],
|
||||||
proofs: &[Proof],
|
proofs: &[Proof],
|
||||||
index: usize,
|
index: usize,
|
||||||
|
domain: PolynomialEvaluationDomain,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
if ![chunks.len(), commitments.len(), proofs.len()]
|
if ![chunks.len(), commitments.len(), proofs.len()]
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -99,7 +111,7 @@ impl DaVerifier {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) {
|
for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) {
|
||||||
if !DaVerifier::verify_chunk(chunk, commitment, proof, index) {
|
if !DaVerifier::verify_chunk(chunk, commitment, proof, index, domain) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,13 +134,16 @@ impl DaVerifier {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify(&self, blob: DaBlob) -> Option<Attestation> {
|
pub fn verify(&self, blob: DaBlob, rows_domain_size: usize) -> Option<Attestation> {
|
||||||
|
let rows_domain = PolynomialEvaluationDomain::new(rows_domain_size)
|
||||||
|
.expect("Domain should be able to build");
|
||||||
let is_column_verified = DaVerifier::verify_column(
|
let is_column_verified = DaVerifier::verify_column(
|
||||||
&blob.column,
|
&blob.column,
|
||||||
&blob.column_commitment,
|
&blob.column_commitment,
|
||||||
&blob.aggregated_column_commitment,
|
&blob.aggregated_column_commitment,
|
||||||
&blob.aggregated_column_proof,
|
&blob.aggregated_column_proof,
|
||||||
self.index,
|
self.index,
|
||||||
|
rows_domain,
|
||||||
);
|
);
|
||||||
if !is_column_verified {
|
if !is_column_verified {
|
||||||
return None;
|
return None;
|
||||||
|
@ -139,6 +154,7 @@ impl DaVerifier {
|
||||||
&blob.rows_commitments,
|
&blob.rows_commitments,
|
||||||
&blob.rows_proofs,
|
&blob.rows_proofs,
|
||||||
self.index,
|
self.index,
|
||||||
|
rows_domain,
|
||||||
);
|
);
|
||||||
if !are_chunks_verified {
|
if !are_chunks_verified {
|
||||||
return None;
|
return None;
|
||||||
|
@ -153,8 +169,9 @@ mod test {
|
||||||
use crate::common::{hash_column_and_commitment, Chunk, Column};
|
use crate::common::{hash_column_and_commitment, Chunk, Column};
|
||||||
use crate::encoder::test::{rand_data, ENCODER};
|
use crate::encoder::test::{rand_data, ENCODER};
|
||||||
use crate::encoder::DaEncoderParams;
|
use crate::encoder::DaEncoderParams;
|
||||||
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
|
use crate::global::GLOBAL_PARAMETERS;
|
||||||
use crate::verifier::DaVerifier;
|
use crate::verifier::DaVerifier;
|
||||||
|
use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
|
||||||
use blst::min_sig::SecretKey;
|
use blst::min_sig::SecretKey;
|
||||||
use kzgrs::{
|
use kzgrs::{
|
||||||
bytes_to_polynomial, commit_polynomial, generate_element_proof, BYTES_PER_FIELD_ELEMENT,
|
bytes_to_polynomial, commit_polynomial, generate_element_proof, BYTES_PER_FIELD_ELEMENT,
|
||||||
|
@ -164,8 +181,9 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_verify_column() {
|
fn test_verify_column() {
|
||||||
let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect();
|
let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect();
|
||||||
|
let domain = GeneralEvaluationDomain::new(10).unwrap();
|
||||||
let (_, column_poly) =
|
let (_, column_poly) =
|
||||||
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), *DOMAIN)
|
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), domain)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let column_commitment = commit_polynomial(&column_poly, &GLOBAL_PARAMETERS).unwrap();
|
let column_commitment = commit_polynomial(&column_poly, &GLOBAL_PARAMETERS).unwrap();
|
||||||
let (aggregated_evals, aggregated_poly) = bytes_to_polynomial::<
|
let (aggregated_evals, aggregated_poly) = bytes_to_polynomial::<
|
||||||
|
@ -176,7 +194,7 @@ mod test {
|
||||||
&column_commitment,
|
&column_commitment,
|
||||||
)
|
)
|
||||||
.as_slice(),
|
.as_slice(),
|
||||||
*DOMAIN,
|
domain,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let aggregated_commitment =
|
let aggregated_commitment =
|
||||||
|
@ -186,7 +204,7 @@ mod test {
|
||||||
&aggregated_poly,
|
&aggregated_poly,
|
||||||
&aggregated_evals,
|
&aggregated_evals,
|
||||||
&GLOBAL_PARAMETERS,
|
&GLOBAL_PARAMETERS,
|
||||||
*DOMAIN,
|
domain,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(DaVerifier::verify_column(
|
assert!(DaVerifier::verify_column(
|
||||||
|
@ -194,14 +212,16 @@ mod test {
|
||||||
&column_commitment,
|
&column_commitment,
|
||||||
&aggregated_commitment,
|
&aggregated_commitment,
|
||||||
&column_proof,
|
&column_proof,
|
||||||
0
|
0,
|
||||||
|
domain
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_verify() {
|
fn test_verify() {
|
||||||
let encoder = &ENCODER;
|
let encoder = &ENCODER;
|
||||||
let data = rand_data(8);
|
let data = rand_data(32);
|
||||||
|
let domain_size = 16usize;
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
let sks: Vec<SecretKey> = (0..16)
|
let sks: Vec<SecretKey> = (0..16)
|
||||||
.map(|_| {
|
.map(|_| {
|
||||||
|
@ -217,6 +237,7 @@ mod test {
|
||||||
.collect();
|
.collect();
|
||||||
let encoded_data = encoder.encode(&data).unwrap();
|
let encoded_data = encoder.encode(&data).unwrap();
|
||||||
for (i, column) in encoded_data.extended_data.columns().enumerate() {
|
for (i, column) in encoded_data.extended_data.columns().enumerate() {
|
||||||
|
println!("{i}");
|
||||||
let verifier = &verifiers[i];
|
let verifier = &verifiers[i];
|
||||||
let da_blob = DaBlob {
|
let da_blob = DaBlob {
|
||||||
column,
|
column,
|
||||||
|
@ -230,7 +251,7 @@ mod test {
|
||||||
.map(|proofs| proofs.get(i).cloned().unwrap())
|
.map(|proofs| proofs.get(i).cloned().unwrap())
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
assert!(verifier.verify(da_blob).is_some());
|
assert!(verifier.verify(da_blob, domain_size).is_some());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
use ark_bls12_381::Fr;
|
use ark_bls12_381::Fr;
|
||||||
use ark_ff::{BigInteger, Field, PrimeField};
|
use ark_ff::{BigInteger, Field, PrimeField};
|
||||||
use ark_poly::univariate::DensePolynomial;
|
use ark_poly::univariate::DensePolynomial;
|
||||||
use ark_poly::{
|
use ark_poly::{DenseUVPolynomial, EvaluationDomain, Evaluations, GeneralEvaluationDomain};
|
||||||
DenseUVPolynomial, EvaluationDomain, Evaluations, GeneralEvaluationDomain, Polynomial,
|
|
||||||
};
|
|
||||||
use num_traits::Zero;
|
use num_traits::Zero;
|
||||||
use std::ops::{Mul, Neg};
|
use std::ops::{Mul, Neg};
|
||||||
|
|
||||||
|
@ -12,17 +10,9 @@ use std::ops::{Mul, Neg};
|
||||||
/// `factor` need to be `>1`
|
/// `factor` need to be `>1`
|
||||||
pub fn encode(
|
pub fn encode(
|
||||||
polynomial: &DensePolynomial<Fr>,
|
polynomial: &DensePolynomial<Fr>,
|
||||||
evaluations: &Evaluations<Fr>,
|
|
||||||
factor: usize,
|
|
||||||
domain: GeneralEvaluationDomain<Fr>,
|
domain: GeneralEvaluationDomain<Fr>,
|
||||||
) -> Evaluations<Fr> {
|
) -> Evaluations<Fr> {
|
||||||
assert!(factor > 1);
|
Evaluations::from_vec_and_domain(domain.fft(&polynomial.coeffs), domain)
|
||||||
Evaluations::from_vec_and_domain(
|
|
||||||
(0..evaluations.evals.len() * factor)
|
|
||||||
.map(|i| polynomial.evaluate(&domain.element(i)))
|
|
||||||
.collect(),
|
|
||||||
domain,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Interpolate points into a polynomial, then evaluate the polynomial in the original evaluations
|
/// Interpolate points into a polynomial, then evaluate the polynomial in the original evaluations
|
||||||
|
@ -40,8 +30,10 @@ pub fn decode(
|
||||||
.unzip();
|
.unzip();
|
||||||
let coeffs = lagrange_interpolate(&points, &roots_of_unity);
|
let coeffs = lagrange_interpolate(&points, &roots_of_unity);
|
||||||
Evaluations::from_vec_and_domain(
|
Evaluations::from_vec_and_domain(
|
||||||
(0..original_chunks_len)
|
domain
|
||||||
.map(|i| coeffs.evaluate(&domain.element(i)))
|
.fft(&coeffs)
|
||||||
|
.into_iter()
|
||||||
|
.take(original_chunks_len)
|
||||||
.collect(),
|
.collect(),
|
||||||
domain,
|
domain,
|
||||||
)
|
)
|
||||||
|
@ -105,9 +97,9 @@ mod test {
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
bytes.try_fill(&mut rng).unwrap();
|
bytes.try_fill(&mut rng).unwrap();
|
||||||
|
|
||||||
let (evals, poly) = bytes_to_polynomial::<31>(&bytes, *DOMAIN).unwrap();
|
let (_evals, poly) = bytes_to_polynomial::<31>(&bytes, *DOMAIN).unwrap();
|
||||||
|
|
||||||
let encoded = encode(&poly, &evals, 2, *DOMAIN);
|
let encoded = encode(&poly, *DOMAIN);
|
||||||
let mut encoded: Vec<Option<Fr>> = encoded.evals.into_iter().map(Some).collect();
|
let mut encoded: Vec<Option<Fr>> = encoded.evals.into_iter().map(Some).collect();
|
||||||
|
|
||||||
let decoded = decode(10, &encoded, *DOMAIN);
|
let decoded = decode(10, &encoded, *DOMAIN);
|
||||||
|
|
|
@ -45,7 +45,10 @@ impl DaVerifier for KzgrsDaVerifier {
|
||||||
|
|
||||||
fn verify(&self, blob: &Self::DaBlob) -> Result<Self::Attestation, Self::Error> {
|
fn verify(&self, blob: &Self::DaBlob) -> Result<Self::Attestation, Self::Error> {
|
||||||
let blob = blob.clone();
|
let blob = blob.clone();
|
||||||
match self.verifier.verify(blob) {
|
// TODO: Prepare the domain depending the size, if fixed, so fixed domain, if not it needs
|
||||||
|
// to come with some metadata.
|
||||||
|
let domain_size = 2usize;
|
||||||
|
match self.verifier.verify(blob, domain_size) {
|
||||||
Some(attestation) => Ok(attestation),
|
Some(attestation) => Ok(attestation),
|
||||||
None => Err(KzgrsDaVerifierError::VerificationError),
|
None => Err(KzgrsDaVerifierError::VerificationError),
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue