Da: v1 encoder (#633)

* Added new kzgrs core module

* Implemented bytes_to_polynomial and tests

* Use coefficient form

* Refactor evaluations into method

* Use domain elements instead of roots of unity in tests

* Fix encoding and test

* Clippy happy

* Add comments

* Implement polynomial commitment

* Implement proof generation

* Sketch fn signature for verification

* implement proof verification

* Implemented verification and tests

* Return evaluations from bytes_to_polynomial as well

* Use modular le bytes

* Implement rs encode/decode

* Implement decoding tests

* Implement decode using lagrange

* Added chunksmatrix

* Implement encoder with chunkify

* Added missing files

* Implement commit row commitments

* Implement compute elements (row) proofs

* Fix lib and types exposures

* Implement encoder main methods

* Implement encode method

* Implement chunkify test
Fix related bugs

* Implement compute row kzg commitments
Fix related bugs

* Implement rs encode rows test
Fix related bugs
Refactored API

* Implement row proofs tests
Fix fieldelement encoding/decoding bug

* Implement aggregated commitment test
Implement aggregated column proofs test

* Cleanup

* Fix deps

* Fix tests
This commit is contained in:
Daniel Sanchez 2024-04-19 12:14:00 +02:00 committed by Gusto
parent ee85d8737d
commit 54a9a73e0d
12 changed files with 584 additions and 23 deletions

View File

@ -20,6 +20,8 @@ members = [
"consensus/carnot-engine",
"consensus/cryptarchia-engine",
"ledger/cryptarchia-ledger",
"tests", "nomos-da/kzgrs",
"tests",
"nomos-da/kzgrs",
"nomos-da/kzgrs-backend",
]
resolver = "2"

View File

@ -0,0 +1,20 @@
[package]
name = "kzgrs-backend"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ark-ff = "0.4"
blake2 = "0.10"
ark-serialize = "0.4.2"
ark-poly = "0.4.2"
kzgrs = { path = "../kzgrs" }
rand = "0.8.5"
once_cell = "1.19"
[dev-dependencies]
rand = "0.8"
itertools = "0.12"
num-bigint = "0.4.4"

View File

@ -0,0 +1,118 @@
use blake2::digest::{Update, VariableOutput};
use kzgrs::Commitment;
use std::io::Cursor;
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Chunk(pub Vec<u8>);
pub struct Row(pub Vec<Chunk>);
pub struct Column(pub Vec<Chunk>);
pub struct ChunksMatrix(pub Vec<Row>);
impl Chunk {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn as_bytes(&self) -> Vec<u8> {
self.0.to_vec()
}
pub const fn empty() -> Self {
Self(vec![])
}
}
impl From<&[u8]> for Chunk {
fn from(value: &[u8]) -> Self {
Self(value.to_vec())
}
}
impl Row {
pub fn iter(&self) -> impl Iterator<Item = &Chunk> {
self.0.iter()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn as_bytes(&self) -> Vec<u8> {
self.0.iter().flat_map(Chunk::as_bytes).collect()
}
}
impl Column {
#[allow(unused)]
pub fn iter(&self) -> impl Iterator<Item = &Chunk> {
self.0.iter()
}
#[allow(unused)]
pub fn len(&self) -> usize {
self.0.len()
}
pub fn as_bytes(&self) -> Vec<u8> {
self.0.iter().flat_map(Chunk::as_bytes).collect()
}
}
impl FromIterator<Chunk> for Row {
fn from_iter<T: IntoIterator<Item = Chunk>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
impl FromIterator<Chunk> for Column {
fn from_iter<T: IntoIterator<Item = Chunk>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
impl ChunksMatrix {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn rows(&self) -> impl Iterator<Item = &Row> + '_ {
self.0.iter()
}
pub fn columns(&self) -> impl Iterator<Item = Column> + '_ {
let size = self.0.first().map(|r| r.0.len()).unwrap_or(0);
(0..size).map(|i| {
self.0
.iter()
.map(|row| row.0.get(i).cloned().unwrap_or_else(Chunk::empty))
.collect::<Column>()
})
}
pub fn transposed(&self) -> Self {
Self(self.columns().map(|c| Row(c.0)).collect())
}
pub fn iter(&self) -> impl Iterator<Item = &Row> {
self.0.iter()
}
}
impl FromIterator<Row> for ChunksMatrix {
fn from_iter<T: IntoIterator<Item = Row>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
pub fn hash_column_and_commitment<const HASH_SIZE: usize>(
column: &Column,
commitment: &Commitment,
) -> [u8; HASH_SIZE] {
use ark_serialize::CanonicalSerialize;
let mut hasher = blake2::Blake2bVar::new(HASH_SIZE)
.unwrap_or_else(|e| panic!("Blake2b should work for size {HASH_SIZE}, {e}"));
hasher.update(column.as_bytes().as_ref());
let mut buff = Cursor::new(vec![]);
commitment
.serialize_uncompressed(&mut buff)
.expect("Serialization of commitment should work");
hasher.update(buff.into_inner().as_ref());
hasher
.finalize_boxed()
.to_vec()
.try_into()
.unwrap_or_else(|_| panic!("Size is guaranteed by constant {HASH_SIZE:?}"))
}

View File

@ -0,0 +1,365 @@
// std
use ark_ff::{BigInteger, PrimeField};
use std::ops::Div;
// crates
// internal
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use kzgrs::common::bytes_to_polynomial_unchecked;
use kzgrs::{
bytes_to_polynomial, commit_polynomial, encode, generate_element_proof, Commitment,
Evaluations, KzgRsError, Polynomial, Proof, BYTES_PER_FIELD_ELEMENT,
};
#[derive(Copy, Clone)]
pub struct DaEncoderParams {
column_count: usize,
}
impl DaEncoderParams {
const MAX_BLS12_381_ENCODING_CHUNK_SIZE: usize = 31;
const fn default_with(column_count: usize) -> Self {
Self { column_count }
}
}
pub struct EncodedData {
data: Vec<u8>,
chunked_data: ChunksMatrix,
extended_data: ChunksMatrix,
row_commitments: Vec<Commitment>,
rows_proofs: Vec<Vec<Proof>>,
column_commitments: Vec<Commitment>,
aggregated_column_commitment: Commitment,
aggregated_column_proofs: Vec<Proof>,
}
pub struct DaEncoder {
params: DaEncoderParams,
}
impl DaEncoder {
pub const fn new(settings: DaEncoderParams) -> Self {
Self { params: settings }
}
fn chunkify(&self, data: &[u8]) -> ChunksMatrix {
let chunk_size =
// column count is divided by two, as later on rows are encoded to twice the size
self.params.column_count.div(2) * DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE;
data.chunks(chunk_size)
.map(|d| {
d.chunks(DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE)
.map(|chunk| {
let mut buff = [0u8; BYTES_PER_FIELD_ELEMENT];
buff[..DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE]
.copy_from_slice(chunk);
Chunk::from(buff.as_slice())
})
.collect()
})
.collect()
}
fn compute_kzg_row_commitments(
matrix: &ChunksMatrix,
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
matrix
.rows()
.map(|r| {
// Using the unchecked version here. Because during the process of chunkifiying
// we already make sure to have the chunks of proper elements.
// Also, after rs encoding, we are sure all `Fr` elements already fits within modulus.
let (evals, poly) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
r.as_bytes().as_ref(),
*DOMAIN,
);
commit_polynomial(&poly, &GLOBAL_PARAMETERS)
.map(|commitment| ((evals, poly), commitment))
})
.collect()
}
fn rs_encode_row(evaluations: &Evaluations, row: &Polynomial) -> Evaluations {
encode(row, evaluations, 2, *DOMAIN)
}
fn rs_encode_rows(rows: &[(Evaluations, Polynomial)]) -> Vec<Evaluations> {
rows.iter()
.map(|(eval, poly)| Self::rs_encode_row(eval, poly))
.collect()
}
fn compute_rows_proofs(
polynomials: &[Polynomial],
proof_count: usize,
) -> Result<Vec<Vec<Proof>>, KzgRsError> {
polynomials
.iter()
.map(|poly| {
(0..proof_count)
.map(|i| generate_element_proof(i, poly, &GLOBAL_PARAMETERS, *DOMAIN))
.collect()
})
.collect()
}
fn compute_kzg_column_commitments(
matrix: &ChunksMatrix,
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
Self::compute_kzg_row_commitments(&matrix.transposed())
}
fn compute_aggregated_column_commitment(
matrix: &ChunksMatrix,
commitments: &[Commitment],
) -> Result<((Evaluations, Polynomial), Commitment), KzgRsError> {
let hashes: Vec<u8> =
matrix
.columns()
.zip(commitments)
.flat_map(|(column, commitment)| {
hash_column_and_commitment::<
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
>(&column, commitment)
})
.collect();
let (evals, poly) = bytes_to_polynomial::<
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
>(hashes.as_ref(), *DOMAIN)?;
let commitment = commit_polynomial(&poly, &GLOBAL_PARAMETERS)?;
Ok(((evals, poly), commitment))
}
fn compute_aggregated_column_proofs(
polynomial: &Polynomial,
proof_count: usize,
) -> Result<Vec<Proof>, KzgRsError> {
(0..proof_count)
.map(|i| generate_element_proof(i, polynomial, &GLOBAL_PARAMETERS, *DOMAIN))
.collect()
}
fn evals_to_chunk_matrix(evals: &[Evaluations]) -> ChunksMatrix {
ChunksMatrix(
evals
.iter()
.map(|eval| {
Row(eval
.evals
.iter()
.map(|point| Chunk(point.into_bigint().to_bytes_le()))
.collect())
})
.collect(),
)
}
pub fn encode(&self, data: &[u8]) -> Result<EncodedData, kzgrs::KzgRsError> {
let chunked_data = self.chunkify(data);
let (row_polynomials, row_commitments): (Vec<_>, Vec<_>) =
Self::compute_kzg_row_commitments(&chunked_data)?
.into_iter()
.unzip();
let extended_data =
Self::evals_to_chunk_matrix(Self::rs_encode_rows(&row_polynomials).as_ref());
let row_polynomials: Vec<_> = row_polynomials.into_iter().map(|(_, p)| p).collect();
let rows_proofs = Self::compute_rows_proofs(&row_polynomials, self.params.column_count)?;
let (_column_polynomials, column_commitments): (Vec<_>, Vec<_>) =
Self::compute_kzg_column_commitments(&extended_data)?
.into_iter()
.unzip();
let ((_aggregated_evals, aggregated_polynomial), aggregated_column_commitment) =
Self::compute_aggregated_column_commitment(&extended_data, &column_commitments)?;
let aggregated_column_proofs = Self::compute_aggregated_column_proofs(
&aggregated_polynomial,
column_commitments.len(),
)?;
Ok(EncodedData {
data: data.to_vec(),
chunked_data,
extended_data,
row_commitments,
rows_proofs,
column_commitments,
aggregated_column_commitment,
aggregated_column_proofs,
})
}
}
#[cfg(test)]
pub mod test {
use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use ark_ff::PrimeField;
use itertools::izip;
use kzgrs::common::bytes_to_polynomial_unchecked;
use kzgrs::{decode, verify_element_proof, FieldElement, BYTES_PER_FIELD_ELEMENT};
use rand::RngCore;
use std::ops::Div;
pub const PARAMS: DaEncoderParams = DaEncoderParams::default_with(16);
pub const ENCODER: DaEncoder = DaEncoder::new(PARAMS);
pub fn rand_data(elements_count: usize) -> Vec<u8> {
let mut buff = vec![0; elements_count * DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE];
rand::thread_rng().fill_bytes(&mut buff);
buff
}
#[test]
fn test_chunkify() {
let params = DaEncoderParams::default_with(2);
let elements = 10usize;
let data = rand_data(elements);
let encoder = DaEncoder::new(params);
let matrix = encoder.chunkify(&data);
assert_eq!(matrix.len(), elements.div(params.column_count.div(2)));
for row in matrix.rows() {
assert_eq!(row.len(), params.column_count.div(2));
assert_eq!(row.0[0].len(), BYTES_PER_FIELD_ELEMENT);
}
}
#[test]
fn test_compute_row_kzg_commitments() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_row_commitments(&matrix).unwrap();
assert_eq!(commitments_data.len(), matrix.len());
}
#[test]
fn test_evals_to_chunk_matrix() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
for (r1, r2) in izip!(matrix.iter(), extended_matrix.iter()) {
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
assert_eq!(c1, c2);
}
}
}
#[test]
fn test_rs_encode_rows() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, _): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let (evals, _): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
// check encoding went well, original evaluation points vs extended ones
for (e1, e2) in izip!(evals.iter(), extended_rows.iter()) {
for (c1, c2) in izip!(&e1.evals, &e2.evals) {
assert_eq!(c1, c2);
}
}
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
for (r1, r2, evals) in izip!(matrix.iter(), extended_matrix.iter(), extended_rows) {
assert_eq!(r1.len(), r2.len().div(2));
for (c1, c2) in izip!(r1.iter(), r2.iter()) {
assert_eq!(c1, c2);
}
let points: Vec<_> = evals.evals.iter().cloned().map(Some).collect();
let poly_2 = decode(r1.len(), &points, *DOMAIN);
let (poly_1, _) = bytes_to_polynomial_unchecked::<BYTES_PER_FIELD_ELEMENT>(
r1.as_bytes().as_ref(),
*DOMAIN,
);
assert_eq!(poly_1, poly_2);
}
}
#[test]
fn test_compute_row_proofs() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_row_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let extended_rows = DaEncoder::rs_encode_rows(&poly_data);
let (_evals, polynomials): (Vec<_>, Vec<_>) = poly_data.into_iter().unzip();
let extended_matrix = DaEncoder::evals_to_chunk_matrix(&extended_rows);
let original_proofs =
DaEncoder::compute_rows_proofs(&polynomials, PARAMS.column_count.div(2)).unwrap();
let extended_proofs =
DaEncoder::compute_rows_proofs(&polynomials, PARAMS.column_count).unwrap();
let checks = izip!(matrix.iter(), &commitments, &original_proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len());
for (i, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
i,
&element,
&commitment,
&proofs[i],
*DOMAIN,
&GLOBAL_PARAMETERS
));
}
}
let checks = izip!(extended_matrix.iter(), &commitments, &extended_proofs);
for (row, commitment, proofs) in checks {
assert_eq!(proofs.len(), row.len());
for (i, chunk) in row.iter().enumerate() {
let element = FieldElement::from_le_bytes_mod_order(chunk.as_bytes().as_ref());
assert!(verify_element_proof(
i,
&element,
&commitment,
&proofs[i],
*DOMAIN,
&GLOBAL_PARAMETERS
));
}
}
}
#[test]
fn test_compute_column_kzg_commitments() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let commitments_data = DaEncoder::compute_kzg_column_commitments(&matrix).unwrap();
assert_eq!(commitments_data.len(), matrix.columns().count());
}
#[test]
fn test_compute_aggregated_column_kzg_commitment() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (_, commitments): (Vec<_>, Vec<_>) = DaEncoder::compute_kzg_column_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let _ = DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
}
#[test]
fn test_compute_aggregated_column_kzg_proofs() {
let data = rand_data(32);
let matrix = ENCODER.chunkify(data.as_ref());
let (_poly_data, commitments): (Vec<_>, Vec<_>) =
DaEncoder::compute_kzg_column_commitments(&matrix)
.unwrap()
.into_iter()
.unzip();
let ((_, polynomial), _aggregated_commitment) =
DaEncoder::compute_aggregated_column_commitment(&matrix, &commitments).unwrap();
DaEncoder::compute_aggregated_column_proofs(&polynomial, commitments.len()).unwrap();
}
}

View File

@ -0,0 +1,11 @@
use ark_poly::EvaluationDomain;
use kzgrs::{global_parameters_from_randomness, GlobalParameters, PolynomialEvaluationDomain};
use once_cell::sync::Lazy;
pub static GLOBAL_PARAMETERS: Lazy<GlobalParameters> = Lazy::new(|| {
let mut rng = rand::thread_rng();
global_parameters_from_randomness(&mut rng)
});
pub static DOMAIN: Lazy<PolynomialEvaluationDomain> =
Lazy::new(|| PolynomialEvaluationDomain::new(8192).unwrap());

View File

@ -0,0 +1,3 @@
mod common;
mod encoder;
mod global;

View File

@ -17,6 +17,4 @@ ark-serialize = { version = "0.4" }
num-bigint = "0.4.4"
thiserror = "1.0.58"
num-traits = "0.2.18"
[dev-dependencies]
rand = "0.8.5"

View File

@ -1,10 +1,12 @@
// std
// crates
use crate::BYTES_PER_FIELD_ELEMENT;
use ark_bls12_381::fr::Fr;
use ark_ff::{BigInteger256, PrimeField, Zero};
use ark_ff::Zero;
use ark_poly::domain::general::GeneralEvaluationDomain;
use ark_poly::evaluations::univariate::Evaluations;
use ark_poly::univariate::DensePolynomial;
use num_bigint::BigUint;
use thiserror::Error;
// internal
@ -33,10 +35,8 @@ pub fn bytes_to_evaluations<const CHUNK_SIZE: usize>(
.map(|e| {
// use little endian for convenience as shortening 1 byte (<32 supported)
// do not matter in this endianness
let bint: BigInteger256 = Fr::from_le_bytes_mod_order(e)
.try_into()
.expect("Bytes size should fit for an 256 bits integer");
Fr::new(bint)
let bui = BigUint::from_bytes_le(e);
Fr::from(bui)
})
.collect(),
domain,
@ -52,7 +52,7 @@ pub fn bytes_to_polynomial<const CHUNK_SIZE: usize>(
data: &[u8],
domain: GeneralEvaluationDomain<Fr>,
) -> Result<(Evaluations<Fr>, DensePolynomial<Fr>), KzgRsError> {
if CHUNK_SIZE >= 32 {
if CHUNK_SIZE > BYTES_PER_FIELD_ELEMENT {
return Err(KzgRsError::ChunkSizeTooBig(CHUNK_SIZE));
}
if data.len() % CHUNK_SIZE != 0 {
@ -61,9 +61,21 @@ pub fn bytes_to_polynomial<const CHUNK_SIZE: usize>(
current_size: data.len(),
});
}
Ok(bytes_to_polynomial_unchecked::<CHUNK_SIZE>(data, domain))
}
/// Transform chunks of bytes (of size `CHUNK_SIZE`) into `Fr` which are considered evaluations of a
/// polynomial. Then use FFT to transform that polynomial into coefficient form.
/// No extra checks are done for the caller.
/// Caller need to ensure that `CHUNK_SIZE` is not bigger than the underlying `Fr` element can be
/// decoded from.
pub fn bytes_to_polynomial_unchecked<const CHUNK_SIZE: usize>(
data: &[u8],
domain: GeneralEvaluationDomain<Fr>,
) -> (Evaluations<Fr>, DensePolynomial<Fr>) {
let evals = bytes_to_evaluations::<CHUNK_SIZE>(data, domain);
let coefficients = evals.interpolate_by_ref();
Ok((evals, coefficients))
(evals, coefficients)
}
#[cfg(test)]

View File

@ -0,0 +1,9 @@
use super::GlobalParameters;
use ark_bls12_381::{fr::Fr, Bls12_381};
use ark_poly::polynomial::univariate::DensePolynomial;
use ark_poly_commit::kzg10::KZG10;
use rand::Rng;
pub fn global_parameters_from_randomness<R: Rng>(rng: &mut R) -> GlobalParameters {
KZG10::<Bls12_381, DensePolynomial<Fr>>::setup(8192, true, rng).unwrap()
}

View File

@ -28,7 +28,7 @@ pub fn generate_element_proof(
element_index: usize,
polynomial: &DensePolynomial<Fr>,
global_parameters: &UniversalParams<Bls12_381>,
domain: &GeneralEvaluationDomain<Fr>,
domain: GeneralEvaluationDomain<Fr>,
) -> Result<Proof<Bls12_381>, KzgRsError> {
let u = domain.element(element_index);
let v = polynomial.evaluate(&u);
@ -49,7 +49,7 @@ pub fn verify_element_proof(
element: &Fr,
commitment: &Commitment<Bls12_381>,
proof: &Proof<Bls12_381>,
domain: &GeneralEvaluationDomain<Fr>,
domain: GeneralEvaluationDomain<Fr>,
global_parameters: &UniversalParams<Bls12_381>,
) -> bool {
let u = domain.element(element_index);
@ -103,7 +103,7 @@ mod test {
let (_, poly) = bytes_to_polynomial::<31>(&bytes, *DOMAIN).unwrap();
let commitment = commit_polynomial(&poly, &GLOBAL_PARAMETERS).unwrap();
let proofs: Vec<_> = (0..10)
.map(|i| generate_element_proof(i, &poly, &GLOBAL_PARAMETERS, &DOMAIN).unwrap())
.map(|i| generate_element_proof(i, &poly, &GLOBAL_PARAMETERS, *DOMAIN).unwrap())
.collect();
for (i, (element, proof)) in evaluations.iter().zip(proofs.iter()).enumerate() {
// verifying works
@ -112,7 +112,7 @@ mod test {
element,
&commitment,
proof,
&DOMAIN,
*DOMAIN,
&GLOBAL_PARAMETERS
));
// verification fails for other items
@ -122,7 +122,7 @@ mod test {
element,
&commitment,
proof,
&DOMAIN,
*DOMAIN,
&GLOBAL_PARAMETERS
));
}

View File

@ -1,3 +1,27 @@
pub mod common;
pub mod global_parameters;
pub mod kzg;
pub mod rs;
use ark_bls12_381::{Bls12_381, Fr};
use ark_poly::univariate::DensePolynomial;
use ark_poly::GeneralEvaluationDomain;
use ark_poly_commit::kzg10;
use ark_poly_commit::sonic_pc::UniversalParams;
use std::mem;
pub use common::{bytes_to_evaluations, bytes_to_polynomial, KzgRsError};
pub use global_parameters::global_parameters_from_randomness;
pub use kzg::{commit_polynomial, generate_element_proof, verify_element_proof};
pub use rs::{decode, encode};
pub type Commitment = kzg10::Commitment<Bls12_381>;
pub type Proof = kzg10::Proof<Bls12_381>;
pub type FieldElement = ark_bls12_381::Fr;
pub type Polynomial = DensePolynomial<Fr>;
pub type Evaluations = ark_poly::Evaluations<Fr>;
pub type PolynomialEvaluationDomain = GeneralEvaluationDomain<Fr>;
pub type GlobalParameters = UniversalParams<Bls12_381>;
pub const BYTES_PER_FIELD_ELEMENT: usize = mem::size_of::<Fr>();

View File

@ -14,14 +14,14 @@ pub fn encode(
polynomial: &DensePolynomial<Fr>,
evaluations: &Evaluations<Fr>,
factor: usize,
domain: &GeneralEvaluationDomain<Fr>,
domain: GeneralEvaluationDomain<Fr>,
) -> Evaluations<Fr> {
assert!(factor > 1);
Evaluations::from_vec_and_domain(
(0..evaluations.evals.len() * factor)
.map(|i| polynomial.evaluate(&domain.element(i)))
.collect(),
*domain,
domain,
)
}
@ -31,7 +31,7 @@ pub fn encode(
pub fn decode(
original_chunks_len: usize,
points: &[Option<Fr>],
domain: &GeneralEvaluationDomain<Fr>,
domain: GeneralEvaluationDomain<Fr>,
) -> Evaluations<Fr> {
let (points, roots_of_unity): (Vec<Fr>, Vec<Fr>) = points
.iter()
@ -49,7 +49,7 @@ pub fn decode(
(0..original_chunks_len)
.map(|i| coeffs.evaluate(&domain.element(i)))
.collect(),
*domain,
domain,
)
}
@ -88,8 +88,7 @@ pub fn points_to_bytes<const CHUNK_SIZE: usize>(points: &[Fr]) -> Vec<u8> {
}
points
.iter()
.map(point_to_buff::<CHUNK_SIZE>)
.flatten()
.flat_map(point_to_buff::<CHUNK_SIZE>)
.collect()
}
@ -114,10 +113,10 @@ mod test {
let (evals, poly) = bytes_to_polynomial::<31>(&bytes, *DOMAIN).unwrap();
let encoded = encode(&poly, &evals, 2, &DOMAIN);
let encoded = encode(&poly, &evals, 2, *DOMAIN);
let mut encoded: Vec<Option<Fr>> = encoded.evals.into_iter().map(Some).collect();
let decoded = decode(10, &encoded, &DOMAIN);
let decoded = decode(10, &encoded, *DOMAIN);
let decoded_bytes = points_to_bytes::<31>(&decoded.evals);
assert_eq!(decoded_bytes, bytes);