Implement encoder with chunkify

This commit is contained in:
Daniel Sanchez Quiros 2024-04-11 17:29:23 +03:00
parent 15f4f4f1b9
commit fe2c5534a3
3 changed files with 86 additions and 3 deletions

View File

@ -2,7 +2,7 @@
pub struct Chunk(Vec<u8>); pub struct Chunk(Vec<u8>);
pub struct Row(Vec<Chunk>); pub struct Row(Vec<Chunk>);
pub struct Column(Vec<Chunk>); pub struct Column(Vec<Chunk>);
struct ChunksMatrix(Vec<Row>); pub struct ChunksMatrix(Vec<Row>);
impl Chunk { impl Chunk {
pub fn as_bytes(&self) -> Vec<u8> { pub fn as_bytes(&self) -> Vec<u8> {
@ -14,15 +14,27 @@ impl Chunk {
} }
} }
impl From<&[u8]> for Chunk {
fn from(value: &[u8]) -> Self {
Self(value.to_vec())
}
}
impl Row { impl Row {
pub fn as_bytes(&self) -> Vec<u8> { pub fn as_bytes(&self) -> Vec<u8> {
self.0.iter().map(Chunk::as_bytes).flatten().collect() self.0.iter().flat_map(Chunk::as_bytes).collect()
} }
} }
impl Column { impl Column {
pub fn as_bytes(&self) -> Vec<u8> { pub fn as_bytes(&self) -> Vec<u8> {
self.0.iter().map(Chunk::as_bytes).flatten().collect() self.0.iter().flat_map(Chunk::as_bytes).collect()
}
}
impl FromIterator<Chunk> for Row {
fn from_iter<T: IntoIterator<Item = Chunk>>(iter: T) -> Self {
Self(iter.into_iter().collect())
} }
} }
@ -47,3 +59,9 @@ impl ChunksMatrix {
Self(self.columns().map(|c| Row(c.0)).collect()) Self(self.columns().map(|c| Row(c.0)).collect())
} }
} }
impl FromIterator<Row> for ChunksMatrix {
fn from_iter<T: IntoIterator<Item = Row>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}

View File

@ -0,0 +1,51 @@
use crate::common::{Chunk, ChunksMatrix};
use kzgrs::{Commitment, Proof, BYTES_PER_FIELD_ELEMENT};
pub struct DaEncoderParams {
column_count: usize,
bytes_per_chunk: usize,
}
impl DaEncoderParams {
const MAX_BLS12_381_ENCODING_CHUNK_SIZE: usize = 31;
const fn default_with(column_count: usize) -> Self {
Self {
column_count,
bytes_per_chunk: Self::MAX_BLS12_381_ENCODING_CHUNK_SIZE,
}
}
}
pub struct EncodedData {
data: Vec<u8>,
chunked_data: ChunksMatrix,
extended_data: ChunksMatrix,
row_commitments: Vec<Commitment>,
row_proofs: Vec<Vec<Proof>>,
column_commitments: Vec<Commitment>,
aggregated_column_commitment: Commitment,
aggregated_column_proofs: Vec<Proof>,
}
struct DaEncoder {
params: DaEncoderParams,
}
impl DaEncoder {
pub const fn new(settings: DaEncoderParams) -> Self {
assert!(settings.bytes_per_chunk < BYTES_PER_FIELD_ELEMENT);
Self { params: settings }
}
fn chunkify(&self, data: &[u8]) -> ChunksMatrix {
let size = self.params.column_count * self.params.bytes_per_chunk;
data.windows(size)
.map(|d| {
d.windows(self.params.bytes_per_chunk)
.map(Chunk::from)
.collect()
})
.collect()
}
}

View File

@ -1,3 +1,17 @@
pub mod common; pub mod common;
pub mod kzg; pub mod kzg;
pub mod rs; pub mod rs;
use ark_bls12_381::{Bls12_381, Fr};
use ark_poly_commit::kzg10;
use std::mem;
pub use common::{bytes_to_evaluations, bytes_to_polynomial, KzgRsError};
pub use kzg::{commit_polynomial, generate_element_proof, verify_element_proof};
pub use rs::{decode, encode};
pub type Commitment = kzg10::Commitment<Bls12_381>;
pub type Proof = kzg10::Proof<Bls12_381>;
pub type FieldElement = ark_bls12_381::Fr;
pub const BYTES_PER_FIELD_ELEMENT: usize = mem::size_of::<Fr>();