feature(encoder): Implement Iterator over DaBlob for EncodedData (#952)

This commit is contained in:
Álex 2024-12-19 18:40:18 +01:00 committed by GitHub
parent 8f79bafe55
commit 739df1737a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 87 additions and 2 deletions

View File

@ -1,6 +1,5 @@
// std
use std::ops::Div;
// crates
use ark_ff::{BigInteger, PrimeField};
use ark_poly::EvaluationDomain;
@ -10,10 +9,11 @@ use kzgrs::{
bytes_to_polynomial, commit_polynomial, encode, Commitment, Evaluations, GlobalParameters,
KzgRsError, Polynomial, PolynomialEvaluationDomain, Proof, BYTES_PER_FIELD_ELEMENT,
};
#[cfg(feature = "parallel")]
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
// internal
use crate::common::blob::DaBlob;
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
use crate::global::GLOBAL_PARAMETERS;
@ -57,6 +57,75 @@ pub struct EncodedData {
pub aggregated_column_proofs: Vec<Proof>,
}
impl EncodedData {
/// Returns a `DaBlob` for the given index.
/// If the index is out of bounds, returns `None`.
pub fn to_da_blob(&self, index: usize) -> Option<DaBlob> {
let column = self.extended_data.columns().nth(index)?;
Some(DaBlob {
column,
column_idx: index.try_into().unwrap(),
column_commitment: self.column_commitments[index],
aggregated_column_commitment: self.aggregated_column_commitment,
aggregated_column_proof: self.aggregated_column_proofs[index],
rows_commitments: self.row_commitments.clone(),
rows_proofs: self
.rows_proofs
.iter()
.map(|proofs| proofs.get(index).cloned().unwrap())
.collect(),
})
}
}
impl<'a> IntoIterator for &'a EncodedData {
type Item = DaBlob;
type IntoIter = EncodedDataIterator<'a>;
fn into_iter(self) -> Self::IntoIter {
EncodedDataIterator::new(self)
}
}
pub struct OwnedEncodedDataIterator {
encoded_data: EncodedData,
next_index: usize,
}
impl Iterator for OwnedEncodedDataIterator {
type Item = DaBlob;
fn next(&mut self) -> Option<Self::Item> {
let next_da_blob = self.encoded_data.to_da_blob(self.next_index)?;
self.next_index += 1;
Some(next_da_blob)
}
}
pub struct EncodedDataIterator<'a> {
encoded_data: &'a EncodedData,
next_index: usize,
}
impl<'a> EncodedDataIterator<'a> {
pub fn new(encoded_data: &'a EncodedData) -> Self {
Self {
encoded_data,
next_index: 0,
}
}
}
impl Iterator for EncodedDataIterator<'_> {
type Item = DaBlob;
fn next(&mut self) -> Option<Self::Item> {
let next_da_blob = self.encoded_data.to_da_blob(self.next_index)?;
self.next_index += 1;
Some(next_da_blob)
}
}
pub struct DaEncoder {
params: DaEncoderParams,
}
@ -507,4 +576,20 @@ pub mod test {
}
}
}
#[test]
fn test_encoded_data_iterator() {
let encoder = &ENCODER;
let data = vec![
49u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
];
let encoded_data = encoder.encode(&data).unwrap();
let blobs: Vec<_> = (&encoded_data).into_iter().collect();
assert_eq!(blobs.len(), 16);
let blobs: Vec<_> = encoded_data.into_iter().collect();
assert_eq!(blobs.len(), 16);
}
}