1
0
mirror of synced 2025-01-10 07:46:05 +00:00

Add sample size test (ignored)

This commit is contained in:
Daniel Sanchez Quiros 2024-12-24 12:52:23 +01:00
parent 1260230898
commit be45f05ef1
2 changed files with 116 additions and 2 deletions

View File

@ -9,13 +9,11 @@ edition = "2021"
ark-ff = "0.4"
ark-serialize = "0.4.2"
ark-poly = "0.4.2"
bitvec = { version = "1.0.1", features = ["serde"] }
blake2 = "0.10"
blst = { version = "0.3.11", features = ["serde"] }
itertools = "0.12"
kzgrs = { path = "../kzgrs" }
nomos-core = { path = "../../nomos-core/chain-defs" }
num-bigint = "0.4.4"
rand = "0.8.5"
once_cell = "1.19"
sha3 = "0.10"
@ -26,6 +24,7 @@ rayon = { version = "1.10.0", optional = true }
rand = "0.8"
ark-bls12-381 = "0.4.0"
divan = "0.1"
bincode = "1.3"
[features]
default = []

View File

@ -72,3 +72,118 @@ impl blob::Blob for DaBlob {
self.column_idx.to_be_bytes()
}
}
#[cfg(test)]
mod tests {
use crate::common::blob::DaBlob;
use crate::common::{deserialize_canonical, serialize_canonical};
use crate::common::{Chunk, Column};
use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::GLOBAL_PARAMETERS;
use kzgrs::{Commitment, Proof};
use nomos_core::da::DaEncoder as _;
use rand::{thread_rng, RngCore};
use serde::{Deserialize, Serialize};
pub fn get_da_blob(data: Vec<u8>) -> DaBlob {
let encoder_params = DaEncoderParams::new(2048, true, GLOBAL_PARAMETERS.clone());
let encoder = DaEncoder::new(encoder_params);
let encoded_data = encoder.encode(&data).unwrap();
let columns: Vec<_> = encoded_data.extended_data.columns().collect();
let index = 0;
let da_blob = DaBlob {
column: columns[index].clone(),
column_idx: index
.try_into()
.expect("Column index shouldn't overflow the target type"),
column_commitment: encoded_data.column_commitments[index],
aggregated_column_commitment: encoded_data.aggregated_column_commitment,
aggregated_column_proof: encoded_data.aggregated_column_proofs[index],
rows_commitments: encoded_data.row_commitments.clone(),
rows_proofs: encoded_data
.rows_proofs
.iter()
.map(|proofs| proofs.get(index).cloned().unwrap())
.collect(),
};
da_blob
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct DaChunk {
chunk: Chunk,
#[serde(
serialize_with = "serialize_canonical",
deserialize_with = "deserialize_canonical"
)]
chunk_proof: Proof,
column_idx: usize,
#[serde(
serialize_with = "serialize_canonical",
deserialize_with = "deserialize_canonical"
)]
column_commitment: Commitment,
#[serde(
serialize_with = "serialize_canonical",
deserialize_with = "deserialize_canonical"
)]
aggregated_column_commitment: Commitment,
#[serde(
serialize_with = "serialize_canonical",
deserialize_with = "deserialize_canonical"
)]
row_commitment: Commitment,
#[serde(
serialize_with = "serialize_canonical",
deserialize_with = "deserialize_canonical"
)]
row_proofs: Proof,
}
fn da_blob_to_single_chunk(blob: &DaBlob) -> DaChunk {
DaChunk {
chunk: blob.column.0[0].clone(),
chunk_proof: blob.rows_proofs[0].clone(),
column_idx: 0,
column_commitment: blob.column_commitment.clone(),
aggregated_column_commitment: blob.aggregated_column_commitment.clone(),
row_commitment: blob.rows_commitments[0].clone(),
row_proofs: blob.rows_proofs[0].clone(),
}
}
// This test is just for information purposes it doesn't actually test anything. Simply ignore
#[ignore]
#[test]
fn test_sizes() {
let sizes: &[usize] = &[
4224, // 128Kb / 31
8456, // 256Kb / 31
16912, // 512Kb / 31
33825, // 1024Kb / 31
];
for size in sizes {
let mut data = vec![0u8; 31 * size];
thread_rng().fill_bytes(&mut data);
println!("Data size: {}bytes", data.len());
let blob = get_da_blob(data);
let chunk = da_blob_to_single_chunk(&blob);
println!("Column len: {}items", blob.column.len());
println!("Column size: {}bytes", blob.column.as_bytes().len());
let encoded = bincode::serialize(&blob).unwrap();
println!(
"Column:\n\tsample size: {}bytes, {}Kb",
encoded.len(),
encoded.len() / 1024
);
let encoded = bincode::serialize(&chunk).unwrap();
println!(
"Chunk:\n\tsample size: {}bytes, {}Kb",
encoded.len(),
encoded.len() / 1024
);
}
}
}