add sample_cells bench

This commit is contained in:
M Alghazwi 2024-10-18 12:01:01 +02:00
parent 9b1881f947
commit 3b80809fb4
5 changed files with 225 additions and 23 deletions

View File

@ -29,3 +29,7 @@ harness = false
[[bench]]
name = "prove_cells"
harness = false
[[bench]]
name = "sample_cells"
harness = false

View File

@ -0,0 +1,129 @@
use criterion::{criterion_group, criterion_main, Criterion};
use anyhow::Result;
use std::time::{Duration, Instant};
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
use plonky2::iop::witness::PartialWitness;
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use codex_plonky2_circuits::circuits::params::TESTING_SLOT_INDEX;
use codex_plonky2_circuits::circuits::sample_cells::DatasetTreeCircuit;
macro_rules! pretty_print {
($($arg:tt)*) => {
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
println!($($arg)*);
}
}
// Hash function used
type HF = PoseidonHash;
fn prepare_data<
F: RichField + Extendable<D> + Poseidon2,
C: GenericConfig<D, F = F>,
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
>() -> Result<(
DatasetTreeCircuit<F, C, D, H>,
usize,
usize,
)> {
// Initialize the dataset tree with testing data
let mut dataset_t = DatasetTreeCircuit::<F,C,D,H>::new_for_testing();
let slot_index = TESTING_SLOT_INDEX;
let entropy = 123;
Ok((dataset_t, slot_index, entropy))
}
fn build_circuit<
F: RichField + Extendable<D> + Poseidon2,
C: GenericConfig<D, F = F>,
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
>(
dataset_tree: &mut DatasetTreeCircuit<F, C, D, H>,
slot_index: usize,
entropy: usize,
// proofs: &[MerkleProof<F, H>],
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
{
// Create the circuit
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
let mut targets = dataset_tree.sample_slot_circuit(&mut builder);
// Create a PartialWitness
let mut pw = PartialWitness::new();
dataset_tree.sample_slot_assign_witness(&mut pw, &mut targets,slot_index,entropy);
// Build the circuit
let data = builder.build::<C>();
Ok((data, pw))
}
fn sampling_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("Sampling Benchmark");
// Circuit parameters
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type H = PoseidonHash;
// Prepare the data that will be used in all steps
let (mut dataset_tree, slot_index, entropy) = prepare_data::<F, C, D, H>().unwrap();
// Benchmark the circuit building
group.bench_function("Single Cell Proof Build", |b| {
b.iter(|| {
build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
})
});
// Build the circuit
let (data, pw) = build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
pretty_print!(
"Circuit size: 2^{} gates",
data.common.degree_bits()
);
let start_time = Instant::now();
let proof_with_pis = data.prove(pw.clone()).unwrap();
println!("prove_time = {:?}", start_time.elapsed());
// Benchmark the proving time
group.bench_function("Single Cell Proof Prove", |b| {
b.iter(|| {
let _proof_with_pis = data.prove(pw.clone()).unwrap();
})
});
// Generate the proof
let proof_with_pis = data.prove(pw.clone()).unwrap();
let verifier_data = data.verifier_data();
pretty_print!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
// Benchmark the verification time
group.bench_function("Single Cell Proof Verify", |b| {
b.iter(|| {
verifier_data.verify(proof_with_pis.clone()).unwrap();
})
});
group.finish();
}
criterion_group!(name = benches;
config = Criterion::default().sample_size(10);
targets = sampling_benchmark);
criterion_main!(benches);

View File

@ -4,15 +4,20 @@
use plonky2::hash::poseidon::PoseidonHash;
// constants and types used throughout the circuit
pub const N_FIELD_ELEMS_PER_CELL: usize = 4;
pub const N_FIELD_ELEMS_PER_CELL: usize = 256;
pub const BOT_DEPTH: usize = 5; // block depth - depth of the block merkle tree
pub const MAX_DEPTH: usize = 8; // depth of big tree (slot tree depth + block tree depth)
pub const MAX_DEPTH: usize = 16; // depth of big tree (slot tree depth + block tree depth)
pub const N_CELLS_IN_BLOCKS: usize = 1<<BOT_DEPTH; //2^BOT_DEPTH
pub const N_BLOCKS: usize = 1<<(MAX_DEPTH - BOT_DEPTH); // 2^(MAX_DEPTH - BOT_DEPTH)
pub const N_CELLS: usize = N_CELLS_IN_BLOCKS * N_BLOCKS;
pub const DATASET_DEPTH: usize = 2;
pub const N_SAMPLES: usize = 5;
//the index of the slot to be sampled
// this is fixed to speed up creating fake dataset
// otherwise it would take lots of time
pub const TESTING_SLOT_INDEX: usize = 2;
pub const DATASET_DEPTH: usize = 5;
pub const N_SAMPLES: usize = 10;
// hash function used. this is hackish way of doing it because
// H::Hash is not consistent with HashOut<F> and causing a lot of headache

View File

@ -91,23 +91,6 @@ impl<
.collect::<Vec<_>>();
// create slot tree
let slot_tree = MerkleTree::<F, H>::new(&block_roots, zero).unwrap();
// let mt =
// MerkleTree::<F,H>{
// tree: slot_tree,
// block_trees,
// cell_data,
// cell_hash: leaves,
// }
// create block circuits
// let block_circuits = block_trees.iter()
// .map(|b_tree| {
// // let start = i * N_CELLS_IN_BLOCKS;
// // let end = (i + 1) * N_CELLS_IN_BLOCKS;
// // Self::get_block_tree(&leaves[start..end].to_vec()) // use helper function
// MerkleTreeCircuit::<F,C,D,H>{ tree:b_tree.clone(), _phantom:Default::default()},
// })
// .collect::<Vec<_>>();
Self{
tree: MerkleTreeCircuit::<F,C,D,H>{ tree:slot_tree, _phantom:Default::default()},
@ -124,7 +107,6 @@ impl<
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
> SlotTreeCircuit<F,C,D, H> {
/// same as default but with supplied cell data
pub fn new(cell_data: Vec<Vec<F>>) -> Self{
let leaves: Vec<HashOut<F>> = cell_data

View File

@ -28,7 +28,7 @@ use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::hash::hashing::PlonkyPermutation;
use crate::circuits::prove_single_cell::{SingleCellTargets, SlotTreeCircuit};
use crate::circuits::params::{MAX_DEPTH, BOT_DEPTH, N_FIELD_ELEMS_PER_CELL, N_CELLS_IN_BLOCKS, N_BLOCKS, N_CELLS, HF, DATASET_DEPTH, N_SAMPLES};
use crate::circuits::params::{MAX_DEPTH, BOT_DEPTH, N_FIELD_ELEMS_PER_CELL, N_CELLS_IN_BLOCKS, N_BLOCKS, N_CELLS, HF, DATASET_DEPTH, N_SAMPLES, TESTING_SLOT_INDEX};
use crate::circuits::safe_tree_circuit::{MerkleTreeCircuit, MerkleTreeTargets};
use crate::circuits::utils::{bits_le_padded_to_usize, calculate_cell_index_bits};
@ -93,6 +93,45 @@ impl<
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
> DatasetTreeCircuit<F,C,D,H> {
/// dataset tree with fake data, for testing only
/// create data for only the TESTING_SLOT_INDEX in params file
pub fn new_for_testing() -> Self {
let mut slot_trees = vec![];
let n_slots = 1<<DATASET_DEPTH;
// zero hash
let zero = HashOut {
elements: [F::ZERO; 4],
};
let zero_slot = SlotTreeCircuit::<F,C,D,H>{
tree: MerkleTreeCircuit {
tree: MerkleTree::<F,H>::new(&[zero.clone()], zero.clone()).unwrap(),
_phantom: Default::default(),
},
block_trees: vec![],
cell_data: vec![],
cell_hash: vec![],
};
for i in 0..n_slots {
if(i == TESTING_SLOT_INDEX) {
slot_trees.push(SlotTreeCircuit::<F, C, D, H>::default());
}else{
slot_trees.push(zero_slot.clone());
}
}
// get the roots or slot trees
let slot_roots = slot_trees.iter()
.map(|t| {
t.tree.tree.root().unwrap()
})
.collect::<Vec<_>>();
let dataset_tree = MerkleTree::<F, H>::new(&slot_roots, zero).unwrap();
Self{
tree: MerkleTreeCircuit::<F,C,D,H>{ tree:dataset_tree, _phantom:Default::default()},
slot_trees,
}
}
/// same as default but with supplied slot trees
pub fn new(slot_trees: Vec<SlotTreeCircuit<F,C,D,H>>) -> Self{
// get the roots or slot trees
@ -297,4 +336,47 @@ mod tests {
Ok(())
}
#[test]
fn test_sample_cells_circuit_from_selected_slot() -> Result<()> {
let mut dataset_t = DatasetTreeCircuit::<F,C,D,H>::new_for_testing();
let slot_index = TESTING_SLOT_INDEX;
let entropy = 123;
// sanity check
let proof = dataset_t.sample_slot(slot_index,entropy);
let slot_root = dataset_t.slot_trees[slot_index].tree.tree.root().unwrap();
let res = dataset_t.verify_sampling(proof).unwrap();
assert_eq!(res, true);
// create the circuit
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
let mut targets = dataset_t.sample_slot_circuit(&mut builder);
// create a PartialWitness and assign
let mut pw = PartialWitness::new();
dataset_t.sample_slot_assign_witness(&mut pw, &mut targets,slot_index,entropy);
// build the circuit
let data = builder.build::<C>();
println!("circuit size = {:?}", data.common.degree_bits());
// Prove the circuit with the assigned witness
let start_time = Instant::now();
let proof_with_pis = data.prove(pw)?;
println!("prove_time = {:?}", start_time.elapsed());
// verify the proof
let verifier_data = data.verifier_data();
assert!(
verifier_data.verify(proof_with_pis).is_ok(),
"Merkle proof verification failed"
);
Ok(())
}
}