add json support, proof-input, and refactor
This commit is contained in:
parent
cc14498c71
commit
67ff54c3f6
|
@ -22,14 +22,3 @@ rand = "0.8.5"
|
|||
criterion = { version = "0.5.1", default-features = false }
|
||||
tynm = { version = "0.1.6", default-features = false }
|
||||
|
||||
[[bench]]
|
||||
name = "safe_circuit"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "prove_cells"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "sample_cells"
|
||||
harness = false
|
||||
|
|
|
@ -1,152 +0,0 @@
|
|||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use anyhow::Result;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use codex_plonky2_circuits::{
|
||||
merkle_tree::merkle_safe::MerkleProof,
|
||||
circuits::merkle_circuit::MerkleTreeCircuit,
|
||||
};
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use std::marker::PhantomData;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use codex_plonky2_circuits::circuits::prove_single_cell::SlotTreeCircuit;
|
||||
|
||||
macro_rules! pretty_print {
|
||||
($($arg:tt)*) => {
|
||||
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||
println!($($arg)*);
|
||||
}
|
||||
}
|
||||
|
||||
// Hash function used
|
||||
type HF = PoseidonHash;
|
||||
|
||||
fn prepare_data<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(N: usize) -> Result<(
|
||||
SlotTreeCircuit<F, C, D, H>,
|
||||
Vec<usize>,
|
||||
Vec<MerkleProof<F, H>>,
|
||||
)> {
|
||||
// Initialize the slot tree with default data
|
||||
let slot_tree = SlotTreeCircuit::<F, C,D, H>::default();
|
||||
|
||||
// Select N leaf indices to prove
|
||||
let leaf_indices: Vec<usize> = (0..N).collect();
|
||||
|
||||
// Get the Merkle proofs for the selected leaves
|
||||
let proofs: Vec<_> = leaf_indices
|
||||
.iter()
|
||||
.map(|&leaf_index| slot_tree.get_proof(leaf_index))
|
||||
.collect();
|
||||
|
||||
Ok((slot_tree, leaf_indices, proofs))
|
||||
}
|
||||
|
||||
fn build_circuit<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
slot_tree: &SlotTreeCircuit<F, C, D, H>,
|
||||
leaf_indices: &[usize],
|
||||
proofs: &[MerkleProof<F, H>],
|
||||
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||
{
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
// Create a PartialWitness
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// For each proof, create targets, add constraints, and assign witnesses
|
||||
for (i, &leaf_index) in leaf_indices.iter().enumerate() {
|
||||
// Build the circuit for each proof
|
||||
let mut targets = SlotTreeCircuit::<F,C,D,H>::prove_single_cell(&mut builder);
|
||||
|
||||
// Assign witnesses for each proof
|
||||
slot_tree.single_cell_assign_witness(
|
||||
&mut pw,
|
||||
&mut targets,
|
||||
leaf_index,
|
||||
&slot_tree.cell_data[leaf_index],
|
||||
proofs[i].clone(),
|
||||
)?;
|
||||
}
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
Ok((data, pw))
|
||||
}
|
||||
|
||||
fn single_cell_proof_benchmark(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Single Cell Proof Benchmark");
|
||||
|
||||
// Circuit parameters
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
// Prepare the data that will be used in all steps
|
||||
let N = 5; // Number of leaves to prove
|
||||
let (slot_tree, leaf_indices, proofs) = prepare_data::<F, C, D, H>(N).unwrap();
|
||||
|
||||
// Benchmark the circuit building
|
||||
group.bench_function("Single Cell Proof Build", |b| {
|
||||
b.iter(|| {
|
||||
build_circuit::<F, C, D, H>(&slot_tree, &leaf_indices, &proofs).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Build the circuit
|
||||
let (data, pw) = build_circuit::<F, C, D, H>(&slot_tree, &leaf_indices, &proofs).unwrap();
|
||||
|
||||
pretty_print!(
|
||||
"Circuit size: 2^{} gates",
|
||||
data.common.degree_bits()
|
||||
);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
println!("prove_time = {:?}", start_time.elapsed());
|
||||
|
||||
// Benchmark the proving time
|
||||
group.bench_function("Single Cell Proof Prove", |b| {
|
||||
b.iter(|| {
|
||||
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Generate the proof
|
||||
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
let verifier_data = data.verifier_data();
|
||||
|
||||
pretty_print!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
|
||||
|
||||
// Benchmark the verification time
|
||||
group.bench_function("Single Cell Proof Verify", |b| {
|
||||
b.iter(|| {
|
||||
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = single_cell_proof_benchmark);
|
||||
criterion_main!(benches);
|
|
@ -1,164 +0,0 @@
|
|||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use anyhow::Result;
|
||||
|
||||
use codex_plonky2_circuits::{merkle_tree::merkle_safe::MerkleTree, circuits::merkle_circuit::MerkleTreeCircuit};
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::hash::hash_types::HashOut;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use std::marker::PhantomData;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleProof;
|
||||
|
||||
macro_rules! pretty_print {
|
||||
($($arg:tt)*) => {
|
||||
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||
println!($($arg)*);
|
||||
}
|
||||
}
|
||||
|
||||
fn prepare_data<F, H>(N: usize) -> Result<(
|
||||
MerkleTree<F, H>,
|
||||
Vec<HashOut<F>>,
|
||||
Vec<usize>,
|
||||
Vec<MerkleProof<F, H>>,
|
||||
HashOut<F>,
|
||||
)>
|
||||
where
|
||||
F: RichField + Extendable<2> + Poseidon2,
|
||||
H: Hasher<F> + AlgebraicHasher<F> + Hasher<F>,
|
||||
{
|
||||
// Total number of leaves in the Merkle tree
|
||||
let nleaves = 1u64 << 16;
|
||||
|
||||
// Generate leaf data
|
||||
let data = (0..nleaves)
|
||||
.map(|i| F::from_canonical_u64(i as u64))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
PoseidonHash::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
let zero_hash = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let tree = MerkleTree::<F, H>::new(&leaves, zero_hash)?;
|
||||
|
||||
// Select N leaf indices to prove
|
||||
let leaf_indices: Vec<usize> = (0..N).collect();
|
||||
|
||||
// Get the Merkle proofs for the selected leaves
|
||||
let proofs: Vec<_> = leaf_indices
|
||||
.iter()
|
||||
.map(|&leaf_index| tree.get_proof(leaf_index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// Expected Merkle root
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
Ok((tree, leaves, leaf_indices, proofs, expected_root))
|
||||
}
|
||||
|
||||
fn build_circuit<F, C, const D: usize, H>(
|
||||
tree: &MerkleTree<F, H>,
|
||||
leaf_indices: &[usize],
|
||||
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||
where
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
H: Hasher<F> + AlgebraicHasher<F> + Hasher<F>,
|
||||
{
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
// Create a PartialWitness
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// Initialize the circuit instance
|
||||
let mut circuit_instance = MerkleTreeCircuit::<F, C, D, H> {
|
||||
tree: tree.clone(),
|
||||
_phantom: PhantomData,
|
||||
};
|
||||
|
||||
// For each proof, create targets, add constraints, and assign witnesses
|
||||
for &leaf_index in leaf_indices.iter() {
|
||||
// Build the circuit for each proof
|
||||
let (mut targets, _root) = circuit_instance.build_circuit(&mut builder);
|
||||
|
||||
// Assign witnesses for each proof
|
||||
circuit_instance.assign_witness(&mut pw, &mut targets, leaf_index)?;
|
||||
}
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
Ok((data, pw))
|
||||
}
|
||||
|
||||
fn merkle_proof_benchmark(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Merkle Proof Benchmark");
|
||||
|
||||
// Circuit parameters
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
// Prepare the data that will be used in all steps
|
||||
let N = 5; // Number of leaves to prove
|
||||
let (tree, _leaves, leaf_indices, _proofs, _expected_root) = prepare_data::<F, H>(N).unwrap();
|
||||
|
||||
// Benchmark the circuit building
|
||||
group.bench_function("Merkle Proof Build", |b| {
|
||||
b.iter(|| {
|
||||
build_circuit::<F, C, D, H>(&tree, &leaf_indices).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Build the circuit once to get the data for the proving and verifying steps
|
||||
let (data, pw) = build_circuit::<F, C, D, H>(&tree, &leaf_indices).unwrap();
|
||||
|
||||
pretty_print!(
|
||||
"circuit size: 2^{} gates",
|
||||
data.common.degree_bits()
|
||||
);
|
||||
|
||||
// Benchmark the proving time
|
||||
group.bench_function("Merkle Proof Prove", |b| {
|
||||
b.iter(|| {
|
||||
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Generate the proof once for verification
|
||||
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
let verifier_data = data.verifier_data();
|
||||
|
||||
pretty_print!("proof size: {}", proof_with_pis.to_bytes().len());
|
||||
|
||||
// Benchmark the verification time
|
||||
group.bench_function("Merkle Proof Verify", |b| {
|
||||
b.iter(|| {
|
||||
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
// criterion_group!(benches, merkle_proof_benchmark);
|
||||
criterion_group!(name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = merkle_proof_benchmark);
|
||||
criterion_main!(benches);
|
|
@ -1,129 +0,0 @@
|
|||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use anyhow::Result;
|
||||
use std::time::{Duration, Instant};
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use codex_plonky2_circuits::circuits::params::TESTING_SLOT_INDEX;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||
|
||||
macro_rules! pretty_print {
|
||||
($($arg:tt)*) => {
|
||||
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||
println!($($arg)*);
|
||||
}
|
||||
}
|
||||
|
||||
// Hash function used
|
||||
type HF = PoseidonHash;
|
||||
|
||||
fn prepare_data<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>() -> Result<(
|
||||
SampleCircuit<F, C, D, H>,
|
||||
usize,
|
||||
usize,
|
||||
)> {
|
||||
// Initialize the dataset tree with testing data
|
||||
let mut dataset_t = SampleCircuit::<F,C,D,H>::new_for_testing();
|
||||
|
||||
let slot_index = TESTING_SLOT_INDEX;
|
||||
let entropy = 123;
|
||||
|
||||
Ok((dataset_t, slot_index, entropy))
|
||||
}
|
||||
|
||||
fn build_circuit<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
dataset_tree: &mut SampleCircuit<F, C, D, H>,
|
||||
slot_index: usize,
|
||||
entropy: usize,
|
||||
// proofs: &[MerkleProof<F, H>],
|
||||
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||
{
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
let mut targets = dataset_tree.sample_slot_circuit(&mut builder);
|
||||
|
||||
// Create a PartialWitness
|
||||
let mut pw = PartialWitness::new();
|
||||
dataset_tree.sample_slot_assign_witness(&mut pw, &mut targets,slot_index,entropy);
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
Ok((data, pw))
|
||||
}
|
||||
|
||||
fn sampling_benchmark(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Sampling Benchmark");
|
||||
|
||||
// Circuit parameters
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
// Prepare the data that will be used in all steps
|
||||
let (mut dataset_tree, slot_index, entropy) = prepare_data::<F, C, D, H>().unwrap();
|
||||
|
||||
// Benchmark the circuit building
|
||||
group.bench_function("Single Cell Proof Build", |b| {
|
||||
b.iter(|| {
|
||||
build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Build the circuit
|
||||
let (data, pw) = build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
|
||||
|
||||
pretty_print!(
|
||||
"Circuit size: 2^{} gates",
|
||||
data.common.degree_bits()
|
||||
);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
println!("prove_time = {:?}", start_time.elapsed());
|
||||
|
||||
// Benchmark the proving time
|
||||
group.bench_function("Single Cell Proof Prove", |b| {
|
||||
b.iter(|| {
|
||||
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// Generate the proof
|
||||
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||
let verifier_data = data.verifier_data();
|
||||
|
||||
pretty_print!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
|
||||
|
||||
// Benchmark the verification time
|
||||
group.bench_function("Single Cell Proof Verify", |b| {
|
||||
b.iter(|| {
|
||||
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = sampling_benchmark);
|
||||
criterion_main!(benches);
|
|
@ -7,10 +7,10 @@ use plonky2_field::extension::Extendable;
|
|||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
|
||||
/// Compression function which takes two 256 bit inputs (HashOut) and u64 key (which is converted to field element in the function)
|
||||
/// and returns a 256 bit output (HashOut).
|
||||
/// and returns a 256 bit output (HashOut / 4 Goldilocks field elems).
|
||||
pub fn key_compress<
|
||||
F: RichField, //+ Extendable<D> + Poseidon2,
|
||||
// const D: usize,
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H:Hasher<F>
|
||||
>(x: HashOut<F>, y: HashOut<F>, key: u64) -> HashOut<F> {
|
||||
|
||||
|
@ -58,7 +58,7 @@ pub fn key_compress_circuit<
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
// use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||
|
@ -66,7 +66,7 @@ mod tests {
|
|||
// types
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
|
||||
pub type F = <C as GenericConfig<D>>::F;
|
||||
pub type H = Poseidon2Hash;
|
||||
|
||||
/// tests the non-circuit key_compress with concrete cases
|
||||
|
@ -128,7 +128,7 @@ mod tests {
|
|||
|
||||
// Iterate over each key and test key_compress output
|
||||
for (key, &expected) in expected_outputs.iter().enumerate() {
|
||||
let output = key_compress::<F, H>(inp1, inp2, key as u64);
|
||||
let output = key_compress::<F, D, H>(inp1, inp2, key as u64);
|
||||
|
||||
// Assert that output matches the expected result
|
||||
assert_eq!(output.elements, expected, "Output mismatch for key: {}", key);
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// pub mod capped_tree_circuit;
|
||||
pub mod merkle_circuit;
|
||||
// pub mod prove_single_cell;
|
||||
pub mod sample_cells;
|
||||
pub mod utils;
|
||||
pub mod params;
|
||||
pub mod keyed_compress;
|
||||
pub mod keyed_compress;
|
||||
pub mod sponge;
|
|
@ -6,7 +6,7 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
|||
// hash function used. this is hackish way of doing it because
|
||||
// H::Hash is not consistent with HashOut<F> and causing a lot of headache
|
||||
// will look into this later.
|
||||
pub type HF = PoseidonHash;
|
||||
pub type HF = Poseidon2Hash;
|
||||
|
||||
// params used for the circuits
|
||||
// should be defined prior to building the circuit
|
||||
|
|
|
@ -17,6 +17,7 @@ use plonky2::hash::hashing::PlonkyPermutation;
|
|||
use crate::circuits::params::{CircuitParams, HF};
|
||||
|
||||
use crate::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
|
||||
use crate::circuits::sponge::hash_n_with_padding;
|
||||
use crate::circuits::utils::assign_hash_out_targets;
|
||||
|
||||
/// circuit for sampling a slot in a dataset merkle tree
|
||||
|
@ -61,7 +62,7 @@ pub struct SampleTargets {
|
|||
}
|
||||
|
||||
/// circuit input as field elements
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct SampleCircuitInput<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
|
@ -189,7 +190,7 @@ impl<
|
|||
let slot_last_index = builder.sub(n_cells_per_slot, one);
|
||||
|
||||
// create the mask bits
|
||||
// TODO: reuse this for block and slot trees
|
||||
// TODO: re-use this for block and slot trees
|
||||
let mask_bits = builder.split_le(slot_last_index,max_depth);
|
||||
|
||||
// last and mask bits for block tree
|
||||
|
@ -210,7 +211,8 @@ impl<
|
|||
// hash the cell data
|
||||
let mut hash_inputs:Vec<Target>= Vec::new();
|
||||
hash_inputs.extend_from_slice(&data_i);
|
||||
let data_i_hash = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
|
||||
// let data_i_hash = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
|
||||
let data_i_hash = hash_n_with_padding::<F,D,HF>(builder, hash_inputs);
|
||||
// make the counter into hash digest
|
||||
let ctr_target = builder.constant(F::from_canonical_u64((i+1) as u64));
|
||||
let mut ctr = builder.add_virtual_hash();
|
||||
|
@ -293,7 +295,8 @@ impl<
|
|||
hash_inputs.extend_from_slice(&entropy.elements);
|
||||
hash_inputs.extend_from_slice(&slot_root.elements);
|
||||
hash_inputs.extend_from_slice(&ctr.elements);
|
||||
let hash_out = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
|
||||
// let hash_out = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
|
||||
let hash_out = hash_n_with_padding::<F,D,HF>(builder, hash_inputs);
|
||||
let cell_index_bits = builder.low_bits(hash_out.elements[0], self.params.max_depth, 64);
|
||||
|
||||
let mut masked_cell_index_bits = vec![];
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
use plonky2::hash::hash_types::{HashOutTarget, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::AlgebraicHasher;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
|
||||
/// hash n targets (field elements) into hash digest / HashOutTarget (4 Goldilocks field elements)
|
||||
/// this function uses the 10* padding
|
||||
pub fn hash_n_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: AlgebraicHasher<F>
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
inputs: Vec<Target>,
|
||||
) -> HashOutTarget {
|
||||
HashOutTarget::from_vec( hash_n_to_m_with_padding::<F,D,H>(builder, inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_n_to_m_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: AlgebraicHasher<F>
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
inputs: Vec<Target>,
|
||||
num_outputs: usize,
|
||||
) -> Vec<Target> {
|
||||
let rate = H::AlgebraicPermutation::RATE;
|
||||
let width = H::AlgebraicPermutation::WIDTH; // rate + capacity
|
||||
let zero = builder.zero();
|
||||
let one = builder.one();
|
||||
let mut state = H::AlgebraicPermutation::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let dom_sep_value = rate as u64 + 256 * 12 + 65536 * 63;
|
||||
let dom_sep = builder.constant(F::from_canonical_u64(dom_sep_value));
|
||||
state.set_elt(dom_sep, 8);
|
||||
|
||||
let n = inputs.len();
|
||||
let num_chunks = (n + rate) / rate; // 10* padding
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process the first (num_chunks - 1) chunks
|
||||
for _ in 0..(num_chunks - 1) {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
chunk.push(zero); // Should not happen, but pad zeros if necessary
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
state.set_elt(builder.add(state.as_ref()[j], chunk[j]), j);
|
||||
}
|
||||
// Apply permutation
|
||||
state = builder.permute::<H>(state);
|
||||
}
|
||||
|
||||
// Process the last chunk with 10* padding
|
||||
let rem = num_chunks * rate - n; // 0 < rem <= rate
|
||||
let ofs = rate - rem; // Offset where padding starts
|
||||
|
||||
let mut last_chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..ofs {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
last_chunk.push(input);
|
||||
} else {
|
||||
last_chunk.push(zero); // Pad zeros if no more inputs
|
||||
}
|
||||
}
|
||||
|
||||
// Add the '1' padding bit
|
||||
last_chunk.push(one);
|
||||
|
||||
// Pad zeros to reach the full rate
|
||||
while last_chunk.len() < rate {
|
||||
last_chunk.push(zero);
|
||||
}
|
||||
|
||||
// Add the last chunk to the state
|
||||
for j in 0..rate {
|
||||
state.set_elt(builder.add(state.as_ref()[j], last_chunk[j]), j);
|
||||
}
|
||||
// Apply permutation
|
||||
state = builder.permute::<H>(state);
|
||||
|
||||
// Squeeze until we have the desired number of outputs
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &s in state.squeeze() {
|
||||
outputs.push(s);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
state = builder.permute::<H>(state);
|
||||
}
|
||||
}
|
|
@ -13,7 +13,7 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
|||
// --------- helper functions ---------
|
||||
|
||||
/// Converts an index to a vector of bits (LSB first) with padding.
|
||||
pub(crate) fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
pub fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
let mut bits = Vec::with_capacity(bit_length);
|
||||
for i in 0..bit_length {
|
||||
bits.push(((index >> i) & 1) == 1);
|
||||
|
@ -24,81 +24,9 @@ pub(crate) fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bo
|
|||
}
|
||||
bits
|
||||
}
|
||||
/// calculate the sampled cell index from entropy, slot root, and counter
|
||||
/// this is the non-circuit version for testing
|
||||
pub(crate) fn calculate_cell_index_bits<F: RichField>(entropy: &Vec<F>, slot_root: HashOut<F>, ctr: usize, depth: usize, mask_bits: Vec<bool>) -> Vec<bool> {
|
||||
let ctr_field = F::from_canonical_u64(ctr as u64);
|
||||
let mut ctr_as_digest = HashOut::<F>::ZERO;
|
||||
ctr_as_digest.elements[0] = ctr_field;
|
||||
let mut hash_inputs = Vec::new();
|
||||
hash_inputs.extend_from_slice(&entropy);
|
||||
hash_inputs.extend_from_slice(&slot_root.elements);
|
||||
hash_inputs.extend_from_slice(&ctr_as_digest.elements);
|
||||
let hash_output = HF::hash_no_pad(&hash_inputs);
|
||||
let cell_index_bytes = hash_output.elements[0].to_canonical_u64();
|
||||
|
||||
let cell_index_bits = usize_to_bits_le_padded(cell_index_bytes as usize, depth);
|
||||
|
||||
let mut masked_cell_index_bits = vec![];
|
||||
|
||||
for i in 0..depth{
|
||||
masked_cell_index_bits.push(cell_index_bits[i] && mask_bits[i]);
|
||||
}
|
||||
|
||||
masked_cell_index_bits
|
||||
}
|
||||
|
||||
pub(crate) fn take_n_bits_from_bytes(bytes: &[u8], n: usize) -> Vec<bool> {
|
||||
bytes.iter()
|
||||
.flat_map(|byte| (0..8u8).map(move |i| (byte >> i) & 1 == 1))
|
||||
.take(n)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Converts a vector of bits (LSB first) into an index (usize).
|
||||
pub(crate) fn bits_le_padded_to_usize(bits: &[bool]) -> usize {
|
||||
bits.iter().enumerate().fold(0usize, |acc, (i, &bit)| {
|
||||
if bit {
|
||||
acc | (1 << i)
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// prove given the circuit data and partial witness
|
||||
pub fn prove<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
data: CircuitData<F, C, D>,
|
||||
pw: PartialWitness<F>
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>>{
|
||||
let proof = data.prove(pw);
|
||||
return proof
|
||||
}
|
||||
|
||||
/// verify given verifier data, public input, and proof
|
||||
pub fn verify<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
verifier_data: &VerifierCircuitData<F, C, D>,
|
||||
public_inputs: Vec<F>,
|
||||
proof: Proof<F, C, D>
|
||||
)-> Result<()> {
|
||||
verifier_data.verify(ProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
})
|
||||
}
|
||||
|
||||
/// assign a vec of bool values to a vec of BoolTargets
|
||||
pub(crate) fn assign_bool_targets<
|
||||
pub fn assign_bool_targets<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
|
@ -112,7 +40,7 @@ pub(crate) fn assign_bool_targets<
|
|||
}
|
||||
|
||||
/// assign a vec of field elems to hash out target elements
|
||||
pub(crate) fn assign_hash_out_targets<
|
||||
pub fn assign_hash_out_targets<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
|
@ -126,7 +54,7 @@ pub(crate) fn assign_hash_out_targets<
|
|||
}
|
||||
|
||||
/// helper fn to multiply a HashOutTarget by a Target
|
||||
pub(crate) fn mul_hash_out_target<
|
||||
pub fn mul_hash_out_target<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(builder: &mut CircuitBuilder<F, D>, t: &Target, hash_target: &mut HashOutTarget) -> HashOutTarget {
|
||||
|
@ -138,7 +66,7 @@ pub(crate) fn mul_hash_out_target<
|
|||
}
|
||||
|
||||
/// helper fn to add AND assign a HashOutTarget (hot) to a mutable HashOutTarget (mut_hot)
|
||||
pub(crate) fn add_assign_hash_out_target<
|
||||
pub fn add_assign_hash_out_target<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(builder: &mut CircuitBuilder<F, D>, mut_hot: &mut HashOutTarget, hot: &HashOutTarget) {
|
||||
|
|
|
@ -1,4 +1,2 @@
|
|||
pub mod circuits;
|
||||
pub mod merkle_tree;
|
||||
// pub mod proof_input;
|
||||
pub mod tests;
|
|
@ -9,7 +9,9 @@ use plonky2::hash::hash_types::{HashOut, RichField};
|
|||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use std::ops::Shr;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::circuits::keyed_compress::key_compress;
|
||||
use crate::circuits::params::HF;
|
||||
|
||||
|
@ -21,18 +23,24 @@ pub const KEY_ODD_AND_BOTTOM_LAYER: u64 = 0x3;
|
|||
|
||||
/// Merkle tree struct, containing the layers, compression function, and zero hash.
|
||||
#[derive(Clone)]
|
||||
pub struct MerkleTree<F: RichField> {
|
||||
pub struct MerkleTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub layers: Vec<Vec<HashOut<F>>>,
|
||||
pub zero: HashOut<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField> MerkleTree<F> {
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> MerkleTree<F, D> {
|
||||
/// Constructs a new Merkle tree from the given leaves.
|
||||
pub fn new(
|
||||
leaves: &[HashOut<F>],
|
||||
zero: HashOut<F>,
|
||||
) -> Result<Self> {
|
||||
let layers = merkle_tree_worker::<F>(leaves, zero, true)?;
|
||||
let layers = merkle_tree_worker::<F, D>(leaves, zero, true)?;
|
||||
Ok(Self {
|
||||
layers,
|
||||
zero,
|
||||
|
@ -57,7 +65,7 @@ impl<F: RichField> MerkleTree<F> {
|
|||
}
|
||||
|
||||
/// Generates a Merkle proof for a given leaf index.
|
||||
pub fn get_proof(&self, index: usize) -> Result<MerkleProof<F>> {
|
||||
pub fn get_proof(&self, index: usize) -> Result<MerkleProof<F, D>> {
|
||||
let depth = self.depth();
|
||||
let nleaves = self.leaves_count();
|
||||
|
||||
|
@ -89,7 +97,10 @@ impl<F: RichField> MerkleTree<F> {
|
|||
}
|
||||
|
||||
/// Build the Merkle tree layers.
|
||||
fn merkle_tree_worker<F: RichField>(
|
||||
fn merkle_tree_worker<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
xs: &[HashOut<F>],
|
||||
zero: HashOut<F>,
|
||||
is_bottom_layer: bool,
|
||||
|
@ -107,7 +118,7 @@ fn merkle_tree_worker<F: RichField>(
|
|||
|
||||
for i in 0..halfn {
|
||||
let key = if is_bottom_layer { KEY_BOTTOM_LAYER } else { KEY_NONE };
|
||||
let h = key_compress::<F, HF>(xs[2 * i], xs[2 * i + 1], key);
|
||||
let h = key_compress::<F, D, HF>(xs[2 * i], xs[2 * i + 1], key);
|
||||
ys.push(h);
|
||||
}
|
||||
|
||||
|
@ -117,12 +128,12 @@ fn merkle_tree_worker<F: RichField>(
|
|||
} else {
|
||||
KEY_ODD
|
||||
};
|
||||
let h = key_compress::<F, HF>(xs[n], zero, key);
|
||||
let h = key_compress::<F, D, HF>(xs[n], zero, key);
|
||||
ys.push(h);
|
||||
}
|
||||
|
||||
let mut layers = vec![xs.to_vec()];
|
||||
let mut upper_layers = merkle_tree_worker::<F>(&ys, zero, false)?;
|
||||
let mut upper_layers = merkle_tree_worker::<F, D>(&ys, zero, false)?;
|
||||
layers.append(&mut upper_layers);
|
||||
|
||||
Ok(layers)
|
||||
|
@ -130,14 +141,20 @@ fn merkle_tree_worker<F: RichField>(
|
|||
|
||||
/// Merkle proof struct, containing the index, path, and other necessary data.
|
||||
#[derive(Clone)]
|
||||
pub struct MerkleProof<F: RichField> {
|
||||
pub struct MerkleProof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub index: usize, // Index of the leaf
|
||||
pub path: Vec<HashOut<F>>, // Sibling hashes from the leaf to the root
|
||||
pub nleaves: usize, // Total number of leaves
|
||||
pub zero: HashOut<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField> MerkleProof<F> {
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> MerkleProof<F, D> {
|
||||
/// Reconstructs the root hash from the proof and the given leaf.
|
||||
pub fn reconstruct_root(&self, leaf: HashOut<F>) -> Result<HashOut<F>> {
|
||||
let mut m = self.nleaves;
|
||||
|
@ -149,14 +166,14 @@ impl<F: RichField> MerkleProof<F> {
|
|||
let odd_index = (j & 1) != 0;
|
||||
if odd_index {
|
||||
// The index of the child is odd
|
||||
h = key_compress::<F,HF>(*p, h, bottom_flag);
|
||||
h = key_compress::<F, D, HF>(*p, h, bottom_flag);
|
||||
} else {
|
||||
if j == m - 1 {
|
||||
// Single child -> so odd node
|
||||
h = key_compress::<F,HF>(h, *p, bottom_flag + 2);
|
||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag + 2);
|
||||
} else {
|
||||
// Even node
|
||||
h = key_compress::<F,HF>(h, *p, bottom_flag);
|
||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag);
|
||||
}
|
||||
}
|
||||
bottom_flag = KEY_NONE;
|
||||
|
@ -169,10 +186,11 @@ impl<F: RichField> MerkleProof<F> {
|
|||
|
||||
/// reconstruct the root using path_bits and last_bits in similar way as the circuit
|
||||
/// this is used for testing - sanity check
|
||||
pub fn reconstruct_root2(leaf: HashOut<F>, path_bits: Vec<bool>, last_bits:Vec<bool>, path: Vec<HashOut<F>>) -> Result<HashOut<F>> {
|
||||
pub fn reconstruct_root2(leaf: HashOut<F>, path_bits: Vec<bool>, last_bits:Vec<bool>, path: Vec<HashOut<F>>, mask_bits:Vec<bool>, depth: usize) -> Result<HashOut<F>> {
|
||||
let is_last = compute_is_last(path_bits.clone(),last_bits);
|
||||
|
||||
let mut h = leaf;
|
||||
let mut h = vec![];
|
||||
h.push(leaf);
|
||||
let mut i = 0;
|
||||
|
||||
for p in &path {
|
||||
|
@ -187,14 +205,23 @@ impl<F: RichField> MerkleProof<F> {
|
|||
let key = bottom + (2 * (odd as u64));
|
||||
let odd_index = path_bits[i];
|
||||
if odd_index {
|
||||
h = key_compress::<F,HF>(*p, h, key);
|
||||
h.push(key_compress::<F, D, HF>(*p, h[i], key));
|
||||
} else {
|
||||
h = key_compress::<F,HF>(h, *p, key);
|
||||
h.push(key_compress::<F,D,HF>(h[i], *p, key));
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(h)
|
||||
let mut reconstructed_root = HashOut::<F>::ZERO;
|
||||
for k in 0..depth{
|
||||
let diff = (mask_bits[k] as u64) - (mask_bits[k+1] as u64);
|
||||
let mul_res: Vec<F> = h[k+1].elements.iter().map(|e| e.mul(F::from_canonical_u64(diff))).collect();
|
||||
reconstructed_root = HashOut::<F>::from_vec(
|
||||
mul_res.iter().zip(reconstructed_root.elements).map(|(e1,e2)| e1.add(e2)).collect()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(reconstructed_root)
|
||||
}
|
||||
|
||||
/// Verifies the proof against a given root and leaf.
|
||||
|
@ -229,6 +256,7 @@ mod tests {
|
|||
|
||||
// types used in all tests
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
type H = PoseidonHash;
|
||||
|
||||
fn compress(
|
||||
|
@ -236,13 +264,13 @@ mod tests {
|
|||
y: HashOut<F>,
|
||||
key: u64,
|
||||
) -> HashOut<F> {
|
||||
key_compress::<F,HF>(x,y,key)
|
||||
key_compress::<F,D,HF>(x,y,key)
|
||||
}
|
||||
|
||||
fn make_tree(
|
||||
data: &[F],
|
||||
zero: HashOut<F>,
|
||||
) -> Result<MerkleTree<F>> {
|
||||
) -> Result<MerkleTree<F,D>> {
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
|
@ -252,7 +280,7 @@ mod tests {
|
|||
})
|
||||
.collect();
|
||||
|
||||
MerkleTree::<F>::new(&leaves, zero)
|
||||
MerkleTree::<F, D>::new(&leaves, zero)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -275,7 +303,7 @@ mod tests {
|
|||
};
|
||||
|
||||
// Build the Merkle tree
|
||||
let tree = MerkleTree::<F>::new(&leaves, zero)?;
|
||||
let tree = MerkleTree::<F, D>::new(&leaves, zero)?;
|
||||
|
||||
// Get the root
|
||||
let root = tree.root()?;
|
||||
|
@ -507,7 +535,7 @@ mod tests {
|
|||
};
|
||||
|
||||
// Build the tree
|
||||
let tree = MerkleTree::<F>::new(&leaf_hashes, zero)?;
|
||||
let tree = MerkleTree::<F, D>::new(&leaf_hashes, zero)?;
|
||||
|
||||
// Get the root
|
||||
let expected_root = tree.root()?;
|
||||
|
|
|
@ -1,441 +0,0 @@
|
|||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::circuits::params::HF;
|
||||
use crate::proof_input::test_params::Params;
|
||||
use crate::circuits::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
|
||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use crate::circuits::sample_cells::Cell;
|
||||
|
||||
// #[derive(Clone)]
|
||||
// pub struct Cell<
|
||||
// F: RichField + Extendable<D> + Poseidon2,
|
||||
// const D: usize,
|
||||
// > {
|
||||
// pub data: Vec<F>, // cell data as field elements
|
||||
// }
|
||||
|
||||
// impl<
|
||||
// F: RichField + Extendable<D> + Poseidon2,
|
||||
// const D: usize,
|
||||
// > Cell<F, D> {
|
||||
/// Create a new cell with random data, using the parameters from `Params`
|
||||
pub fn new_random_cell<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(params: &Params) -> Cell<F,D> {
|
||||
let data = (0..params.n_field_elems_per_cell())
|
||||
.map(|_| F::rand())
|
||||
.collect::<Vec<_>>();
|
||||
Cell::<F,D> {
|
||||
data,
|
||||
}
|
||||
}
|
||||
// }
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SlotTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F>, // slot tree
|
||||
pub block_trees: Vec<MerkleTree<F>>, // vec of block trees
|
||||
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||
pub params: Params, // parameters
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> SlotTree<F, D> {
|
||||
/// Create a slot tree with fake data, for testing only
|
||||
pub fn new_default(params: &Params) -> Self {
|
||||
// generate fake cell data
|
||||
let cell_data = (0..params.n_cells)
|
||||
.map(|_| new_random_cell(params))
|
||||
.collect::<Vec<_>>();
|
||||
Self::new(cell_data, params.clone())
|
||||
}
|
||||
|
||||
/// Create a new slot tree with the supplied cell data and parameters
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: Params) -> Self {
|
||||
let leaves: Vec<HashOut<F>> = cells
|
||||
.iter()
|
||||
.map(|element| HF::hash_no_pad(&element.data))
|
||||
.collect();
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let n_blocks = params.n_blocks_test();
|
||||
let n_cells_in_blocks = params.n_cells_in_blocks();
|
||||
|
||||
let block_trees = (0..n_blocks)
|
||||
.map(|i| {
|
||||
let start = i * n_cells_in_blocks;
|
||||
let end = (i + 1) * n_cells_in_blocks;
|
||||
Self::get_block_tree(&leaves[start..end].to_vec())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let block_roots = block_trees
|
||||
.iter()
|
||||
.map(|t| t.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let slot_tree = MerkleTree::<F>::new(&block_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: slot_tree,
|
||||
block_trees,
|
||||
cell_data: cells,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a proof for the given leaf index
|
||||
/// The path in the proof is a combined block and slot path to make up the full path
|
||||
pub fn get_proof(&self, index: usize) -> MerkleProof<F> {
|
||||
let block_index = index / self.params.n_cells_in_blocks();
|
||||
let leaf_index = index % self.params.n_cells_in_blocks();
|
||||
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
||||
let slot_proof = self.tree.get_proof(block_index).unwrap();
|
||||
|
||||
// Combine the paths from the block and slot proofs
|
||||
let mut combined_path = block_proof.path.clone();
|
||||
combined_path.extend(slot_proof.path.clone());
|
||||
|
||||
MerkleProof::<F> {
|
||||
index,
|
||||
path: combined_path,
|
||||
nleaves: self.cell_data.len(),
|
||||
zero: block_proof.zero.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verify the given proof for slot tree, checks equality with the given root
|
||||
pub fn verify_cell_proof(&self, proof: MerkleProof<F>, root: HashOut<F>) -> anyhow::Result<bool> {
|
||||
let mut block_path_bits = usize_to_bits_le_padded(proof.index, self.params.max_depth);
|
||||
let last_index = self.params.n_cells - 1;
|
||||
let mut block_last_bits = usize_to_bits_le_padded(last_index, self.params.max_depth);
|
||||
|
||||
let split_point = self.params.bot_depth();
|
||||
|
||||
let slot_last_bits = block_last_bits.split_off(split_point);
|
||||
let slot_path_bits = block_path_bits.split_off(split_point);
|
||||
|
||||
let leaf_hash = HF::hash_no_pad(&self.cell_data[proof.index].data);
|
||||
|
||||
let mut block_path = proof.path;
|
||||
let slot_path = block_path.split_off(split_point);
|
||||
|
||||
let block_res = MerkleProof::<F>::reconstruct_root2(
|
||||
leaf_hash,
|
||||
block_path_bits.clone(),
|
||||
block_last_bits.clone(),
|
||||
block_path,
|
||||
);
|
||||
let reconstructed_root = MerkleProof::<F>::reconstruct_root2(
|
||||
block_res.unwrap(),
|
||||
slot_path_bits,
|
||||
slot_last_bits,
|
||||
slot_path,
|
||||
);
|
||||
|
||||
Ok(reconstructed_root.unwrap() == root)
|
||||
}
|
||||
|
||||
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F> {
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
// Build the Merkle tree
|
||||
let block_tree = MerkleTree::<F>::new(leaves, zero).unwrap();
|
||||
block_tree
|
||||
}
|
||||
}
|
||||
|
||||
// ------ Dataset Tree --------
|
||||
/// Dataset tree containing all slot trees
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F>, // dataset tree
|
||||
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
||||
pub params: Params, // parameters
|
||||
}
|
||||
|
||||
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetProof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub slot_index: F,
|
||||
pub entropy: HashOut<F>,
|
||||
pub dataset_proof: MerkleProof<F>, // proof for dataset level tree
|
||||
pub slot_proofs: Vec<MerkleProof<F>>, // proofs for sampled slot
|
||||
pub cell_data: Vec<Cell<F,D>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> DatasetTree<F, D> {
|
||||
/// Dataset tree with fake data, for testing only
|
||||
pub fn new_default(params: &Params) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
let n_slots = 1 << params.dataset_depth_test();
|
||||
for _ in 0..n_slots {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
}
|
||||
Self::new(slot_trees, params.clone())
|
||||
}
|
||||
|
||||
/// Create data for only the specified slot index in params
|
||||
pub fn new_for_testing(params: &Params) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
// let n_slots = 1 << params.dataset_depth();
|
||||
let n_slots = params.n_slots;
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let zero_slot = SlotTree::<F, D> {
|
||||
tree: MerkleTree::<F>::new(&[zero.clone()], zero.clone()).unwrap(),
|
||||
block_trees: vec![],
|
||||
cell_data: vec![],
|
||||
params: params.clone(),
|
||||
};
|
||||
for i in 0..n_slots {
|
||||
if i == params.testing_slot_index {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
} else {
|
||||
slot_trees.push(zero_slot.clone());
|
||||
}
|
||||
}
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let dataset_tree = MerkleTree::<F>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params: params.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as default but with supplied slot trees
|
||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: Params) -> Self {
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let dataset_tree = MerkleTree::<F>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a dataset level proof for the given slot index
|
||||
/// Just a regular Merkle tree proof
|
||||
pub fn get_proof(&self, index: usize) -> MerkleProof<F> {
|
||||
let dataset_proof = self.tree.get_proof(index).unwrap();
|
||||
dataset_proof
|
||||
}
|
||||
|
||||
/// Generates a proof for the given slot index
|
||||
/// Also takes entropy so it can use it to sample the slot
|
||||
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
||||
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||
// println!("d proof len = {}", dataset_proof.path.len());
|
||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_depth());
|
||||
// println!("d proof len = {}", dataset_proof.path.len());
|
||||
let slot = &self.slot_trees[index];
|
||||
let slot_root = slot.tree.root().unwrap();
|
||||
let mut slot_proofs = vec![];
|
||||
let mut cell_data = vec![];
|
||||
let entropy_field = F::from_canonical_u64(entropy as u64);
|
||||
let mut entropy_as_digest = HashOut::<F>::ZERO;
|
||||
entropy_as_digest.elements[0] = entropy_field;
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le_padded(self.params.n_cells-1, self.params.max_depth+1);
|
||||
for i in 0..self.params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&entropy_as_digest.elements.to_vec(),
|
||||
slot_root,
|
||||
i + 1,
|
||||
self.params.max_depth,
|
||||
mask_bits.clone()
|
||||
);
|
||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||
let mut s_proof = slot.get_proof(cell_index);
|
||||
Self::pad_proof(&mut s_proof, self.params.max_depth);
|
||||
slot_proofs.push(s_proof);
|
||||
let data_i = slot.cell_data[cell_index].data.clone();
|
||||
let cell_i = Cell::<F,D>{
|
||||
data: data_i
|
||||
};
|
||||
cell_data.push(cell_i);
|
||||
}
|
||||
|
||||
DatasetProof {
|
||||
slot_index: F::from_canonical_u64(index as u64),
|
||||
entropy: entropy_as_digest,
|
||||
dataset_proof,
|
||||
slot_proofs,
|
||||
cell_data,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pad_proof(merkle_proof: &mut MerkleProof<F>, max_depth: usize){
|
||||
for i in merkle_proof.path.len()..max_depth{
|
||||
merkle_proof.path.push(HashOut::<F>::ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify the sampling - non-circuit version
|
||||
pub fn verify_sampling(&self, proof: DatasetProof<F,D>) -> bool {
|
||||
let slot_index = proof.slot_index.to_canonical_u64() as usize;
|
||||
let slot = &self.slot_trees[slot_index];
|
||||
let slot_root = slot.tree.root().unwrap();
|
||||
// check dataset level proof
|
||||
let d_res = proof.dataset_proof.verify(slot_root, self.tree.root().unwrap());
|
||||
if d_res.unwrap() == false {
|
||||
return false;
|
||||
}
|
||||
// sanity check
|
||||
assert_eq!(self.params.n_samples, proof.slot_proofs.len());
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le_padded(self.params.n_cells -1, self.params.max_depth);
|
||||
for i in 0..self.params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&proof.entropy.elements.to_vec(),
|
||||
slot_root,
|
||||
i + 1,
|
||||
self.params.max_depth,
|
||||
mask_bits.clone(),
|
||||
);
|
||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||
// check the cell_index is the same as one in the proof
|
||||
assert_eq!(cell_index, proof.slot_proofs[i].index);
|
||||
let s_res = slot.verify_cell_proof(proof.slot_proofs[i].clone(), slot_root);
|
||||
if s_res.unwrap() == false {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Instant;
|
||||
use super::*;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::circuits::params::CircuitParams;
|
||||
use crate::circuits::sample_cells::{MerklePath, SampleCircuit, SampleCircuitInput};
|
||||
use crate::proof_input::test_params::{C, D, F};
|
||||
|
||||
// Test sample cells (non-circuit)
|
||||
#[test]
|
||||
fn test_sample_cells() {
|
||||
let params = Params::default();
|
||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||
let slot_index = params.testing_slot_index;
|
||||
let entropy = params.entropy; // Use the entropy from Params if desired
|
||||
let proof = dataset_t.sample_slot(slot_index, entropy);
|
||||
let res = dataset_t.verify_sampling(proof);
|
||||
assert_eq!(res, true);
|
||||
}
|
||||
|
||||
// Test sample cells in-circuit for a selected slot
|
||||
#[test]
|
||||
fn test_sample_cells_circuit_from_selected_slot() -> anyhow::Result<()> {
|
||||
let params = Params::default();
|
||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||
|
||||
let slot_index = params.testing_slot_index;
|
||||
let entropy = params.entropy; // Use the entropy from Params if desired
|
||||
|
||||
// Sanity check
|
||||
let proof = dataset_t.sample_slot(slot_index, entropy);
|
||||
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
|
||||
// let res = dataset_t.verify_sampling(proof.clone());
|
||||
// assert_eq!(res, true);
|
||||
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
let circuit_params = CircuitParams {
|
||||
max_depth: params.max_depth,
|
||||
max_log2_n_slots: params.dataset_depth(),
|
||||
block_tree_depth: params.bot_depth(),
|
||||
n_field_elems_per_cell: params.n_field_elems_per_cell(),
|
||||
n_samples: params.n_samples,
|
||||
};
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit(&mut builder);
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
let mut slot_paths = vec![];
|
||||
for i in 0..params.n_samples {
|
||||
let path = proof.slot_proofs[i].path.clone();
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
slot_paths.push(mp);
|
||||
}
|
||||
println!("circuit params = {:?}", circuit_params);
|
||||
|
||||
let witness = SampleCircuitInput::<F, D> {
|
||||
entropy: proof.entropy.elements.clone().to_vec(),
|
||||
dataset_root: dataset_t.tree.root().unwrap(),
|
||||
slot_index: proof.slot_index.clone(),
|
||||
slot_root,
|
||||
n_cells_per_slot: F::from_canonical_usize(params.n_cells),
|
||||
n_slots_per_dataset: F::from_canonical_usize(params.n_slots),
|
||||
slot_proof: proof.dataset_proof.path.clone(),
|
||||
cell_data: proof.cell_data.clone(),
|
||||
merkle_paths: slot_paths,
|
||||
};
|
||||
|
||||
println!("dataset = {:?}", witness.slot_proof.clone());
|
||||
println!("n_slots_per_dataset = {:?}", witness.n_slots_per_dataset.clone());
|
||||
|
||||
circ.sample_slot_assign_witness(&mut pw, &mut targets, witness);
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
println!("circuit size = {:?}", data.common.degree_bits());
|
||||
|
||||
// Prove the circuit with the assigned witness
|
||||
let start_time = Instant::now();
|
||||
let proof_with_pis = data.prove(pw)?;
|
||||
println!("prove_time = {:?}", start_time.elapsed());
|
||||
|
||||
// Verify the proof
|
||||
let verifier_data = data.verifier_data();
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,406 +0,0 @@
|
|||
// use std::fmt::Error;
|
||||
use anyhow::{anyhow, Result, Error};
|
||||
use std::num::ParseIntError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Write};
|
||||
use crate::proof_input::gen_input::DatasetTree;
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
|
||||
use crate::proof_input::test_params::Params;
|
||||
|
||||
// ... (Include necessary imports and your existing code)
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
> DatasetTree<F, D> {
|
||||
/// Function to generate witness and export to JSON
|
||||
pub fn export_witness_to_json(&self, params: &Params, filename: &str) -> anyhow::Result<()> {
|
||||
// Sample the slot
|
||||
let slot_index = params.testing_slot_index;
|
||||
let entropy = params.entropy;
|
||||
|
||||
let proof = self.sample_slot(slot_index, entropy);
|
||||
let slot_root = self.slot_trees[slot_index].tree.root().unwrap();
|
||||
|
||||
// Prepare the witness data
|
||||
let mut slot_paths = vec![];
|
||||
for i in 0..params.n_samples {
|
||||
let path = proof.slot_proofs[i].path.clone();
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
slot_paths.push(mp);
|
||||
}
|
||||
|
||||
// Create the witness
|
||||
let witness = SampleCircuitInput::<F, D> {
|
||||
entropy: proof.entropy.elements.clone().to_vec(),
|
||||
dataset_root: self.tree.root().unwrap(),
|
||||
slot_index: proof.slot_index.clone(),
|
||||
slot_root,
|
||||
n_cells_per_slot: F::from_canonical_usize(params.n_cells_per_slot()),
|
||||
n_slots_per_dataset: F::from_canonical_usize(params.n_slots_per_dataset()),
|
||||
slot_proof: proof.dataset_proof.path.clone(),
|
||||
cell_data: proof.cell_data.clone(),
|
||||
merkle_paths: slot_paths,
|
||||
};
|
||||
|
||||
// Convert the witness to a serializable format
|
||||
let serializable_witness = SerializableWitness::from_witness(&witness);
|
||||
|
||||
// Serialize to JSON
|
||||
let json_data = serde_json::to_string_pretty(&serializable_witness)?;
|
||||
|
||||
// Write to file
|
||||
let mut file = File::create(filename)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Serializable versions of your data structures
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableWitness<
|
||||
// F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
// const D: usize,
|
||||
> {
|
||||
dataSetRoot: Vec<String>,
|
||||
entropy: Vec<String>,
|
||||
nCellsPerSlot: usize,
|
||||
nSlotsPerDataSet: usize,
|
||||
slotIndex: u64,
|
||||
slotRoot: Vec<String>,
|
||||
slotProof: Vec<String>,
|
||||
cellData: Vec<Vec<String>>,
|
||||
merklePaths: Vec<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
// F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
// const D: usize,
|
||||
> SerializableWitness{
|
||||
pub fn from_witness<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(witness: &SampleCircuitInput<F, D>) -> Self {
|
||||
SerializableWitness {
|
||||
dataSetRoot: witness
|
||||
.dataset_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
entropy: witness
|
||||
.entropy
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
nCellsPerSlot: witness.n_cells_per_slot.to_canonical_u64() as usize,
|
||||
nSlotsPerDataSet: witness.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||
slotIndex: witness.slot_index.to_canonical_u64(),
|
||||
slotRoot: witness
|
||||
.slot_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
slotProof: witness
|
||||
.slot_proof
|
||||
.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
cellData: witness
|
||||
.cell_data
|
||||
.iter()
|
||||
.map(|data_vec| {
|
||||
data_vec.data
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
merklePaths: witness
|
||||
.merkle_paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
path.path.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pub struct SampleCircuitInput<
|
||||
// F: RichField + Extendable<D> + Poseidon2,
|
||||
// const D: usize,
|
||||
// > {
|
||||
// pub entropy: Vec<F>,
|
||||
// pub dataset_root: HashOut<F>,
|
||||
// pub slot_index: F,
|
||||
// pub slot_root: HashOut<F>,
|
||||
// pub n_cells_per_slot: F,
|
||||
// pub n_slots_per_dataset: F,
|
||||
// pub slot_proof: Vec<HashOut<F>>,
|
||||
// pub cell_data: Vec<Vec<F>>,
|
||||
// pub merkle_paths: Vec<Vec<HashOut<F>>>,
|
||||
// }
|
||||
|
||||
impl<> SerializableWitness {
|
||||
pub fn to_witness<
|
||||
F: RichField + Extendable<D> + Poseidon2, const D: usize
|
||||
>(&self) -> Result<SampleCircuitInput<F, D>> {
|
||||
// Convert entropy
|
||||
let entropy = self
|
||||
.entropy
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
|
||||
// Convert dataset_root
|
||||
let dataset_root_elements = self
|
||||
.dataSetRoot
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
let dataset_root = HashOut {
|
||||
elements: dataset_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid dataset_root length"))?,
|
||||
};
|
||||
|
||||
// slot_index
|
||||
let slot_index = F::from_canonical_u64(self.slotIndex);
|
||||
|
||||
// slot_root
|
||||
let slot_root_elements = self
|
||||
.slotRoot
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
let slot_root = HashOut {
|
||||
elements: slot_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid slot_root length"))?,
|
||||
};
|
||||
|
||||
// n_cells_per_slot
|
||||
let n_cells_per_slot = F::from_canonical_usize(self.nCellsPerSlot);
|
||||
|
||||
// n_slots_per_dataset
|
||||
let n_slots_per_dataset = F::from_canonical_usize(self.nSlotsPerDataSet);
|
||||
|
||||
// slot_proof
|
||||
let slot_proof_elements = self
|
||||
.slotProof
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
if slot_proof_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid slot_proof length"));
|
||||
}
|
||||
let slot_proof = slot_proof_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
// cell_data
|
||||
let cell_data = self
|
||||
.cellData
|
||||
.iter()
|
||||
.map(|vec_of_strings| -> Result<Cell<F,D>, Error> {
|
||||
let cell = vec_of_strings
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>();
|
||||
Ok(Cell::<F,D>{
|
||||
data: cell.unwrap(),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<Cell<F,D>>, Error>>()?;
|
||||
|
||||
// merkle_paths
|
||||
let merkle_paths = self
|
||||
.merklePaths
|
||||
.iter()
|
||||
.map(|path_strings| -> Result<MerklePath<F,D>, Error> {
|
||||
let path_elements = path_strings
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
|
||||
if path_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid merkle path length"));
|
||||
}
|
||||
|
||||
let path = path_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
Ok(mp)
|
||||
})
|
||||
.collect::<Result<Vec<MerklePath<F,D>>, Error>>()?;
|
||||
|
||||
Ok(SampleCircuitInput {
|
||||
entropy,
|
||||
dataset_root,
|
||||
slot_index,
|
||||
slot_root,
|
||||
n_cells_per_slot,
|
||||
n_slots_per_dataset,
|
||||
slot_proof,
|
||||
cell_data,
|
||||
merkle_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_witness_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||
filename: &str,
|
||||
) -> Result<SampleCircuitInput<F, D>> {
|
||||
let file = File::open(filename)?;
|
||||
let reader = BufReader::new(file);
|
||||
let serializable_witness: SerializableWitness = serde_json::from_reader(reader)?;
|
||||
|
||||
let witness = serializable_witness.to_witness()?;
|
||||
Ok(witness)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::proof_input::test_params::{F,D};
|
||||
use std::fs;
|
||||
|
||||
// Test to generate the JSON file
|
||||
#[test]
|
||||
fn test_export_witness_to_json() -> anyhow::Result<()> {
|
||||
// Create Params instance
|
||||
let params = Params::default();
|
||||
|
||||
// Create the dataset tree
|
||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||
|
||||
// Export the witness to JSON
|
||||
dataset_t.export_witness_to_json(¶ms, "input.json")?;
|
||||
|
||||
println!("Witness exported to input.json");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_witness_from_json() -> anyhow::Result<()> {
|
||||
// First, ensure that the JSON file exists
|
||||
// You can generate it using the export function if needed
|
||||
|
||||
// Import the witness from the JSON file
|
||||
let witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
|
||||
// Perform some checks to verify that the data was imported correctly
|
||||
assert_eq!(witness.entropy.len(), 4); // Example check
|
||||
// Add more assertions as needed
|
||||
|
||||
println!("Witness imported successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_export_import_witness() -> anyhow::Result<()> {
|
||||
// Create Params instance
|
||||
let params = Params::default();
|
||||
|
||||
// Create the dataset tree
|
||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||
|
||||
// Generate the witness data
|
||||
let slot_index = params.testing_slot_index;
|
||||
let entropy = params.entropy;
|
||||
|
||||
let proof = dataset_t.sample_slot(slot_index, entropy);
|
||||
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
|
||||
|
||||
let mut slot_paths = vec![];
|
||||
for i in 0..params.n_samples {
|
||||
let path = proof.slot_proofs[i].path.clone();
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
slot_paths.push(mp);
|
||||
}
|
||||
|
||||
let original_witness = SampleCircuitInput::<F, D> {
|
||||
entropy: proof.entropy.elements.clone().to_vec(),
|
||||
dataset_root: dataset_t.tree.root().unwrap(),
|
||||
slot_index: proof.slot_index.clone(),
|
||||
slot_root,
|
||||
n_cells_per_slot: F::from_canonical_usize(params.n_cells_per_slot()),
|
||||
n_slots_per_dataset: F::from_canonical_usize(params.n_slots_per_dataset()),
|
||||
slot_proof: proof.dataset_proof.path.clone(),
|
||||
cell_data: proof.cell_data.clone(),
|
||||
merkle_paths: slot_paths,
|
||||
};
|
||||
|
||||
// Export the witness to JSON
|
||||
dataset_t.export_witness_to_json(¶ms, "input.json")?;
|
||||
println!("Witness exported to input.json");
|
||||
|
||||
// Import the witness from JSON
|
||||
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
println!("Witness imported from input.json");
|
||||
|
||||
// Compare the original and imported witnesses
|
||||
assert_eq!(original_witness, imported_witness, "Witnesses are not equal");
|
||||
|
||||
// Cleanup: Remove the generated JSON file
|
||||
fs::remove_file("input.json")?;
|
||||
|
||||
println!("Test passed: Original and imported witnesses are equal.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
pub mod gen_input;
|
||||
pub mod test_params;
|
||||
pub mod utils;
|
||||
pub mod json;
|
|
@ -1,226 +0,0 @@
|
|||
// config for generating input for proof circuit
|
||||
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use std::env;
|
||||
use anyhow::{Result, Context};
|
||||
|
||||
// fake input params
|
||||
|
||||
// types
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
|
||||
pub type H = PoseidonHash;
|
||||
|
||||
|
||||
// hardcoded params for generating proof input
|
||||
pub const MAX_DEPTH: usize = 32; // depth of big tree (slot tree depth, includes block tree depth)
|
||||
pub const MAX_SLOTS: usize = 256; // maximum number of slots
|
||||
pub const CELL_SIZE: usize = 2048; // cell size in bytes
|
||||
pub const BLOCK_SIZE: usize = 65536; // block size in bytes
|
||||
pub const N_SAMPLES: usize = 5; // number of samples to prove
|
||||
|
||||
pub const ENTROPY: usize = 1234567; // external randomness
|
||||
pub const SEED: usize = 12345; // seed for creating fake data TODO: not used now
|
||||
|
||||
pub const N_SLOTS: usize = 16; // number of slots in the dataset
|
||||
pub const TESTING_SLOT_INDEX: usize = 2; // the index of the slot to be sampled
|
||||
pub const N_CELLS: usize = 512; // number of cells in each slot
|
||||
|
||||
/// Params struct
|
||||
#[derive(Clone)]
|
||||
pub struct Params {
|
||||
pub max_depth: usize,
|
||||
pub max_slots: usize,
|
||||
pub cell_size: usize,
|
||||
pub block_size: usize,
|
||||
pub n_samples: usize,
|
||||
pub entropy: usize,
|
||||
pub seed: usize,
|
||||
pub n_slots: usize,
|
||||
pub testing_slot_index: usize,
|
||||
pub n_cells: usize,
|
||||
}
|
||||
|
||||
/// Implement the Default trait for Params using the hardcoded constants
|
||||
impl Default for Params {
|
||||
fn default() -> Self {
|
||||
Params {
|
||||
max_depth: MAX_DEPTH,
|
||||
max_slots: MAX_SLOTS,
|
||||
cell_size: CELL_SIZE,
|
||||
block_size: BLOCK_SIZE,
|
||||
n_samples: N_SAMPLES,
|
||||
entropy: ENTROPY,
|
||||
seed: SEED,
|
||||
n_slots: N_SLOTS,
|
||||
testing_slot_index: TESTING_SLOT_INDEX,
|
||||
n_cells: N_CELLS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement a new function to create Params with custom values
|
||||
impl Params {
|
||||
pub fn new(
|
||||
max_depth: usize,
|
||||
max_slots: usize,
|
||||
cell_size: usize,
|
||||
block_size: usize,
|
||||
n_samples: usize,
|
||||
entropy: usize,
|
||||
seed: usize,
|
||||
n_slots: usize,
|
||||
testing_slot_index: usize,
|
||||
n_cells: usize,
|
||||
) -> Self {
|
||||
Params {
|
||||
max_depth,
|
||||
max_slots,
|
||||
cell_size,
|
||||
block_size,
|
||||
n_samples,
|
||||
entropy,
|
||||
seed,
|
||||
n_slots,
|
||||
testing_slot_index,
|
||||
n_cells,
|
||||
}
|
||||
}
|
||||
// GOLDILOCKS_F_SIZE
|
||||
pub fn goldilocks_f_size(&self) -> usize {
|
||||
64
|
||||
}
|
||||
|
||||
// N_FIELD_ELEMS_PER_CELL
|
||||
pub fn n_field_elems_per_cell(&self) -> usize {
|
||||
self.cell_size * 8 / self.goldilocks_f_size()
|
||||
}
|
||||
|
||||
// BOT_DEPTH
|
||||
pub fn bot_depth(&self) -> usize {
|
||||
(self.block_size / self.cell_size).trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// N_CELLS_IN_BLOCKS
|
||||
pub fn n_cells_in_blocks(&self) -> usize {
|
||||
1 << self.bot_depth()
|
||||
}
|
||||
|
||||
// N_BLOCKS
|
||||
pub fn n_blocks(&self) -> usize {
|
||||
1 << (self.max_depth - self.bot_depth())
|
||||
}
|
||||
|
||||
// Depth of test input
|
||||
pub fn depth_test(&self) -> usize {
|
||||
self.n_cells.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// N_BLOCKS for the test input
|
||||
pub fn n_blocks_test(&self) -> usize {
|
||||
1 << (self.depth_test() - self.bot_depth())
|
||||
}
|
||||
|
||||
// DATASET_DEPTH
|
||||
pub fn dataset_depth(&self) -> usize {
|
||||
self.max_slots.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// DATASET_DEPTH for test
|
||||
pub fn dataset_depth_test(&self) -> usize {
|
||||
self.n_slots.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// n_cells_per_slot (2^max_depth)
|
||||
pub fn n_cells_per_slot(&self) -> usize {
|
||||
1 << self.max_depth
|
||||
}
|
||||
|
||||
// n_slots_per_dataset (2^dataset_depth)
|
||||
pub fn n_slots_per_dataset(&self) -> usize {
|
||||
1 << self.dataset_depth()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// computed constants
|
||||
pub const GOLDILOCKS_F_SIZE: usize = 64;
|
||||
pub const N_FIELD_ELEMS_PER_CELL: usize = CELL_SIZE * 8 / GOLDILOCKS_F_SIZE;
|
||||
pub const BOT_DEPTH: usize = (BLOCK_SIZE/CELL_SIZE).ilog2() as usize; // block tree depth
|
||||
|
||||
pub const N_CELLS_IN_BLOCKS: usize = 1<< BOT_DEPTH; //2^BOT_DEPTH
|
||||
pub const N_BLOCKS: usize = 1<<(MAX_DEPTH - BOT_DEPTH); // 2^(MAX_DEPTH - BOT_DEPTH)
|
||||
|
||||
pub const DATASET_DEPTH: usize = MAX_SLOTS.ilog2() as usize;
|
||||
|
||||
// load params
|
||||
|
||||
impl Params {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let max_depth = env::var("MAXDEPTH")
|
||||
.context("MAXDEPTH not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid MAXDEPTH")?;
|
||||
|
||||
let max_slots = env::var("MAXSLOTS")
|
||||
.context("MAXSLOTS not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid MAXSLOTS")?;
|
||||
|
||||
let cell_size = env::var("CELLSIZE")
|
||||
.context("CELLSIZE not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid CELLSIZE")?;
|
||||
|
||||
let block_size = env::var("BLOCKSIZE")
|
||||
.context("BLOCKSIZE not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid BLOCKSIZE")?;
|
||||
|
||||
let n_samples = env::var("NSAMPLES")
|
||||
.context("NSAMPLES not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid NSAMPLES")?;
|
||||
|
||||
let entropy = env::var("ENTROPY")
|
||||
.context("ENTROPY not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid ENTROPY")?;
|
||||
|
||||
let seed = env::var("SEED")
|
||||
.context("SEED not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid SEED")?;
|
||||
|
||||
let n_slots = env::var("NSLOTS")
|
||||
.context("NSLOTS not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid NSLOTS")?;
|
||||
|
||||
let testing_slot_index = env::var("SLOTINDEX")
|
||||
.context("SLOTINDEX not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid SLOTINDEX")?;
|
||||
|
||||
let n_cells = env::var("NCELLS")
|
||||
.context("NCELLS not set")?
|
||||
.parse::<usize>()
|
||||
.context("Invalid NCELLS")?;
|
||||
|
||||
Ok(Params {
|
||||
max_depth,
|
||||
max_slots,
|
||||
cell_size,
|
||||
block_size,
|
||||
n_samples,
|
||||
entropy,
|
||||
seed,
|
||||
n_slots,
|
||||
testing_slot_index,
|
||||
n_cells,
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,297 +0,0 @@
|
|||
use anyhow::Result;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::{HashOut, HashOutTarget, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use serde::Serialize;
|
||||
use crate::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
|
||||
use crate::circuits::utils::{assign_bool_targets, assign_hash_out_targets, usize_to_bits_le_padded};
|
||||
|
||||
use crate::merkle_tree::merkle_safe::MerkleTree;
|
||||
|
||||
/// the input to the merkle tree circuit
|
||||
#[derive(Clone)]
|
||||
pub struct MerkleTreeCircuitInput<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>{
|
||||
pub leaf: HashOut<F>,
|
||||
pub path_bits: Vec<bool>,
|
||||
pub last_bits: Vec<bool>,
|
||||
pub mask_bits: Vec<bool>,
|
||||
pub merkle_path: Vec<HashOut<F>>,
|
||||
}
|
||||
|
||||
/// defines the computations inside the circuit and returns the targets used
|
||||
/// NOTE: this is not used in the sampling circuit, see reconstruct_merkle_root_circuit_with_mask
|
||||
pub fn build_circuit<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder::<F, D>,
|
||||
depth: usize,
|
||||
) -> (MerkleTreeTargets, HashOutTarget) {
|
||||
|
||||
// Create virtual targets
|
||||
let leaf = builder.add_virtual_hash();
|
||||
|
||||
// path bits (binary decomposition of leaf_index)
|
||||
let path_bits = (0..depth).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||
|
||||
// last bits (binary decomposition of last_index = nleaves - 1)
|
||||
let last_bits = (0..depth).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||
|
||||
// last bits (binary decomposition of last_index = nleaves - 1)
|
||||
let mask_bits = (0..depth+1).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||
|
||||
// Merkle path (sibling hashes from leaf to root)
|
||||
let merkle_path = MerkleProofTarget {
|
||||
path: (0..depth).map(|_| builder.add_virtual_hash()).collect(),
|
||||
};
|
||||
|
||||
// create MerkleTreeTargets struct
|
||||
let mut targets = MerkleTreeTargets{
|
||||
leaf,
|
||||
path_bits,
|
||||
last_bits,
|
||||
mask_bits,
|
||||
merkle_path,
|
||||
};
|
||||
|
||||
// Add Merkle proof verification constraints to the circuit
|
||||
let reconstructed_root_target = MerkleTreeCircuit::reconstruct_merkle_root_circuit_with_mask(builder, &mut targets, depth);
|
||||
|
||||
// Return MerkleTreeTargets
|
||||
(targets, reconstructed_root_target)
|
||||
}
|
||||
|
||||
/// assign the witness values in the circuit targets
|
||||
/// this takes MerkleTreeCircuitInput and fills all required circuit targets
|
||||
pub fn assign_witness<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
pw: &mut PartialWitness<F>,
|
||||
targets: &mut MerkleTreeTargets,
|
||||
witnesses: MerkleTreeCircuitInput<F, D>
|
||||
)-> Result<()> {
|
||||
// Assign the leaf hash to the leaf target
|
||||
pw.set_hash_target(targets.leaf, witnesses.leaf);
|
||||
|
||||
// Assign path bits
|
||||
assign_bool_targets(pw, &targets.path_bits, witnesses.path_bits);
|
||||
|
||||
// Assign last bits
|
||||
assign_bool_targets(pw, &targets.last_bits, witnesses.last_bits);
|
||||
|
||||
// Assign mask bits
|
||||
assign_bool_targets(pw, &targets.mask_bits, witnesses.mask_bits);
|
||||
|
||||
// assign the Merkle path (sibling hashes) to the targets
|
||||
for i in 0..targets.merkle_path.path.len() {
|
||||
if i>=witnesses.merkle_path.len() { // pad with zeros
|
||||
assign_hash_out_targets(pw, &targets.merkle_path.path[i].elements, &[F::ZERO; NUM_HASH_OUT_ELTS]);
|
||||
continue
|
||||
}
|
||||
assign_hash_out_targets(pw, &targets.merkle_path.path[i].elements, &witnesses.merkle_path[i].elements)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::hash::hash_types::HashOut;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use super::*;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use crate::circuits::utils::usize_to_bits_le_padded;
|
||||
use crate::merkle_tree::merkle_safe::MerkleTree;
|
||||
|
||||
// NOTE: for now these tests don't check the reconstructed root is equal to expected_root
|
||||
// will be fixed later, but for that test check the prove_single_cell tests
|
||||
#[test]
|
||||
fn test_build_circuit() -> anyhow::Result<()> {
|
||||
// circuit params
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
// Generate random leaf data
|
||||
let nleaves = 16; // Number of leaves
|
||||
let max_depth = 4;
|
||||
let data = (0..nleaves)
|
||||
.map(|i| GoldilocksField::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
PoseidonHash::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
//initialize the Merkle tree
|
||||
let zero_hash = HashOut {
|
||||
elements: [GoldilocksField::ZERO; 4],
|
||||
};
|
||||
let tree = MerkleTree::<F>::new(&leaves, zero_hash)?;
|
||||
|
||||
// select leaf index to prove
|
||||
let leaf_index: usize = 8;
|
||||
|
||||
// get the Merkle proof for the selected leaf
|
||||
let proof = tree.get_proof(leaf_index)?;
|
||||
// sanity check:
|
||||
let check = proof.verify(tree.layers[0][leaf_index],tree.root().unwrap()).unwrap();
|
||||
assert_eq!(check, true);
|
||||
|
||||
// get the expected Merkle root
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
// create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
let (mut targets, reconstructed_root_target) = build_circuit(&mut builder, max_depth);
|
||||
|
||||
// expected Merkle root
|
||||
let expected_root = builder.add_virtual_hash();
|
||||
|
||||
// check equality with expected root
|
||||
for i in 0..NUM_HASH_OUT_ELTS {
|
||||
builder.connect(expected_root.elements[i], reconstructed_root_target.elements[i]);
|
||||
}
|
||||
|
||||
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||
let last_index = (nleaves - 1) as usize;
|
||||
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
leaf: tree.layers[0][leaf_index],
|
||||
path_bits,
|
||||
last_bits,
|
||||
mask_bits,
|
||||
merkle_path: proof.path,
|
||||
};
|
||||
|
||||
// create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||
pw.set_hash_target(expected_root, tree.root().unwrap());
|
||||
|
||||
// build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
// Prove the circuit with the assigned witness
|
||||
let proof_with_pis = data.prove(pw)?;
|
||||
|
||||
// verify the proof
|
||||
let verifier_data = data.verifier_data();
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// same as test above but for all leaves
|
||||
#[test]
|
||||
fn test_verify_all_leaves() -> anyhow::Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
let nleaves = 16; // Number of leaves
|
||||
let max_depth = 4;
|
||||
let data = (0..nleaves)
|
||||
.map(|i| GoldilocksField::from_canonical_u64(i as u64))
|
||||
.collect::<Vec<_>>();
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
PoseidonHash::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
let zero_hash = HashOut {
|
||||
elements: [GoldilocksField::ZERO; 4],
|
||||
};
|
||||
let tree = MerkleTree::<F>::new(&leaves, zero_hash)?;
|
||||
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
let (mut targets, reconstructed_root_target) = build_circuit(&mut builder, max_depth);
|
||||
|
||||
// expected Merkle root
|
||||
let expected_root_target = builder.add_virtual_hash();
|
||||
|
||||
// check equality with expected root
|
||||
for i in 0..NUM_HASH_OUT_ELTS {
|
||||
builder.connect(expected_root_target.elements[i], reconstructed_root_target.elements[i]);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
|
||||
for leaf_index in 0..nleaves {
|
||||
let proof = tree.get_proof(leaf_index)?;
|
||||
let check = proof.verify(tree.layers[0][leaf_index], expected_root)?;
|
||||
assert!(
|
||||
check,
|
||||
"Merkle proof verification failed for leaf index {}",
|
||||
leaf_index
|
||||
);
|
||||
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||
let last_index = (nleaves - 1) as usize;
|
||||
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
leaf: tree.layers[0][leaf_index],
|
||||
path_bits,
|
||||
last_bits,
|
||||
mask_bits,
|
||||
merkle_path: proof.path,
|
||||
};
|
||||
|
||||
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||
pw.set_hash_target(expected_root_target, expected_root);
|
||||
|
||||
let proof_with_pis = data.prove(pw)?;
|
||||
|
||||
let verifier_data = data.verifier_data();
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed in circuit for leaf index {}",
|
||||
leaf_index
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
pub mod merkle_circuit;
|
Loading…
Reference in New Issue