re-arrange code and refactor
This commit is contained in:
parent
67ff54c3f6
commit
54ffe0d3fd
|
@ -0,0 +1,28 @@
|
||||||
|
[package]
|
||||||
|
name = "proof-input"
|
||||||
|
description = "proof input generation library"
|
||||||
|
authors = ["Mohammed Alghazwi <m.ghazwi@gmail.com>"]
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
clap = { version = "4.0", features = ["derive"] }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
anyhow = "1.0"
|
||||||
|
plonky2 = { version = "0.2.2" }
|
||||||
|
plonky2_field = { version = "0.2.2", default-features = false }
|
||||||
|
plonky2_poseidon2 = { path = "../plonky2_poseidon2" }
|
||||||
|
codex-plonky2-circuits = { path = "../codex-plonky2-circuits" }
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "safe_circuit"
|
||||||
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "prove_cells"
|
||||||
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "sample_cells"
|
||||||
|
harness = false
|
|
@ -0,0 +1,152 @@
|
||||||
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
use codex_plonky2_circuits::{
|
||||||
|
merkle_tree::merkle_safe::MerkleProof,
|
||||||
|
circuits::merkle_circuit::MerkleTreeCircuit,
|
||||||
|
};
|
||||||
|
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::field::extension::Extendable;
|
||||||
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use codex_plonky2_circuits::circuits::prove_single_cell::SlotTreeCircuit;
|
||||||
|
|
||||||
|
macro_rules! pretty_print {
|
||||||
|
($($arg:tt)*) => {
|
||||||
|
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||||
|
println!($($arg)*);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash function used
|
||||||
|
type HF = PoseidonHash;
|
||||||
|
|
||||||
|
fn prepare_data<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>(N: usize) -> Result<(
|
||||||
|
SlotTreeCircuit<F, C, D, H>,
|
||||||
|
Vec<usize>,
|
||||||
|
Vec<MerkleProof<F, H>>,
|
||||||
|
)> {
|
||||||
|
// Initialize the slot tree with default data
|
||||||
|
let slot_tree = SlotTreeCircuit::<F, C,D, H>::default();
|
||||||
|
|
||||||
|
// Select N leaf indices to prove
|
||||||
|
let leaf_indices: Vec<usize> = (0..N).collect();
|
||||||
|
|
||||||
|
// Get the Merkle proofs for the selected leaves
|
||||||
|
let proofs: Vec<_> = leaf_indices
|
||||||
|
.iter()
|
||||||
|
.map(|&leaf_index| slot_tree.get_proof(leaf_index))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok((slot_tree, leaf_indices, proofs))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_circuit<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>(
|
||||||
|
slot_tree: &SlotTreeCircuit<F, C, D, H>,
|
||||||
|
leaf_indices: &[usize],
|
||||||
|
proofs: &[MerkleProof<F, H>],
|
||||||
|
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||||
|
{
|
||||||
|
// Create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
|
||||||
|
// Create a PartialWitness
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
|
||||||
|
// For each proof, create targets, add constraints, and assign witnesses
|
||||||
|
for (i, &leaf_index) in leaf_indices.iter().enumerate() {
|
||||||
|
// Build the circuit for each proof
|
||||||
|
let mut targets = SlotTreeCircuit::<F,C,D,H>::prove_single_cell(&mut builder);
|
||||||
|
|
||||||
|
// Assign witnesses for each proof
|
||||||
|
slot_tree.single_cell_assign_witness(
|
||||||
|
&mut pw,
|
||||||
|
&mut targets,
|
||||||
|
leaf_index,
|
||||||
|
&slot_tree.cell_data[leaf_index],
|
||||||
|
proofs[i].clone(),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
|
||||||
|
Ok((data, pw))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn single_cell_proof_benchmark(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("Single Cell Proof Benchmark");
|
||||||
|
|
||||||
|
// Circuit parameters
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
|
||||||
|
// Prepare the data that will be used in all steps
|
||||||
|
let N = 5; // Number of leaves to prove
|
||||||
|
let (slot_tree, leaf_indices, proofs) = prepare_data::<F, C, D, H>(N).unwrap();
|
||||||
|
|
||||||
|
// Benchmark the circuit building
|
||||||
|
group.bench_function("Single Cell Proof Build", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
build_circuit::<F, C, D, H>(&slot_tree, &leaf_indices, &proofs).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let (data, pw) = build_circuit::<F, C, D, H>(&slot_tree, &leaf_indices, &proofs).unwrap();
|
||||||
|
|
||||||
|
pretty_print!(
|
||||||
|
"Circuit size: 2^{} gates",
|
||||||
|
data.common.degree_bits()
|
||||||
|
);
|
||||||
|
|
||||||
|
let start_time = Instant::now();
|
||||||
|
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
println!("prove_time = {:?}", start_time.elapsed());
|
||||||
|
|
||||||
|
// Benchmark the proving time
|
||||||
|
group.bench_function("Single Cell Proof Prove", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate the proof
|
||||||
|
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
|
||||||
|
pretty_print!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
|
||||||
|
|
||||||
|
// Benchmark the verification time
|
||||||
|
group.bench_function("Single Cell Proof Verify", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(name = benches;
|
||||||
|
config = Criterion::default().sample_size(10);
|
||||||
|
targets = single_cell_proof_benchmark);
|
||||||
|
criterion_main!(benches);
|
|
@ -0,0 +1,164 @@
|
||||||
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use codex_plonky2_circuits::{merkle_tree::merkle_safe::MerkleTree, circuits::merkle_circuit::MerkleTreeCircuit};
|
||||||
|
use plonky2::field::types::Field;
|
||||||
|
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::hash::hash_types::HashOut;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::field::extension::Extendable;
|
||||||
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleProof;
|
||||||
|
|
||||||
|
macro_rules! pretty_print {
|
||||||
|
($($arg:tt)*) => {
|
||||||
|
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||||
|
println!($($arg)*);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prepare_data<F, H>(N: usize) -> Result<(
|
||||||
|
MerkleTree<F, H>,
|
||||||
|
Vec<HashOut<F>>,
|
||||||
|
Vec<usize>,
|
||||||
|
Vec<MerkleProof<F, H>>,
|
||||||
|
HashOut<F>,
|
||||||
|
)>
|
||||||
|
where
|
||||||
|
F: RichField + Extendable<2> + Poseidon2,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F> + Hasher<F>,
|
||||||
|
{
|
||||||
|
// Total number of leaves in the Merkle tree
|
||||||
|
let nleaves = 1u64 << 16;
|
||||||
|
|
||||||
|
// Generate leaf data
|
||||||
|
let data = (0..nleaves)
|
||||||
|
.map(|i| F::from_canonical_u64(i as u64))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Hash the data to obtain leaf hashes
|
||||||
|
let leaves: Vec<HashOut<F>> = data
|
||||||
|
.iter()
|
||||||
|
.map(|&element| {
|
||||||
|
PoseidonHash::hash_no_pad(&[element])
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let zero_hash = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
let tree = MerkleTree::<F, H>::new(&leaves, zero_hash)?;
|
||||||
|
|
||||||
|
// Select N leaf indices to prove
|
||||||
|
let leaf_indices: Vec<usize> = (0..N).collect();
|
||||||
|
|
||||||
|
// Get the Merkle proofs for the selected leaves
|
||||||
|
let proofs: Vec<_> = leaf_indices
|
||||||
|
.iter()
|
||||||
|
.map(|&leaf_index| tree.get_proof(leaf_index))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
|
// Expected Merkle root
|
||||||
|
let expected_root = tree.root()?;
|
||||||
|
|
||||||
|
Ok((tree, leaves, leaf_indices, proofs, expected_root))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_circuit<F, C, const D: usize, H>(
|
||||||
|
tree: &MerkleTree<F, H>,
|
||||||
|
leaf_indices: &[usize],
|
||||||
|
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||||
|
where
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F> + Hasher<F>,
|
||||||
|
{
|
||||||
|
// Create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
|
||||||
|
// Create a PartialWitness
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
|
||||||
|
// Initialize the circuit instance
|
||||||
|
let mut circuit_instance = MerkleTreeCircuit::<F, C, D, H> {
|
||||||
|
tree: tree.clone(),
|
||||||
|
_phantom: PhantomData,
|
||||||
|
};
|
||||||
|
|
||||||
|
// For each proof, create targets, add constraints, and assign witnesses
|
||||||
|
for &leaf_index in leaf_indices.iter() {
|
||||||
|
// Build the circuit for each proof
|
||||||
|
let (mut targets, _root) = circuit_instance.build_circuit(&mut builder);
|
||||||
|
|
||||||
|
// Assign witnesses for each proof
|
||||||
|
circuit_instance.assign_witness(&mut pw, &mut targets, leaf_index)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
|
||||||
|
Ok((data, pw))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merkle_proof_benchmark(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("Merkle Proof Benchmark");
|
||||||
|
|
||||||
|
// Circuit parameters
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
|
||||||
|
// Prepare the data that will be used in all steps
|
||||||
|
let N = 5; // Number of leaves to prove
|
||||||
|
let (tree, _leaves, leaf_indices, _proofs, _expected_root) = prepare_data::<F, H>(N).unwrap();
|
||||||
|
|
||||||
|
// Benchmark the circuit building
|
||||||
|
group.bench_function("Merkle Proof Build", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
build_circuit::<F, C, D, H>(&tree, &leaf_indices).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build the circuit once to get the data for the proving and verifying steps
|
||||||
|
let (data, pw) = build_circuit::<F, C, D, H>(&tree, &leaf_indices).unwrap();
|
||||||
|
|
||||||
|
pretty_print!(
|
||||||
|
"circuit size: 2^{} gates",
|
||||||
|
data.common.degree_bits()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Benchmark the proving time
|
||||||
|
group.bench_function("Merkle Proof Prove", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate the proof once for verification
|
||||||
|
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
|
||||||
|
pretty_print!("proof size: {}", proof_with_pis.to_bytes().len());
|
||||||
|
|
||||||
|
// Benchmark the verification time
|
||||||
|
group.bench_function("Merkle Proof Verify", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
// criterion_group!(benches, merkle_proof_benchmark);
|
||||||
|
criterion_group!(name = benches;
|
||||||
|
config = Criterion::default().sample_size(10);
|
||||||
|
targets = merkle_proof_benchmark);
|
||||||
|
criterion_main!(benches);
|
|
@ -0,0 +1,129 @@
|
||||||
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
|
use anyhow::Result;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::field::extension::Extendable;
|
||||||
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use codex_plonky2_circuits::circuits::params::TESTING_SLOT_INDEX;
|
||||||
|
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||||
|
|
||||||
|
macro_rules! pretty_print {
|
||||||
|
($($arg:tt)*) => {
|
||||||
|
print!("\x1b[0;36mINFO ===========>\x1b[0m ");
|
||||||
|
println!($($arg)*);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash function used
|
||||||
|
type HF = PoseidonHash;
|
||||||
|
|
||||||
|
fn prepare_data<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>() -> Result<(
|
||||||
|
SampleCircuit<F, C, D, H>,
|
||||||
|
usize,
|
||||||
|
usize,
|
||||||
|
)> {
|
||||||
|
// Initialize the dataset tree with testing data
|
||||||
|
let mut dataset_t = SampleCircuit::<F,C,D,H>::new_for_testing();
|
||||||
|
|
||||||
|
let slot_index = TESTING_SLOT_INDEX;
|
||||||
|
let entropy = 123;
|
||||||
|
|
||||||
|
Ok((dataset_t, slot_index, entropy))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_circuit<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>(
|
||||||
|
dataset_tree: &mut SampleCircuit<F, C, D, H>,
|
||||||
|
slot_index: usize,
|
||||||
|
entropy: usize,
|
||||||
|
// proofs: &[MerkleProof<F, H>],
|
||||||
|
) -> Result<(CircuitData<F, C, D>, PartialWitness<F>)>
|
||||||
|
{
|
||||||
|
// Create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
|
||||||
|
let mut targets = dataset_tree.sample_slot_circuit(&mut builder);
|
||||||
|
|
||||||
|
// Create a PartialWitness
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
dataset_tree.sample_slot_assign_witness(&mut pw, &mut targets,slot_index,entropy);
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
|
||||||
|
Ok((data, pw))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sampling_benchmark(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("Sampling Benchmark");
|
||||||
|
|
||||||
|
// Circuit parameters
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
|
||||||
|
// Prepare the data that will be used in all steps
|
||||||
|
let (mut dataset_tree, slot_index, entropy) = prepare_data::<F, C, D, H>().unwrap();
|
||||||
|
|
||||||
|
// Benchmark the circuit building
|
||||||
|
group.bench_function("Single Cell Proof Build", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let (data, pw) = build_circuit::<F, C, D, H>(&mut dataset_tree, slot_index, entropy).unwrap();
|
||||||
|
|
||||||
|
pretty_print!(
|
||||||
|
"Circuit size: 2^{} gates",
|
||||||
|
data.common.degree_bits()
|
||||||
|
);
|
||||||
|
|
||||||
|
let start_time = Instant::now();
|
||||||
|
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
println!("prove_time = {:?}", start_time.elapsed());
|
||||||
|
|
||||||
|
// Benchmark the proving time
|
||||||
|
group.bench_function("Single Cell Proof Prove", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
let _proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate the proof
|
||||||
|
let proof_with_pis = data.prove(pw.clone()).unwrap();
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
|
||||||
|
pretty_print!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
|
||||||
|
|
||||||
|
// Benchmark the verification time
|
||||||
|
group.bench_function("Single Cell Proof Verify", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
verifier_data.verify(proof_with_pis.clone()).unwrap();
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(name = benches;
|
||||||
|
config = Criterion::default().sample_size(10);
|
||||||
|
targets = sampling_benchmark);
|
||||||
|
criterion_main!(benches);
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,455 @@
|
||||||
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
|
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||||
|
use plonky2_field::extension::Extendable;
|
||||||
|
use plonky2_field::types::Field;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use codex_plonky2_circuits::circuits::params::{CircuitParams, HF};
|
||||||
|
use crate::params::Params;
|
||||||
|
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
|
||||||
|
use codex_plonky2_circuits::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||||
|
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuit, SampleCircuitInput};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use crate::sponge::hash_n_with_padding;
|
||||||
|
|
||||||
|
/// generates input witness (SampleCircuitInput) from fake data
|
||||||
|
/// which can be later stored into json see json.rs
|
||||||
|
pub fn gen_witness<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(params: &Params) -> SampleCircuitInput<F,D>{
|
||||||
|
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||||
|
|
||||||
|
let slot_index = params.testing_slot_index; // samples the specified slot
|
||||||
|
let entropy = params.entropy; // Use the entropy from Params
|
||||||
|
|
||||||
|
let proof = dataset_t.sample_slot(slot_index, entropy);
|
||||||
|
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
|
||||||
|
|
||||||
|
let mut slot_paths = vec![];
|
||||||
|
for i in 0..params.n_samples {
|
||||||
|
let path = proof.slot_proofs[i].path.clone();
|
||||||
|
let mp = MerklePath::<F,D>{
|
||||||
|
path,
|
||||||
|
};
|
||||||
|
slot_paths.push(mp);
|
||||||
|
}
|
||||||
|
|
||||||
|
SampleCircuitInput::<F, D> {
|
||||||
|
entropy: proof.entropy.elements.clone().to_vec(),
|
||||||
|
dataset_root: dataset_t.tree.root().unwrap(),
|
||||||
|
slot_index: proof.slot_index.clone(),
|
||||||
|
slot_root,
|
||||||
|
n_cells_per_slot: F::from_canonical_usize(params.n_cells),
|
||||||
|
n_slots_per_dataset: F::from_canonical_usize(params.n_slots),
|
||||||
|
slot_proof: proof.dataset_proof.path.clone(),
|
||||||
|
cell_data: proof.cell_data.clone(),
|
||||||
|
merkle_paths: slot_paths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// verifies the witness.
|
||||||
|
/// this is non circuit version for sanity check
|
||||||
|
pub fn verify_witness<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(witness: SampleCircuitInput<F,D>, params: &Params) -> bool{
|
||||||
|
let slot_index = witness.slot_index.to_canonical_u64();
|
||||||
|
let slot_root = witness.slot_root.clone();
|
||||||
|
// check dataset level proof
|
||||||
|
let slot_proof = witness.slot_proof.clone();
|
||||||
|
let dataset_path_bits = usize_to_bits_le_padded(slot_index as usize, params.dataset_depth());
|
||||||
|
let last_index = params.n_slots - 1;
|
||||||
|
let dataset_last_bits = usize_to_bits_le_padded(last_index, params.dataset_depth());
|
||||||
|
let dataset_mask_bits = usize_to_bits_le_padded(last_index, params.dataset_depth()+1);
|
||||||
|
let reconstructed_slot_root = MerkleProof::<F,D>::reconstruct_root2(
|
||||||
|
slot_root,
|
||||||
|
dataset_path_bits,
|
||||||
|
dataset_last_bits,
|
||||||
|
slot_proof,
|
||||||
|
dataset_mask_bits,
|
||||||
|
params.max_slots.trailing_zeros() as usize,
|
||||||
|
).unwrap();
|
||||||
|
// assert reconstructed equals dataset root
|
||||||
|
assert_eq!(reconstructed_slot_root, witness.dataset_root.clone());
|
||||||
|
|
||||||
|
// check each sampled cell
|
||||||
|
|
||||||
|
// get the index for cell from H(slot_root|counter|entropy)
|
||||||
|
let mask_bits = usize_to_bits_le_padded(params.n_cells -1, params.max_depth);
|
||||||
|
for i in 0..params.n_samples {
|
||||||
|
let cell_index_bits = calculate_cell_index_bits(
|
||||||
|
&witness.entropy,
|
||||||
|
slot_root,
|
||||||
|
i + 1,
|
||||||
|
params.max_depth,
|
||||||
|
mask_bits.clone(),
|
||||||
|
);
|
||||||
|
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||||
|
|
||||||
|
let s_res = verify_cell_proof(&witness, ¶ms, cell_index, i);
|
||||||
|
if s_res.unwrap() == false {
|
||||||
|
println!("call {} is false", i);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify the given proof for slot tree, checks equality with the given root
|
||||||
|
pub fn verify_cell_proof<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(witness: &SampleCircuitInput<F,D>, params: &Params, cell_index: usize, ctr: usize) -> anyhow::Result<bool> {
|
||||||
|
let mut block_path_bits = usize_to_bits_le_padded(cell_index, params.max_depth);
|
||||||
|
let last_index = params.n_cells - 1;
|
||||||
|
let mut block_last_bits = usize_to_bits_le_padded(last_index, params.max_depth);
|
||||||
|
|
||||||
|
let split_point = params.bot_depth();
|
||||||
|
|
||||||
|
let slot_last_bits = block_last_bits.split_off(split_point);
|
||||||
|
let slot_path_bits = block_path_bits.split_off(split_point);
|
||||||
|
|
||||||
|
// pub type HP = <PoseidonHash as Hasher<F>>::Permutation;
|
||||||
|
let leaf_hash = hash_n_with_padding::<F,D,HF>(&witness.cell_data[ctr].data);
|
||||||
|
// HF::hash_no_pad()
|
||||||
|
|
||||||
|
let mut block_path = witness.merkle_paths[ctr].path.clone();
|
||||||
|
let slot_path = block_path.split_off(split_point);
|
||||||
|
|
||||||
|
let mask_bits = usize_to_bits_le_padded(last_index, params.max_depth+1);
|
||||||
|
|
||||||
|
let block_res = MerkleProof::<F,D>::reconstruct_root2(
|
||||||
|
leaf_hash,
|
||||||
|
block_path_bits.clone(),
|
||||||
|
block_last_bits.clone(),
|
||||||
|
block_path,
|
||||||
|
mask_bits.clone(),
|
||||||
|
params.bot_depth(),
|
||||||
|
);
|
||||||
|
let reconstructed_root = MerkleProof::<F,D>::reconstruct_root2(
|
||||||
|
block_res.unwrap(),
|
||||||
|
slot_path_bits,
|
||||||
|
slot_last_bits,
|
||||||
|
slot_path,
|
||||||
|
mask_bits.clone(),
|
||||||
|
params.max_depth - params.bot_depth(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(reconstructed_root.unwrap() == witness.slot_root)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Create a new cell with random data, using the parameters from `Params`
|
||||||
|
pub fn new_random_cell<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(params: &Params) -> Cell<F,D> {
|
||||||
|
let data = (0..params.n_field_elems_per_cell())
|
||||||
|
.map(|_| F::rand())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Cell::<F,D> {
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SlotTree<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
> {
|
||||||
|
pub tree: MerkleTree<F, D>, // slot tree
|
||||||
|
pub block_trees: Vec<MerkleTree<F,D>>, // vec of block trees
|
||||||
|
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||||
|
pub params: Params, // parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
> SlotTree<F, D> {
|
||||||
|
/// Create a slot tree with fake data, for testing only
|
||||||
|
pub fn new_default(params: &Params) -> Self {
|
||||||
|
// generate fake cell data
|
||||||
|
let cell_data = (0..params.n_cells)
|
||||||
|
.map(|_| new_random_cell(params))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Self::new(cell_data, params.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new slot tree with the supplied cell data and parameters
|
||||||
|
pub fn new(cells: Vec<Cell<F, D>>, params: Params) -> Self {
|
||||||
|
let leaves: Vec<HashOut<F>> = cells
|
||||||
|
.iter()
|
||||||
|
.map(|element| hash_n_with_padding::<F,D,HF>(&element.data))
|
||||||
|
.collect();
|
||||||
|
let zero = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
let n_blocks = params.n_blocks_test();
|
||||||
|
let n_cells_in_blocks = params.n_cells_in_blocks();
|
||||||
|
|
||||||
|
let block_trees = (0..n_blocks)
|
||||||
|
.map(|i| {
|
||||||
|
let start = i * n_cells_in_blocks;
|
||||||
|
let end = (i + 1) * n_cells_in_blocks;
|
||||||
|
Self::get_block_tree(&leaves[start..end].to_vec())
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let block_roots = block_trees
|
||||||
|
.iter()
|
||||||
|
.map(|t| t.root().unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let slot_tree = MerkleTree::<F,D>::new(&block_roots, zero).unwrap();
|
||||||
|
Self {
|
||||||
|
tree: slot_tree,
|
||||||
|
block_trees,
|
||||||
|
cell_data: cells,
|
||||||
|
params,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generates a proof for the given leaf index
|
||||||
|
/// The path in the proof is a combined block and slot path to make up the full path
|
||||||
|
pub fn get_proof(&self, index: usize) -> MerkleProof<F,D> {
|
||||||
|
let block_index = index / self.params.n_cells_in_blocks();
|
||||||
|
let leaf_index = index % self.params.n_cells_in_blocks();
|
||||||
|
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
||||||
|
let slot_proof = self.tree.get_proof(block_index).unwrap();
|
||||||
|
|
||||||
|
// Combine the paths from the block and slot proofs
|
||||||
|
let mut combined_path = block_proof.path.clone();
|
||||||
|
combined_path.extend(slot_proof.path.clone());
|
||||||
|
|
||||||
|
MerkleProof::<F,D> {
|
||||||
|
index,
|
||||||
|
path: combined_path,
|
||||||
|
nleaves: self.cell_data.len(),
|
||||||
|
zero: block_proof.zero.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F,D> {
|
||||||
|
let zero = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
// Build the Merkle tree
|
||||||
|
let block_tree = MerkleTree::<F,D>::new(leaves, zero).unwrap();
|
||||||
|
block_tree
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------ Dataset Tree --------
|
||||||
|
/// Dataset tree containing all slot trees
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DatasetTree<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
> {
|
||||||
|
pub tree: MerkleTree<F,D>, // dataset tree
|
||||||
|
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
||||||
|
pub params: Params, // parameters
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DatasetProof<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
> {
|
||||||
|
pub slot_index: F,
|
||||||
|
pub entropy: HashOut<F>,
|
||||||
|
pub dataset_proof: MerkleProof<F,D>, // proof for dataset level tree
|
||||||
|
pub slot_proofs: Vec<MerkleProof<F,D>>, // proofs for sampled slot
|
||||||
|
pub cell_data: Vec<Cell<F,D>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
> DatasetTree<F, D> {
|
||||||
|
/// Dataset tree with fake data, for testing only
|
||||||
|
pub fn new_default(params: &Params) -> Self {
|
||||||
|
let mut slot_trees = vec![];
|
||||||
|
let n_slots = 1 << params.dataset_depth_test();
|
||||||
|
for _ in 0..n_slots {
|
||||||
|
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||||
|
}
|
||||||
|
Self::new(slot_trees, params.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create data for only the specified slot index in params
|
||||||
|
pub fn new_for_testing(params: &Params) -> Self {
|
||||||
|
let mut slot_trees = vec![];
|
||||||
|
// let n_slots = 1 << params.dataset_depth();
|
||||||
|
let n_slots = params.n_slots;
|
||||||
|
// zero hash
|
||||||
|
let zero = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
let zero_slot = SlotTree::<F, D> {
|
||||||
|
tree: MerkleTree::<F,D>::new(&[zero.clone()], zero.clone()).unwrap(),
|
||||||
|
block_trees: vec![],
|
||||||
|
cell_data: vec![],
|
||||||
|
params: params.clone(),
|
||||||
|
};
|
||||||
|
for i in 0..n_slots {
|
||||||
|
if i == params.testing_slot_index {
|
||||||
|
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||||
|
} else {
|
||||||
|
slot_trees.push(zero_slot.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// get the roots of slot trees
|
||||||
|
let slot_roots = slot_trees
|
||||||
|
.iter()
|
||||||
|
.map(|t| t.tree.root().unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||||
|
Self {
|
||||||
|
tree: dataset_tree,
|
||||||
|
slot_trees,
|
||||||
|
params: params.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Same as default but with supplied slot trees
|
||||||
|
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: Params) -> Self {
|
||||||
|
// get the roots of slot trees
|
||||||
|
let slot_roots = slot_trees
|
||||||
|
.iter()
|
||||||
|
.map(|t| t.tree.root().unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
// zero hash
|
||||||
|
let zero = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||||
|
Self {
|
||||||
|
tree: dataset_tree,
|
||||||
|
slot_trees,
|
||||||
|
params,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generates a proof for the given slot index
|
||||||
|
/// Also takes entropy so it can use it to sample the slot
|
||||||
|
/// note: proofs are padded based on the params in self
|
||||||
|
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
||||||
|
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||||
|
Self::pad_proof(&mut dataset_proof, self.params.dataset_depth());
|
||||||
|
|
||||||
|
let slot = &self.slot_trees[index];
|
||||||
|
let slot_root = slot.tree.root().unwrap();
|
||||||
|
let mut slot_proofs = vec![];
|
||||||
|
let mut cell_data = vec![];
|
||||||
|
let entropy_field = F::from_canonical_u64(entropy as u64);
|
||||||
|
let mut entropy_as_digest = HashOut::<F>::ZERO;
|
||||||
|
entropy_as_digest.elements[0] = entropy_field;
|
||||||
|
|
||||||
|
// get the index for cell from H(slot_root|counter|entropy)
|
||||||
|
let mask_bits = usize_to_bits_le_padded(self.params.n_cells-1, self.params.max_depth+1);
|
||||||
|
for i in 0..self.params.n_samples {
|
||||||
|
let cell_index_bits = calculate_cell_index_bits(
|
||||||
|
&entropy_as_digest.elements.to_vec(),
|
||||||
|
slot_root,
|
||||||
|
i + 1,
|
||||||
|
self.params.max_depth,
|
||||||
|
mask_bits.clone()
|
||||||
|
);
|
||||||
|
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||||
|
let mut s_proof = slot.get_proof(cell_index);
|
||||||
|
Self::pad_proof(&mut s_proof, self.params.max_depth);
|
||||||
|
slot_proofs.push(s_proof);
|
||||||
|
let data_i = slot.cell_data[cell_index].data.clone();
|
||||||
|
let cell_i = Cell::<F,D>{
|
||||||
|
data: data_i
|
||||||
|
};
|
||||||
|
cell_data.push(cell_i);
|
||||||
|
}
|
||||||
|
|
||||||
|
DatasetProof {
|
||||||
|
slot_index: F::from_canonical_u64(index as u64),
|
||||||
|
entropy: entropy_as_digest,
|
||||||
|
dataset_proof,
|
||||||
|
slot_proofs,
|
||||||
|
cell_data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// pad the proof with 0s until max_depth
|
||||||
|
pub fn pad_proof(merkle_proof: &mut MerkleProof<F,D>, max_depth: usize){
|
||||||
|
for i in merkle_proof.path.len()..max_depth{
|
||||||
|
merkle_proof.path.push(HashOut::<F>::ZERO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::time::Instant;
|
||||||
|
use super::*;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use plonky2::plonk::config::GenericConfig;
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use codex_plonky2_circuits::circuits::params::CircuitParams;
|
||||||
|
use codex_plonky2_circuits::circuits::sample_cells::{MerklePath, SampleCircuit, SampleCircuitInput};
|
||||||
|
use crate::params::{C, D, F};
|
||||||
|
|
||||||
|
// Test sample cells (non-circuit)
|
||||||
|
#[test]
|
||||||
|
fn test_gen_verify_proof(){
|
||||||
|
let params = Params::default();
|
||||||
|
let w = gen_witness::<F,D>(¶ms);
|
||||||
|
assert!(verify_witness::<F,D>(w,¶ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test sample cells in-circuit for a selected slot
|
||||||
|
#[test]
|
||||||
|
fn test_proof_in_circuit() -> anyhow::Result<()> {
|
||||||
|
// get witness
|
||||||
|
let params = Params::default();
|
||||||
|
let witness = gen_witness::<F,D>(¶ms);
|
||||||
|
|
||||||
|
// Create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
|
||||||
|
let circuit_params = CircuitParams {
|
||||||
|
max_depth: params.max_depth,
|
||||||
|
max_log2_n_slots: params.dataset_depth(),
|
||||||
|
block_tree_depth: params.bot_depth(),
|
||||||
|
n_field_elems_per_cell: params.n_field_elems_per_cell(),
|
||||||
|
n_samples: params.n_samples,
|
||||||
|
};
|
||||||
|
|
||||||
|
// build the circuit
|
||||||
|
let circ = SampleCircuit::new(circuit_params.clone());
|
||||||
|
let mut targets = circ.sample_slot_circuit(&mut builder);
|
||||||
|
|
||||||
|
// Create a PartialWitness and assign
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
|
||||||
|
// assign a witness
|
||||||
|
circ.sample_slot_assign_witness(&mut pw, &mut targets, witness);
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
println!("circuit size = {:?}", data.common.degree_bits());
|
||||||
|
|
||||||
|
// Prove the circuit with the assigned witness
|
||||||
|
let start_time = Instant::now();
|
||||||
|
let proof_with_pis = data.prove(pw)?;
|
||||||
|
println!("prove_time = {:?}", start_time.elapsed());
|
||||||
|
|
||||||
|
// Verify the proof
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
assert!(
|
||||||
|
verifier_data.verify(proof_with_pis).is_ok(),
|
||||||
|
"Merkle proof verification failed"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,404 @@
|
||||||
|
use anyhow::{anyhow, Error, Result};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::{BufReader, Write};
|
||||||
|
use crate::gen_input::{DatasetTree, gen_witness};
|
||||||
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
|
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||||
|
use plonky2_field::extension::Extendable;
|
||||||
|
use plonky2_field::types::Field;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
|
||||||
|
use crate::params::Params;
|
||||||
|
|
||||||
|
pub fn export_witness_to_json<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||||
|
const D: usize,
|
||||||
|
> (witness :SampleCircuitInput<F, D>, filename: &str) -> Result<()>{
|
||||||
|
// Convert the witness to a serializable format
|
||||||
|
let serializable_witness = SerializableWitness::from_witness(&witness);
|
||||||
|
|
||||||
|
// Serialize to JSON
|
||||||
|
let json_data = serde_json::to_string_pretty(&serializable_witness)?;
|
||||||
|
|
||||||
|
// Write to file
|
||||||
|
let mut file = File::create(filename)?;
|
||||||
|
file.write_all(json_data.as_bytes())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Function to generate witness and export to JSON
|
||||||
|
pub fn generate_and_export_witness_to_json<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||||
|
const D: usize,
|
||||||
|
>( params: &Params, filename: &str) -> anyhow::Result<()> {
|
||||||
|
|
||||||
|
let witness = gen_witness::<F,D>(params);
|
||||||
|
|
||||||
|
export_witness_to_json(witness, filename)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Serializable versions of the witness
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct SerializableWitness<
|
||||||
|
> {
|
||||||
|
dataSetRoot: Vec<String>,
|
||||||
|
entropy: Vec<String>,
|
||||||
|
nCellsPerSlot: usize,
|
||||||
|
nSlotsPerDataSet: usize,
|
||||||
|
slotIndex: u64,
|
||||||
|
slotRoot: Vec<String>,
|
||||||
|
slotProof: Vec<String>,
|
||||||
|
cellData: Vec<Vec<String>>,
|
||||||
|
merklePaths: Vec<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<
|
||||||
|
> SerializableWitness{
|
||||||
|
/// from the witness to serializable witness
|
||||||
|
pub fn from_witness<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||||
|
const D: usize,
|
||||||
|
>(witness: &SampleCircuitInput<F, D>) -> Self {
|
||||||
|
SerializableWitness {
|
||||||
|
dataSetRoot: witness
|
||||||
|
.dataset_root
|
||||||
|
.elements
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect(),
|
||||||
|
entropy: witness
|
||||||
|
.entropy
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect(),
|
||||||
|
nCellsPerSlot: witness.n_cells_per_slot.to_canonical_u64() as usize,
|
||||||
|
nSlotsPerDataSet: witness.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||||
|
slotIndex: witness.slot_index.to_canonical_u64(),
|
||||||
|
slotRoot: witness
|
||||||
|
.slot_root
|
||||||
|
.elements
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect(),
|
||||||
|
slotProof: witness
|
||||||
|
.slot_proof
|
||||||
|
.iter()
|
||||||
|
.flat_map(|hash| hash.elements.iter())
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect(),
|
||||||
|
cellData: witness
|
||||||
|
.cell_data
|
||||||
|
.iter()
|
||||||
|
.map(|data_vec| {
|
||||||
|
data_vec.data
|
||||||
|
.iter()
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
merklePaths: witness
|
||||||
|
.merkle_paths
|
||||||
|
.iter()
|
||||||
|
.map(|path| {
|
||||||
|
path.path.iter()
|
||||||
|
.flat_map(|hash| hash.elements.iter())
|
||||||
|
.map(|e| e.to_canonical_u64().to_string())
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<> SerializableWitness {
|
||||||
|
/// from serializable witness to witness
|
||||||
|
pub fn to_witness<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize
|
||||||
|
>(&self) -> Result<SampleCircuitInput<F, D>> {
|
||||||
|
// Convert entropy
|
||||||
|
let entropy = self
|
||||||
|
.entropy
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>()?;
|
||||||
|
|
||||||
|
// Convert dataset_root
|
||||||
|
let dataset_root_elements = self
|
||||||
|
.dataSetRoot
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>()?;
|
||||||
|
let dataset_root = HashOut {
|
||||||
|
elements: dataset_root_elements
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| anyhow!("Invalid dataset_root length"))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
// slot_index
|
||||||
|
let slot_index = F::from_canonical_u64(self.slotIndex);
|
||||||
|
|
||||||
|
// slot_root
|
||||||
|
let slot_root_elements = self
|
||||||
|
.slotRoot
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>()?;
|
||||||
|
let slot_root = HashOut {
|
||||||
|
elements: slot_root_elements
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| anyhow!("Invalid slot_root length"))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
// n_cells_per_slot
|
||||||
|
let n_cells_per_slot = F::from_canonical_usize(self.nCellsPerSlot);
|
||||||
|
|
||||||
|
// n_slots_per_dataset
|
||||||
|
let n_slots_per_dataset = F::from_canonical_usize(self.nSlotsPerDataSet);
|
||||||
|
|
||||||
|
// slot_proof
|
||||||
|
let slot_proof_elements = self
|
||||||
|
.slotProof
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>()?;
|
||||||
|
if slot_proof_elements.len() % 4 != 0 {
|
||||||
|
return Err(anyhow!("Invalid slot_proof length"));
|
||||||
|
}
|
||||||
|
let slot_proof = slot_proof_elements
|
||||||
|
.chunks(4)
|
||||||
|
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||||
|
let elements: [F; 4] = chunk
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||||
|
Ok(HashOut { elements })
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||||
|
|
||||||
|
// cell_data
|
||||||
|
let cell_data = self
|
||||||
|
.cellData
|
||||||
|
.iter()
|
||||||
|
.map(|vec_of_strings| -> Result<Cell<F,D>, Error> {
|
||||||
|
let cell = vec_of_strings
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>();
|
||||||
|
Ok(Cell::<F,D>{
|
||||||
|
data: cell.unwrap(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<Cell<F,D>>, Error>>()?;
|
||||||
|
|
||||||
|
// merkle_paths
|
||||||
|
let merkle_paths = self
|
||||||
|
.merklePaths
|
||||||
|
.iter()
|
||||||
|
.map(|path_strings| -> Result<MerklePath<F,D>, Error> {
|
||||||
|
let path_elements = path_strings
|
||||||
|
.iter()
|
||||||
|
.map(|s| -> Result<F, Error> {
|
||||||
|
let n = s.parse::<u64>()?;
|
||||||
|
Ok(F::from_canonical_u64(n))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<F>, Error>>()?;
|
||||||
|
|
||||||
|
if path_elements.len() % 4 != 0 {
|
||||||
|
return Err(anyhow!("Invalid merkle path length"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = path_elements
|
||||||
|
.chunks(4)
|
||||||
|
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||||
|
let elements: [F; 4] = chunk
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||||
|
Ok(HashOut { elements })
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||||
|
|
||||||
|
let mp = MerklePath::<F,D>{
|
||||||
|
path,
|
||||||
|
};
|
||||||
|
Ok(mp)
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<MerklePath<F,D>>, Error>>()?;
|
||||||
|
|
||||||
|
Ok(SampleCircuitInput {
|
||||||
|
entropy,
|
||||||
|
dataset_root,
|
||||||
|
slot_index,
|
||||||
|
slot_root,
|
||||||
|
n_cells_per_slot,
|
||||||
|
n_slots_per_dataset,
|
||||||
|
slot_proof,
|
||||||
|
cell_data,
|
||||||
|
merkle_paths,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// reads the json file, converts it to witness (SampleCircuitInput) and returns it
|
||||||
|
pub fn import_witness_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||||
|
filename: &str,
|
||||||
|
) -> Result<SampleCircuitInput<F, D>> {
|
||||||
|
let file = File::open(filename)?;
|
||||||
|
let reader = BufReader::new(file);
|
||||||
|
let serializable_witness: SerializableWitness = serde_json::from_reader(reader)?;
|
||||||
|
|
||||||
|
let witness = serializable_witness.to_witness()?;
|
||||||
|
Ok(witness)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::params::{BOT_DEPTH, C, D, F, MAX_DEPTH, N_CELLS};
|
||||||
|
use std::fs;
|
||||||
|
use std::time::Instant;
|
||||||
|
use codex_plonky2_circuits::circuits::params::{CircuitParams, HF};
|
||||||
|
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||||
|
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleProof;
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use crate::gen_input::verify_witness;
|
||||||
|
use crate::sponge::hash_n_with_padding;
|
||||||
|
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
|
||||||
|
|
||||||
|
// Test to generate the JSON file
|
||||||
|
#[test]
|
||||||
|
fn test_export_witness_to_json() -> anyhow::Result<()> {
|
||||||
|
// Create Params
|
||||||
|
let params = Params::default();
|
||||||
|
// Export the witness to JSON
|
||||||
|
generate_and_export_witness_to_json::<F,D>(¶ms, "input.json")?;
|
||||||
|
|
||||||
|
println!("Witness exported to input.json");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_import_witness_from_json() -> anyhow::Result<()> {
|
||||||
|
// Import the witness from the JSON file
|
||||||
|
// NOTE: MAKE SURE THE FILE EXISTS
|
||||||
|
let witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||||
|
println!("Witness imported successfully");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// export the witness and then import it and checks equality
|
||||||
|
#[test]
|
||||||
|
fn test_export_import_witness() -> anyhow::Result<()> {
|
||||||
|
// Create Params instance
|
||||||
|
let params = Params::default();
|
||||||
|
|
||||||
|
// Export the witness to JSON
|
||||||
|
let original_witness = gen_witness(¶ms);
|
||||||
|
export_witness_to_json(original_witness.clone(), "input.json")?;
|
||||||
|
println!("Witness exported to input.json");
|
||||||
|
|
||||||
|
// Import the witness from JSON
|
||||||
|
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||||
|
println!("Witness imported from input.json");
|
||||||
|
|
||||||
|
// Compare the original and imported witnesses
|
||||||
|
assert_eq!(original_witness, imported_witness, "Witnesses are not equal");
|
||||||
|
|
||||||
|
// cleanup: Remove the generated JSON file
|
||||||
|
fs::remove_file("input.json")?;
|
||||||
|
|
||||||
|
println!("Test passed: Original and imported witnesses are equal.");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// reads the json input and runs the circuit
|
||||||
|
#[test]
|
||||||
|
fn test_json_witness_circuit() -> anyhow::Result<()> {
|
||||||
|
let params = Params::default();
|
||||||
|
|
||||||
|
// Create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
|
||||||
|
let circuit_params = CircuitParams {
|
||||||
|
max_depth: params.max_depth,
|
||||||
|
max_log2_n_slots: params.dataset_depth(),
|
||||||
|
block_tree_depth: params.bot_depth(),
|
||||||
|
n_field_elems_per_cell: params.n_field_elems_per_cell(),
|
||||||
|
n_samples: params.n_samples,
|
||||||
|
};
|
||||||
|
let circ = SampleCircuit::new(circuit_params.clone());
|
||||||
|
let mut targets = circ.sample_slot_circuit(&mut builder);
|
||||||
|
|
||||||
|
// Create a PartialWitness and assign
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
|
||||||
|
// Import the witness from JSON
|
||||||
|
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||||
|
println!("Witness imported from input.json");
|
||||||
|
|
||||||
|
circ.sample_slot_assign_witness(&mut pw, &mut targets, imported_witness);
|
||||||
|
|
||||||
|
// Build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
println!("circuit size = {:?}", data.common.degree_bits());
|
||||||
|
|
||||||
|
// Prove the circuit with the assigned witness
|
||||||
|
let start_time = Instant::now();
|
||||||
|
let proof_with_pis = data.prove(pw)?;
|
||||||
|
println!("prove_time = {:?}", start_time.elapsed());
|
||||||
|
|
||||||
|
// Verify the proof
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
assert!(
|
||||||
|
verifier_data.verify(proof_with_pis).is_ok(),
|
||||||
|
"Merkle proof verification failed"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// reads the json input and verify (non-circuit)
|
||||||
|
// NOTE: expects the json input proof uses the default params
|
||||||
|
#[test]
|
||||||
|
fn test_json_witness() -> anyhow::Result<()> {
|
||||||
|
let params = Params::default();
|
||||||
|
|
||||||
|
// Import the witness from JSON
|
||||||
|
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||||
|
println!("Witness imported from input.json");
|
||||||
|
|
||||||
|
// Verify the proof
|
||||||
|
let ver = verify_witness(imported_witness, ¶ms);
|
||||||
|
assert!(
|
||||||
|
ver,
|
||||||
|
"Merkle proof verification failed"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
pub mod gen_input;
|
||||||
|
pub mod params;
|
||||||
|
pub mod utils;
|
||||||
|
pub mod json;
|
||||||
|
pub mod tests;
|
||||||
|
mod sponge;
|
|
@ -0,0 +1,226 @@
|
||||||
|
// params for generating input for proof circuit
|
||||||
|
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||||
|
use std::env;
|
||||||
|
use anyhow::{Result, Context};
|
||||||
|
use plonky2_poseidon2::config::Poseidon2GoldilocksConfig;
|
||||||
|
|
||||||
|
// fake input params
|
||||||
|
|
||||||
|
// types
|
||||||
|
pub const D: usize = 2;
|
||||||
|
pub type C = Poseidon2GoldilocksConfig;
|
||||||
|
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
|
||||||
|
// pub type H = PoseidonHash;
|
||||||
|
// pub type HP = <PoseidonHash as plonky2::plonk::config::Hasher<F>>::Permutation;
|
||||||
|
|
||||||
|
// hardcoded params for generating proof input
|
||||||
|
pub const MAX_DEPTH: usize = 32; // depth of big tree (slot tree depth, includes block tree depth)
|
||||||
|
pub const MAX_SLOTS: usize = 256; // maximum number of slots
|
||||||
|
pub const CELL_SIZE: usize = 2048; // cell size in bytes
|
||||||
|
pub const BLOCK_SIZE: usize = 65536; // block size in bytes
|
||||||
|
pub const N_SAMPLES: usize = 5; // number of samples to prove
|
||||||
|
|
||||||
|
pub const ENTROPY: usize = 1234567; // external randomness
|
||||||
|
pub const SEED: usize = 12345; // seed for creating fake data TODO: not used now
|
||||||
|
|
||||||
|
pub const N_SLOTS: usize = 16; // number of slots in the dataset
|
||||||
|
pub const TESTING_SLOT_INDEX: usize = 3; // the index of the slot to be sampled
|
||||||
|
pub const N_CELLS: usize = 512; // number of cells in each slot
|
||||||
|
|
||||||
|
/// Params struct
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Params {
|
||||||
|
pub max_depth: usize,
|
||||||
|
pub max_slots: usize,
|
||||||
|
pub cell_size: usize,
|
||||||
|
pub block_size: usize,
|
||||||
|
pub n_samples: usize,
|
||||||
|
pub entropy: usize,
|
||||||
|
pub seed: usize,
|
||||||
|
pub n_slots: usize,
|
||||||
|
pub testing_slot_index: usize,
|
||||||
|
pub n_cells: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Implement the Default trait for Params using the hardcoded constants
|
||||||
|
impl Default for Params {
|
||||||
|
fn default() -> Self {
|
||||||
|
Params {
|
||||||
|
max_depth: MAX_DEPTH,
|
||||||
|
max_slots: MAX_SLOTS,
|
||||||
|
cell_size: CELL_SIZE,
|
||||||
|
block_size: BLOCK_SIZE,
|
||||||
|
n_samples: N_SAMPLES,
|
||||||
|
entropy: ENTROPY,
|
||||||
|
seed: SEED,
|
||||||
|
n_slots: N_SLOTS,
|
||||||
|
testing_slot_index: TESTING_SLOT_INDEX,
|
||||||
|
n_cells: N_CELLS,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Implement a new function to create Params with custom values
|
||||||
|
impl Params {
|
||||||
|
pub fn new(
|
||||||
|
max_depth: usize,
|
||||||
|
max_slots: usize,
|
||||||
|
cell_size: usize,
|
||||||
|
block_size: usize,
|
||||||
|
n_samples: usize,
|
||||||
|
entropy: usize,
|
||||||
|
seed: usize,
|
||||||
|
n_slots: usize,
|
||||||
|
testing_slot_index: usize,
|
||||||
|
n_cells: usize,
|
||||||
|
) -> Self {
|
||||||
|
Params {
|
||||||
|
max_depth,
|
||||||
|
max_slots,
|
||||||
|
cell_size,
|
||||||
|
block_size,
|
||||||
|
n_samples,
|
||||||
|
entropy,
|
||||||
|
seed,
|
||||||
|
n_slots,
|
||||||
|
testing_slot_index,
|
||||||
|
n_cells,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// GOLDILOCKS_F_SIZE
|
||||||
|
pub fn goldilocks_f_size(&self) -> usize {
|
||||||
|
64
|
||||||
|
}
|
||||||
|
|
||||||
|
// N_FIELD_ELEMS_PER_CELL
|
||||||
|
pub fn n_field_elems_per_cell(&self) -> usize {
|
||||||
|
self.cell_size * 8 / self.goldilocks_f_size()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BOT_DEPTH
|
||||||
|
pub fn bot_depth(&self) -> usize {
|
||||||
|
(self.block_size / self.cell_size).trailing_zeros() as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
// N_CELLS_IN_BLOCKS
|
||||||
|
pub fn n_cells_in_blocks(&self) -> usize {
|
||||||
|
1 << self.bot_depth()
|
||||||
|
}
|
||||||
|
|
||||||
|
// N_BLOCKS
|
||||||
|
pub fn n_blocks(&self) -> usize {
|
||||||
|
1 << (self.max_depth - self.bot_depth())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Depth of test input
|
||||||
|
pub fn depth_test(&self) -> usize {
|
||||||
|
self.n_cells.trailing_zeros() as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
// N_BLOCKS for the test input
|
||||||
|
pub fn n_blocks_test(&self) -> usize {
|
||||||
|
1 << (self.depth_test() - self.bot_depth())
|
||||||
|
}
|
||||||
|
|
||||||
|
// DATASET_DEPTH
|
||||||
|
pub fn dataset_depth(&self) -> usize {
|
||||||
|
self.max_slots.trailing_zeros() as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
// DATASET_DEPTH for test
|
||||||
|
pub fn dataset_depth_test(&self) -> usize {
|
||||||
|
self.n_slots.trailing_zeros() as usize
|
||||||
|
}
|
||||||
|
|
||||||
|
// n_cells_per_slot (2^max_depth)
|
||||||
|
pub fn n_cells_per_slot(&self) -> usize {
|
||||||
|
1 << self.max_depth
|
||||||
|
}
|
||||||
|
|
||||||
|
// n_slots_per_dataset (2^dataset_depth)
|
||||||
|
pub fn n_slots_per_dataset(&self) -> usize {
|
||||||
|
1 << self.dataset_depth()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// computed constants
|
||||||
|
pub const GOLDILOCKS_F_SIZE: usize = 64;
|
||||||
|
pub const N_FIELD_ELEMS_PER_CELL: usize = CELL_SIZE * 8 / GOLDILOCKS_F_SIZE;
|
||||||
|
pub const BOT_DEPTH: usize = (BLOCK_SIZE/CELL_SIZE).ilog2() as usize; // block tree depth
|
||||||
|
|
||||||
|
pub const N_CELLS_IN_BLOCKS: usize = 1<< BOT_DEPTH; //2^BOT_DEPTH
|
||||||
|
pub const N_BLOCKS: usize = 1<<(MAX_DEPTH - BOT_DEPTH); // 2^(MAX_DEPTH - BOT_DEPTH)
|
||||||
|
|
||||||
|
pub const DATASET_DEPTH: usize = MAX_SLOTS.ilog2() as usize;
|
||||||
|
|
||||||
|
/// load params from env
|
||||||
|
impl Params {
|
||||||
|
pub fn from_env() -> Result<Self> {
|
||||||
|
let max_depth = env::var("MAXDEPTH")
|
||||||
|
.context("MAXDEPTH not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid MAXDEPTH")?;
|
||||||
|
|
||||||
|
let max_slots = env::var("MAXSLOTS")
|
||||||
|
.context("MAXSLOTS not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid MAXSLOTS")?;
|
||||||
|
|
||||||
|
let cell_size = env::var("CELLSIZE")
|
||||||
|
.context("CELLSIZE not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid CELLSIZE")?;
|
||||||
|
|
||||||
|
let block_size = env::var("BLOCKSIZE")
|
||||||
|
.context("BLOCKSIZE not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid BLOCKSIZE")?;
|
||||||
|
|
||||||
|
let n_samples = env::var("NSAMPLES")
|
||||||
|
.context("NSAMPLES not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid NSAMPLES")?;
|
||||||
|
|
||||||
|
let entropy = env::var("ENTROPY")
|
||||||
|
.context("ENTROPY not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid ENTROPY")?;
|
||||||
|
|
||||||
|
let seed = env::var("SEED")
|
||||||
|
.context("SEED not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid SEED")?;
|
||||||
|
|
||||||
|
let n_slots = env::var("NSLOTS")
|
||||||
|
.context("NSLOTS not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid NSLOTS")?;
|
||||||
|
|
||||||
|
let testing_slot_index = env::var("SLOTINDEX")
|
||||||
|
.context("SLOTINDEX not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid SLOTINDEX")?;
|
||||||
|
|
||||||
|
let n_cells = env::var("NCELLS")
|
||||||
|
.context("NCELLS not set")?
|
||||||
|
.parse::<usize>()
|
||||||
|
.context("Invalid NCELLS")?;
|
||||||
|
|
||||||
|
Ok(Params {
|
||||||
|
max_depth,
|
||||||
|
max_slots,
|
||||||
|
cell_size,
|
||||||
|
block_size,
|
||||||
|
n_samples,
|
||||||
|
entropy,
|
||||||
|
seed,
|
||||||
|
n_slots,
|
||||||
|
testing_slot_index,
|
||||||
|
n_cells,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,224 @@
|
||||||
|
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||||
|
use plonky2_field::extension::Extendable;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use plonky2::plonk::config::Hasher;
|
||||||
|
use plonky2::hash::hashing::PlonkyPermutation;
|
||||||
|
use plonky2_field::types::Field;
|
||||||
|
|
||||||
|
/// sponge function similar to the in-circuit one
|
||||||
|
/// used here for testing / sanity check
|
||||||
|
pub fn hash_n_with_padding<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F>
|
||||||
|
>(
|
||||||
|
inputs: &[F],
|
||||||
|
) -> HashOut<F>{
|
||||||
|
HashOut::<F>::from_vec(hash_n_to_m_with_padding::<F,D,H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_n_to_m_with_padding<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
P: PlonkyPermutation<F>
|
||||||
|
>(
|
||||||
|
inputs: &[F],
|
||||||
|
num_outputs: usize,
|
||||||
|
) -> Vec<F> {
|
||||||
|
let rate = P::RATE;
|
||||||
|
let width = P::WIDTH; // rate + capacity
|
||||||
|
let zero = F::ZERO;
|
||||||
|
let one = F::ONE;
|
||||||
|
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||||
|
|
||||||
|
// Set the domain separator at index 8
|
||||||
|
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 63);
|
||||||
|
perm.set_elt(domsep_value, 8);
|
||||||
|
|
||||||
|
let N = inputs.len();
|
||||||
|
let num_chunks = (N + rate) / rate; // Calculate number of chunks with 10* padding
|
||||||
|
let mut input_iter = inputs.iter();
|
||||||
|
|
||||||
|
// Process all chunks except the last one
|
||||||
|
for _ in 0..(num_chunks - 1) {
|
||||||
|
let mut chunk = Vec::with_capacity(rate);
|
||||||
|
for _ in 0..rate {
|
||||||
|
if let Some(&input) = input_iter.next() {
|
||||||
|
chunk.push(input);
|
||||||
|
} else {
|
||||||
|
chunk.push(zero); // Pad with zeros if necessary (should not happen here)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Add the chunk to the state
|
||||||
|
for j in 0..rate {
|
||||||
|
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||||
|
}
|
||||||
|
// Apply permutation
|
||||||
|
perm.permute();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the last chunk with 10* padding
|
||||||
|
let rem = num_chunks * rate - N; // Number of padding elements (0 < rem <= rate)
|
||||||
|
let ofs = rate - rem; // Offset where padding starts
|
||||||
|
|
||||||
|
let mut last_chunk = Vec::with_capacity(rate);
|
||||||
|
// Absorb remaining inputs
|
||||||
|
for _ in 0..ofs {
|
||||||
|
if let Some(&input) = input_iter.next() {
|
||||||
|
last_chunk.push(input);
|
||||||
|
} else {
|
||||||
|
last_chunk.push(zero);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Add the '1' padding bit
|
||||||
|
last_chunk.push(one);
|
||||||
|
// Pad with zeros to reach the full rate
|
||||||
|
while last_chunk.len() < rate {
|
||||||
|
last_chunk.push(zero);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the last chunk to the state
|
||||||
|
for j in 0..rate {
|
||||||
|
perm.set_elt(perm.as_ref()[j] + last_chunk[j],j);
|
||||||
|
}
|
||||||
|
// Apply permutation
|
||||||
|
perm.permute();
|
||||||
|
|
||||||
|
// Squeeze outputs until we have the desired number
|
||||||
|
let mut outputs = Vec::with_capacity(num_outputs);
|
||||||
|
loop {
|
||||||
|
for &item in perm.squeeze() {
|
||||||
|
outputs.push(item);
|
||||||
|
if outputs.len() == num_outputs {
|
||||||
|
return outputs;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
perm.permute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use plonky2::field::types::Field;
|
||||||
|
use crate::sponge::hash_n_with_padding;
|
||||||
|
use crate::params::{D, F};
|
||||||
|
use codex_plonky2_circuits::circuits::params::HF;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sponge_hash_rate_8() {
|
||||||
|
|
||||||
|
struct TestCase {
|
||||||
|
n: usize,
|
||||||
|
digest: [u64; 4],
|
||||||
|
}
|
||||||
|
|
||||||
|
let test_cases: Vec<TestCase> = vec![
|
||||||
|
TestCase { n: 0, digest: [0x509f3a747e4a6fca, 0xd6f21d91afb92eb3, 0xf65ef4075dcfb169, 0xbceaf22e0cd21b3d] },
|
||||||
|
TestCase { n: 1, digest: [0xfa286adad207c7ea, 0x97d864ff2e89415e, 0xcf002b28585bd945, 0x95ec163fbdd0792e] },
|
||||||
|
TestCase { n: 2, digest: [0xe4b779622cbb574f, 0x1fe4b1bc9a0c9fc7, 0x40051ada5252de9b, 0xb351345b1894a59f] },
|
||||||
|
TestCase { n: 3, digest: [0x133a5a2fd0cae006, 0x072a7769ca9a550d, 0x92134dad95d394c6, 0x22234de7d7270aab] },
|
||||||
|
TestCase { n: 4, digest: [0x78269e830f2a824a, 0x76f8b00469a8fa81, 0x6793369b1d75ebf5, 0xfba1a89dc21d9b30] },
|
||||||
|
TestCase { n: 5, digest: [0x263994efd2cd5c57, 0x7c37a93fd48fc98b, 0xa081b26a68767d13, 0x16af92d6e1e4d7f8] },
|
||||||
|
TestCase { n: 6, digest: [0x0b0b0f1d64f8d58c, 0x2946089b2eb949fc, 0xf68bcf08b69a95e7, 0x814d6eb4b2df848c] },
|
||||||
|
TestCase { n: 7, digest: [0xae0c900a194ee051, 0x4555257fba7a500b, 0x1713fd448cc82c3a, 0xaf8f2e895e2136f3] },
|
||||||
|
TestCase { n: 8, digest: [0x100351f04fc470b7, 0x79d3c3c416087158, 0x113bb1c70a6e84ee, 0x3eab2507cdc254d3] },
|
||||||
|
TestCase { n: 9, digest: [0xbab284d7f11855d6, 0xe1b53d108f308a1c, 0x971fea7184337830, 0x6d674ae321cfb9ba] },
|
||||||
|
TestCase { n: 10, digest: [0x68c00dbe0ed03a8f, 0xab5ba3617eb6f76b, 0x5d735bb89418cc0b, 0xff4101076f3f3c70] },
|
||||||
|
TestCase { n: 11, digest: [0xaecce2fa7de4f97d, 0x07cee3dc720812e0, 0x4155bf667391a9e8, 0xbf8a49a12f40e746] },
|
||||||
|
TestCase { n: 12, digest: [0xd3f43f06fc7affd2, 0xee9a8ac5ef44071a, 0xe00ec9e7f468d0e2, 0x944e34913a974233] },
|
||||||
|
TestCase { n: 13, digest: [0xcd50fe6ab5e3de54, 0x9b2093adaeac949c, 0xa176a2a9e2c82787, 0xd35f0635a1ec333f] },
|
||||||
|
TestCase { n: 14, digest: [0x8f5188d26ca0368c, 0x0116bf587e5cc970, 0x30654ee52a3c66d8, 0xe8ded60382c44b04] },
|
||||||
|
TestCase { n: 15, digest: [0xc7f020f910327951, 0x13a468945463870d, 0xbcf8ca584edb30f3, 0x7e7234f0b8954e7e] },
|
||||||
|
TestCase { n: 16, digest: [0xf8a9aef7392048e7, 0x6124715a2c5343eb, 0x1b7f17ebec4a5b13, 0xdf61d868051dad75] },
|
||||||
|
TestCase { n: 17, digest: [0x44d1fb6822c7f3fa, 0x2623cc2240022e42, 0xc90ce9259c9e1160, 0x7a42bc611acacc12] },
|
||||||
|
TestCase { n: 18, digest: [0x85dab5b06ef2d176, 0x24a587b13a4e3b30, 0xf547a00373299873, 0xb298a6ef846d64a1] },
|
||||||
|
TestCase { n: 19, digest: [0x7cc060a3f2a74260, 0xa07dc76e73335eb0, 0xf8ed9acbcf8a242e, 0xd32eaf3150005e49] },
|
||||||
|
TestCase { n: 20, digest: [0x3e961c84e53106f9, 0x63d9a807f9cfd88c, 0x7031e8834a17821a, 0xf2e1c79698798fa9] },
|
||||||
|
TestCase { n: 21, digest: [0x8a0ab00081c9828f, 0xa5f7aadaf3af046e, 0xada8b4c6220b3420, 0x80ebc8c91a65518c] },
|
||||||
|
TestCase { n: 22, digest: [0x39505fc00f052122, 0xb13edc24a35665c7, 0xa7b164fffe37ec64, 0x8f7eeb42c068e19f] },
|
||||||
|
TestCase { n: 23, digest: [0x1f49d6f25f39522b, 0x879377d8df727784, 0x00f1461600d09cdd, 0xd2c7946a44e1aa66] },
|
||||||
|
TestCase { n: 24, digest: [0x1c6f7a68537f7dc7, 0x64e6e09714dc0854, 0x9abfed111e51bd96, 0x65061b2bc484ed8b] },
|
||||||
|
TestCase { n: 25, digest: [0x95fd5cc6bc02ab29, 0xe2e3c96d9b1b8b5d, 0xadcf491caa16549e, 0x97d91e370da3c0b4] },
|
||||||
|
TestCase { n: 26, digest: [0x7599c5052ba67767, 0x3fe4a05f44e96ed6, 0xbbfe6874aa53808c, 0xd6771e162cc9f0ff] },
|
||||||
|
TestCase { n: 27, digest: [0xdff28121d822093c, 0x7313ea03b57bb436, 0x10ed29b28a77d8c3, 0x6ee304be541fe36f] },
|
||||||
|
TestCase { n: 28, digest: [0xce2b7f232b504b48, 0x02c638c398c12cb0, 0x4f1d416215377a86, 0x2d43ff6c5dd88f8c] },
|
||||||
|
TestCase { n: 29, digest: [0xa60cb008de647e9a, 0x502e2e740f68e2d1, 0xe983eb54e4052013, 0xe76e59c5e5dbcca2] },
|
||||||
|
TestCase { n: 30, digest: [0x7735e3ac5e08fa00, 0x211a86449207c30d, 0x9d80ddd40e7760b2, 0xe60f32f28597a188] },
|
||||||
|
TestCase { n: 31, digest: [0x6fab3f12496f0691, 0x5116ad81bedd7d84, 0xaa8a7713a80b323b, 0xce6d94533fc40b88] },
|
||||||
|
TestCase { n: 32, digest: [0xce51cdbd641d57c0, 0xf638202a88ee7f9c, 0x26c291ecc5162b45, 0x04a0a62b949c236f] },
|
||||||
|
TestCase { n: 33, digest: [0x923391e4a4cde9e2, 0xdcb3acccba80597d, 0x247bb4b67044a0e1, 0x65bbac92e096d1ec] },
|
||||||
|
TestCase { n: 34, digest: [0x1550d0234ae35f05, 0x16f4d1708923d4f1, 0x232319cb4090ea4e, 0x8354e1aed093070c] },
|
||||||
|
TestCase { n: 35, digest: [0xc7dd24e6db4ea70f, 0x80bc3d2aac952cb1, 0xabbd1a878bc50565, 0xf1ebc3b8d513c591] },
|
||||||
|
TestCase { n: 36, digest: [0xba9c4b1ce906efb1, 0xa332d0daccc62979, 0xfb658fcad0b5fbbd, 0x62d21407f34a35ee] },
|
||||||
|
TestCase { n: 37, digest: [0xcb2973d44f2b589d, 0x01708b32c4556317, 0x3ad51597c12b8564, 0x28d3a5d7de72cfd5] },
|
||||||
|
TestCase { n: 38, digest: [0x1dcf1f4ab7338296, 0xb88c661141b5aabb, 0x7e546b6e9b31bc90, 0xf26f7e6ffabb4e69] },
|
||||||
|
TestCase { n: 39, digest: [0x2e139ff910c0f410, 0xba3d2c0a92ec3845, 0x2860e475933a7108, 0x8f2a6c6d13bedc7a] },
|
||||||
|
TestCase { n: 40, digest: [0xc18a53c17c360ef4, 0x5e56ea9228988c68, 0xee0bd138436e996d, 0x06afd46a753f8257] },
|
||||||
|
TestCase { n: 41, digest: [0x2c992403c5277dc5, 0xba8770bc3a54b043, 0x51b882882a7b7864, 0xf75e179a53e7948e] },
|
||||||
|
TestCase { n: 42, digest: [0xde855183965741c3, 0x93520eac77a8f98d, 0x6412ae8cf0522d78, 0x9db49c6b455a83b4] },
|
||||||
|
TestCase { n: 43, digest: [0x552e357ddb7e1ef6, 0x5fa779e9c7373b56, 0x18f7c445e27e5dcf, 0x2664ecee5e7bc6c2] },
|
||||||
|
TestCase { n: 44, digest: [0x37b8a716c87e5489, 0x1201fcd31e407152, 0x0979d7887c42e1ca, 0x902e8b2bf748b356] },
|
||||||
|
TestCase { n: 45, digest: [0xa48bdd1d464960ed, 0x8e92c1af0cf258bc, 0x7c5b447524b92ba9, 0xac63902e613e4ef0] },
|
||||||
|
TestCase { n: 46, digest: [0x542e62f9317fe11d, 0xc23ba113a3f3c810, 0x2bda30c42a89cc7e, 0x35616e9f1a00aa8f] },
|
||||||
|
TestCase { n: 47, digest: [0x1c9194a0acfa97d7, 0x60d536ac106dd774, 0x8855b4a40e110080, 0xc2c408114e8c20d6] },
|
||||||
|
TestCase { n: 48, digest: [0x0e90b1cc3ac49e0c, 0x1b73aa8e0decbf88, 0x0ca9ef7070e0513f, 0x25cfb975571b6139] },
|
||||||
|
TestCase { n: 49, digest: [0xba6d6f7aa664f2e7, 0x4b9af896093937b9, 0x115b9aeb6c5f563e, 0x41cb5f42c6d3b115] },
|
||||||
|
TestCase { n: 50, digest: [0xdc3bdc491154caf6, 0xb95159bae61b2035, 0x98bd384fb3d0100b, 0xd70226f2b71ea465] },
|
||||||
|
TestCase { n: 51, digest: [0x57f31da51bcd2eab, 0x4a3b3945a8662b5c, 0x44dffaa325530b19, 0x47f4e41c2c1474cf] },
|
||||||
|
TestCase { n: 52, digest: [0xc3f518f6cf3b43bf, 0x1214790ff48554e4, 0x99c1eabc61b218fd, 0xf90b03954d7937f8] },
|
||||||
|
TestCase { n: 53, digest: [0x6357b3cdcbc1283a, 0x6acc0c2d5aac9261, 0xdf11e7ad14d432d1, 0x2242b26bdcc8a380] },
|
||||||
|
TestCase { n: 54, digest: [0x1946dc4471f8c502, 0x6be7d72499e0b4a5, 0x6e11de349239ff90, 0xfca78044256b8b54] },
|
||||||
|
TestCase { n: 55, digest: [0x302b38fb3df623dd, 0x69b362f7932fd7af, 0x2b47156f9135508b, 0xfe6c574f0a102e92] },
|
||||||
|
TestCase { n: 56, digest: [0xfdc9bd08a0416122, 0x063ebf4767fc7914, 0x330f36279d94050e, 0x79c61f80746893ec] },
|
||||||
|
TestCase { n: 57, digest: [0x7b5d8384b67af5c0, 0xa705e0163fa4d839, 0x1e203432e872104e, 0xe0e7699f20a291f4] },
|
||||||
|
TestCase { n: 58, digest: [0xb0aa74a52fe04366, 0x194b0d4afcdc03d9, 0x5134dc604b5d9f2a, 0x53c6bf9d5a1d502b] },
|
||||||
|
TestCase { n: 59, digest: [0xd5c8258f6fc80e2b, 0x82bac373eb051b48, 0x5ef620241420462d, 0x58635db0134fb97a] },
|
||||||
|
TestCase { n: 60, digest: [0x42ebb974ac5dd0ef, 0x676d0c6b3dde78c3, 0x14ed5eda2c9cb9de, 0x0f78a26badaa447c] },
|
||||||
|
TestCase { n: 61, digest: [0x2b3ca7711db999d5, 0xb74bd29abcb6179a, 0x8ba196525e6adb25, 0x86cb9464ae269a43] },
|
||||||
|
TestCase { n: 62, digest: [0x3d0e61a2ca7a65a2, 0x31f77852d41a6c8d, 0x2e4ceaa39763a53d, 0x5232ff5a3d78755e] },
|
||||||
|
TestCase { n: 63, digest: [0xb2ed789e88c1f525, 0x1592c1a1eafd2a9b, 0x98700c512f8c9a5d, 0xf96837b5d99a4eb4] },
|
||||||
|
TestCase { n: 64, digest: [0xe4b7d14e11de2fa9, 0xe81afff2cee68e14, 0xc58abb080bf37dd3, 0x36ae8b2196b5ae88] },
|
||||||
|
TestCase { n: 65, digest: [0xa1df9ff199c41d63, 0xd02c067d3d12edc1, 0xc9b598130fa60794, 0x5afe82d34c3fc8fa] },
|
||||||
|
TestCase { n: 66, digest: [0x0bc0094a1f07256d, 0x33c5b4c2a171d5bd, 0x1f38f1b1dc92aa54, 0x4610d21f276faa11] },
|
||||||
|
TestCase { n: 67, digest: [0x8072f00df8f7e44f, 0x42f0c2b8fe81d8a0, 0x2b5caf9e7c0ff611, 0x92b0b3a4a4bebe1a] },
|
||||||
|
TestCase { n: 68, digest: [0x6539f06fab064b57, 0xdb298b91f6c4f44f, 0x5d8f8eec6b7e8c86, 0x848a447123f39006] },
|
||||||
|
TestCase { n: 69, digest: [0x87f32efc9eaa65f6, 0xc5699d4ab6443852, 0x61008286bc651f4a, 0xcbcf714354843da3] },
|
||||||
|
TestCase { n: 70, digest: [0xffb8ad2258107315, 0xf7d6a58eb54f2745, 0xaecf888211821114, 0x7e0ea33b4d56976e] },
|
||||||
|
TestCase { n: 71, digest: [0xa9e5b6d70f67db2b, 0x072fd05840040322, 0x40ffcc86e3909dec, 0x3d80f61616a9e6d7] },
|
||||||
|
TestCase { n: 72, digest: [0xa77dd95d9ff4d7b8, 0x3a0e0502f74c091a, 0x1fa83de1e7dc716d, 0xe01ae447cc3a0e40] },
|
||||||
|
TestCase { n: 73, digest: [0xc4a29dc875a308eb, 0xd2ed0da7aab24b0c, 0x4c2aaaed0bc4f059, 0xaea772c635ea901a] },
|
||||||
|
TestCase { n: 74, digest: [0xaad59bf06c151ecf, 0x5e0f45e55df36692, 0x4798afb8b944a01e, 0xd7152cd819bbd7f8] },
|
||||||
|
TestCase { n: 75, digest: [0x89ae5b2b35ba07c7, 0x129f4ff59afaa1a3, 0x4275f3f797112650, 0xea3b4baaf7190a19] },
|
||||||
|
TestCase { n: 76, digest: [0xab068e43be297604, 0x17bd1c3cf4afec96, 0xaa84a8098dba4516, 0xa6e487ceafb02c49] },
|
||||||
|
TestCase { n: 77, digest: [0x2c85080ef895bb4a, 0xbd280690a789c124, 0xca4f8423b50de8a5, 0xec809bb8c30de95b] },
|
||||||
|
TestCase { n: 78, digest: [0x51c3d13543e4922b, 0xff9c11d5b93268db, 0xd9cf911cc5326948, 0x4b7bb11eafe7fd44] },
|
||||||
|
TestCase { n: 79, digest: [0xb435274d75678586, 0x8600e7f2db687493, 0x282873a3600a38da, 0x727791507d1b600e] },
|
||||||
|
TestCase { n: 80, digest: [0x23ae45602324f628, 0x0dc16b33f43209c5, 0x2455376f83b1aeff, 0xd5470f22ec2113bc] },
|
||||||
|
];
|
||||||
|
|
||||||
|
for test_case in test_cases {
|
||||||
|
let n = test_case.n;
|
||||||
|
let expected_digest = test_case.digest;
|
||||||
|
|
||||||
|
// Generate inputs
|
||||||
|
let inputs: Vec<F> = (0..n)
|
||||||
|
.map(|i| F::from_canonical_u64(i as u64 + 1))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Call the sponge function
|
||||||
|
let output = hash_n_with_padding::<F,D,HF>(&inputs);
|
||||||
|
|
||||||
|
// Compare the outputs
|
||||||
|
for (i, &out_elem) in output.elements.iter().enumerate() {
|
||||||
|
let expected_elem = F::from_canonical_u64(expected_digest[i]);
|
||||||
|
assert_eq!(
|
||||||
|
out_elem,
|
||||||
|
expected_elem,
|
||||||
|
"Mismatch at test case n={}, output element {}",
|
||||||
|
n,
|
||||||
|
i
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,192 @@
|
||||||
|
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||||
|
use plonky2::field::types::Field;
|
||||||
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
|
use plonky2_field::extension::Extendable;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
|
||||||
|
fn digest_seq<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(n: usize) -> Vec<HashOut<F>> {
|
||||||
|
(0..n)
|
||||||
|
.map(|i| HashOut {
|
||||||
|
elements: [
|
||||||
|
F::from_canonical_u64((i + 1) as u64),
|
||||||
|
F::ZERO,
|
||||||
|
F::ZERO,
|
||||||
|
F::ZERO,
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use anyhow::Result;
|
||||||
|
use codex_plonky2_circuits::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||||
|
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||||
|
use plonky2::field::types::Field;
|
||||||
|
use plonky2::hash::hash_types::HashOut;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
|
||||||
|
type F = GoldilocksField;
|
||||||
|
const D: usize = 2;
|
||||||
|
|
||||||
|
struct TestCase {
|
||||||
|
n: usize,
|
||||||
|
digest: [u64; 4],
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merkle_roots() -> Result<()> {
|
||||||
|
let zero = HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
};
|
||||||
|
|
||||||
|
let test_cases: Vec<TestCase> = vec![
|
||||||
|
TestCase { n: 1, digest: [0x232f21acc9d346d8, 0x2eba96d3a73822c1, 0x4163308f6d0eff64, 0x5190c2b759734aff] },
|
||||||
|
TestCase { n: 2, digest: [0x999dde2cb60b5bdb, 0xacb725a87250a306, 0x8eeb00a6fc173443, 0x5f510b7eeece33bb] },
|
||||||
|
TestCase { n: 3, digest: [0x00b72dc0a592b9c0, 0x68575842dd1c6e27, 0x871d5146985881d6, 0xc945d7f3d5fdde00] },
|
||||||
|
];
|
||||||
|
|
||||||
|
for test_case in test_cases {
|
||||||
|
let n = test_case.n;
|
||||||
|
let expected_digest = test_case.digest;
|
||||||
|
|
||||||
|
// Generate the inputs
|
||||||
|
let inputs = digest_seq::<F,D>(n);
|
||||||
|
|
||||||
|
// Build the Merkle tree
|
||||||
|
let tree = MerkleTree::<F, D>::new(&inputs, zero.clone())?;
|
||||||
|
|
||||||
|
// Get the computed root
|
||||||
|
let computed_root = tree.root()?;
|
||||||
|
|
||||||
|
// Construct the expected root hash
|
||||||
|
let expected_root = HashOut {
|
||||||
|
elements: [
|
||||||
|
F::from_canonical_u64(expected_digest[0]),
|
||||||
|
F::from_canonical_u64(expected_digest[1]),
|
||||||
|
F::from_canonical_u64(expected_digest[2]),
|
||||||
|
F::from_canonical_u64(expected_digest[3]),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Compare computed root to expected digest
|
||||||
|
assert_eq!(
|
||||||
|
computed_root, expected_root,
|
||||||
|
"Mismatch at n = {}",
|
||||||
|
n
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_merkle_proof_with_given_leaf_and_root() -> Result<()> {
|
||||||
|
// Parse the root
|
||||||
|
let root_elements = vec![
|
||||||
|
"14459953088494886308",
|
||||||
|
"12400665201701660877",
|
||||||
|
"8918969394875474575",
|
||||||
|
"3734475392324688728",
|
||||||
|
];
|
||||||
|
let root = HashOut {
|
||||||
|
elements: root_elements
|
||||||
|
.iter()
|
||||||
|
.map(|s| {
|
||||||
|
let num = s.parse::<u64>().unwrap();
|
||||||
|
F::from_canonical_u64(num)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.try_into()
|
||||||
|
.unwrap(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse the leaf
|
||||||
|
let leaf_elements = vec![
|
||||||
|
"6216356142838248961",
|
||||||
|
"7651361162368135479",
|
||||||
|
"8250178335123580371",
|
||||||
|
"3813462866599431579",
|
||||||
|
];
|
||||||
|
let leaf = HashOut {
|
||||||
|
elements: leaf_elements
|
||||||
|
.iter()
|
||||||
|
.map(|s| {
|
||||||
|
let num = s.parse::<u64>().unwrap();
|
||||||
|
F::from_canonical_u64(num)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.try_into()
|
||||||
|
.unwrap(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse the proof
|
||||||
|
let proof_strings = vec![
|
||||||
|
"1345604040032513712",
|
||||||
|
"7222769029677219453",
|
||||||
|
"4856886058017005512",
|
||||||
|
"17218820401481758629",
|
||||||
|
"6741690371018853470",
|
||||||
|
"10000950172891759230",
|
||||||
|
"1256624250298316158",
|
||||||
|
"14572953286928282395",
|
||||||
|
"11250861626949238654",
|
||||||
|
"2066450512590186880",
|
||||||
|
"4406339264013603126",
|
||||||
|
"6649535526486987988",
|
||||||
|
"14920223145083393283",
|
||||||
|
"18017129979212138612",
|
||||||
|
"1235310154294028825",
|
||||||
|
"16382646529383194172",
|
||||||
|
];
|
||||||
|
|
||||||
|
let proof_numbers: Vec<u64> = proof_strings
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.parse::<u64>().unwrap())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let proof_elements: Vec<F> = proof_numbers
|
||||||
|
.iter()
|
||||||
|
.map(|&num| F::from_canonical_u64(num))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let path_hashes: Vec<HashOut<F>> = proof_elements
|
||||||
|
.chunks(4)
|
||||||
|
.map(|chunk| HashOut {
|
||||||
|
elements: chunk.try_into().unwrap(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let num_indices = 1 << path_hashes.len();
|
||||||
|
let mut found = false;
|
||||||
|
|
||||||
|
for index in 0..num_indices {
|
||||||
|
let proof = MerkleProof::<F,D> {
|
||||||
|
index,
|
||||||
|
path: path_hashes.clone(),
|
||||||
|
nleaves: num_indices,
|
||||||
|
zero: HashOut {
|
||||||
|
elements: [F::ZERO; 4],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Reconstruct the root
|
||||||
|
let reconstructed_root = proof.reconstruct_root(leaf.clone())?;
|
||||||
|
|
||||||
|
// Compare with the given root
|
||||||
|
if reconstructed_root == root {
|
||||||
|
println!("Proof is valid for index {}", index);
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(found, "No valid proof found for the given leaf and root");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,298 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
use plonky2::field::extension::Extendable;
|
||||||
|
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||||
|
use plonky2::field::types::Field;
|
||||||
|
use plonky2::hash::hash_types::{HashOut, HashOutTarget, NUM_HASH_OUT_ELTS, RichField};
|
||||||
|
use plonky2::hash::hashing::PlonkyPermutation;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use serde::Serialize;
|
||||||
|
use codex_plonky2_circuits::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
|
||||||
|
use codex_plonky2_circuits::circuits::utils::{assign_bool_targets, assign_hash_out_targets};
|
||||||
|
use crate::utils::usize_to_bits_le_padded;
|
||||||
|
|
||||||
|
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleTree;
|
||||||
|
|
||||||
|
/// the input to the merkle tree circuit
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct MerkleTreeCircuitInput<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>{
|
||||||
|
pub leaf: HashOut<F>,
|
||||||
|
pub path_bits: Vec<bool>,
|
||||||
|
pub last_bits: Vec<bool>,
|
||||||
|
pub mask_bits: Vec<bool>,
|
||||||
|
pub merkle_path: Vec<HashOut<F>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// defines the computations inside the circuit and returns the targets used
|
||||||
|
/// NOTE: this is not used in the sampling circuit, see reconstruct_merkle_root_circuit_with_mask
|
||||||
|
pub fn build_circuit<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
builder: &mut CircuitBuilder::<F, D>,
|
||||||
|
depth: usize,
|
||||||
|
) -> (MerkleTreeTargets, HashOutTarget) {
|
||||||
|
|
||||||
|
// Create virtual targets
|
||||||
|
let leaf = builder.add_virtual_hash();
|
||||||
|
|
||||||
|
// path bits (binary decomposition of leaf_index)
|
||||||
|
let path_bits = (0..depth).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// last bits (binary decomposition of last_index = nleaves - 1)
|
||||||
|
let last_bits = (0..depth).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// last bits (binary decomposition of last_index = nleaves - 1)
|
||||||
|
let mask_bits = (0..depth+1).map(|_| builder.add_virtual_bool_target_safe()).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Merkle path (sibling hashes from leaf to root)
|
||||||
|
let merkle_path = MerkleProofTarget {
|
||||||
|
path: (0..depth).map(|_| builder.add_virtual_hash()).collect(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// create MerkleTreeTargets struct
|
||||||
|
let mut targets = MerkleTreeTargets{
|
||||||
|
leaf,
|
||||||
|
path_bits,
|
||||||
|
last_bits,
|
||||||
|
mask_bits,
|
||||||
|
merkle_path,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add Merkle proof verification constraints to the circuit
|
||||||
|
let reconstructed_root_target = MerkleTreeCircuit::reconstruct_merkle_root_circuit_with_mask(builder, &mut targets, depth);
|
||||||
|
|
||||||
|
// Return MerkleTreeTargets
|
||||||
|
(targets, reconstructed_root_target)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// assign the witness values in the circuit targets
|
||||||
|
/// this takes MerkleTreeCircuitInput and fills all required circuit targets
|
||||||
|
pub fn assign_witness<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
pw: &mut PartialWitness<F>,
|
||||||
|
targets: &mut MerkleTreeTargets,
|
||||||
|
witnesses: MerkleTreeCircuitInput<F, D>
|
||||||
|
)-> Result<()> {
|
||||||
|
// Assign the leaf hash to the leaf target
|
||||||
|
pw.set_hash_target(targets.leaf, witnesses.leaf);
|
||||||
|
|
||||||
|
// Assign path bits
|
||||||
|
assign_bool_targets(pw, &targets.path_bits, witnesses.path_bits);
|
||||||
|
|
||||||
|
// Assign last bits
|
||||||
|
assign_bool_targets(pw, &targets.last_bits, witnesses.last_bits);
|
||||||
|
|
||||||
|
// Assign mask bits
|
||||||
|
assign_bool_targets(pw, &targets.mask_bits, witnesses.mask_bits);
|
||||||
|
|
||||||
|
// assign the Merkle path (sibling hashes) to the targets
|
||||||
|
for i in 0..targets.merkle_path.path.len() {
|
||||||
|
if i>=witnesses.merkle_path.len() { // pad with zeros
|
||||||
|
assign_hash_out_targets(pw, &targets.merkle_path.path[i].elements, &[F::ZERO; NUM_HASH_OUT_ELTS]);
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
assign_hash_out_targets(pw, &targets.merkle_path.path[i].elements, &witnesses.merkle_path[i].elements)
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use plonky2::hash::hash_types::HashOut;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use super::*;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||||
|
// use crate::circuits::utils::usize_to_bits_le_padded;
|
||||||
|
// use crate::merkle_tree::merkle_safe::MerkleTree;
|
||||||
|
|
||||||
|
// NOTE: for now these tests don't check the reconstructed root is equal to expected_root
|
||||||
|
// will be fixed later, but for that test check the prove_single_cell tests
|
||||||
|
#[test]
|
||||||
|
fn test_build_circuit() -> anyhow::Result<()> {
|
||||||
|
// circuit params
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
|
||||||
|
// Generate random leaf data
|
||||||
|
let nleaves = 16; // Number of leaves
|
||||||
|
let max_depth = 4;
|
||||||
|
let data = (0..nleaves)
|
||||||
|
.map(|i| GoldilocksField::from_canonical_u64(i))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
// Hash the data to obtain leaf hashes
|
||||||
|
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||||
|
.iter()
|
||||||
|
.map(|&element| {
|
||||||
|
// Hash each field element to get the leaf hash
|
||||||
|
PoseidonHash::hash_no_pad(&[element])
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
//initialize the Merkle tree
|
||||||
|
let zero_hash = HashOut {
|
||||||
|
elements: [GoldilocksField::ZERO; 4],
|
||||||
|
};
|
||||||
|
let tree = MerkleTree::<F, D>::new(&leaves, zero_hash)?;
|
||||||
|
|
||||||
|
// select leaf index to prove
|
||||||
|
let leaf_index: usize = 8;
|
||||||
|
|
||||||
|
// get the Merkle proof for the selected leaf
|
||||||
|
let proof = tree.get_proof(leaf_index)?;
|
||||||
|
// sanity check:
|
||||||
|
let check = proof.verify(tree.layers[0][leaf_index],tree.root().unwrap()).unwrap();
|
||||||
|
assert_eq!(check, true);
|
||||||
|
|
||||||
|
// get the expected Merkle root
|
||||||
|
let expected_root = tree.root()?;
|
||||||
|
|
||||||
|
// create the circuit
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
let (mut targets, reconstructed_root_target) = build_circuit(&mut builder, max_depth);
|
||||||
|
|
||||||
|
// expected Merkle root
|
||||||
|
let expected_root = builder.add_virtual_hash();
|
||||||
|
|
||||||
|
// check equality with expected root
|
||||||
|
for i in 0..NUM_HASH_OUT_ELTS {
|
||||||
|
builder.connect(expected_root.elements[i], reconstructed_root_target.elements[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||||
|
let last_index = (nleaves - 1) as usize;
|
||||||
|
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||||
|
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||||
|
|
||||||
|
// circuit input
|
||||||
|
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||||
|
leaf: tree.layers[0][leaf_index],
|
||||||
|
path_bits,
|
||||||
|
last_bits,
|
||||||
|
mask_bits,
|
||||||
|
merkle_path: proof.path,
|
||||||
|
};
|
||||||
|
|
||||||
|
// create a PartialWitness and assign
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||||
|
pw.set_hash_target(expected_root, tree.root().unwrap());
|
||||||
|
|
||||||
|
// build the circuit
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
|
||||||
|
// Prove the circuit with the assigned witness
|
||||||
|
let proof_with_pis = data.prove(pw)?;
|
||||||
|
|
||||||
|
// verify the proof
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
assert!(
|
||||||
|
verifier_data.verify(proof_with_pis).is_ok(),
|
||||||
|
"Merkle proof verification failed"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// same as test above but for all leaves
|
||||||
|
#[test]
|
||||||
|
fn test_verify_all_leaves() -> anyhow::Result<()> {
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
|
||||||
|
let nleaves = 16; // Number of leaves
|
||||||
|
let max_depth = 4;
|
||||||
|
let data = (0..nleaves)
|
||||||
|
.map(|i| GoldilocksField::from_canonical_u64(i as u64))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
// Hash the data to obtain leaf hashes
|
||||||
|
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||||
|
.iter()
|
||||||
|
.map(|&element| {
|
||||||
|
// Hash each field element to get the leaf hash
|
||||||
|
PoseidonHash::hash_no_pad(&[element])
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let zero_hash = HashOut {
|
||||||
|
elements: [GoldilocksField::ZERO; 4],
|
||||||
|
};
|
||||||
|
let tree = MerkleTree::<F, D>::new(&leaves, zero_hash)?;
|
||||||
|
|
||||||
|
let expected_root = tree.root()?;
|
||||||
|
|
||||||
|
let config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||||
|
let (mut targets, reconstructed_root_target) = build_circuit(&mut builder, max_depth);
|
||||||
|
|
||||||
|
// expected Merkle root
|
||||||
|
let expected_root_target = builder.add_virtual_hash();
|
||||||
|
|
||||||
|
// check equality with expected root
|
||||||
|
for i in 0..NUM_HASH_OUT_ELTS {
|
||||||
|
builder.connect(expected_root_target.elements[i], reconstructed_root_target.elements[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
|
||||||
|
for leaf_index in 0..nleaves {
|
||||||
|
let proof = tree.get_proof(leaf_index)?;
|
||||||
|
let check = proof.verify(tree.layers[0][leaf_index], expected_root)?;
|
||||||
|
assert!(
|
||||||
|
check,
|
||||||
|
"Merkle proof verification failed for leaf index {}",
|
||||||
|
leaf_index
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
|
||||||
|
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||||
|
let last_index = (nleaves - 1) as usize;
|
||||||
|
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||||
|
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||||
|
|
||||||
|
// circuit input
|
||||||
|
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||||
|
leaf: tree.layers[0][leaf_index],
|
||||||
|
path_bits,
|
||||||
|
last_bits,
|
||||||
|
mask_bits,
|
||||||
|
merkle_path: proof.path,
|
||||||
|
};
|
||||||
|
|
||||||
|
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||||
|
pw.set_hash_target(expected_root_target, expected_root);
|
||||||
|
|
||||||
|
let proof_with_pis = data.prove(pw)?;
|
||||||
|
|
||||||
|
let verifier_data = data.verifier_data();
|
||||||
|
assert!(
|
||||||
|
verifier_data.verify(proof_with_pis).is_ok(),
|
||||||
|
"Merkle proof verification failed in circuit for leaf index {}",
|
||||||
|
leaf_index
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
pub mod merkle_circuit;
|
||||||
|
pub mod merkle;
|
|
@ -0,0 +1,100 @@
|
||||||
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_data::{CircuitData, VerifierCircuitData};
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||||
|
use plonky2::plonk::proof::{Proof, ProofWithPublicInputs};
|
||||||
|
use plonky2_field::extension::Extendable;
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
|
use codex_plonky2_circuits::circuits::params::HF;
|
||||||
|
use anyhow::Result;
|
||||||
|
use plonky2::hash::hashing::PlonkyPermutation;
|
||||||
|
use crate::sponge::hash_n_with_padding;
|
||||||
|
// --------- helper functions ---------
|
||||||
|
|
||||||
|
/// Converts an index to a vector of bits (LSB first) with padding.
|
||||||
|
pub(crate) fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bool> {
|
||||||
|
let mut bits = Vec::with_capacity(bit_length);
|
||||||
|
for i in 0..bit_length {
|
||||||
|
bits.push(((index >> i) & 1) == 1);
|
||||||
|
}
|
||||||
|
// If index requires fewer bits, pad with `false`
|
||||||
|
while bits.len() < bit_length {
|
||||||
|
bits.push(false);
|
||||||
|
}
|
||||||
|
bits
|
||||||
|
}
|
||||||
|
/// calculate the sampled cell index from entropy, slot root, and counter
|
||||||
|
/// this is the non-circuit version for testing
|
||||||
|
pub(crate) fn calculate_cell_index_bits<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
const D: usize
|
||||||
|
>(entropy: &Vec<F>, slot_root: HashOut<F>, ctr: usize, depth: usize, mask_bits: Vec<bool>) -> Vec<bool> {
|
||||||
|
let ctr_field = F::from_canonical_u64(ctr as u64);
|
||||||
|
let mut ctr_as_digest = HashOut::<F>::ZERO;
|
||||||
|
ctr_as_digest.elements[0] = ctr_field;
|
||||||
|
let mut hash_inputs = Vec::new();
|
||||||
|
hash_inputs.extend_from_slice(&entropy);
|
||||||
|
hash_inputs.extend_from_slice(&slot_root.elements);
|
||||||
|
hash_inputs.extend_from_slice(&ctr_as_digest.elements);
|
||||||
|
let hash_output = hash_n_with_padding::<F,D,HF>(&hash_inputs);
|
||||||
|
let cell_index_bytes = hash_output.elements[0].to_canonical_u64();
|
||||||
|
|
||||||
|
let cell_index_bits = usize_to_bits_le_padded(cell_index_bytes as usize, depth);
|
||||||
|
|
||||||
|
let mut masked_cell_index_bits = vec![];
|
||||||
|
|
||||||
|
for i in 0..depth{
|
||||||
|
masked_cell_index_bits.push(cell_index_bits[i] && mask_bits[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
masked_cell_index_bits
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn take_n_bits_from_bytes(bytes: &[u8], n: usize) -> Vec<bool> {
|
||||||
|
bytes.iter()
|
||||||
|
.flat_map(|byte| (0..8u8).map(move |i| (byte >> i) & 1 == 1))
|
||||||
|
.take(n)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a vector of bits (LSB first) into an index (usize).
|
||||||
|
pub(crate) fn bits_le_padded_to_usize(bits: &[bool]) -> usize {
|
||||||
|
bits.iter().enumerate().fold(0usize, |acc, (i, &bit)| {
|
||||||
|
if bit {
|
||||||
|
acc | (1 << i)
|
||||||
|
} else {
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// prove given the circuit data and partial witness
|
||||||
|
pub fn prove<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>(
|
||||||
|
data: CircuitData<F, C, D>,
|
||||||
|
pw: PartialWitness<F>
|
||||||
|
) -> Result<ProofWithPublicInputs<F, C, D>>{
|
||||||
|
let proof = data.prove(pw);
|
||||||
|
return proof
|
||||||
|
}
|
||||||
|
|
||||||
|
/// verify given verifier data, public input, and proof
|
||||||
|
pub fn verify<
|
||||||
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
H: Hasher<F> + AlgebraicHasher<F>,
|
||||||
|
>(
|
||||||
|
verifier_data: &VerifierCircuitData<F, C, D>,
|
||||||
|
public_inputs: Vec<F>,
|
||||||
|
proof: Proof<F, C, D>
|
||||||
|
)-> Result<()> {
|
||||||
|
verifier_data.verify(ProofWithPublicInputs {
|
||||||
|
proof,
|
||||||
|
public_inputs,
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc_fingerprint":3013295485060983078,"outputs":{"15729799797837862367":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.dylib\nlib___.dylib\nlib___.a\nlib___.dylib\n/Users/mohammedalghazwi/.rustup/toolchains/nightly-aarch64-apple-darwin\noff\npacked\nunpacked\n___\ndebug_assertions\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"aarch64\"\ntarget_endian=\"little\"\ntarget_env=\"\"\ntarget_family=\"unix\"\ntarget_feature=\"aes\"\ntarget_feature=\"crc\"\ntarget_feature=\"dit\"\ntarget_feature=\"dotprod\"\ntarget_feature=\"dpb\"\ntarget_feature=\"dpb2\"\ntarget_feature=\"fcma\"\ntarget_feature=\"fhm\"\ntarget_feature=\"flagm\"\ntarget_feature=\"fp16\"\ntarget_feature=\"frintts\"\ntarget_feature=\"jsconv\"\ntarget_feature=\"lor\"\ntarget_feature=\"lse\"\ntarget_feature=\"neon\"\ntarget_feature=\"paca\"\ntarget_feature=\"pacg\"\ntarget_feature=\"pan\"\ntarget_feature=\"pmuv3\"\ntarget_feature=\"ras\"\ntarget_feature=\"rcpc\"\ntarget_feature=\"rcpc2\"\ntarget_feature=\"rdm\"\ntarget_feature=\"sb\"\ntarget_feature=\"sha2\"\ntarget_feature=\"sha3\"\ntarget_feature=\"ssbs\"\ntarget_feature=\"v8.1a\"\ntarget_feature=\"v8.2a\"\ntarget_feature=\"v8.3a\"\ntarget_feature=\"v8.4a\"\ntarget_feature=\"vh\"\ntarget_has_atomic\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"128\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"128\"\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_os=\"macos\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"apple\"\nub_checks\nunix\n","stderr":""},"4614504638168534921":{"success":true,"status":"","code":0,"stdout":"rustc 1.79.0-nightly (0bf471f33 2024-04-13)\nbinary: rustc\ncommit-hash: 0bf471f339837af930ec90ef5e1e9cb232e99f29\ncommit-date: 2024-04-13\nhost: aarch64-apple-darwin\nrelease: 1.79.0-nightly\nLLVM version: 18.1.3\n","stderr":""},"15481046163696847946":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.dylib\nlib___.dylib\nlib___.a\nlib___.dylib\n/Users/mohammedalghazwi/.rustup/toolchains/nightly-aarch64-apple-darwin\noff\npacked\nunpacked\n___\ndebug_assertions\noverflow_checks\npanic=\"unwind\"\nproc_macro\nrelocation_model=\"pic\"\ntarget_abi=\"\"\ntarget_arch=\"aarch64\"\ntarget_endian=\"little\"\ntarget_env=\"\"\ntarget_family=\"unix\"\ntarget_feature=\"aes\"\ntarget_feature=\"crc\"\ntarget_feature=\"dit\"\ntarget_feature=\"dotprod\"\ntarget_feature=\"dpb\"\ntarget_feature=\"dpb2\"\ntarget_feature=\"fcma\"\ntarget_feature=\"fhm\"\ntarget_feature=\"flagm\"\ntarget_feature=\"fp16\"\ntarget_feature=\"frintts\"\ntarget_feature=\"jsconv\"\ntarget_feature=\"lor\"\ntarget_feature=\"lse\"\ntarget_feature=\"neon\"\ntarget_feature=\"paca\"\ntarget_feature=\"pacg\"\ntarget_feature=\"pan\"\ntarget_feature=\"pmuv3\"\ntarget_feature=\"ras\"\ntarget_feature=\"rcpc\"\ntarget_feature=\"rcpc2\"\ntarget_feature=\"rdm\"\ntarget_feature=\"sb\"\ntarget_feature=\"sha2\"\ntarget_feature=\"sha3\"\ntarget_feature=\"ssbs\"\ntarget_feature=\"v8.1a\"\ntarget_feature=\"v8.2a\"\ntarget_feature=\"v8.3a\"\ntarget_feature=\"v8.4a\"\ntarget_feature=\"vh\"\ntarget_has_atomic\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_has_atomic_equal_alignment=\"128\"\ntarget_has_atomic_equal_alignment=\"16\"\ntarget_has_atomic_equal_alignment=\"32\"\ntarget_has_atomic_equal_alignment=\"64\"\ntarget_has_atomic_equal_alignment=\"8\"\ntarget_has_atomic_equal_alignment=\"ptr\"\ntarget_has_atomic_load_store\ntarget_has_atomic_load_store=\"128\"\ntarget_has_atomic_load_store=\"16\"\ntarget_has_atomic_load_store=\"32\"\ntarget_has_atomic_load_store=\"64\"\ntarget_has_atomic_load_store=\"8\"\ntarget_has_atomic_load_store=\"ptr\"\ntarget_os=\"macos\"\ntarget_pointer_width=\"64\"\ntarget_thread_local\ntarget_vendor=\"apple\"\nub_checks\nunix\n","stderr":""}},"successes":{}}
|
|
@ -0,0 +1,3 @@
|
||||||
|
Signature: 8a477f597d28d172789f06886806bc55
|
||||||
|
# This file is a cache directory tag created by cargo.
|
||||||
|
# For information about cache directory tags see https://bford.info/cachedir/
|
|
@ -0,0 +1 @@
|
||||||
|
7edef65a904f3945
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"compile-time-rng\", \"const-random\"]","declared_features":"","target":13708040221295731214,"profile":1200860260873630964,"path":6231922836321726678,"deps":[[4366825111050392739,"version_check",false,15786420151621770117]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/ahash-3f99d641ca1955a0/dep-build-script-build-script-build"}}],"rustflags":[],"metadata":6548036084630991988,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
9a431b64231aea8b
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"compile-time-rng\", \"const-random\"]","declared_features":"","target":15946166061513530080,"profile":3797293754785534760,"path":1528684582876422223,"deps":[[2452538001284770427,"cfg_if",false,3203166170258471341],[2751633865096478575,"once_cell",false,12231094743857206959],[8776983334904785487,"zerocopy",false,294149281937114246],[15443876827423482409,"build_script_build",false,15852390295855028257],[17919418334554113058,"const_random",false,9073307235475755424]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/ahash-af178abcd080e94b/dep-lib-ahash"}}],"rustflags":[],"metadata":6548036084630991988,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
f9291312615f8d4f
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"compile-time-rng\", \"const-random\"]","declared_features":"","target":15946166061513530080,"profile":4374887572363265115,"path":1528684582876422223,"deps":[[2452538001284770427,"cfg_if",false,2486202953659128152],[2751633865096478575,"once_cell",false,748832215468416173],[8776983334904785487,"zerocopy",false,10403420805085834480],[15443876827423482409,"build_script_build",false,15852390295855028257],[17919418334554113058,"const_random",false,6749215817535640163]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/ahash-ccfff8bdf1ae54cf/dep-lib-ahash"}}],"rustflags":[],"metadata":6548036084630991988,"config":2202906307356721367,"compile_kind":0}
|
|
@ -0,0 +1 @@
|
||||||
|
21d84909fc00ffdb
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"","declared_features":"","target":0,"profile":0,"path":0,"deps":[[15443876827423482409,"build_script_build",false,4988105543709548158]],"local":[{"RerunIfChanged":{"output":"debug/build/ahash-d536f066018566fc/output","paths":["build.rs"]}}],"rustflags":[],"metadata":0,"config":0,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
3ee71801ba984748
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"auto\", \"default\", \"wincon\"]","declared_features":"","target":1736373845211751465,"profile":11497727726920792826,"path":1673770740720347937,"deps":[[821897733253474908,"anstyle",false,243185765304966503],[6726333832837302156,"anstyle_query",false,2231456570620701234],[8720183142424604966,"utf8parse",false,8354262512456219188],[9119385831240683871,"is_terminal_polyfill",false,15402287928372065015],[16168342247272166835,"anstyle_parse",false,14722721328824445292],[17599588001959536047,"colorchoice",false,15523229461858673341]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstream-aae918ed3d8ce677/dep-lib-anstream"}}],"rustflags":[],"metadata":7500874485387469444,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
76d7ee69fdc09fc4
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"auto\", \"default\", \"wincon\"]","declared_features":"","target":1736373845211751465,"profile":13313845465037890836,"path":1673770740720347937,"deps":[[821897733253474908,"anstyle",false,18385214188734042437],[6726333832837302156,"anstyle_query",false,8569742987658344335],[8720183142424604966,"utf8parse",false,6519909320626031671],[9119385831240683871,"is_terminal_polyfill",false,9700208384370940077],[16168342247272166835,"anstyle_parse",false,5608909534202308829],[17599588001959536047,"colorchoice",false,9595913824721439055]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstream-c8dec6fe6431527b/dep-lib-anstream"}}],"rustflags":[],"metadata":7500874485387469444,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
671d6f432af85f03
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"std\"]","declared_features":"","target":4691279112367741833,"profile":11497727726920792826,"path":2699772770093274216,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-41ddda993b26af3d/dep-lib-anstyle"}}],"rustflags":[],"metadata":14064844656010464607,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
45d1f646e56625ff
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"std\"]","declared_features":"","target":4691279112367741833,"profile":13313845465037890836,"path":2699772770093274216,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-b4deb9e2034496cd/dep-lib-anstyle"}}],"rustflags":[],"metadata":14064844656010464607,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
6ce9a5530b9d51cc
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"utf8\"]","declared_features":"","target":985948777999996156,"profile":11497727726920792826,"path":5083605088593959059,"deps":[[8720183142424604966,"utf8parse",false,8354262512456219188]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-parse-8a410b4d96bb52aa/dep-lib-anstyle_parse"}}],"rustflags":[],"metadata":9799137552285937175,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
dd286e0b81d9d64d
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"utf8\"]","declared_features":"","target":985948777999996156,"profile":13313845465037890836,"path":5083605088593959059,"deps":[[8720183142424604966,"utf8parse",false,6519909320626031671]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-parse-db32c049e7b9b4e4/dep-lib-anstyle_parse"}}],"rustflags":[],"metadata":9799137552285937175,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
8f37c5c3d7d7ed76
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":2663518930196293257,"profile":13313845465037890836,"path":15561040612267538653,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-query-04566787c005bd5e/dep-lib-anstyle_query"}}],"rustflags":[],"metadata":12668695791606146315,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
32ea8e9fedb9f71e
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":2663518930196293257,"profile":11497727726920792826,"path":15561040612267538653,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anstyle-query-5bff6f8628184b67/dep-lib-anstyle_query"}}],"rustflags":[],"metadata":12668695791606146315,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
e49e6ee44b67f578
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"std\"]","declared_features":"","target":863081735331048100,"profile":4374887572363265115,"path":957478014399389327,"deps":[[6711756778572459952,"build_script_build",false,4337685822958982743]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anyhow-497bc5b2b743e05a/dep-lib-anyhow"}}],"rustflags":[],"metadata":17154292783084528516,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
27885d35815db9ec
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"std\"]","declared_features":"","target":863081735331048100,"profile":3797293754785534760,"path":957478014399389327,"deps":[[6711756778572459952,"build_script_build",false,4337685822958982743]],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anyhow-902a3396c007ebda/dep-lib-anyhow"}}],"rustflags":[],"metadata":17154292783084528516,"config":2202906307356721367,"compile_kind":0}
|
|
@ -0,0 +1 @@
|
||||||
|
00af9e9ffc62fa70
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[\"default\", \"std\"]","declared_features":"","target":13708040221295731214,"profile":1200860260873630964,"path":3166392776792684279,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/anyhow-a81f6b8b8a068cca/dep-build-script-build-script-build"}}],"rustflags":[],"metadata":17154292783084528516,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
57d6b720468e323c
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"","declared_features":"","target":0,"profile":0,"path":0,"deps":[[6711756778572459952,"build_script_build",false,8140928113574850304]],"local":[{"RerunIfChanged":{"output":"debug/build/anyhow-ca4752970a69a287/output","paths":["build/probe.rs"]}}],"rustflags":[],"metadata":0,"config":0,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
70cb76aef77f1080
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":4416014774196737203,"profile":1200860260873630964,"path":7251434446046100793,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/autocfg-e452ec8700d8a67a/dep-lib-autocfg"}}],"rustflags":[],"metadata":13102859075309379048,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
383844a9149b8a91
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":16903832911151110546,"profile":4374887572363265115,"path":35383766418190931,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/byteorder-63450e0ccd45d0f8/dep-lib-byteorder"}}],"rustflags":[],"metadata":5398730104718078656,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
03f444f324a9ae0c
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":16903832911151110546,"profile":3797293754785534760,"path":35383766418190931,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/byteorder-8bd58823369234ed/dep-lib-byteorder"}}],"rustflags":[],"metadata":5398730104718078656,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
adf9cd26a9ee732c
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":11601024444410784892,"profile":3797293754785534760,"path":3141140904230004506,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/cfg-if-0b311d2c89ea812e/dep-lib-cfg_if"}}],"rustflags":[],"metadata":8462187951337715540,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
fcf7b61b76651e69
|
|
@ -0,0 +1 @@
|
||||||
|
{"rustc":13835057395185014303,"features":"[]","declared_features":"","target":11601024444410784892,"profile":1200860260873630964,"path":3141140904230004506,"deps":[],"local":[{"CheckDepInfo":{"dep_info":"debug/.fingerprint/cfg-if-4b4372228eb23d63/dep-lib-cfg_if"}}],"rustflags":[],"metadata":8462187951337715540,"config":2202906307356721367,"compile_kind":0}
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
This file has an mtime of when this was started.
|
|
@ -0,0 +1 @@
|
||||||
|
581961f469c48022
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue