mirror of
https://github.com/logos-storage/proof-aggregation.git
synced 2026-01-09 09:13:09 +00:00
improve circuit input generation and refactor
This commit is contained in:
parent
d29c19b221
commit
26a0a6b675
@ -1,6 +1,7 @@
|
|||||||
// Data structure used to generate the proof input
|
// Data structure used to generate the proof input
|
||||||
|
|
||||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
|
use plonky2::plonk::config::AlgebraicHasher;
|
||||||
use plonky2_field::extension::Extendable;
|
use plonky2_field::extension::Extendable;
|
||||||
use codex_plonky2_circuits::circuits::sample_cells::Cell;
|
use codex_plonky2_circuits::circuits::sample_cells::Cell;
|
||||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
@ -14,9 +15,10 @@ use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_
|
|||||||
pub struct SlotTree<
|
pub struct SlotTree<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
> {
|
> {
|
||||||
pub tree: MerkleTree<F, D>, // slot tree
|
pub tree: MerkleTree<F, D, H>, // slot tree
|
||||||
pub block_trees: Vec<MerkleTree<F,D>>, // vec of block trees
|
pub block_trees: Vec<MerkleTree<F,D, H>>, // vec of block trees
|
||||||
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||||
pub params: InputParams, // parameters
|
pub params: InputParams, // parameters
|
||||||
}
|
}
|
||||||
@ -24,7 +26,8 @@ pub struct SlotTree<
|
|||||||
impl<
|
impl<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
> SlotTree<F, D> {
|
H: AlgebraicHasher<F>,
|
||||||
|
> SlotTree<F, D, H> {
|
||||||
/// Create a slot tree with fake data, for testing only
|
/// Create a slot tree with fake data, for testing only
|
||||||
pub fn new_default(params: &InputParams) -> Self {
|
pub fn new_default(params: &InputParams) -> Self {
|
||||||
// generate fake cell data
|
// generate fake cell data
|
||||||
@ -40,9 +43,7 @@ impl<
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
||||||
.collect();
|
.collect();
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
let n_blocks = params.n_blocks_test();
|
let n_blocks = params.n_blocks_test();
|
||||||
let n_cells_in_blocks = params.n_cells_in_blocks();
|
let n_cells_in_blocks = params.n_cells_in_blocks();
|
||||||
|
|
||||||
@ -57,7 +58,7 @@ impl<
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|t| t.root().unwrap())
|
.map(|t| t.root().unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let slot_tree = MerkleTree::<F,D>::new(&block_roots, zero).unwrap();
|
let slot_tree = MerkleTree::<F,D, H>::new(&block_roots).unwrap();
|
||||||
Self {
|
Self {
|
||||||
tree: slot_tree,
|
tree: slot_tree,
|
||||||
block_trees,
|
block_trees,
|
||||||
@ -68,7 +69,7 @@ impl<
|
|||||||
|
|
||||||
/// Generates a proof for the given leaf index
|
/// Generates a proof for the given leaf index
|
||||||
/// The path in the proof is a combined block and slot path to make up the full path
|
/// The path in the proof is a combined block and slot path to make up the full path
|
||||||
pub fn get_proof(&self, index: usize) -> MerkleProof<F,D> {
|
pub fn get_proof(&self, index: usize) -> MerkleProof<F,D, H> {
|
||||||
let block_index = index / self.params.n_cells_in_blocks();
|
let block_index = index / self.params.n_cells_in_blocks();
|
||||||
let leaf_index = index % self.params.n_cells_in_blocks();
|
let leaf_index = index % self.params.n_cells_in_blocks();
|
||||||
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
||||||
@ -78,20 +79,16 @@ impl<
|
|||||||
let mut combined_path = block_proof.path.clone();
|
let mut combined_path = block_proof.path.clone();
|
||||||
combined_path.extend(slot_proof.path.clone());
|
combined_path.extend(slot_proof.path.clone());
|
||||||
|
|
||||||
MerkleProof::<F,D> {
|
MerkleProof::<F,D, H>::new(
|
||||||
index,
|
index,
|
||||||
path: combined_path,
|
combined_path,
|
||||||
nleaves: self.cell_data.len(),
|
self.cell_data.len(),
|
||||||
zero: block_proof.zero.clone(),
|
)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F,D> {
|
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F,D,H> {
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
// Build the Merkle tree
|
// Build the Merkle tree
|
||||||
let block_tree = MerkleTree::<F,D>::new(leaves, zero).unwrap();
|
let block_tree = MerkleTree::<F,D,H>::new(leaves).unwrap();
|
||||||
block_tree
|
block_tree
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -102,9 +99,10 @@ impl<
|
|||||||
pub struct DatasetTree<
|
pub struct DatasetTree<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
> {
|
> {
|
||||||
pub tree: MerkleTree<F,D>, // dataset tree
|
pub tree: MerkleTree<F,D,H>, // dataset tree
|
||||||
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
pub slot_trees: Vec<SlotTree<F, D, H>>, // vec of slot trees
|
||||||
pub params: InputParams, // parameters
|
pub params: InputParams, // parameters
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,24 +111,26 @@ pub struct DatasetTree<
|
|||||||
pub struct DatasetProof<
|
pub struct DatasetProof<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
> {
|
> {
|
||||||
pub slot_index: F,
|
pub slot_index: F,
|
||||||
pub entropy: HashOut<F>,
|
pub entropy: HashOut<F>,
|
||||||
pub dataset_proof: MerkleProof<F,D>, // proof for dataset level tree
|
pub dataset_proof: MerkleProof<F,D,H>, // proof for dataset level tree
|
||||||
pub slot_proofs: Vec<MerkleProof<F,D>>, // proofs for sampled slot
|
pub slot_proofs: Vec<MerkleProof<F,D,H>>, // proofs for sampled slot
|
||||||
pub cell_data: Vec<Cell<F,D>>,
|
pub cell_data: Vec<Cell<F,D>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<
|
impl<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
> DatasetTree<F, D> {
|
H: AlgebraicHasher<F>,
|
||||||
|
> DatasetTree<F, D, H> {
|
||||||
/// Dataset tree with fake data, for testing only
|
/// Dataset tree with fake data, for testing only
|
||||||
pub fn new_default(params: &InputParams) -> Self {
|
pub fn new_default(params: &InputParams) -> Self {
|
||||||
let mut slot_trees = vec![];
|
let mut slot_trees = vec![];
|
||||||
let n_slots = 1 << params.dataset_depth_test();
|
let n_slots = 1 << params.dataset_depth_test();
|
||||||
for _ in 0..n_slots {
|
for _ in 0..n_slots {
|
||||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
slot_trees.push(SlotTree::<F, D, H>::new_default(params));
|
||||||
}
|
}
|
||||||
Self::new(slot_trees, params.clone())
|
Self::new(slot_trees, params.clone())
|
||||||
}
|
}
|
||||||
@ -144,15 +144,15 @@ impl<
|
|||||||
let zero = HashOut {
|
let zero = HashOut {
|
||||||
elements: [F::ZERO; 4],
|
elements: [F::ZERO; 4],
|
||||||
};
|
};
|
||||||
let zero_slot = SlotTree::<F, D> {
|
let zero_slot = SlotTree::<F, D, H> {
|
||||||
tree: MerkleTree::<F,D>::new(&[zero.clone()], zero.clone()).unwrap(),
|
tree: MerkleTree::<F,D,H>::new(&[zero.clone()]).unwrap(),
|
||||||
block_trees: vec![],
|
block_trees: vec![],
|
||||||
cell_data: vec![],
|
cell_data: vec![],
|
||||||
params: params.clone(),
|
params: params.clone(),
|
||||||
};
|
};
|
||||||
for i in 0..n_slots {
|
for i in 0..n_slots {
|
||||||
if i == params.testing_slot_index {
|
if i == params.testing_slot_index {
|
||||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
slot_trees.push(SlotTree::<F, D, H>::new_default(params));
|
||||||
} else {
|
} else {
|
||||||
slot_trees.push(zero_slot.clone());
|
slot_trees.push(zero_slot.clone());
|
||||||
}
|
}
|
||||||
@ -162,7 +162,7 @@ impl<
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|t| t.tree.root().unwrap())
|
.map(|t| t.tree.root().unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
let dataset_tree = MerkleTree::<F,D,H>::new(&slot_roots).unwrap();
|
||||||
Self {
|
Self {
|
||||||
tree: dataset_tree,
|
tree: dataset_tree,
|
||||||
slot_trees,
|
slot_trees,
|
||||||
@ -171,17 +171,14 @@ impl<
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Same as default but with supplied slot trees
|
/// Same as default but with supplied slot trees
|
||||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: InputParams) -> Self {
|
pub fn new(slot_trees: Vec<SlotTree<F, D, H>>, params: InputParams) -> Self {
|
||||||
// get the roots of slot trees
|
// get the roots of slot trees
|
||||||
let slot_roots = slot_trees
|
let slot_roots = slot_trees
|
||||||
.iter()
|
.iter()
|
||||||
.map(|t| t.tree.root().unwrap())
|
.map(|t| t.tree.root().unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// zero hash
|
|
||||||
let zero = HashOut {
|
let dataset_tree = MerkleTree::<F,D,H>::new(&slot_roots).unwrap();
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
|
||||||
Self {
|
Self {
|
||||||
tree: dataset_tree,
|
tree: dataset_tree,
|
||||||
slot_trees,
|
slot_trees,
|
||||||
@ -192,7 +189,7 @@ impl<
|
|||||||
/// Generates a proof for the given slot index
|
/// Generates a proof for the given slot index
|
||||||
/// Also takes entropy so it can use it to sample the slot
|
/// Also takes entropy so it can use it to sample the slot
|
||||||
/// note: proofs are padded based on the params in self
|
/// note: proofs are padded based on the params in self
|
||||||
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D,H> {
|
||||||
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_max_depth());
|
Self::pad_proof(&mut dataset_proof, self.params.dataset_max_depth());
|
||||||
|
|
||||||
@ -234,7 +231,7 @@ impl<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// pad the proof with 0s until max_depth
|
/// pad the proof with 0s until max_depth
|
||||||
pub fn pad_proof(merkle_proof: &mut MerkleProof<F,D>, max_depth: usize){
|
pub fn pad_proof(merkle_proof: &mut MerkleProof<F,D,H>, max_depth: usize){
|
||||||
for _i in merkle_proof.path.len()..max_depth{
|
for _i in merkle_proof.path.len()..max_depth{
|
||||||
merkle_proof.path.push(HashOut::<F>::ZERO);
|
merkle_proof.path.push(HashOut::<F>::ZERO);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,230 +1,224 @@
|
|||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::path::Path;
|
||||||
use plonky2::hash::hash_types::RichField;
|
use plonky2::hash::hash_types::RichField;
|
||||||
use plonky2_field::extension::Extendable;
|
use plonky2_field::extension::Extendable;
|
||||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
use crate::params::{Params,InputParams};
|
use crate::params::{Params,InputParams};
|
||||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, ceiling_log2, usize_to_bits_le};
|
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, ceiling_log2, usize_to_bits_le};
|
||||||
use crate::merkle_tree::merkle_safe::MerkleProof;
|
use crate::merkle_tree::merkle_safe::MerkleProof;
|
||||||
use codex_plonky2_circuits::circuits::sample_cells::{MerklePath, SampleCircuit, SampleCircuitInput, SampleTargets};
|
use codex_plonky2_circuits::circuits::sample_cells::{MerklePath, SampleCircuitInput};
|
||||||
use plonky2::iop::witness::PartialWitness;
|
use plonky2::plonk::config::AlgebraicHasher;
|
||||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
|
||||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
|
||||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
|
||||||
use crate::data_structs::DatasetTree;
|
use crate::data_structs::DatasetTree;
|
||||||
|
use crate::serialization::circuit_input::export_circ_input_to_json;
|
||||||
use crate::sponge::hash_bytes_no_padding;
|
use crate::sponge::hash_bytes_no_padding;
|
||||||
use crate::params::{C, D, F, HF};
|
|
||||||
|
|
||||||
/// generates circuit input (SampleCircuitInput) from fake data for testing
|
/// Input Generator to generates circuit input (SampleCircuitInput)
|
||||||
/// which can be later stored into json see json.rs
|
/// which can be later stored into json see json.rs
|
||||||
pub fn gen_testing_circuit_input<
|
/// For now it generates input from fake data for testing
|
||||||
|
pub struct InputGenerator<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
>(params: &InputParams) -> SampleCircuitInput<F,D>{
|
H: AlgebraicHasher<F>,
|
||||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
>{
|
||||||
|
pub input_params: InputParams,
|
||||||
let slot_index = params.testing_slot_index; // samples the specified slot
|
phantom_data: PhantomData<(F,H)>
|
||||||
let entropy = params.entropy; // Use the entropy from Params
|
|
||||||
|
|
||||||
let proof = dataset_t.sample_slot(slot_index, entropy);
|
|
||||||
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
|
|
||||||
|
|
||||||
let mut slot_paths = vec![];
|
|
||||||
for i in 0..params.n_samples {
|
|
||||||
let path = proof.slot_proofs[i].path.clone();
|
|
||||||
let mp = MerklePath::<F,D>{
|
|
||||||
path,
|
|
||||||
};
|
|
||||||
slot_paths.push(mp);
|
|
||||||
}
|
|
||||||
|
|
||||||
SampleCircuitInput::<F, D> {
|
|
||||||
entropy: proof.entropy,
|
|
||||||
dataset_root: dataset_t.tree.root().unwrap(),
|
|
||||||
slot_index: proof.slot_index.clone(),
|
|
||||||
slot_root,
|
|
||||||
n_cells_per_slot: F::from_canonical_usize(params.n_cells),
|
|
||||||
n_slots_per_dataset: F::from_canonical_usize(params.n_slots),
|
|
||||||
slot_proof: proof.dataset_proof.path.clone(),
|
|
||||||
cell_data: proof.cell_data.clone(),
|
|
||||||
merkle_paths: slot_paths,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// verifies the given circuit input.
|
impl<
|
||||||
/// this is non circuit version for sanity check
|
|
||||||
pub fn verify_circuit_input<
|
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
>(circ_input: SampleCircuitInput<F,D>, params: &InputParams) -> bool{
|
H: AlgebraicHasher<F>,
|
||||||
let slot_index = circ_input.slot_index.to_canonical_u64();
|
> InputGenerator<F, D, H> {
|
||||||
let slot_root = circ_input.slot_root.clone();
|
|
||||||
// check dataset level proof
|
|
||||||
let slot_proof = circ_input.slot_proof.clone();
|
|
||||||
let dataset_path_bits = usize_to_bits_le(slot_index as usize, params.dataset_max_depth());
|
|
||||||
let (dataset_last_bits, dataset_mask_bits) = ceiling_log2(params.n_slots, params.dataset_max_depth());
|
|
||||||
let reconstructed_slot_root = MerkleProof::<F,D>::reconstruct_root2(
|
|
||||||
slot_root,
|
|
||||||
dataset_path_bits,
|
|
||||||
dataset_last_bits,
|
|
||||||
slot_proof,
|
|
||||||
dataset_mask_bits,
|
|
||||||
params.max_slots.trailing_zeros() as usize,
|
|
||||||
).unwrap();
|
|
||||||
// assert reconstructed equals dataset root
|
|
||||||
assert_eq!(reconstructed_slot_root, circ_input.dataset_root.clone());
|
|
||||||
|
|
||||||
// check each sampled cell
|
pub fn new(input_params: InputParams) -> Self{
|
||||||
// get the index for cell from H(slot_root|counter|entropy)
|
Self{
|
||||||
let mask_bits = usize_to_bits_le(params.n_cells -1, params.max_depth);
|
input_params,
|
||||||
for i in 0..params.n_samples {
|
phantom_data: PhantomData::default(),
|
||||||
let cell_index_bits = calculate_cell_index_bits(
|
|
||||||
&circ_input.entropy.elements.to_vec(),
|
|
||||||
slot_root,
|
|
||||||
i + 1,
|
|
||||||
params.max_depth,
|
|
||||||
mask_bits.clone(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
|
||||||
|
|
||||||
let s_res = verify_cell_proof(&circ_input, ¶ms, cell_index, i);
|
|
||||||
if s_res.unwrap() == false {
|
|
||||||
println!("call {} is false", i);
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify the given proof for slot tree, checks equality with the given root
|
pub fn default() -> Self{
|
||||||
pub fn verify_cell_proof<
|
Self{
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
input_params: Params::default().input_params,
|
||||||
const D: usize,
|
phantom_data: PhantomData::default(),
|
||||||
>(circ_input: &SampleCircuitInput<F,D>, params: &InputParams, cell_index: usize, ctr: usize) -> anyhow::Result<bool> {
|
}
|
||||||
let mut block_path_bits = usize_to_bits_le(cell_index, params.max_depth);
|
}
|
||||||
let last_index = params.n_cells - 1;
|
|
||||||
let mut block_last_bits = usize_to_bits_le(last_index, params.max_depth);
|
|
||||||
|
|
||||||
let split_point = params.bot_depth();
|
/// Generate circuit input and export to JSON
|
||||||
|
pub fn generate_and_export_circ_input_to_json<
|
||||||
|
P: AsRef<Path>,
|
||||||
|
>(
|
||||||
|
&self,
|
||||||
|
base_path: P,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
let circ_input = self.gen_testing_circuit_input();
|
||||||
|
export_circ_input_to_json(circ_input, base_path)?;
|
||||||
|
|
||||||
let slot_last_bits = block_last_bits.split_off(split_point);
|
Ok(())
|
||||||
let slot_path_bits = block_path_bits.split_off(split_point);
|
}
|
||||||
|
|
||||||
// pub type HP = <PoseidonHash as Hasher<F>>::Permutation;
|
/// returns exactly M default circuit input of all same circuit input
|
||||||
let leaf_hash = hash_bytes_no_padding::<F,D,HF>(&circ_input.cell_data[ctr].data);
|
pub fn get_m_testing_circ_input<const M: usize>(&self) -> [SampleCircuitInput<F,D>; M]{
|
||||||
|
let one_circ_input = self.gen_testing_circuit_input();
|
||||||
|
let circ_input: [SampleCircuitInput<F,D>; M] = (0..M)
|
||||||
|
.map(|_| one_circ_input.clone())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.try_into().unwrap();
|
||||||
|
circ_input
|
||||||
|
}
|
||||||
|
|
||||||
let mut block_path = circ_input.merkle_paths[ctr].path.clone();
|
/// returns exactly M default circuit input of different circuit input
|
||||||
let slot_path = block_path.split_off(split_point);
|
pub fn get_m_unique_testing_circ_input<const M: usize>(&self) -> [SampleCircuitInput<F,D>; M]{
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
let mut block_mask_bits = usize_to_bits_le(last_index, params.max_depth+1);
|
/// generates circuit input (SampleCircuitInput) from fake data for testing
|
||||||
let mut slot_mask_bits = block_mask_bits.split_off(split_point);
|
pub fn gen_testing_circuit_input(&self) -> SampleCircuitInput<F,D>{
|
||||||
|
let params = &self.input_params;
|
||||||
|
let dataset_t = DatasetTree::<F, D, H>::new_for_testing(params);
|
||||||
|
|
||||||
block_mask_bits.push(false);
|
let slot_index = params.testing_slot_index; // samples the specified slot
|
||||||
slot_mask_bits.push(false);
|
let entropy = params.entropy; // Use the entropy from Params
|
||||||
|
|
||||||
let block_res = MerkleProof::<F,D>::reconstruct_root2(
|
let proof = dataset_t.sample_slot(slot_index, entropy);
|
||||||
leaf_hash,
|
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
|
||||||
block_path_bits.clone(),
|
|
||||||
block_last_bits.clone(),
|
|
||||||
block_path,
|
|
||||||
block_mask_bits,
|
|
||||||
params.bot_depth(),
|
|
||||||
);
|
|
||||||
let reconstructed_root = MerkleProof::<F,D>::reconstruct_root2(
|
|
||||||
block_res.unwrap(),
|
|
||||||
slot_path_bits,
|
|
||||||
slot_last_bits,
|
|
||||||
slot_path,
|
|
||||||
slot_mask_bits,
|
|
||||||
params.max_depth - params.bot_depth(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(reconstructed_root.unwrap() == circ_input.slot_root)
|
let mut slot_paths = vec![];
|
||||||
}
|
for i in 0..params.n_samples {
|
||||||
|
let path = proof.slot_proofs[i].path.clone();
|
||||||
|
let mp = MerklePath::<F,D>{
|
||||||
|
path,
|
||||||
|
};
|
||||||
|
slot_paths.push(mp);
|
||||||
|
}
|
||||||
|
|
||||||
/// build the sampling circuit
|
SampleCircuitInput::<F, D> {
|
||||||
/// returns the proof and circuit data
|
entropy: proof.entropy,
|
||||||
pub fn build_circuit(n_samples: usize, slot_index: usize) -> anyhow::Result<(CircuitData<F, C, D>, PartialWitness<F>)>{
|
dataset_root: dataset_t.tree.root().unwrap(),
|
||||||
let (data, pw, _) = build_circuit_with_targets(n_samples, slot_index).unwrap();
|
slot_index: proof.slot_index.clone(),
|
||||||
|
slot_root,
|
||||||
|
n_cells_per_slot: F::from_canonical_usize(params.n_cells),
|
||||||
|
n_slots_per_dataset: F::from_canonical_usize(params.n_slots),
|
||||||
|
slot_proof: proof.dataset_proof.path.clone(),
|
||||||
|
cell_data: proof.cell_data.clone(),
|
||||||
|
merkle_paths: slot_paths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok((data, pw))
|
/// verifies the given circuit input.
|
||||||
}
|
/// this is non circuit version for sanity check
|
||||||
|
pub fn verify_circuit_input<
|
||||||
|
>(&self, circ_input: SampleCircuitInput<F,D>) -> bool{
|
||||||
|
let params = self.input_params.clone();
|
||||||
|
let slot_index = circ_input.slot_index.to_canonical_u64();
|
||||||
|
let slot_root = circ_input.slot_root.clone();
|
||||||
|
// check dataset level proof
|
||||||
|
let slot_proof = circ_input.slot_proof.clone();
|
||||||
|
let dataset_path_bits = usize_to_bits_le(slot_index as usize, params.dataset_max_depth());
|
||||||
|
let (dataset_last_bits, dataset_mask_bits) = ceiling_log2(params.n_slots, params.dataset_max_depth());
|
||||||
|
let reconstructed_slot_root = MerkleProof::<F,D,H>::reconstruct_root2(
|
||||||
|
slot_root,
|
||||||
|
dataset_path_bits,
|
||||||
|
dataset_last_bits,
|
||||||
|
slot_proof,
|
||||||
|
dataset_mask_bits,
|
||||||
|
params.max_slots.trailing_zeros() as usize,
|
||||||
|
).unwrap();
|
||||||
|
// assert reconstructed equals dataset root
|
||||||
|
assert_eq!(reconstructed_slot_root, circ_input.dataset_root.clone());
|
||||||
|
|
||||||
/// build the sampling circuit ,
|
// check each sampled cell
|
||||||
/// returns the proof, circuit data, and targets
|
// get the index for cell from H(slot_root|counter|entropy)
|
||||||
pub fn build_circuit_with_targets(n_samples: usize, slot_index: usize) -> anyhow::Result<(CircuitData<F, C, D>, PartialWitness<F>, SampleTargets)>{
|
let mask_bits = usize_to_bits_le(params.n_cells -1, params.max_depth);
|
||||||
// get input
|
for i in 0..params.n_samples {
|
||||||
let mut params = Params::default();
|
let cell_index_bits = calculate_cell_index_bits(
|
||||||
params.set_n_samples(n_samples);
|
&circ_input.entropy.elements.to_vec(),
|
||||||
let mut input_params = params.input_params;
|
slot_root,
|
||||||
input_params.testing_slot_index = slot_index;
|
i + 1,
|
||||||
let circ_input = gen_testing_circuit_input::<F,D>(&input_params);
|
params.max_depth,
|
||||||
|
mask_bits.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
// Create the circuit
|
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||||
let config = CircuitConfig::standard_recursion_config();
|
|
||||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
|
||||||
|
|
||||||
let circuit_params = params.circuit_params;
|
let s_res = self.verify_cell_proof(&circ_input, cell_index, i);
|
||||||
|
if s_res.unwrap() == false {
|
||||||
|
println!("call {} is false", i);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
// build the circuit
|
/// Verify the given proof for slot tree, checks equality with the given root
|
||||||
let circ = SampleCircuit::<F,D,HF>::new(circuit_params.clone());
|
fn verify_cell_proof<
|
||||||
let targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
>(&self, circ_input: &SampleCircuitInput<F,D>, cell_index: usize, ctr: usize) -> anyhow::Result<bool> {
|
||||||
|
let params = self.input_params.clone();
|
||||||
|
let mut block_path_bits = usize_to_bits_le(cell_index, params.max_depth);
|
||||||
|
let last_index = params.n_cells - 1;
|
||||||
|
let mut block_last_bits = usize_to_bits_le(last_index, params.max_depth);
|
||||||
|
|
||||||
// Create a PartialWitness and assign
|
let split_point = params.bot_depth();
|
||||||
let mut pw = PartialWitness::new();
|
|
||||||
|
|
||||||
// assign a witness
|
let slot_last_bits = block_last_bits.split_off(split_point);
|
||||||
circ.sample_slot_assign_witness(&mut pw, &targets, &circ_input)?;
|
let slot_path_bits = block_path_bits.split_off(split_point);
|
||||||
|
|
||||||
// Build the circuit
|
// pub type HP = <PoseidonHash as Hasher<F>>::Permutation;
|
||||||
let data = builder.build::<C>();
|
let leaf_hash = hash_bytes_no_padding::<F,D,H>(&circ_input.cell_data[ctr].data);
|
||||||
|
|
||||||
Ok((data, pw, targets))
|
let mut block_path = circ_input.merkle_paths[ctr].path.clone();
|
||||||
}
|
let slot_path = block_path.split_off(split_point);
|
||||||
|
|
||||||
/// prove the circuit
|
let mut block_mask_bits = usize_to_bits_le(last_index, params.max_depth+1);
|
||||||
pub fn prove_circuit(data: &CircuitData<F, C, D>, pw: &PartialWitness<F>) -> anyhow::Result<ProofWithPublicInputs<F, C, D>>{
|
let mut slot_mask_bits = block_mask_bits.split_off(split_point);
|
||||||
// Prove the circuit with the assigned witness
|
|
||||||
let proof_with_pis = data.prove(pw.clone())?;
|
|
||||||
|
|
||||||
Ok(proof_with_pis)
|
block_mask_bits.push(false);
|
||||||
}
|
slot_mask_bits.push(false);
|
||||||
|
|
||||||
/// returns exactly M default circuit input
|
let block_res = MerkleProof::<F,D,H>::reconstruct_root2(
|
||||||
pub fn get_m_default_circ_input<const M: usize>() -> [SampleCircuitInput<F,D>; M]{
|
leaf_hash,
|
||||||
let params = Params::default().input_params;
|
block_path_bits.clone(),
|
||||||
// let one_circ_input = gen_testing_circuit_input::<F,D>(¶ms);
|
block_last_bits.clone(),
|
||||||
// let circ_input: [SampleCircuitInput<F,D>; M] = (0..M)
|
block_path,
|
||||||
// .map(|_| one_circ_input.clone())
|
block_mask_bits,
|
||||||
// .collect::<Vec<_>>()
|
params.bot_depth(),
|
||||||
// .try_into().unwrap();
|
);
|
||||||
// circ_input
|
let reconstructed_root = MerkleProof::<F,D,H>::reconstruct_root2(
|
||||||
get_m_circ_input::<M>(params)
|
block_res.unwrap(),
|
||||||
}
|
slot_path_bits,
|
||||||
|
slot_last_bits,
|
||||||
|
slot_path,
|
||||||
|
slot_mask_bits,
|
||||||
|
params.max_depth - params.bot_depth(),
|
||||||
|
);
|
||||||
|
|
||||||
/// returns exactly M default circuit input
|
Ok(reconstructed_root.unwrap() == circ_input.slot_root)
|
||||||
pub fn get_m_circ_input<const M: usize>(params: InputParams) -> [SampleCircuitInput<F,D>; M]{
|
}
|
||||||
// let params = Params::default().input_params;
|
|
||||||
let one_circ_input = gen_testing_circuit_input::<F,D>(¶ms);
|
|
||||||
let circ_input: [SampleCircuitInput<F,D>; M] = (0..M)
|
|
||||||
.map(|_| one_circ_input.clone())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.try_into().unwrap();
|
|
||||||
circ_input
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
use plonky2::hash::poseidon::PoseidonHash;
|
||||||
|
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||||
|
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||||
|
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||||
use super::*;
|
use super::*;
|
||||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||||
|
|
||||||
|
// types used in all tests
|
||||||
|
type F = GoldilocksField;
|
||||||
|
const D: usize = 2;
|
||||||
|
type H = PoseidonHash;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
|
||||||
// Test sample cells (non-circuit)
|
// Test sample cells (non-circuit)
|
||||||
#[test]
|
#[test]
|
||||||
fn test_gen_verify_proof(){
|
fn test_gen_verify_proof(){
|
||||||
let params = Params::default().input_params;
|
let input_gen = InputGenerator::<F,D,H>::default();
|
||||||
let w = gen_testing_circuit_input::<F,D>(¶ms);
|
let w = input_gen.gen_testing_circuit_input();
|
||||||
assert!(verify_circuit_input::<F,D>(w, ¶ms));
|
assert!(input_gen.verify_circuit_input(w));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test sample cells in-circuit for a selected slot
|
// Test sample cells in-circuit for a selected slot
|
||||||
@ -235,10 +229,11 @@ mod tests {
|
|||||||
params.set_n_samples(10);
|
params.set_n_samples(10);
|
||||||
let input_params = params.input_params;
|
let input_params = params.input_params;
|
||||||
let circuit_params = params.circuit_params;
|
let circuit_params = params.circuit_params;
|
||||||
let circ_input = gen_testing_circuit_input::<F,D>(&input_params);
|
let input_gen = InputGenerator::<F,D,H>::new(input_params);
|
||||||
|
let circ_input = input_gen.gen_testing_circuit_input();
|
||||||
|
|
||||||
// build the circuit
|
// build the circuit
|
||||||
let circ = SampleCircuit::<F,D,HF>::new(circuit_params.clone());
|
let circ = SampleCircuit::<F,D,H>::new(circuit_params.clone());
|
||||||
let (targets, data) = circ.build_with_standard_config()?;
|
let (targets, data) = circ.build_with_standard_config()?;
|
||||||
println!("circuit size = {:?}", data.common.degree_bits());
|
println!("circuit size = {:?}", data.common.degree_bits());
|
||||||
|
|
||||||
|
|||||||
@ -28,7 +28,6 @@ pub fn key_compress<
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
// use plonky2::hash::poseidon::PoseidonHash;
|
|
||||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||||
use plonky2_field::types::Field;
|
use plonky2_field::types::Field;
|
||||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||||
|
|||||||
@ -129,7 +129,7 @@ mod tests {
|
|||||||
use plonky2::field::types::Field;
|
use plonky2::field::types::Field;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_build_circuit() -> anyhow::Result<()> {
|
fn test_mt_build_circuit() -> anyhow::Result<()> {
|
||||||
// circuit params
|
// circuit params
|
||||||
const D: usize = 2;
|
const D: usize = 2;
|
||||||
type C = PoseidonGoldilocksConfig;
|
type C = PoseidonGoldilocksConfig;
|
||||||
@ -152,10 +152,7 @@ mod tests {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
//initialize the Merkle tree
|
//initialize the Merkle tree
|
||||||
let zero_hash = HashOut {
|
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||||
elements: [GoldilocksField::ZERO; 4],
|
|
||||||
};
|
|
||||||
let tree = MerkleTree::<F, D>::new(&leaves, zero_hash)?;
|
|
||||||
|
|
||||||
// select leaf index to prove
|
// select leaf index to prove
|
||||||
let leaf_index: usize = 8;
|
let leaf_index: usize = 8;
|
||||||
@ -236,10 +233,7 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let zero_hash = HashOut {
|
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||||
elements: [GoldilocksField::ZERO; 4],
|
|
||||||
};
|
|
||||||
let tree = MerkleTree::<F, D>::new(&leaves, zero_hash)?;
|
|
||||||
|
|
||||||
let expected_root = tree.root()?;
|
let expected_root = tree.root()?;
|
||||||
|
|
||||||
|
|||||||
@ -1,14 +1,16 @@
|
|||||||
// Implementation of "safe" merkle tree
|
// Implementation of Codex specific "safe" merkle tree
|
||||||
// consistent with the one in codex:
|
// consistent with the one in codex:
|
||||||
// https://github.com/codex-storage/nim-codex/blob/master/codex/merkletree/merkletree.nim
|
// https://github.com/codex-storage/nim-codex/blob/master/codex/merkletree/merkletree.nim
|
||||||
|
|
||||||
|
use std::marker::PhantomData;
|
||||||
use anyhow::{ensure, Result};
|
use anyhow::{ensure, Result};
|
||||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
use std::ops::Shr;
|
use std::ops::Shr;
|
||||||
|
use plonky2::plonk::config::AlgebraicHasher;
|
||||||
use plonky2_field::extension::Extendable;
|
use plonky2_field::extension::Extendable;
|
||||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||||
use crate::merkle_tree::key_compress::key_compress;
|
use crate::merkle_tree::key_compress::key_compress;
|
||||||
use crate::params::HF;
|
use crate::utils::zero;
|
||||||
|
|
||||||
// Constants for the keys used in compression
|
// Constants for the keys used in compression
|
||||||
pub const KEY_NONE: u64 = 0x0;
|
pub const KEY_NONE: u64 = 0x0;
|
||||||
@ -21,24 +23,25 @@ pub const KEY_ODD_AND_BOTTOM_LAYER: u64 = 0x3;
|
|||||||
pub struct MerkleTree<
|
pub struct MerkleTree<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
> {
|
> {
|
||||||
pub layers: Vec<Vec<HashOut<F>>>,
|
pub layers: Vec<Vec<HashOut<F>>>,
|
||||||
pub zero: HashOut<F>,
|
phantom_data: PhantomData<H>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<
|
impl<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
> MerkleTree<F, D> {
|
H: AlgebraicHasher<F>,
|
||||||
|
> MerkleTree<F, D, H> {
|
||||||
/// Constructs a new Merkle tree from the given leaves.
|
/// Constructs a new Merkle tree from the given leaves.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
leaves: &[HashOut<F>],
|
leaves: &[HashOut<F>],
|
||||||
zero: HashOut<F>,
|
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let layers = merkle_tree_worker::<F, D>(leaves, zero, true)?;
|
let layers = merkle_tree_worker::<F, D, H>(leaves, true)?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
layers,
|
layers,
|
||||||
zero,
|
phantom_data: PhantomData::default(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,7 +63,7 @@ impl<
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Generates a Merkle proof for a given leaf index.
|
/// Generates a Merkle proof for a given leaf index.
|
||||||
pub fn get_proof(&self, index: usize) -> Result<MerkleProof<F, D>> {
|
pub fn get_proof(&self, index: usize) -> Result<MerkleProof<F, D, H>> {
|
||||||
let depth = self.depth();
|
let depth = self.depth();
|
||||||
let nleaves = self.leaves_count();
|
let nleaves = self.leaves_count();
|
||||||
|
|
||||||
@ -75,19 +78,18 @@ impl<
|
|||||||
let sibling = if j < m {
|
let sibling = if j < m {
|
||||||
self.layers[i][j]
|
self.layers[i][j]
|
||||||
} else {
|
} else {
|
||||||
self.zero
|
zero::<F,D>()
|
||||||
};
|
};
|
||||||
path.push(sibling);
|
path.push(sibling);
|
||||||
k = k >> 1;
|
k = k >> 1;
|
||||||
m = (m + 1) >> 1;
|
m = (m + 1) >> 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(MerkleProof {
|
Ok(MerkleProof::new(
|
||||||
index,
|
index,
|
||||||
path,
|
path,
|
||||||
nleaves,
|
nleaves,
|
||||||
zero: self.zero,
|
))
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,9 +97,9 @@ impl<
|
|||||||
fn merkle_tree_worker<
|
fn merkle_tree_worker<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
>(
|
>(
|
||||||
xs: &[HashOut<F>],
|
xs: &[HashOut<F>],
|
||||||
zero: HashOut<F>,
|
|
||||||
is_bottom_layer: bool,
|
is_bottom_layer: bool,
|
||||||
) -> Result<Vec<Vec<HashOut<F>>>> {
|
) -> Result<Vec<Vec<HashOut<F>>>> {
|
||||||
let m = xs.len();
|
let m = xs.len();
|
||||||
@ -113,7 +115,7 @@ fn merkle_tree_worker<
|
|||||||
|
|
||||||
for i in 0..halfn {
|
for i in 0..halfn {
|
||||||
let key = if is_bottom_layer { KEY_BOTTOM_LAYER } else { KEY_NONE };
|
let key = if is_bottom_layer { KEY_BOTTOM_LAYER } else { KEY_NONE };
|
||||||
let h = key_compress::<F, D, HF>(xs[2 * i], xs[2 * i + 1], key);
|
let h = key_compress::<F, D, H>(xs[2 * i], xs[2 * i + 1], key);
|
||||||
ys.push(h);
|
ys.push(h);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,12 +125,12 @@ fn merkle_tree_worker<
|
|||||||
} else {
|
} else {
|
||||||
KEY_ODD
|
KEY_ODD
|
||||||
};
|
};
|
||||||
let h = key_compress::<F, D, HF>(xs[n], zero, key);
|
let h = key_compress::<F, D, H>(xs[n], zero::<F,D>(), key);
|
||||||
ys.push(h);
|
ys.push(h);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut layers = vec![xs.to_vec()];
|
let mut layers = vec![xs.to_vec()];
|
||||||
let mut upper_layers = merkle_tree_worker::<F, D>(&ys, zero, false)?;
|
let mut upper_layers = merkle_tree_worker::<F, D, H>(&ys, false)?;
|
||||||
layers.append(&mut upper_layers);
|
layers.append(&mut upper_layers);
|
||||||
|
|
||||||
Ok(layers)
|
Ok(layers)
|
||||||
@ -139,17 +141,31 @@ fn merkle_tree_worker<
|
|||||||
pub struct MerkleProof<
|
pub struct MerkleProof<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
|
H: AlgebraicHasher<F>,
|
||||||
> {
|
> {
|
||||||
pub index: usize, // Index of the leaf
|
pub index: usize, // Index of the leaf
|
||||||
pub path: Vec<HashOut<F>>, // Sibling hashes from the leaf to the root
|
pub path: Vec<HashOut<F>>, // Sibling hashes from the leaf to the root
|
||||||
pub nleaves: usize, // Total number of leaves
|
pub nleaves: usize, // Total number of leaves
|
||||||
pub zero: HashOut<F>,
|
phantom_data: PhantomData<H>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<
|
impl<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
const D: usize,
|
const D: usize,
|
||||||
> MerkleProof<F, D> {
|
H: AlgebraicHasher<F>,
|
||||||
|
> MerkleProof<F, D, H> {
|
||||||
|
pub fn new(
|
||||||
|
index: usize,
|
||||||
|
path: Vec<HashOut<F>>,
|
||||||
|
nleaves: usize,
|
||||||
|
) -> Self{
|
||||||
|
Self{
|
||||||
|
index,
|
||||||
|
path,
|
||||||
|
nleaves,
|
||||||
|
phantom_data: PhantomData::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
/// Reconstructs the root hash from the proof and the given leaf.
|
/// Reconstructs the root hash from the proof and the given leaf.
|
||||||
pub fn reconstruct_root(&self, leaf: HashOut<F>) -> Result<HashOut<F>> {
|
pub fn reconstruct_root(&self, leaf: HashOut<F>) -> Result<HashOut<F>> {
|
||||||
let mut m = self.nleaves;
|
let mut m = self.nleaves;
|
||||||
@ -161,14 +177,14 @@ impl<
|
|||||||
let odd_index = (j & 1) != 0;
|
let odd_index = (j & 1) != 0;
|
||||||
if odd_index {
|
if odd_index {
|
||||||
// The index of the child is odd
|
// The index of the child is odd
|
||||||
h = key_compress::<F, D, HF>(*p, h, bottom_flag);
|
h = key_compress::<F, D, H>(*p, h, bottom_flag);
|
||||||
} else {
|
} else {
|
||||||
if j == m - 1 {
|
if j == m - 1 {
|
||||||
// Single child -> so odd node
|
// Single child -> so odd node
|
||||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag + 2);
|
h = key_compress::<F, D, H>(h, *p, bottom_flag + 2);
|
||||||
} else {
|
} else {
|
||||||
// Even node
|
// Even node
|
||||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag);
|
h = key_compress::<F, D, H>(h, *p, bottom_flag);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bottom_flag = KEY_NONE;
|
bottom_flag = KEY_NONE;
|
||||||
@ -200,9 +216,9 @@ impl<
|
|||||||
let key = bottom + (2 * (odd as u64));
|
let key = bottom + (2 * (odd as u64));
|
||||||
let odd_index = path_bits[i];
|
let odd_index = path_bits[i];
|
||||||
if odd_index {
|
if odd_index {
|
||||||
h.push(key_compress::<F, D, HF>(*p, h[i], key));
|
h.push(key_compress::<F, D, H>(*p, h[i], key));
|
||||||
} else {
|
} else {
|
||||||
h.push(key_compress::<F,D,HF>(h[i], *p, key));
|
h.push(key_compress::<F,D,H>(h[i], *p, key));
|
||||||
}
|
}
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
@ -262,13 +278,12 @@ mod tests {
|
|||||||
y: HashOut<F>,
|
y: HashOut<F>,
|
||||||
key: u64,
|
key: u64,
|
||||||
) -> HashOut<F> {
|
) -> HashOut<F> {
|
||||||
key_compress::<F,D,HF>(x,y,key)
|
key_compress::<F,D,H>(x,y,key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_tree(
|
fn make_tree(
|
||||||
data: &[F],
|
data: &[F],
|
||||||
zero: HashOut<F>,
|
) -> Result<MerkleTree<F,D, H>> {
|
||||||
) -> Result<MerkleTree<F,D>> {
|
|
||||||
// Hash the data to obtain leaf hashes
|
// Hash the data to obtain leaf hashes
|
||||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||||
.iter()
|
.iter()
|
||||||
@ -278,7 +293,7 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
MerkleTree::<F, D>::new(&leaves, zero)
|
MerkleTree::<F, D, H>::new(&leaves)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -296,12 +311,8 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build the Merkle tree
|
// Build the Merkle tree
|
||||||
let tree = MerkleTree::<F, D>::new(&leaves, zero)?;
|
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||||
|
|
||||||
// Get the root
|
// Get the root
|
||||||
let root = tree.root()?;
|
let root = tree.root()?;
|
||||||
@ -329,11 +340,6 @@ mod tests {
|
|||||||
.map(|&element| H::hash_no_pad(&[element]))
|
.map(|&element| H::hash_no_pad(&[element]))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// zero hash
|
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
let expected_root =
|
let expected_root =
|
||||||
compress(
|
compress(
|
||||||
compress(
|
compress(
|
||||||
@ -366,7 +372,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Build the tree
|
// Build the tree
|
||||||
let tree = make_tree(&data, zero)?;
|
let tree = make_tree(&data)?;
|
||||||
|
|
||||||
// Get the computed root
|
// Get the computed root
|
||||||
let computed_root = tree.root()?;
|
let computed_root = tree.root()?;
|
||||||
@ -390,11 +396,6 @@ mod tests {
|
|||||||
.map(|&element| H::hash_no_pad(&[element]))
|
.map(|&element| H::hash_no_pad(&[element]))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// zero hash
|
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
let expected_root =
|
let expected_root =
|
||||||
compress(
|
compress(
|
||||||
compress(
|
compress(
|
||||||
@ -418,7 +419,7 @@ mod tests {
|
|||||||
),
|
),
|
||||||
compress(
|
compress(
|
||||||
leaf_hashes[6],
|
leaf_hashes[6],
|
||||||
zero,
|
zero::<F,D>(),
|
||||||
KEY_ODD_AND_BOTTOM_LAYER,
|
KEY_ODD_AND_BOTTOM_LAYER,
|
||||||
),
|
),
|
||||||
KEY_NONE,
|
KEY_NONE,
|
||||||
@ -427,7 +428,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Build the tree
|
// Build the tree
|
||||||
let tree = make_tree(&data, zero)?;
|
let tree = make_tree(&data)?;
|
||||||
|
|
||||||
// Get the computed root
|
// Get the computed root
|
||||||
let computed_root = tree.root()?;
|
let computed_root = tree.root()?;
|
||||||
@ -451,11 +452,6 @@ mod tests {
|
|||||||
.map(|&element| H::hash_no_pad(&[element]))
|
.map(|&element| H::hash_no_pad(&[element]))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// zero hash
|
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
let expected_root = compress(
|
let expected_root = compress(
|
||||||
compress(
|
compress(
|
||||||
compress(
|
compress(
|
||||||
@ -493,17 +489,17 @@ mod tests {
|
|||||||
leaf_hashes[9],
|
leaf_hashes[9],
|
||||||
KEY_BOTTOM_LAYER,
|
KEY_BOTTOM_LAYER,
|
||||||
),
|
),
|
||||||
zero,
|
zero::<F,D>(),
|
||||||
KEY_ODD,
|
KEY_ODD,
|
||||||
),
|
),
|
||||||
zero,
|
zero::<F,D>(),
|
||||||
KEY_ODD,
|
KEY_ODD,
|
||||||
),
|
),
|
||||||
KEY_NONE,
|
KEY_NONE,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Build the tree
|
// Build the tree
|
||||||
let tree = make_tree(&data, zero)?;
|
let tree = make_tree(&data)?;
|
||||||
|
|
||||||
// Get the computed root
|
// Get the computed root
|
||||||
let computed_root = tree.root()?;
|
let computed_root = tree.root()?;
|
||||||
@ -527,13 +523,8 @@ mod tests {
|
|||||||
.map(|&element| H::hash_no_pad(&[element]))
|
.map(|&element| H::hash_no_pad(&[element]))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// zero hash
|
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build the tree
|
// Build the tree
|
||||||
let tree = MerkleTree::<F, D>::new(&leaf_hashes, zero)?;
|
let tree = MerkleTree::<F, D, H>::new(&leaf_hashes)?;
|
||||||
|
|
||||||
// Get the root
|
// Get the root
|
||||||
let expected_root = tree.root()?;
|
let expected_root = tree.root()?;
|
||||||
|
|||||||
@ -3,13 +3,14 @@
|
|||||||
mod tests {
|
mod tests {
|
||||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||||
use plonky2_field::extension::Extendable;
|
use plonky2_field::extension::Extendable;
|
||||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::{Poseidon2, Poseidon2Hash};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||||
use plonky2::field::types::Field;
|
use plonky2::field::types::Field;
|
||||||
|
|
||||||
type F = GoldilocksField;
|
type F = GoldilocksField;
|
||||||
|
type H = Poseidon2Hash;
|
||||||
const D: usize = 2;
|
const D: usize = 2;
|
||||||
|
|
||||||
struct TestCase {
|
struct TestCase {
|
||||||
@ -35,9 +36,6 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_merkle_roots() -> Result<()> {
|
fn test_merkle_roots() -> Result<()> {
|
||||||
let zero = HashOut {
|
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
};
|
|
||||||
|
|
||||||
let test_cases: Vec<TestCase> = vec![
|
let test_cases: Vec<TestCase> = vec![
|
||||||
TestCase { n: 1, digest: [0x232f21acc9d346d8, 0x2eba96d3a73822c1, 0x4163308f6d0eff64, 0x5190c2b759734aff] },
|
TestCase { n: 1, digest: [0x232f21acc9d346d8, 0x2eba96d3a73822c1, 0x4163308f6d0eff64, 0x5190c2b759734aff] },
|
||||||
@ -53,7 +51,7 @@ mod tests {
|
|||||||
let inputs = digest_seq::<F,D>(n);
|
let inputs = digest_seq::<F,D>(n);
|
||||||
|
|
||||||
// Build the Merkle tree
|
// Build the Merkle tree
|
||||||
let tree = MerkleTree::<F, D>::new(&inputs, zero.clone())?;
|
let tree = MerkleTree::<F, D, H>::new(&inputs)?;
|
||||||
|
|
||||||
// Get the computed root
|
// Get the computed root
|
||||||
let computed_root = tree.root()?;
|
let computed_root = tree.root()?;
|
||||||
@ -160,14 +158,11 @@ mod tests {
|
|||||||
let mut found = false;
|
let mut found = false;
|
||||||
|
|
||||||
for index in 0..num_indices {
|
for index in 0..num_indices {
|
||||||
let proof = MerkleProof::<F,D> {
|
let proof = MerkleProof::<F,D,H>::new(
|
||||||
index,
|
index,
|
||||||
path: path_hashes.clone(),
|
path_hashes.clone(),
|
||||||
nleaves: num_indices,
|
num_indices,
|
||||||
zero: HashOut {
|
);
|
||||||
elements: [F::ZERO; 4],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Reconstruct the root
|
// Reconstruct the root
|
||||||
let reconstructed_root = proof.reconstruct_root(leaf.clone())?;
|
let reconstructed_root = proof.reconstruct_root(leaf.clone())?;
|
||||||
|
|||||||
@ -2,7 +2,7 @@ use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
|||||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||||
use crate::gen_input::gen_testing_circuit_input;
|
use crate::gen_input::InputGenerator;
|
||||||
use crate::params::{C, D, F, HF, Params};
|
use crate::params::{C, D, F, HF, Params};
|
||||||
|
|
||||||
pub mod tree_test;
|
pub mod tree_test;
|
||||||
@ -16,7 +16,8 @@ pub fn run_sampling_circ() -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, Pr
|
|||||||
// Circuit that does the sampling - 100 samples
|
// Circuit that does the sampling - 100 samples
|
||||||
let mut params = Params::default();
|
let mut params = Params::default();
|
||||||
params.set_n_samples(100);
|
params.set_n_samples(100);
|
||||||
let one_circ_input = gen_testing_circuit_input::<F, D>(¶ms.input_params);
|
let input_gen = InputGenerator::<F,D,HF>::new(params.input_params.clone());
|
||||||
|
let one_circ_input = input_gen.gen_testing_circuit_input();
|
||||||
let samp_circ = SampleCircuit::<F,D,HF>::new(params.circuit_params);
|
let samp_circ = SampleCircuit::<F,D,HF>::new(params.circuit_params);
|
||||||
let (inner_tar, inner_data) = samp_circ.build_with_standard_config()?;
|
let (inner_tar, inner_data) = samp_circ.build_with_standard_config()?;
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ mod tests {
|
|||||||
use crate::recursion::leaf_test::tests::run_leaf_circ;
|
use crate::recursion::leaf_test::tests::run_leaf_circ;
|
||||||
use crate::recursion::run_sampling_circ;
|
use crate::recursion::run_sampling_circ;
|
||||||
|
|
||||||
fn run_node_circ<const N: usize, const T: usize>(leaf_proofs: Vec<ProofWithPublicInputs<F, C, D>>, leaf_verifier_data: VerifierCircuitData<F, C, D>, flag: bool, index: usize) -> anyhow::Result<()> {
|
fn run_node_circ<const N: usize, const T: usize>(leaf_proofs: Vec<ProofWithPublicInputs<F, C, D>>, leaf_verifier_data: VerifierCircuitData<F, C, D>, _flag: bool, index: usize) -> anyhow::Result<()> {
|
||||||
|
|
||||||
// ------------------- Node --------------------
|
// ------------------- Node --------------------
|
||||||
// N leaf proofs
|
// N leaf proofs
|
||||||
@ -53,10 +53,10 @@ mod tests {
|
|||||||
fn test_real_node_circ() -> anyhow::Result<()> {
|
fn test_real_node_circ() -> anyhow::Result<()> {
|
||||||
let (inner_proof, _, inner_verifier) = run_sampling_circ()?;
|
let (inner_proof, _, inner_verifier) = run_sampling_circ()?;
|
||||||
// this is a bit wasteful to build leaf twice, TODO: fix this
|
// this is a bit wasteful to build leaf twice, TODO: fix this
|
||||||
let (leaf_proof_1, _, leaf_verifier) = run_leaf_circ::<128>(inner_proof.clone(), inner_verifier.clone(), true, 0)?;
|
let (leaf_proof_1, _, _leaf_verifier_1) = run_leaf_circ::<128>(inner_proof.clone(), inner_verifier.clone(), true, 0)?;
|
||||||
let (leaf_proof_2, _, leaf_verifier) = run_leaf_circ::<128>(inner_proof, inner_verifier, true, 1)?;
|
let (leaf_proof_2, _, leaf_verifier_2) = run_leaf_circ::<128>(inner_proof, inner_verifier, true, 1)?;
|
||||||
let leaf_proofs = vec![leaf_proof_1,leaf_proof_2];
|
let leaf_proofs = vec![leaf_proof_1,leaf_proof_2];
|
||||||
run_node_circ::<2,128>(leaf_proofs, leaf_verifier, true, 0)
|
run_node_circ::<2,128>(leaf_proofs, leaf_verifier_2, true, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -3,7 +3,6 @@
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use plonky2::plonk::proof::{ProofWithPublicInputs};
|
use plonky2::plonk::proof::{ProofWithPublicInputs};
|
||||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
|
||||||
use crate::params::{F, D, C, HF};
|
use crate::params::{F, D, C, HF};
|
||||||
use codex_plonky2_circuits::recursion::{tree::TreeRecursion};
|
use codex_plonky2_circuits::recursion::{tree::TreeRecursion};
|
||||||
use crate::recursion::run_sampling_circ;
|
use crate::recursion::run_sampling_circ;
|
||||||
@ -12,7 +11,7 @@ mod tests {
|
|||||||
|
|
||||||
//------------ sampling inner circuit ----------------------
|
//------------ sampling inner circuit ----------------------
|
||||||
// Circuit that does the sampling - 100 samples
|
// Circuit that does the sampling - 100 samples
|
||||||
let (inner_proof, inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
let (inner_proof, _inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
||||||
|
|
||||||
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
||||||
|
|
||||||
|
|||||||
@ -69,7 +69,7 @@ mod tests {
|
|||||||
|
|
||||||
//------------ sampling inner circuit ----------------------
|
//------------ sampling inner circuit ----------------------
|
||||||
// Circuit that does the sampling - 100 samples
|
// Circuit that does the sampling - 100 samples
|
||||||
let (inner_proof, inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
let (inner_proof, _inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
||||||
|
|
||||||
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
||||||
|
|
||||||
|
|||||||
@ -7,8 +7,6 @@ use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCir
|
|||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{BufReader, Write};
|
use std::io::{BufReader, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use crate::gen_input::gen_testing_circuit_input;
|
|
||||||
use crate::params::InputParams;
|
|
||||||
use codex_plonky2_circuits::serialization::ensure_parent_directory_exists;
|
use codex_plonky2_circuits::serialization::ensure_parent_directory_exists;
|
||||||
|
|
||||||
pub const CIRC_INPUT_JSON: &str = "prover_data/input.json";
|
pub const CIRC_INPUT_JSON: &str = "prover_data/input.json";
|
||||||
@ -261,24 +259,6 @@ pub fn export_circ_input_to_json<
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Function to generate circuit input and export to JSON
|
|
||||||
pub fn generate_and_export_circ_input_to_json<
|
|
||||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
|
||||||
const D: usize,
|
|
||||||
P: AsRef<Path>,
|
|
||||||
>(
|
|
||||||
params: &InputParams,
|
|
||||||
base_path: P,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
|
|
||||||
let circ_input = gen_testing_circuit_input::<F,D>(params);
|
|
||||||
|
|
||||||
export_circ_input_to_json(circ_input, base_path)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// reads the json file, converts it to circuit input (SampleCircuitInput) and returns it
|
/// reads the json file, converts it to circuit input (SampleCircuitInput) and returns it
|
||||||
pub fn import_circ_input_from_json<
|
pub fn import_circ_input_from_json<
|
||||||
F: RichField + Extendable<D> + Poseidon2,
|
F: RichField + Extendable<D> + Poseidon2,
|
||||||
@ -303,16 +283,16 @@ mod tests {
|
|||||||
use codex_plonky2_circuits::circuits::sample_cells::{SampleCircuit, SampleCircuitInput};
|
use codex_plonky2_circuits::circuits::sample_cells::{SampleCircuit, SampleCircuitInput};
|
||||||
use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
||||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||||
use crate::gen_input::{gen_testing_circuit_input, verify_circuit_input};
|
use crate::gen_input::InputGenerator;
|
||||||
use crate::serialization::circuit_input::{export_circ_input_to_json, generate_and_export_circ_input_to_json, import_circ_input_from_json};
|
use crate::serialization::circuit_input::{export_circ_input_to_json, import_circ_input_from_json};
|
||||||
|
|
||||||
// Test to generate the JSON file
|
// Test to generate the JSON file
|
||||||
#[test]
|
#[test]
|
||||||
fn test_export_circ_input_to_json() -> anyhow::Result<()> {
|
fn test_export_circ_input_to_json() -> anyhow::Result<()> {
|
||||||
// Create Params
|
// Create InputGenerator
|
||||||
let params = Params::default().input_params;
|
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||||
// Export the circuit input to JSON
|
// Export the circuit input to JSON
|
||||||
generate_and_export_circ_input_to_json::<F, D,_>(¶ms, "../output/test/")?;
|
input_gen.generate_and_export_circ_input_to_json( "../output/test/")?;
|
||||||
|
|
||||||
println!("Circuit input exported to input.json");
|
println!("Circuit input exported to input.json");
|
||||||
|
|
||||||
@ -332,11 +312,11 @@ mod tests {
|
|||||||
// export the circuit input and then import it and checks equality
|
// export the circuit input and then import it and checks equality
|
||||||
#[test]
|
#[test]
|
||||||
fn test_export_import_circ_input() -> anyhow::Result<()> {
|
fn test_export_import_circ_input() -> anyhow::Result<()> {
|
||||||
// Create Params instance
|
// Create InputGenerator
|
||||||
let params = Params::default().input_params;
|
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||||
|
|
||||||
// Export the circuit input to JSON
|
// Export the circuit input to JSON
|
||||||
let original_circ_input = gen_testing_circuit_input(¶ms);
|
let original_circ_input = input_gen.gen_testing_circuit_input();
|
||||||
export_circ_input_to_json(original_circ_input.clone(), "../output/test/")?;
|
export_circ_input_to_json(original_circ_input.clone(), "../output/test/")?;
|
||||||
println!("circuit input exported to input.json");
|
println!("circuit input exported to input.json");
|
||||||
|
|
||||||
@ -387,14 +367,15 @@ mod tests {
|
|||||||
// NOTE: expects that the json input proof uses the default params
|
// NOTE: expects that the json input proof uses the default params
|
||||||
#[test]
|
#[test]
|
||||||
fn test_read_json_and_verify() -> anyhow::Result<()> {
|
fn test_read_json_and_verify() -> anyhow::Result<()> {
|
||||||
let params = Params::default().input_params;
|
// Create InputGenerator
|
||||||
|
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||||
|
|
||||||
// Import the circuit input from JSON
|
// Import the circuit input from JSON
|
||||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
||||||
println!("circuit input imported from input.json");
|
println!("circuit input imported from input.json");
|
||||||
|
|
||||||
// Verify the proof
|
// Verify the proof
|
||||||
let ver = verify_circuit_input(imported_circ_input, ¶ms);
|
let ver = input_gen.verify_circuit_input(imported_circ_input);
|
||||||
assert!(
|
assert!(
|
||||||
ver,
|
ver,
|
||||||
"Merkle proof verification failed"
|
"Merkle proof verification failed"
|
||||||
|
|||||||
@ -168,8 +168,15 @@ pub fn hash_bytes_to_m_no_padding<
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use plonky2::field::types::Field;
|
use plonky2::field::types::Field;
|
||||||
|
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||||
|
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||||
use crate::sponge::hash_n_with_padding;
|
use crate::sponge::hash_n_with_padding;
|
||||||
use crate::params::{D, F, HF};
|
|
||||||
|
// test types
|
||||||
|
pub const D: usize = 2;
|
||||||
|
pub type C = PoseidonGoldilocksConfig;
|
||||||
|
pub type F = <C as GenericConfig<D>>::F;
|
||||||
|
pub type H = Poseidon2Hash;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sponge_hash_rate_8() {
|
fn test_sponge_hash_rate_8() {
|
||||||
@ -273,7 +280,7 @@ mod tests {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Call the sponge function
|
// Call the sponge function
|
||||||
let output = hash_n_with_padding::<F,D,HF>(&inputs);
|
let output = hash_n_with_padding::<F,D,H>(&inputs);
|
||||||
|
|
||||||
// Compare the outputs
|
// Compare the outputs
|
||||||
for (i, &out_elem) in output.elements.iter().enumerate() {
|
for (i, &out_elem) in output.elements.iter().enumerate() {
|
||||||
|
|||||||
@ -100,3 +100,7 @@ pub fn ceiling_log2(
|
|||||||
|
|
||||||
(last_bits, mask)
|
(last_bits, mask)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn zero<F: RichField + Extendable<D> + Poseidon2, const D: usize>() -> HashOut<F>{
|
||||||
|
HashOut { elements: [F::ZERO; 4],}
|
||||||
|
}
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use proof_input::serialization::circuit_input::export_circ_input_to_json;
|
use proof_input::serialization::circuit_input::export_circ_input_to_json;
|
||||||
use proof_input::gen_input::gen_testing_circuit_input;
|
use proof_input::gen_input::InputGenerator;
|
||||||
use proof_input::params::Params;
|
use proof_input::params::Params;
|
||||||
use proof_input::params::{D, F};
|
use proof_input::params::{D, F, HF};
|
||||||
use crate::file_paths::SAMPLING_CIRC_BASE_PATH;
|
use crate::file_paths::SAMPLING_CIRC_BASE_PATH;
|
||||||
|
|
||||||
pub fn run() -> Result<()> {
|
pub fn run() -> Result<()> {
|
||||||
@ -12,7 +12,8 @@ pub fn run() -> Result<()> {
|
|||||||
|
|
||||||
// generate circuit input with given parameters
|
// generate circuit input with given parameters
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
let circ_input = gen_testing_circuit_input::<F,D>(¶ms.input_params);
|
let input_gen = InputGenerator::<F,D,HF>::new(params.input_params);
|
||||||
|
let circ_input = input_gen.gen_testing_circuit_input();
|
||||||
println!("Generating input time: {:?}", start_time.elapsed());
|
println!("Generating input time: {:?}", start_time.elapsed());
|
||||||
|
|
||||||
// export circuit parameters to json file
|
// export circuit parameters to json file
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user