mirror of
https://github.com/logos-storage/proof-aggregation.git
synced 2026-01-02 13:53:13 +00:00
refactor and improve code readability
This commit is contained in:
parent
a2113ac44b
commit
72cc4699f3
259
proof-input/src/data_structs.rs
Normal file
259
proof-input/src/data_structs.rs
Normal file
@ -0,0 +1,259 @@
|
||||
// Data structure used to generate the proof input
|
||||
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use codex_plonky2_circuits::circuits::params::HF;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::Cell;
|
||||
use plonky2_field::types::Sample;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use crate::params::TestParams;
|
||||
use crate::sponge::hash_bytes_no_padding;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le};
|
||||
|
||||
// ----------------- slot tree -----------------
|
||||
#[derive(Clone)]
|
||||
pub struct SlotTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F, D>, // slot tree
|
||||
pub block_trees: Vec<MerkleTree<F,D>>, // vec of block trees
|
||||
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> SlotTree<F, D> {
|
||||
/// Create a slot tree with fake data, for testing only
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
// generate fake cell data
|
||||
let cell_data = (0..params.n_cells)
|
||||
.map(|_| new_random_cell(params))
|
||||
.collect::<Vec<_>>();
|
||||
Self::new(cell_data, params.clone())
|
||||
}
|
||||
|
||||
/// Create a new slot tree with the supplied cell data and parameters
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: TestParams) -> Self {
|
||||
let leaves: Vec<HashOut<F>> = cells
|
||||
.iter()
|
||||
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
||||
.collect();
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let n_blocks = params.n_blocks_test();
|
||||
let n_cells_in_blocks = params.n_cells_in_blocks();
|
||||
|
||||
let block_trees = (0..n_blocks)
|
||||
.map(|i| {
|
||||
let start = i * n_cells_in_blocks;
|
||||
let end = (i + 1) * n_cells_in_blocks;
|
||||
Self::get_block_tree(&leaves[start..end].to_vec())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let block_roots = block_trees
|
||||
.iter()
|
||||
.map(|t| t.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let slot_tree = MerkleTree::<F,D>::new(&block_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: slot_tree,
|
||||
block_trees,
|
||||
cell_data: cells,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a proof for the given leaf index
|
||||
/// The path in the proof is a combined block and slot path to make up the full path
|
||||
pub fn get_proof(&self, index: usize) -> MerkleProof<F,D> {
|
||||
let block_index = index / self.params.n_cells_in_blocks();
|
||||
let leaf_index = index % self.params.n_cells_in_blocks();
|
||||
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
||||
let slot_proof = self.tree.get_proof(block_index).unwrap();
|
||||
|
||||
// Combine the paths from the block and slot proofs
|
||||
let mut combined_path = block_proof.path.clone();
|
||||
combined_path.extend(slot_proof.path.clone());
|
||||
|
||||
MerkleProof::<F,D> {
|
||||
index,
|
||||
path: combined_path,
|
||||
nleaves: self.cell_data.len(),
|
||||
zero: block_proof.zero.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F,D> {
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
// Build the Merkle tree
|
||||
let block_tree = MerkleTree::<F,D>::new(leaves, zero).unwrap();
|
||||
block_tree
|
||||
}
|
||||
}
|
||||
|
||||
// -------------- Dataset Tree -------------
|
||||
/// Dataset tree containing all slot trees
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F,D>, // dataset tree
|
||||
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetProof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub slot_index: F,
|
||||
pub entropy: HashOut<F>,
|
||||
pub dataset_proof: MerkleProof<F,D>, // proof for dataset level tree
|
||||
pub slot_proofs: Vec<MerkleProof<F,D>>, // proofs for sampled slot
|
||||
pub cell_data: Vec<Cell<F,D>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> DatasetTree<F, D> {
|
||||
/// Dataset tree with fake data, for testing only
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
let n_slots = 1 << params.dataset_depth_test();
|
||||
for _ in 0..n_slots {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
}
|
||||
Self::new(slot_trees, params.clone())
|
||||
}
|
||||
|
||||
/// Create data for only the specified slot index in params
|
||||
pub fn new_for_testing(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
// let n_slots = 1 << params.dataset_depth();
|
||||
let n_slots = params.n_slots;
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let zero_slot = SlotTree::<F, D> {
|
||||
tree: MerkleTree::<F,D>::new(&[zero.clone()], zero.clone()).unwrap(),
|
||||
block_trees: vec![],
|
||||
cell_data: vec![],
|
||||
params: params.clone(),
|
||||
};
|
||||
for i in 0..n_slots {
|
||||
if i == params.testing_slot_index {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
} else {
|
||||
slot_trees.push(zero_slot.clone());
|
||||
}
|
||||
}
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params: params.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as default but with supplied slot trees
|
||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: TestParams) -> Self {
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a proof for the given slot index
|
||||
/// Also takes entropy so it can use it to sample the slot
|
||||
/// note: proofs are padded based on the params in self
|
||||
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
||||
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_max_depth());
|
||||
|
||||
let slot = &self.slot_trees[index];
|
||||
let slot_root = slot.tree.root().unwrap();
|
||||
let mut slot_proofs = vec![];
|
||||
let mut cell_data = vec![];
|
||||
let entropy_field = F::from_canonical_u64(entropy as u64);
|
||||
let mut entropy_as_digest = HashOut::<F>::ZERO;
|
||||
entropy_as_digest.elements[0] = entropy_field;
|
||||
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le(self.params.n_cells-1, self.params.max_depth+1);
|
||||
for i in 0..self.params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&entropy_as_digest.elements.to_vec(),
|
||||
slot_root,
|
||||
i + 1,
|
||||
self.params.max_depth,
|
||||
mask_bits.clone()
|
||||
);
|
||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||
let mut s_proof = slot.get_proof(cell_index);
|
||||
Self::pad_proof(&mut s_proof, self.params.max_depth);
|
||||
slot_proofs.push(s_proof);
|
||||
let data_i = slot.cell_data[cell_index].data.clone();
|
||||
let cell_i = Cell::<F,D>{
|
||||
data: data_i
|
||||
};
|
||||
cell_data.push(cell_i);
|
||||
}
|
||||
|
||||
DatasetProof {
|
||||
slot_index: F::from_canonical_u64(index as u64),
|
||||
entropy: entropy_as_digest,
|
||||
dataset_proof,
|
||||
slot_proofs,
|
||||
cell_data,
|
||||
}
|
||||
}
|
||||
/// pad the proof with 0s until max_depth
|
||||
pub fn pad_proof(merkle_proof: &mut MerkleProof<F,D>, max_depth: usize){
|
||||
for i in merkle_proof.path.len()..max_depth{
|
||||
merkle_proof.path.push(HashOut::<F>::ZERO);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ------------ helper functions -------------
|
||||
|
||||
/// Create a new cell with random data, using the parameters from `Params`
|
||||
pub fn new_random_cell<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(params: &TestParams) -> Cell<F,D> {
|
||||
let data = (0..params.n_field_elems_per_cell())
|
||||
.map(|_| F::rand())
|
||||
.collect::<Vec<_>>();
|
||||
Cell::<F,D> {
|
||||
data,
|
||||
}
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
@ -6,12 +6,13 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use codex_plonky2_circuits::circuits::params::{CircuitParams, HF};
|
||||
use crate::params::TestParams;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, ceiling_log2, usize_to_bits_le};
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuit, SampleCircuitInput, SampleTargets};
|
||||
use crate::merkle_tree::merkle_safe::MerkleProof;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{MerklePath, SampleCircuit, SampleCircuitInput, SampleTargets};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use crate::data_structs::DatasetTree;
|
||||
use crate::sponge::hash_bytes_no_padding;
|
||||
use crate::params::{C, D, F};
|
||||
|
||||
@ -143,251 +144,6 @@ pub fn verify_cell_proof<
|
||||
Ok(reconstructed_root.unwrap() == circ_input.slot_root)
|
||||
}
|
||||
|
||||
|
||||
/// Create a new cell with random data, using the parameters from `Params`
|
||||
pub fn new_random_cell<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(params: &TestParams) -> Cell<F,D> {
|
||||
let data = (0..params.n_field_elems_per_cell())
|
||||
.map(|_| F::rand())
|
||||
.collect::<Vec<_>>();
|
||||
Cell::<F,D> {
|
||||
data,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SlotTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F, D>, // slot tree
|
||||
pub block_trees: Vec<MerkleTree<F,D>>, // vec of block trees
|
||||
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> SlotTree<F, D> {
|
||||
/// Create a slot tree with fake data, for testing only
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
// generate fake cell data
|
||||
let cell_data = (0..params.n_cells)
|
||||
.map(|_| new_random_cell(params))
|
||||
.collect::<Vec<_>>();
|
||||
Self::new(cell_data, params.clone())
|
||||
}
|
||||
|
||||
/// Create a new slot tree with the supplied cell data and parameters
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: TestParams) -> Self {
|
||||
let leaves: Vec<HashOut<F>> = cells
|
||||
.iter()
|
||||
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
||||
.collect();
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let n_blocks = params.n_blocks_test();
|
||||
let n_cells_in_blocks = params.n_cells_in_blocks();
|
||||
|
||||
let block_trees = (0..n_blocks)
|
||||
.map(|i| {
|
||||
let start = i * n_cells_in_blocks;
|
||||
let end = (i + 1) * n_cells_in_blocks;
|
||||
Self::get_block_tree(&leaves[start..end].to_vec())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let block_roots = block_trees
|
||||
.iter()
|
||||
.map(|t| t.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let slot_tree = MerkleTree::<F,D>::new(&block_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: slot_tree,
|
||||
block_trees,
|
||||
cell_data: cells,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a proof for the given leaf index
|
||||
/// The path in the proof is a combined block and slot path to make up the full path
|
||||
pub fn get_proof(&self, index: usize) -> MerkleProof<F,D> {
|
||||
let block_index = index / self.params.n_cells_in_blocks();
|
||||
let leaf_index = index % self.params.n_cells_in_blocks();
|
||||
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
|
||||
let slot_proof = self.tree.get_proof(block_index).unwrap();
|
||||
|
||||
// Combine the paths from the block and slot proofs
|
||||
let mut combined_path = block_proof.path.clone();
|
||||
combined_path.extend(slot_proof.path.clone());
|
||||
|
||||
MerkleProof::<F,D> {
|
||||
index,
|
||||
path: combined_path,
|
||||
nleaves: self.cell_data.len(),
|
||||
zero: block_proof.zero.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_block_tree(leaves: &Vec<HashOut<F>>) -> MerkleTree<F,D> {
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
// Build the Merkle tree
|
||||
let block_tree = MerkleTree::<F,D>::new(leaves, zero).unwrap();
|
||||
block_tree
|
||||
}
|
||||
}
|
||||
|
||||
// ------ Dataset Tree --------
|
||||
/// Dataset tree containing all slot trees
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub tree: MerkleTree<F,D>, // dataset tree
|
||||
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
|
||||
#[derive(Clone)]
|
||||
pub struct DatasetProof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub slot_index: F,
|
||||
pub entropy: HashOut<F>,
|
||||
pub dataset_proof: MerkleProof<F,D>, // proof for dataset level tree
|
||||
pub slot_proofs: Vec<MerkleProof<F,D>>, // proofs for sampled slot
|
||||
pub cell_data: Vec<Cell<F,D>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> DatasetTree<F, D> {
|
||||
/// Dataset tree with fake data, for testing only
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
let n_slots = 1 << params.dataset_depth_test();
|
||||
for _ in 0..n_slots {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
}
|
||||
Self::new(slot_trees, params.clone())
|
||||
}
|
||||
|
||||
/// Create data for only the specified slot index in params
|
||||
pub fn new_for_testing(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
// let n_slots = 1 << params.dataset_depth();
|
||||
let n_slots = params.n_slots;
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let zero_slot = SlotTree::<F, D> {
|
||||
tree: MerkleTree::<F,D>::new(&[zero.clone()], zero.clone()).unwrap(),
|
||||
block_trees: vec![],
|
||||
cell_data: vec![],
|
||||
params: params.clone(),
|
||||
};
|
||||
for i in 0..n_slots {
|
||||
if i == params.testing_slot_index {
|
||||
slot_trees.push(SlotTree::<F, D>::new_default(params));
|
||||
} else {
|
||||
slot_trees.push(zero_slot.clone());
|
||||
}
|
||||
}
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params: params.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as default but with supplied slot trees
|
||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: TestParams) -> Self {
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
.map(|t| t.tree.root().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
let dataset_tree = MerkleTree::<F,D>::new(&slot_roots, zero).unwrap();
|
||||
Self {
|
||||
tree: dataset_tree,
|
||||
slot_trees,
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates a proof for the given slot index
|
||||
/// Also takes entropy so it can use it to sample the slot
|
||||
/// note: proofs are padded based on the params in self
|
||||
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
||||
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_max_depth());
|
||||
|
||||
let slot = &self.slot_trees[index];
|
||||
let slot_root = slot.tree.root().unwrap();
|
||||
let mut slot_proofs = vec![];
|
||||
let mut cell_data = vec![];
|
||||
let entropy_field = F::from_canonical_u64(entropy as u64);
|
||||
let mut entropy_as_digest = HashOut::<F>::ZERO;
|
||||
entropy_as_digest.elements[0] = entropy_field;
|
||||
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le(self.params.n_cells-1, self.params.max_depth+1);
|
||||
for i in 0..self.params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&entropy_as_digest.elements.to_vec(),
|
||||
slot_root,
|
||||
i + 1,
|
||||
self.params.max_depth,
|
||||
mask_bits.clone()
|
||||
);
|
||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||
let mut s_proof = slot.get_proof(cell_index);
|
||||
Self::pad_proof(&mut s_proof, self.params.max_depth);
|
||||
slot_proofs.push(s_proof);
|
||||
let data_i = slot.cell_data[cell_index].data.clone();
|
||||
let cell_i = Cell::<F,D>{
|
||||
data: data_i
|
||||
};
|
||||
cell_data.push(cell_i);
|
||||
}
|
||||
|
||||
DatasetProof {
|
||||
slot_index: F::from_canonical_u64(index as u64),
|
||||
entropy: entropy_as_digest,
|
||||
dataset_proof,
|
||||
slot_proofs,
|
||||
cell_data,
|
||||
}
|
||||
}
|
||||
/// pad the proof with 0s until max_depth
|
||||
pub fn pad_proof(merkle_proof: &mut MerkleProof<F,D>, max_depth: usize){
|
||||
for i in merkle_proof.path.len()..max_depth{
|
||||
merkle_proof.path.push(HashOut::<F>::ZERO);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// build the sampling circuit
|
||||
/// returns the proof and circuit data
|
||||
pub fn build_circuit(n_samples: usize, slot_index: usize) -> anyhow::Result<(CircuitData<F, C, D>, PartialWitness<F>)>{
|
||||
@ -414,13 +170,13 @@ pub fn build_circuit_with_targets(n_samples: usize, slot_index: usize) -> anyhow
|
||||
|
||||
// build the circuit
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder);
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// assign a witness
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &circ_input);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &circ_input)?;
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
@ -437,10 +193,10 @@ pub fn prove_circuit(data: &CircuitData<F, C, D>, pw: &PartialWitness<F>) -> any
|
||||
}
|
||||
|
||||
/// returns exactly M default circuit input
|
||||
pub fn get_m_default_circ_input<const M: usize>() -> [SampleCircuitInput<codex_plonky2_circuits::recursion::params::F,D>; M]{
|
||||
pub fn get_m_default_circ_input<const M: usize>() -> [SampleCircuitInput<codex_plonky2_circuits::params::F,D>; M]{
|
||||
let params = TestParams::default();
|
||||
let one_circ_input = gen_testing_circuit_input::<codex_plonky2_circuits::recursion::params::F,D>(¶ms);
|
||||
let circ_input: [SampleCircuitInput<codex_plonky2_circuits::recursion::params::F,D>; M] = (0..M)
|
||||
let one_circ_input = gen_testing_circuit_input::<codex_plonky2_circuits::params::F,D>(¶ms);
|
||||
let circ_input: [SampleCircuitInput<codex_plonky2_circuits::params::F,D>; M] = (0..M)
|
||||
.map(|_| one_circ_input.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.try_into().unwrap();
|
||||
@ -483,13 +239,13 @@ mod tests {
|
||||
|
||||
// build the circuit
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder);
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// assign a witness
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &circ_input);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &circ_input)?;
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
pub mod gen_input;
|
||||
pub mod params;
|
||||
pub mod utils;
|
||||
pub mod json;
|
||||
pub mod tests;
|
||||
mod sponge;
|
||||
// pub mod recursion;
|
||||
pub mod sponge;
|
||||
pub mod merkle_tree;
|
||||
pub mod data_structs;
|
||||
pub mod serialization;
|
||||
|
||||
110
proof-input/src/merkle_tree/key_compress.rs
Normal file
110
proof-input/src/merkle_tree/key_compress.rs
Normal file
@ -0,0 +1,110 @@
|
||||
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
|
||||
/// Compression function which takes two 256 bit inputs (HashOut) and u64 key (which is converted to field element in the function)
|
||||
/// and returns a 256 bit output (HashOut / 4 Goldilocks field elems).
|
||||
pub fn key_compress<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H:Hasher<F>
|
||||
>(x: HashOut<F>, y: HashOut<F>, key: u64) -> HashOut<F> {
|
||||
|
||||
let key_field = F::from_canonical_u64(key);
|
||||
|
||||
let mut perm = H::Permutation::new(core::iter::repeat(F::ZERO));
|
||||
perm.set_from_slice(&x.elements, 0);
|
||||
perm.set_from_slice(&y.elements, NUM_HASH_OUT_ELTS);
|
||||
perm.set_elt(key_field,NUM_HASH_OUT_ELTS*2);
|
||||
|
||||
perm.permute();
|
||||
|
||||
HashOut {
|
||||
elements: perm.squeeze()[..NUM_HASH_OUT_ELTS].try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||
use super::*;
|
||||
// test types
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F;
|
||||
pub type H = Poseidon2Hash;
|
||||
|
||||
/// tests the non-circuit key_compress with concrete cases
|
||||
#[test]
|
||||
pub fn test_key_compress(){
|
||||
let ref_inp_1: [F; 4] = [
|
||||
F::from_canonical_u64(0x0000000000000001),
|
||||
F::from_canonical_u64(0x0000000000000002),
|
||||
F::from_canonical_u64(0x0000000000000003),
|
||||
F::from_canonical_u64(0x0000000000000004),
|
||||
];
|
||||
|
||||
let ref_inp_2: [F; 4] = [
|
||||
F::from_canonical_u64(0x0000000000000005),
|
||||
F::from_canonical_u64(0x0000000000000006),
|
||||
F::from_canonical_u64(0x0000000000000007),
|
||||
F::from_canonical_u64(0x0000000000000008),
|
||||
];
|
||||
|
||||
let ref_out_key_0: [F; 4] = [
|
||||
F::from_canonical_u64(0xc4a4082f411ba790),
|
||||
F::from_canonical_u64(0x98c2ed7546c44cce),
|
||||
F::from_canonical_u64(0xc9404f373b78c979),
|
||||
F::from_canonical_u64(0x65d6b3c998920f59),
|
||||
];
|
||||
|
||||
let ref_out_key_1: [F; 4] = [
|
||||
F::from_canonical_u64(0xca47449a05283778),
|
||||
F::from_canonical_u64(0x08d3ced2020391ac),
|
||||
F::from_canonical_u64(0xda461ea45670fb12),
|
||||
F::from_canonical_u64(0x57f2c0b6c98a05c5),
|
||||
];
|
||||
|
||||
let ref_out_key_2: [F; 4] = [
|
||||
F::from_canonical_u64(0xe6fcec96a7a7f4b0),
|
||||
F::from_canonical_u64(0x3002a22356daa551),
|
||||
F::from_canonical_u64(0x899e2c1075a45f3f),
|
||||
F::from_canonical_u64(0xf07e38ccb3ade312),
|
||||
];
|
||||
|
||||
let ref_out_key_3: [F; 4] = [
|
||||
F::from_canonical_u64(0x9930cff752b046fb),
|
||||
F::from_canonical_u64(0x41570687cadcea0b),
|
||||
F::from_canonical_u64(0x3ac093a5a92066c7),
|
||||
F::from_canonical_u64(0xc45c75a3911cde87),
|
||||
];
|
||||
|
||||
// `HashOut` for inputs
|
||||
let inp1 = HashOut { elements: ref_inp_1 };
|
||||
let inp2 = HashOut { elements: ref_inp_2 };
|
||||
|
||||
// Expected outputs
|
||||
let expected_outputs = [
|
||||
ref_out_key_0,
|
||||
ref_out_key_1,
|
||||
ref_out_key_2,
|
||||
ref_out_key_3,
|
||||
];
|
||||
|
||||
// Iterate over each key and test key_compress output
|
||||
for (key, &expected) in expected_outputs.iter().enumerate() {
|
||||
let output = key_compress::<F, D, H>(inp1, inp2, key as u64);
|
||||
|
||||
// Assert that output matches the expected result
|
||||
assert_eq!(output.elements, expected, "Output mismatch for key: {}", key);
|
||||
|
||||
println!("Test passed for key {}", key);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
551
proof-input/src/merkle_tree/merkle_safe.rs
Normal file
551
proof-input/src/merkle_tree/merkle_safe.rs
Normal file
@ -0,0 +1,551 @@
|
||||
// Implementation of "safe" merkle tree
|
||||
// consistent with the one in codex:
|
||||
// https://github.com/codex-storage/nim-codex/blob/master/codex/merkletree/merkletree.nim
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use std::ops::Shr;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::merkle_tree::key_compress::key_compress;
|
||||
use crate::params::HF;
|
||||
|
||||
// Constants for the keys used in compression
|
||||
pub const KEY_NONE: u64 = 0x0;
|
||||
pub const KEY_BOTTOM_LAYER: u64 = 0x1;
|
||||
pub const KEY_ODD: u64 = 0x2;
|
||||
pub const KEY_ODD_AND_BOTTOM_LAYER: u64 = 0x3;
|
||||
|
||||
/// Merkle tree struct, containing the layers, compression function, and zero hash.
|
||||
#[derive(Clone)]
|
||||
pub struct MerkleTree<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub layers: Vec<Vec<HashOut<F>>>,
|
||||
pub zero: HashOut<F>,
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> MerkleTree<F, D> {
|
||||
/// Constructs a new Merkle tree from the given leaves.
|
||||
pub fn new(
|
||||
leaves: &[HashOut<F>],
|
||||
zero: HashOut<F>,
|
||||
) -> Result<Self> {
|
||||
let layers = merkle_tree_worker::<F, D>(leaves, zero, true)?;
|
||||
Ok(Self {
|
||||
layers,
|
||||
zero,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the depth of the Merkle tree.
|
||||
pub fn depth(&self) -> usize {
|
||||
self.layers.len() - 1
|
||||
}
|
||||
|
||||
/// Returns the number of leaves in the Merkle tree.
|
||||
pub fn leaves_count(&self) -> usize {
|
||||
self.layers[0].len()
|
||||
}
|
||||
|
||||
/// Returns the root hash of the Merkle tree.
|
||||
pub fn root(&self) -> Result<HashOut<F>> {
|
||||
let last_layer = self.layers.last().ok_or_else(|| anyhow::anyhow!("Empty tree"))?;
|
||||
ensure!(last_layer.len() == 1, "Invalid Merkle tree");
|
||||
Ok(last_layer[0])
|
||||
}
|
||||
|
||||
/// Generates a Merkle proof for a given leaf index.
|
||||
pub fn get_proof(&self, index: usize) -> Result<MerkleProof<F, D>> {
|
||||
let depth = self.depth();
|
||||
let nleaves = self.leaves_count();
|
||||
|
||||
ensure!(index < nleaves, "Index out of bounds");
|
||||
|
||||
let mut path = Vec::with_capacity(depth);
|
||||
let mut k = index;
|
||||
let mut m = nleaves;
|
||||
|
||||
for i in 0..depth {
|
||||
let j = k ^ 1;
|
||||
let sibling = if j < m {
|
||||
self.layers[i][j]
|
||||
} else {
|
||||
self.zero
|
||||
};
|
||||
path.push(sibling);
|
||||
k = k >> 1;
|
||||
m = (m + 1) >> 1;
|
||||
}
|
||||
|
||||
Ok(MerkleProof {
|
||||
index,
|
||||
path,
|
||||
nleaves,
|
||||
zero: self.zero,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the Merkle tree layers.
|
||||
fn merkle_tree_worker<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(
|
||||
xs: &[HashOut<F>],
|
||||
zero: HashOut<F>,
|
||||
is_bottom_layer: bool,
|
||||
) -> Result<Vec<Vec<HashOut<F>>>> {
|
||||
let m = xs.len();
|
||||
if !is_bottom_layer && m == 1 {
|
||||
return Ok(vec![xs.to_vec()]);
|
||||
}
|
||||
|
||||
let halfn = m / 2;
|
||||
let n = 2 * halfn;
|
||||
let is_odd = n != m;
|
||||
|
||||
let mut ys = Vec::with_capacity(halfn + if is_odd { 1 } else { 0 });
|
||||
|
||||
for i in 0..halfn {
|
||||
let key = if is_bottom_layer { KEY_BOTTOM_LAYER } else { KEY_NONE };
|
||||
let h = key_compress::<F, D, HF>(xs[2 * i], xs[2 * i + 1], key);
|
||||
ys.push(h);
|
||||
}
|
||||
|
||||
if is_odd {
|
||||
let key = if is_bottom_layer {
|
||||
KEY_ODD_AND_BOTTOM_LAYER
|
||||
} else {
|
||||
KEY_ODD
|
||||
};
|
||||
let h = key_compress::<F, D, HF>(xs[n], zero, key);
|
||||
ys.push(h);
|
||||
}
|
||||
|
||||
let mut layers = vec![xs.to_vec()];
|
||||
let mut upper_layers = merkle_tree_worker::<F, D>(&ys, zero, false)?;
|
||||
layers.append(&mut upper_layers);
|
||||
|
||||
Ok(layers)
|
||||
}
|
||||
|
||||
/// Merkle proof struct, containing the index, path, and other necessary data.
|
||||
#[derive(Clone)]
|
||||
pub struct MerkleProof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> {
|
||||
pub index: usize, // Index of the leaf
|
||||
pub path: Vec<HashOut<F>>, // Sibling hashes from the leaf to the root
|
||||
pub nleaves: usize, // Total number of leaves
|
||||
pub zero: HashOut<F>,
|
||||
}
|
||||
|
||||
impl<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
> MerkleProof<F, D> {
|
||||
/// Reconstructs the root hash from the proof and the given leaf.
|
||||
pub fn reconstruct_root(&self, leaf: HashOut<F>) -> Result<HashOut<F>> {
|
||||
let mut m = self.nleaves;
|
||||
let mut j = self.index;
|
||||
let mut h = leaf;
|
||||
let mut bottom_flag = KEY_BOTTOM_LAYER;
|
||||
|
||||
for p in &self.path {
|
||||
let odd_index = (j & 1) != 0;
|
||||
if odd_index {
|
||||
// The index of the child is odd
|
||||
h = key_compress::<F, D, HF>(*p, h, bottom_flag);
|
||||
} else {
|
||||
if j == m - 1 {
|
||||
// Single child -> so odd node
|
||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag + 2);
|
||||
} else {
|
||||
// Even node
|
||||
h = key_compress::<F, D, HF>(h, *p, bottom_flag);
|
||||
}
|
||||
}
|
||||
bottom_flag = KEY_NONE;
|
||||
j = j.shr(1);
|
||||
m = (m + 1).shr(1);
|
||||
}
|
||||
|
||||
Ok(h)
|
||||
}
|
||||
|
||||
/// reconstruct the root using path_bits and last_bits in similar way as the circuit
|
||||
/// this is used for testing - sanity check
|
||||
pub fn reconstruct_root2(leaf: HashOut<F>, path_bits: Vec<bool>, last_bits:Vec<bool>, path: Vec<HashOut<F>>, mask_bits:Vec<bool>, depth: usize) -> Result<HashOut<F>> {
|
||||
let is_last = compute_is_last(path_bits.clone(),last_bits);
|
||||
|
||||
let mut h = vec![];
|
||||
h.push(leaf);
|
||||
let mut i = 0;
|
||||
|
||||
for p in &path {
|
||||
let bottom = if(i==0){
|
||||
KEY_BOTTOM_LAYER
|
||||
}else{
|
||||
KEY_NONE
|
||||
};
|
||||
|
||||
let odd = (is_last[i] as usize) * (1-(path_bits[i] as usize));
|
||||
|
||||
let key = bottom + (2 * (odd as u64));
|
||||
let odd_index = path_bits[i];
|
||||
if odd_index {
|
||||
h.push(key_compress::<F, D, HF>(*p, h[i], key));
|
||||
} else {
|
||||
h.push(key_compress::<F,D,HF>(h[i], *p, key));
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
let mut reconstructed_root = HashOut::<F>::ZERO;
|
||||
for k in 0..depth{
|
||||
let diff = (mask_bits[k] as u64) - (mask_bits[k+1] as u64);
|
||||
let mul_res: Vec<F> = h[k+1].elements.iter().map(|e| e.mul(F::from_canonical_u64(diff))).collect();
|
||||
reconstructed_root = HashOut::<F>::from_vec(
|
||||
mul_res.iter().zip(reconstructed_root.elements).map(|(e1,e2)| e1.add(e2)).collect()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(reconstructed_root)
|
||||
}
|
||||
|
||||
/// Verifies the proof against a given root and leaf.
|
||||
pub fn verify(&self, leaf: HashOut<F>, root: HashOut<F>) -> Result<bool> {
|
||||
let reconstructed_root = self.reconstruct_root(leaf)?;
|
||||
Ok(reconstructed_root == root)
|
||||
}
|
||||
}
|
||||
|
||||
///helper function to compute is_last
|
||||
fn compute_is_last(path_bits: Vec<bool>, last_bits: Vec<bool>) -> Vec<bool> {
|
||||
let max_depth = path_bits.len();
|
||||
|
||||
// Initialize isLast vector
|
||||
let mut is_last = vec![false; max_depth + 1];
|
||||
is_last[max_depth] = true; // Set isLast[max_depth] to 1 (true)
|
||||
|
||||
// Iterate over eq and isLast in reverse order
|
||||
for i in (0..max_depth).rev() {
|
||||
let eq_out = path_bits[i] == last_bits[i]; // eq[i].out
|
||||
is_last[i] = is_last[i + 1] && eq_out; // isLast[i] = isLast[i+1] * eq[i].out
|
||||
}
|
||||
|
||||
is_last
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use plonky2::field::types::Field;
|
||||
use crate::merkle_tree::key_compress::key_compress;
|
||||
|
||||
// types used in all tests
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
type H = PoseidonHash;
|
||||
|
||||
fn compress(
|
||||
x: HashOut<F>,
|
||||
y: HashOut<F>,
|
||||
key: u64,
|
||||
) -> HashOut<F> {
|
||||
key_compress::<F,D,HF>(x,y,key)
|
||||
}
|
||||
|
||||
fn make_tree(
|
||||
data: &[F],
|
||||
zero: HashOut<F>,
|
||||
) -> Result<MerkleTree<F,D>> {
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
H::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
MerkleTree::<F, D>::new(&leaves, zero)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_proof_test() -> Result<()> {
|
||||
let data = (1u64..=8)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
H::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
|
||||
// Build the Merkle tree
|
||||
let tree = MerkleTree::<F, D>::new(&leaves, zero)?;
|
||||
|
||||
// Get the root
|
||||
let root = tree.root()?;
|
||||
|
||||
// Get a proof for the first leaf
|
||||
let proof = tree.get_proof(0)?;
|
||||
|
||||
// Verify the proof
|
||||
let is_valid = proof.verify(leaves[0], root)?;
|
||||
assert!(is_valid, "Merkle proof verification failed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_even_bottom_layer() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=8)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
|
||||
let expected_root =
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
leaf_hashes[7],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data, zero)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_odd_bottom_layer() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=7)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
|
||||
let expected_root =
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
zero,
|
||||
KEY_ODD_AND_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data, zero)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_even_bottom_odd_upper_layers() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=10)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
|
||||
let expected_root = compress(
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
leaf_hashes[7],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[8],
|
||||
leaf_hashes[9],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
zero,
|
||||
KEY_ODD,
|
||||
),
|
||||
zero,
|
||||
KEY_ODD,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data, zero)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proofs() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=10)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
// zero hash
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
};
|
||||
|
||||
// Build the tree
|
||||
let tree = MerkleTree::<F, D>::new(&leaf_hashes, zero)?;
|
||||
|
||||
// Get the root
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
// Verify proofs for all leaves
|
||||
for (i, &leaf_hash) in leaf_hashes.iter().enumerate() {
|
||||
let proof = tree.get_proof(i)?;
|
||||
let is_valid = proof.verify(leaf_hash, expected_root)?;
|
||||
assert!(is_valid, "Proof verification failed for leaf {}", i);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
4
proof-input/src/merkle_tree/mod.rs
Normal file
4
proof-input/src/merkle_tree/mod.rs
Normal file
@ -0,0 +1,4 @@
|
||||
pub mod merkle_safe;
|
||||
pub mod key_compress;
|
||||
pub mod merkle_circuit;
|
||||
pub mod test;
|
||||
@ -24,7 +24,7 @@ fn digest_seq<
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::HashOut;
|
||||
@ -12,7 +12,7 @@ use plonky2_poseidon2::config::Poseidon2GoldilocksConfig;
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
|
||||
// pub type H = PoseidonHash;
|
||||
pub type HF = PoseidonHash;
|
||||
// pub type HP = <PoseidonHash as plonky2::plonk::config::Hasher<F>>::Permutation;
|
||||
|
||||
// hardcoded default params for generating proof input
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
pub mod merkle_circuit;
|
||||
pub mod merkle;
|
||||
pub mod simple_recursion;
|
||||
pub mod cyclic_recursion;
|
||||
pub mod tree_recursion1;
|
||||
@ -10,9 +10,9 @@ mod tests {
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::types::Field;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{SampleCircuit, SampleCircuitInput};
|
||||
use codex_plonky2_circuits::recursion::params::{F, D, C, Plonky2Proof};
|
||||
use codex_plonky2_circuits::recursion::sampling_inner_circuit::SamplingRecursion;
|
||||
use codex_plonky2_circuits::recursion::inner_circuit::InnerCircuit;
|
||||
use codex_plonky2_circuits::params::{F, D, C, Plonky2Proof};
|
||||
use codex_plonky2_circuits::recursion::circuits::sampling_inner_circuit::SamplingRecursion;
|
||||
use codex_plonky2_circuits::recursion::circuits::inner_circuit::InnerCircuit;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::{Poseidon2, Poseidon2Hash};
|
||||
use crate::gen_input::get_m_default_circ_input;
|
||||
use codex_plonky2_circuits::recursion::tree_recursion::{NodeCircuit, TreeRecursion};
|
||||
@ -11,15 +11,15 @@ mod tests {
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use codex_plonky2_circuits::circuits::params::CircuitParams;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{SampleCircuit, SampleCircuitInput};
|
||||
use codex_plonky2_circuits::recursion::params::{F, D, C, Plonky2Proof};
|
||||
use codex_plonky2_circuits::recursion::sampling_inner_circuit::SamplingRecursion;
|
||||
use codex_plonky2_circuits::recursion::inner_circuit::InnerCircuit;
|
||||
use codex_plonky2_circuits::params::{F, D, C, Plonky2Proof};
|
||||
use codex_plonky2_circuits::recursion::circuits::sampling_inner_circuit::SamplingRecursion;
|
||||
use codex_plonky2_circuits::recursion::circuits::inner_circuit::InnerCircuit;
|
||||
use codex_plonky2_circuits::recursion::leaf_circuit::{LeafCircuit, LeafInput};
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::{Poseidon2, Poseidon2Hash};
|
||||
use crate::gen_input::gen_testing_circuit_input;
|
||||
use crate::params::TestParams;
|
||||
use codex_plonky2_circuits::recursion::tree_recursion2::{NodeCircuit as nodeC, TreeRecursion as TR};
|
||||
use codex_plonky2_circuits::recursion::utils::{get_dummy_leaf_proof, get_dummy_node_proof};
|
||||
use codex_plonky2_circuits::recursion::tree2::utils::{get_dummy_leaf_proof, get_dummy_node_proof};
|
||||
use crate::gen_input::get_m_default_circ_input;
|
||||
|
||||
/// Uses node recursion to sample the dataset
|
||||
278
proof-input/src/serialization/circuit_input.rs
Normal file
278
proof-input/src/serialization/circuit_input.rs
Normal file
@ -0,0 +1,278 @@
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use anyhow::{anyhow, Error};
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Write};
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
use crate::gen_input::gen_testing_circuit_input;
|
||||
use crate::params::TestParams;
|
||||
|
||||
/// export circuit input to json file
|
||||
pub fn export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
> (circ_input:SampleCircuitInput<F, D>, filename: &str) -> anyhow::Result<()>{
|
||||
// Convert the circuit input to a serializable format
|
||||
let serializable_circ_input = SerializableCircuitInput::from_circ_input(&circ_input);
|
||||
|
||||
// Serialize to JSON
|
||||
let json_data = serde_json::to_string_pretty(&serializable_circ_input)?;
|
||||
|
||||
// Write to file
|
||||
let mut file = File::create(filename)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
/// Function to generate circuit input and export to JSON
|
||||
pub fn generate_and_export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(params: &TestParams, filename: &str) -> anyhow::Result<()> {
|
||||
|
||||
let circ_input = gen_testing_circuit_input::<F,D>(params);
|
||||
|
||||
export_circ_input_to_json(circ_input, filename)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
// Serializable versions of the circuit input
|
||||
// naming here is not Rust friendly but only so that its compatible with Nim code.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableCircuitInput<
|
||||
> {
|
||||
dataSetRoot: Vec<String>,
|
||||
entropy: Vec<String>,
|
||||
nCellsPerSlot: usize,
|
||||
nSlotsPerDataSet: usize,
|
||||
slotIndex: u64,
|
||||
slotRoot: Vec<String>,
|
||||
slotProof: Vec<String>,
|
||||
cellData: Vec<Vec<String>>,
|
||||
merklePaths: Vec<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
> SerializableCircuitInput {
|
||||
/// from the circuit input to serializable circuit input
|
||||
pub fn from_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(circ_input: &SampleCircuitInput<F, D>) -> Self {
|
||||
SerializableCircuitInput {
|
||||
dataSetRoot: circ_input
|
||||
.dataset_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
entropy: circ_input
|
||||
.entropy
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
nCellsPerSlot: circ_input.n_cells_per_slot.to_canonical_u64() as usize,
|
||||
nSlotsPerDataSet: circ_input.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||
slotIndex: circ_input.slot_index.to_canonical_u64(),
|
||||
slotRoot: circ_input
|
||||
.slot_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
slotProof: circ_input
|
||||
.slot_proof
|
||||
.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
cellData: circ_input
|
||||
.cell_data
|
||||
.iter()
|
||||
.map(|data_vec| {
|
||||
data_vec.data
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
merklePaths: circ_input
|
||||
.merkle_paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
path.path.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<> SerializableCircuitInput {
|
||||
/// from serializable circuit input to circuit input
|
||||
pub fn to_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize
|
||||
>(&self) -> anyhow::Result<SampleCircuitInput<F, D>> {
|
||||
// Convert entropy
|
||||
let entropy_elements = self
|
||||
.entropy
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>()?;
|
||||
let entropy = HashOut {
|
||||
elements: entropy_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid entropy length"))?,
|
||||
};
|
||||
|
||||
// Convert dataset_root
|
||||
let dataset_root_elements = self
|
||||
.dataSetRoot
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>()?;
|
||||
let dataset_root = HashOut {
|
||||
elements: dataset_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid dataset_root length"))?,
|
||||
};
|
||||
|
||||
// slot_index
|
||||
let slot_index = F::from_canonical_u64(self.slotIndex);
|
||||
|
||||
// slot_root
|
||||
let slot_root_elements = self
|
||||
.slotRoot
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>()?;
|
||||
let slot_root = HashOut {
|
||||
elements: slot_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid slot_root length"))?,
|
||||
};
|
||||
|
||||
// n_cells_per_slot
|
||||
let n_cells_per_slot = F::from_canonical_usize(self.nCellsPerSlot);
|
||||
|
||||
// n_slots_per_dataset
|
||||
let n_slots_per_dataset = F::from_canonical_usize(self.nSlotsPerDataSet);
|
||||
|
||||
// slot_proof
|
||||
let slot_proof_elements = self
|
||||
.slotProof
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>()?;
|
||||
if slot_proof_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid slot_proof length"));
|
||||
}
|
||||
let slot_proof = slot_proof_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> anyhow::Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
// cell_data
|
||||
let cell_data = self
|
||||
.cellData
|
||||
.iter()
|
||||
.map(|vec_of_strings| -> anyhow::Result<Cell<F,D>, Error> {
|
||||
let cell = vec_of_strings
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>();
|
||||
Ok(Cell::<F,D>{
|
||||
data: cell.unwrap(),
|
||||
})
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<Cell<F,D>>, Error>>()?;
|
||||
|
||||
// merkle_paths
|
||||
let merkle_paths = self
|
||||
.merklePaths
|
||||
.iter()
|
||||
.map(|path_strings| -> anyhow::Result<MerklePath<F,D>, Error> {
|
||||
let path_elements = path_strings
|
||||
.iter()
|
||||
.map(|s| -> anyhow::Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<F>, Error>>()?;
|
||||
|
||||
if path_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid merkle path length"));
|
||||
}
|
||||
|
||||
let path = path_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> anyhow::Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
Ok(mp)
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<MerklePath<F,D>>, Error>>()?;
|
||||
|
||||
Ok(SampleCircuitInput {
|
||||
entropy,
|
||||
dataset_root,
|
||||
slot_index,
|
||||
slot_root,
|
||||
n_cells_per_slot,
|
||||
n_slots_per_dataset,
|
||||
slot_proof,
|
||||
cell_data,
|
||||
merkle_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// reads the json file, converts it to circuit input (SampleCircuitInput) and returns it
|
||||
pub fn import_circ_input_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||
filename: &str,
|
||||
) -> anyhow::Result<SampleCircuitInput<F, D>> {
|
||||
let file = File::open(filename)?;
|
||||
let reader = BufReader::new(file);
|
||||
let serializable_circ_input: SerializableCircuitInput = serde_json::from_reader(reader)?;
|
||||
|
||||
let circ_input = serializable_circ_input.to_circ_input()?;
|
||||
Ok(circ_input)
|
||||
}
|
||||
@ -1,37 +1,19 @@
|
||||
use anyhow::{anyhow, Error, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::{fs, io};
|
||||
use std::io::{BufReader, BufWriter, Write};
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::Path;
|
||||
use crate::gen_input::{DatasetTree, gen_testing_circuit_input};
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use crate::gen_input::gen_testing_circuit_input;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
|
||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuitInput;
|
||||
use plonky2::plonk::proof::CompressedProofWithPublicInputs;
|
||||
use serde_json::to_writer_pretty;
|
||||
use crate::params::TestParams;
|
||||
|
||||
/// export circuit input to json file
|
||||
pub fn export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
> (circ_input:SampleCircuitInput<F, D>, filename: &str) -> Result<()>{
|
||||
// Convert the circuit input to a serializable format
|
||||
let serializable_circ_input = SerializableCircuitInput::from_circ_input(&circ_input);
|
||||
|
||||
// Serialize to JSON
|
||||
let json_data = serde_json::to_string_pretty(&serializable_circ_input)?;
|
||||
|
||||
// Write to file
|
||||
let mut file = File::create(filename)?;
|
||||
file.write_all(json_data.as_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Function to export proof with public input to json file
|
||||
fn export_proof_with_pi_to_json<F, C, const D: usize>(
|
||||
instance: &CompressedProofWithPublicInputs<F, C, D>,
|
||||
@ -51,255 +33,6 @@ fn export_proof_with_pi_to_json<F, C, const D: usize>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
/// Function to generate circuit input and export to JSON
|
||||
pub fn generate_and_export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(params: &TestParams, filename: &str) -> Result<()> {
|
||||
|
||||
let circ_input = gen_testing_circuit_input::<F,D>(params);
|
||||
|
||||
export_circ_input_to_json(circ_input, filename)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
// Serializable versions of the circuit input
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableCircuitInput<
|
||||
> {
|
||||
dataSetRoot: Vec<String>,
|
||||
entropy: Vec<String>,
|
||||
nCellsPerSlot: usize,
|
||||
nSlotsPerDataSet: usize,
|
||||
slotIndex: u64,
|
||||
slotRoot: Vec<String>,
|
||||
slotProof: Vec<String>,
|
||||
cellData: Vec<Vec<String>>,
|
||||
merklePaths: Vec<Vec<String>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
> SerializableCircuitInput {
|
||||
/// from the circuit input to serializable circuit input
|
||||
pub fn from_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(circ_input: &SampleCircuitInput<F, D>) -> Self {
|
||||
SerializableCircuitInput {
|
||||
dataSetRoot: circ_input
|
||||
.dataset_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
entropy: circ_input
|
||||
.entropy
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
nCellsPerSlot: circ_input.n_cells_per_slot.to_canonical_u64() as usize,
|
||||
nSlotsPerDataSet: circ_input.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||
slotIndex: circ_input.slot_index.to_canonical_u64(),
|
||||
slotRoot: circ_input
|
||||
.slot_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
slotProof: circ_input
|
||||
.slot_proof
|
||||
.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
cellData: circ_input
|
||||
.cell_data
|
||||
.iter()
|
||||
.map(|data_vec| {
|
||||
data_vec.data
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
merklePaths: circ_input
|
||||
.merkle_paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
path.path.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<> SerializableCircuitInput {
|
||||
/// from serializable circuit input to circuit input
|
||||
pub fn to_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize
|
||||
>(&self) -> Result<SampleCircuitInput<F, D>> {
|
||||
// Convert entropy
|
||||
let entropy_elements = self
|
||||
.entropy
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
let entropy = HashOut {
|
||||
elements: entropy_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid entropy length"))?,
|
||||
};
|
||||
|
||||
// Convert dataset_root
|
||||
let dataset_root_elements = self
|
||||
.dataSetRoot
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
let dataset_root = HashOut {
|
||||
elements: dataset_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid dataset_root length"))?,
|
||||
};
|
||||
|
||||
// slot_index
|
||||
let slot_index = F::from_canonical_u64(self.slotIndex);
|
||||
|
||||
// slot_root
|
||||
let slot_root_elements = self
|
||||
.slotRoot
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
let slot_root = HashOut {
|
||||
elements: slot_root_elements
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid slot_root length"))?,
|
||||
};
|
||||
|
||||
// n_cells_per_slot
|
||||
let n_cells_per_slot = F::from_canonical_usize(self.nCellsPerSlot);
|
||||
|
||||
// n_slots_per_dataset
|
||||
let n_slots_per_dataset = F::from_canonical_usize(self.nSlotsPerDataSet);
|
||||
|
||||
// slot_proof
|
||||
let slot_proof_elements = self
|
||||
.slotProof
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
if slot_proof_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid slot_proof length"));
|
||||
}
|
||||
let slot_proof = slot_proof_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
// cell_data
|
||||
let cell_data = self
|
||||
.cellData
|
||||
.iter()
|
||||
.map(|vec_of_strings| -> Result<Cell<F,D>, Error> {
|
||||
let cell = vec_of_strings
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>();
|
||||
Ok(Cell::<F,D>{
|
||||
data: cell.unwrap(),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<Cell<F,D>>, Error>>()?;
|
||||
|
||||
// merkle_paths
|
||||
let merkle_paths = self
|
||||
.merklePaths
|
||||
.iter()
|
||||
.map(|path_strings| -> Result<MerklePath<F,D>, Error> {
|
||||
let path_elements = path_strings
|
||||
.iter()
|
||||
.map(|s| -> Result<F, Error> {
|
||||
let n = s.parse::<u64>()?;
|
||||
Ok(F::from_canonical_u64(n))
|
||||
})
|
||||
.collect::<Result<Vec<F>, Error>>()?;
|
||||
|
||||
if path_elements.len() % 4 != 0 {
|
||||
return Err(anyhow!("Invalid merkle path length"));
|
||||
}
|
||||
|
||||
let path = path_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| -> Result<HashOut<F>, Error> {
|
||||
let elements: [F; 4] = chunk
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Invalid chunk length"))?;
|
||||
Ok(HashOut { elements })
|
||||
})
|
||||
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
|
||||
|
||||
let mp = MerklePath::<F,D>{
|
||||
path,
|
||||
};
|
||||
Ok(mp)
|
||||
})
|
||||
.collect::<Result<Vec<MerklePath<F,D>>, Error>>()?;
|
||||
|
||||
Ok(SampleCircuitInput {
|
||||
entropy,
|
||||
dataset_root,
|
||||
slot_index,
|
||||
slot_root,
|
||||
n_cells_per_slot,
|
||||
n_slots_per_dataset,
|
||||
slot_proof,
|
||||
cell_data,
|
||||
merkle_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// reads the json file, converts it to circuit input (SampleCircuitInput) and returns it
|
||||
pub fn import_circ_input_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||
filename: &str,
|
||||
) -> Result<SampleCircuitInput<F, D>> {
|
||||
let file = File::open(filename)?;
|
||||
let reader = BufReader::new(file);
|
||||
let serializable_circ_input: SerializableCircuitInput = serde_json::from_reader(reader)?;
|
||||
|
||||
let circ_input = serializable_circ_input.to_circ_input()?;
|
||||
Ok(circ_input)
|
||||
}
|
||||
|
||||
/// Writes the provided bytes to the specified file path using `std::fs::write`.
|
||||
pub fn write_bytes_to_file<P: AsRef<Path>>(data: Vec<u8>, path: P) -> io::Result<()> {
|
||||
fs::write(path, data)
|
||||
@ -322,10 +55,11 @@ mod tests {
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2_poseidon2::serialization::{DefaultGateSerializer, DefaultGeneratorSerializer};
|
||||
use crate::gen_input::verify_circuit_input;
|
||||
use crate::serialization::circuit_input::{export_circ_input_to_json, generate_and_export_circ_input_to_json, import_circ_input_from_json};
|
||||
|
||||
// Test to generate the JSON file
|
||||
#[test]
|
||||
fn test_export_circ_input_to_json() -> Result<()> {
|
||||
fn test_export_circ_input_to_json() -> anyhow::Result<()> {
|
||||
// Create Params
|
||||
let params = TestParams::default();
|
||||
// Export the circuit input to JSON
|
||||
@ -384,7 +118,7 @@ mod tests {
|
||||
let circuit_params = CircuitParams::default();
|
||||
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder);
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
@ -393,7 +127,7 @@ mod tests {
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("input.json")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input)?;
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
@ -417,7 +151,7 @@ mod tests {
|
||||
// reads the json input and verify (non-circuit)
|
||||
// NOTE: expects that the json input proof uses the default params
|
||||
#[test]
|
||||
fn test_read_json_and_verify() -> Result<()> {
|
||||
fn test_read_json_and_verify() -> anyhow::Result<()> {
|
||||
let params = TestParams::default();
|
||||
|
||||
// Import the circuit input from JSON
|
||||
@ -445,14 +179,14 @@ mod tests {
|
||||
|
||||
let circuit_params = CircuitParams::default();
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder);
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// gen circ input
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = gen_testing_circuit_input::<F,D>(¶ms);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input)?;
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
@ -497,14 +231,14 @@ mod tests {
|
||||
|
||||
let circuit_params = CircuitParams::default();
|
||||
let circ = SampleCircuit::new(circuit_params.clone());
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder);
|
||||
let mut targets = circ.sample_slot_circuit_with_public_input(&mut builder)?;
|
||||
|
||||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// gen circ input
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = gen_testing_circuit_input::<F,D>(¶ms);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input);
|
||||
circ.sample_slot_assign_witness(&mut pw, &targets, &imported_circ_input)?;
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
2
proof-input/src/serialization/mod.rs
Normal file
2
proof-input/src/serialization/mod.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub mod circuit_input;
|
||||
pub mod json;
|
||||
@ -1,8 +1,4 @@
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_data::{CircuitData, VerifierCircuitData};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::proof::{Proof, ProofWithPublicInputs};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use codex_plonky2_circuits::circuits::params::HF;
|
||||
@ -106,34 +102,3 @@ pub fn ceiling_log2(
|
||||
|
||||
(last_bits, mask)
|
||||
}
|
||||
|
||||
/// prove given the circuit data and partial witness
|
||||
pub fn prove<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
data: CircuitData<F, C, D>,
|
||||
pw: PartialWitness<F>
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>>{
|
||||
let proof = data.prove(pw);
|
||||
return proof
|
||||
}
|
||||
|
||||
/// verify given verifier data, public input, and proof
|
||||
pub fn verify<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
H: Hasher<F> + AlgebraicHasher<F>,
|
||||
>(
|
||||
verifier_data: &VerifierCircuitData<F, C, D>,
|
||||
public_inputs: Vec<F>,
|
||||
proof: Proof<F, C, D>
|
||||
)-> Result<()> {
|
||||
verifier_data.verify(ProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
})
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user