move proof-input and refactor

This commit is contained in:
M Alghazwi 2024-11-07 09:32:29 +01:00
parent 7a24f7d081
commit cc14498c71
12 changed files with 899 additions and 286 deletions

View File

@ -10,7 +10,7 @@ use plonky2::hash::hash_types::RichField;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use codex_plonky2_circuits::circuits::params::TESTING_SLOT_INDEX;
use codex_plonky2_circuits::circuits::sample_cells::DatasetTreeCircuit;
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
macro_rules! pretty_print {
($($arg:tt)*) => {
@ -28,12 +28,12 @@ fn prepare_data<
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
>() -> Result<(
DatasetTreeCircuit<F, C, D, H>,
SampleCircuit<F, C, D, H>,
usize,
usize,
)> {
// Initialize the dataset tree with testing data
let mut dataset_t = DatasetTreeCircuit::<F,C,D,H>::new_for_testing();
let mut dataset_t = SampleCircuit::<F,C,D,H>::new_for_testing();
let slot_index = TESTING_SLOT_INDEX;
let entropy = 123;
@ -47,7 +47,7 @@ fn build_circuit<
const D: usize,
H: Hasher<F> + AlgebraicHasher<F>,
>(
dataset_tree: &mut DatasetTreeCircuit<F, C, D, H>,
dataset_tree: &mut SampleCircuit<F, C, D, H>,
slot_index: usize,
entropy: usize,
// proofs: &[MerkleProof<F, H>],

View File

@ -9,7 +9,8 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
/// Compression function which takes two 256 bit inputs (HashOut) and u64 key (which is converted to field element in the function)
/// and returns a 256 bit output (HashOut).
pub fn key_compress<
F: RichField,
F: RichField, //+ Extendable<D> + Poseidon2,
// const D: usize,
H:Hasher<F>
>(x: HashOut<F>, y: HashOut<F>, key: u64) -> HashOut<F> {
@ -55,5 +56,86 @@ pub fn key_compress_circuit<
}
}
#[cfg(test)]
mod tests {
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2_field::types::Field;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
use super::*;
// types
pub const D: usize = 2;
pub type C = PoseidonGoldilocksConfig;
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
pub type H = Poseidon2Hash;
/// tests the non-circuit key_compress with concrete cases
#[test]
pub fn test_key_compress(){
let ref_inp_1: [F; 4] = [
F::from_canonical_u64(0x0000000000000001),
F::from_canonical_u64(0x0000000000000002),
F::from_canonical_u64(0x0000000000000003),
F::from_canonical_u64(0x0000000000000004),
];
let ref_inp_2: [F; 4] = [
F::from_canonical_u64(0x0000000000000005),
F::from_canonical_u64(0x0000000000000006),
F::from_canonical_u64(0x0000000000000007),
F::from_canonical_u64(0x0000000000000008),
];
let ref_out_key_0: [F; 4] = [
F::from_canonical_u64(0xc4a4082f411ba790),
F::from_canonical_u64(0x98c2ed7546c44cce),
F::from_canonical_u64(0xc9404f373b78c979),
F::from_canonical_u64(0x65d6b3c998920f59),
];
let ref_out_key_1: [F; 4] = [
F::from_canonical_u64(0xca47449a05283778),
F::from_canonical_u64(0x08d3ced2020391ac),
F::from_canonical_u64(0xda461ea45670fb12),
F::from_canonical_u64(0x57f2c0b6c98a05c5),
];
let ref_out_key_2: [F; 4] = [
F::from_canonical_u64(0xe6fcec96a7a7f4b0),
F::from_canonical_u64(0x3002a22356daa551),
F::from_canonical_u64(0x899e2c1075a45f3f),
F::from_canonical_u64(0xf07e38ccb3ade312),
];
let ref_out_key_3: [F; 4] = [
F::from_canonical_u64(0x9930cff752b046fb),
F::from_canonical_u64(0x41570687cadcea0b),
F::from_canonical_u64(0x3ac093a5a92066c7),
F::from_canonical_u64(0xc45c75a3911cde87),
];
// `HashOut` for inputs
let inp1 = HashOut { elements: ref_inp_1 };
let inp2 = HashOut { elements: ref_inp_2 };
// Expected outputs
let expected_outputs = [
ref_out_key_0,
ref_out_key_1,
ref_out_key_2,
ref_out_key_3,
];
// Iterate over each key and test key_compress output
for (key, &expected) in expected_outputs.iter().enumerate() {
let output = key_compress::<F, H>(inp1, inp2, key as u64);
// Assert that output matches the expected result
assert_eq!(output.elements, expected, "Output mismatch for key: {}", key);
println!("Test passed for key {}", key);
}
}
}

View File

@ -46,7 +46,8 @@ impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> MerkleTreeCircuit<F, D> {
pub fn new() -> Self{
Self{
phantom_data: Default::default(),
@ -109,7 +110,7 @@ impl<
/// takes the params from the targets struct
/// outputs the reconstructed merkle root
/// this one uses the mask bits
/// this one uses the mask bits to select the right layer
pub fn reconstruct_merkle_root_circuit_with_mask(
builder: &mut CircuitBuilder<F, D>,
targets: &mut MerkleTreeTargets,
@ -160,6 +161,7 @@ impl<
i += 1;
}
// select the right layer using the mask bits
// another way to do this is to use builder.select
// but that might be less efficient & more constraints
let mut reconstructed_root = HashOutTarget::from_vec([builder.zero();4].to_vec());
@ -169,8 +171,7 @@ impl<
add_assign_hash_out_target(builder,&mut reconstructed_root, &mul_result);
}
// reconstructed_root
state[max_depth]
reconstructed_root
}
}

View File

@ -8,3 +8,14 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
// will look into this later.
pub type HF = PoseidonHash;
// params used for the circuits
// should be defined prior to building the circuit
#[derive(Clone, Debug)]
pub struct CircuitParams{
pub max_depth: usize,
pub max_log2_n_slots: usize,
pub block_tree_depth: usize,
pub n_field_elems_per_cell: usize,
pub n_samples: usize,
}

View File

@ -5,25 +5,23 @@
// - reconstruct the dataset merkle root using the slot root as leaf
// - samples multiple cells by calling the sample_cells
use anyhow::Result;
use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::{HashOut, HashOutTarget, NUM_HASH_OUT_ELTS, RichField};
use plonky2::iop::target::{BoolTarget, Target};
use plonky2::iop::witness::{PartialWitness, WitnessWrite, Witness};
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, GenericHashOut};
use plonky2::plonk::config::GenericConfig;
use std::marker::PhantomData;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use plonky2::hash::hashing::PlonkyPermutation;
use crate::circuits::params::HF;
use crate::circuits::params::{CircuitParams, HF};
use crate::circuits::merkle_circuit::{MerkleTreeCircuit, MerkleTreeTargets, MerkleProofTarget};
use crate::circuits::utils::{assign_hash_out_targets, bits_le_padded_to_usize, calculate_cell_index_bits};
use crate::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
use crate::circuits::utils::assign_hash_out_targets;
// ------ Dataset Tree --------
///dataset tree containing all slot trees
/// circuit for sampling a slot in a dataset merkle tree
#[derive(Clone)]
pub struct DatasetTreeCircuit<
pub struct SampleCircuit<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
@ -34,7 +32,7 @@ pub struct DatasetTreeCircuit<
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> DatasetTreeCircuit<F, D> {
> SampleCircuit<F, D> {
pub fn new(params: CircuitParams) -> Self{
Self{
params,
@ -43,17 +41,8 @@ impl<
}
}
// params used for the circuits
// should be defined prior to building the circuit
#[derive(Clone)]
pub struct CircuitParams{
pub max_depth: usize,
pub max_log2_n_slots: usize,
pub block_tree_depth: usize,
pub n_field_elems_per_cell: usize,
pub n_samples: usize,
}
/// struct of input to the circuit as targets
/// used to build the circuit and can be assigned after building
#[derive(Clone)]
pub struct SampleTargets {
@ -65,13 +54,14 @@ pub struct SampleTargets {
pub n_cells_per_slot: Target,
pub n_slots_per_dataset: Target,
pub slot_proof: MerkleProofTarget, // proof that slot_root in dataset tree
pub slot_proof: MerkleProofTarget,
pub cell_data: Vec<Vec<Target>>,
pub cell_data: Vec<CellTarget>,
pub merkle_paths: Vec<MerkleProofTarget>,
}
#[derive(Clone)]
/// circuit input as field elements
#[derive(Debug, PartialEq)]
pub struct SampleCircuitInput<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
@ -84,31 +74,42 @@ pub struct SampleCircuitInput<
pub n_cells_per_slot: F,
pub n_slots_per_dataset: F,
pub slot_proof: Vec<HashOut<F>>, // proof that slot_root in dataset tree
pub slot_proof: Vec<HashOut<F>>,
pub cell_data: Vec<Vec<F>>,
pub merkle_paths: Vec<Vec<HashOut<F>>>,
pub cell_data: Vec<Cell<F,D>>,
pub merkle_paths: Vec<MerklePath<F,D>>,
}
#[derive(Clone)]
/// merkle path from leaf to root as vec of HashOut (4 Goldilocks field elems)
#[derive(Clone, Debug, PartialEq)]
pub struct MerklePath<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
path: Vec<HashOut<F>>
pub path: Vec<HashOut<F>>
}
#[derive(Clone)]
/// a vec of cell targets
#[derive(Clone, Debug, PartialEq)]
pub struct CellTarget {
pub data: Vec<Target>
}
/// cell data as field elements
#[derive(Clone, Debug, PartialEq)]
pub struct Cell<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
pub data: Vec<F>,
}
//------- circuit impl --------
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> DatasetTreeCircuit<F, D> {
> SampleCircuit<F, D> {
// in-circuit sampling
// TODO: make it more modular
@ -145,7 +146,9 @@ impl<
// dataset last bits (binary decomposition of last_index = nleaves - 1)
let dataset_last_index = builder.sub(n_slots_per_dataset, one);
let d_last_bits = builder.split_le(dataset_last_index,max_log2_n_slots);
let d_mask_bits = builder.split_le(dataset_last_index,max_log2_n_slots+1);
// dataset mask bits
let mut d_mask_bits = builder.split_le(dataset_last_index,max_log2_n_slots+1);
// dataset Merkle path (sibling hashes from leaf to root)
let d_merkle_path = MerkleProofTarget {
@ -182,25 +185,33 @@ impl<
// virtual target for n_cells_per_slot
let n_cells_per_slot = builder.add_virtual_target();
// calculate last index = n_cells_per_slot-1
let slot_last_index = builder.sub(n_cells_per_slot, one);
// create the mask bits
// TODO: reuse this for block and slot trees
let mask_bits = builder.split_le(slot_last_index,max_depth);
// last and mask bits for block tree
let mut b_last_bits = builder.split_le(slot_last_index,max_depth);
let mut b_mask_bits = builder.split_le(slot_last_index,max_depth);
// last and mask bits for the slot tree
let mut s_last_bits = b_last_bits.split_off(block_tree_depth);
let mut s_mask_bits = b_mask_bits.split_off(block_tree_depth);
// pad mask bits with 0
b_mask_bits.push(BoolTarget::new_unsafe(zero.clone()));
s_mask_bits.push(BoolTarget::new_unsafe(zero.clone()));
for i in 0..n_samples{
// cell data targets
let mut data_i = (0..n_field_elems_per_cell).map(|_| builder.add_virtual_target()).collect::<Vec<_>>();
// hash the cell data
let mut hash_inputs:Vec<Target>= Vec::new();
hash_inputs.extend_from_slice(&data_i);
let data_i_hash = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
// counter constant
// make the counter into hash digest
let ctr_target = builder.constant(F::from_canonical_u64((i+1) as u64));
let mut ctr = builder.add_virtual_hash();
for i in 0..ctr.elements.len() {
@ -210,8 +221,8 @@ impl<
ctr.elements[i] = zero.clone();
}
}
// paths
let mut b_path_bits = self.calculate_cell_index_bits(builder, &entropy_target, &d_targets.leaf, &ctr);
// paths for block and slot
let mut b_path_bits = self.calculate_cell_index_bits(builder, &entropy_target, &d_targets.leaf, &ctr, mask_bits.clone());
let mut s_path_bits = b_path_bits.split_off(block_tree_depth);
let mut b_merkle_path = MerkleProofTarget {
@ -255,7 +266,10 @@ impl<
};
slot_sample_proof_target.path.extend_from_slice(&slot_targets.merkle_path.path);
data_targets.push(data_i);
let cell_i = CellTarget{
data: data_i
};
data_targets.push(cell_i);
slot_sample_proofs.push(slot_sample_proof_target);
}
@ -273,7 +287,8 @@ impl<
}
}
pub fn calculate_cell_index_bits(&self, builder: &mut CircuitBuilder::<F, D>, entropy: &HashOutTarget, slot_root: &HashOutTarget, ctr: &HashOutTarget) -> Vec<BoolTarget> {
/// calculate the cell index = H( entropy | slotRoot | counter ) `mod` nCells
pub fn calculate_cell_index_bits(&self, builder: &mut CircuitBuilder::<F, D>, entropy: &HashOutTarget, slot_root: &HashOutTarget, ctr: &HashOutTarget, mask_bits: Vec<BoolTarget>) -> Vec<BoolTarget> {
let mut hash_inputs:Vec<Target>= Vec::new();
hash_inputs.extend_from_slice(&entropy.elements);
hash_inputs.extend_from_slice(&slot_root.elements);
@ -281,9 +296,17 @@ impl<
let hash_out = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
let cell_index_bits = builder.low_bits(hash_out.elements[0], self.params.max_depth, 64);
cell_index_bits
let mut masked_cell_index_bits = vec![];
// extract the lowest 32 bits using the bit mask
for i in 0..self.params.max_depth{
masked_cell_index_bits.push(BoolTarget::new_unsafe(builder.mul(mask_bits[i].target, cell_index_bits[i].target)));
}
masked_cell_index_bits
}
/// helper method to assign the targets in the circuit to actual field elems
pub fn sample_slot_assign_witness(
&self,
pw: &mut PartialWitness<F>,
@ -323,16 +346,13 @@ impl<
// do the sample N times
for i in 0..n_samples {
let cell_index_bits = calculate_cell_index_bits(&witnesses.entropy,witnesses.slot_root,i+1,max_depth);
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
// assign cell data
let leaf = witnesses.cell_data[i].clone();
let leaf = witnesses.cell_data[i].data.clone();
for j in 0..n_field_elems_per_cell{
pw.set_target(targets.cell_data[i][j], leaf[j]);
pw.set_target(targets.cell_data[i].data[j], leaf[j]);
}
// assign proof for that cell
let cell_proof = witnesses.merkle_paths[i].clone();
let cell_proof = witnesses.merkle_paths[i].path.clone();
for k in 0..max_depth {
pw.set_hash_target(targets.merkle_paths[i].path[k], cell_proof[k])
}

View File

@ -25,7 +25,8 @@ pub(crate) fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bo
bits
}
/// calculate the sampled cell index from entropy, slot root, and counter
pub(crate) fn calculate_cell_index_bits<F: RichField>(entropy: &Vec<F>, slot_root: HashOut<F>, ctr: usize, depth: usize) -> Vec<bool> {
/// this is the non-circuit version for testing
pub(crate) fn calculate_cell_index_bits<F: RichField>(entropy: &Vec<F>, slot_root: HashOut<F>, ctr: usize, depth: usize, mask_bits: Vec<bool>) -> Vec<bool> {
let ctr_field = F::from_canonical_u64(ctr as u64);
let mut ctr_as_digest = HashOut::<F>::ZERO;
ctr_as_digest.elements[0] = ctr_field;
@ -37,7 +38,14 @@ pub(crate) fn calculate_cell_index_bits<F: RichField>(entropy: &Vec<F>, slot_roo
let cell_index_bytes = hash_output.elements[0].to_canonical_u64();
let cell_index_bits = usize_to_bits_le_padded(cell_index_bytes as usize, depth);
cell_index_bits
let mut masked_cell_index_bits = vec![];
for i in 0..depth{
masked_cell_index_bits.push(cell_index_bits[i] && mask_bits[i]);
}
masked_cell_index_bits
}
pub(crate) fn take_n_bits_from_bytes(bytes: &[u8], n: usize) -> Vec<bool> {

View File

@ -1,4 +1,4 @@
pub mod circuits;
pub mod merkle_tree;
pub mod proof_input;
// pub mod proof_input;
pub mod tests;

View File

@ -1,144 +1,101 @@
use anyhow::Result;
use plonky2::hash::hash_types::{HashOut, RichField};
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2_field::extension::Extendable;
use plonky2_field::types::Field;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use crate::circuits::params::HF;
use crate::proof_input::test_params::{BOT_DEPTH, DATASET_DEPTH, MAX_DEPTH, N_BLOCKS, N_CELLS, N_CELLS_IN_BLOCKS, N_FIELD_ELEMS_PER_CELL, N_SAMPLES, TESTING_SLOT_INDEX};
use crate::proof_input::test_params::Params;
use crate::circuits::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
use crate::circuits::sample_cells::Cell;
// #[derive(Clone)]
// pub struct Cell<
// F: RichField + Extendable<D> + Poseidon2,
// const D: usize,
// > {
// pub data: Vec<F>, // cell data as field elements
// }
// impl<
// F: RichField + Extendable<D> + Poseidon2,
// const D: usize,
// > Cell<F, D> {
/// Create a new cell with random data, using the parameters from `Params`
pub fn new_random_cell<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
>(params: &Params) -> Cell<F,D> {
let data = (0..params.n_field_elems_per_cell())
.map(|_| F::rand())
.collect::<Vec<_>>();
Cell::<F,D> {
data,
}
}
// }
#[derive(Clone)]
pub struct SlotTree<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
pub tree: MerkleTree<F>, // slot tree
pub tree: MerkleTree<F>, // slot tree
pub block_trees: Vec<MerkleTree<F>>, // vec of block trees
pub cell_data: Vec<Cell<F,D>>, // cell data as field elements
}
#[derive(Clone)]
pub struct Cell<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
pub data: Vec<F>, // cell data as field elements
}
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> Default for Cell<F, D> {
/// default cell with random data
fn default() -> Self {
let data = (0..N_FIELD_ELEMS_PER_CELL)
.map(|j| F::rand())
.collect::<Vec<_>>();
Self{
data,
}
}
}
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> Default for SlotTree<F, D> {
/// slot tree with fake data, for testing only
fn default() -> Self {
// generate fake cell data
let mut cell_data = (0..N_CELLS)
.map(|i|{
Cell::<F,D>::default()
})
.collect::<Vec<_>>();
Self::new(cell_data)
}
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
pub params: Params, // parameters
}
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> SlotTree<F, D> {
/// Slot tree with fake data, for testing only
pub fn new_for_testing(cells: Vec<Cell<F, D>>) -> Self {
// Hash the cell data block to create leaves for one block
let leaves_block: Vec<HashOut<F>> = cells
.iter()
.map(|element| {
HF::hash_no_pad(&element.data)
})
.collect();
/// Create a slot tree with fake data, for testing only
pub fn new_default(params: &Params) -> Self {
// generate fake cell data
let cell_data = (0..params.n_cells)
.map(|_| new_random_cell(params))
.collect::<Vec<_>>();
Self::new(cell_data, params.clone())
}
// Zero hash
/// Create a new slot tree with the supplied cell data and parameters
pub fn new(cells: Vec<Cell<F, D>>, params: Params) -> Self {
let leaves: Vec<HashOut<F>> = cells
.iter()
.map(|element| HF::hash_no_pad(&element.data))
.collect();
let zero = HashOut {
elements: [F::ZERO; 4],
};
let n_blocks = params.n_blocks_test();
let n_cells_in_blocks = params.n_cells_in_blocks();
// Create a block tree from the leaves of one block
let b_tree = Self::get_block_tree(&leaves_block);
// Now replicate this block tree for all N_BLOCKS blocks
let block_trees = vec![b_tree; N_BLOCKS];
// Get the roots of block trees
let block_trees = (0..n_blocks)
.map(|i| {
let start = i * n_cells_in_blocks;
let end = (i + 1) * n_cells_in_blocks;
Self::get_block_tree(&leaves[start..end].to_vec())
})
.collect::<Vec<_>>();
let block_roots = block_trees
.iter()
.map(|t| t.root().unwrap())
.collect::<Vec<_>>();
// Create the slot tree from block roots
let slot_tree = MerkleTree::<F>::new(&block_roots, zero).unwrap();
// Create the full cell data and cell hash by repeating the block data
let cell_data = vec![cells.clone(); N_BLOCKS].concat();
// Return the constructed Self
Self {
tree: slot_tree,
block_trees,
cell_data,
}
}
/// same as default but with supplied cell data
pub fn new(cells: Vec<Cell<F, D>>) -> Self {
let leaves: Vec<HashOut<F>> = cells
.iter()
.map(|element| {
HF::hash_no_pad(&element.data)
})
.collect();
let zero = HashOut {
elements: [F::ZERO; 4],
};
let block_trees = (0..N_BLOCKS as usize)
.map(|i| {
let start = i * N_CELLS_IN_BLOCKS;
let end = (i + 1) * N_CELLS_IN_BLOCKS;
Self::get_block_tree(&leaves[start..end].to_vec())
// MerkleTree::<F> { tree: b_tree }
})
.collect::<Vec<_>>();
let block_roots = block_trees.iter()
.map(|t| {
t.root().unwrap()
})
.collect::<Vec<_>>();
let slot_tree = MerkleTree::<F>::new(&block_roots, zero).unwrap();
Self {
tree: slot_tree,
block_trees,
cell_data: cells,
params,
}
}
/// generates a proof for given leaf index
/// the path in the proof is a combined block and slot path to make up the full path
/// Generates a proof for the given leaf index
/// The path in the proof is a combined block and slot path to make up the full path
pub fn get_proof(&self, index: usize) -> MerkleProof<F> {
let block_index = index / N_CELLS_IN_BLOCKS;
let leaf_index = index % N_CELLS_IN_BLOCKS;
let block_index = index / self.params.n_cells_in_blocks();
let leaf_index = index % self.params.n_cells_in_blocks();
let block_proof = self.block_trees[block_index].get_proof(leaf_index).unwrap();
let slot_proof = self.tree.get_proof(block_index).unwrap();
@ -147,20 +104,20 @@ impl<
combined_path.extend(slot_proof.path.clone());
MerkleProof::<F> {
index: index,
index,
path: combined_path,
nleaves: self.cell_data.len(),
zero: block_proof.zero.clone(),
}
}
/// verify the given proof for slot tree, checks equality with given root
/// Verify the given proof for slot tree, checks equality with the given root
pub fn verify_cell_proof(&self, proof: MerkleProof<F>, root: HashOut<F>) -> anyhow::Result<bool> {
let mut block_path_bits = usize_to_bits_le_padded(proof.index, MAX_DEPTH);
let last_index = N_CELLS - 1;
let mut block_last_bits = usize_to_bits_le_padded(last_index, MAX_DEPTH);
let mut block_path_bits = usize_to_bits_le_padded(proof.index, self.params.max_depth);
let last_index = self.params.n_cells - 1;
let mut block_last_bits = usize_to_bits_le_padded(last_index, self.params.max_depth);
let split_point = BOT_DEPTH;
let split_point = self.params.bot_depth();
let slot_last_bits = block_last_bits.split_off(split_point);
let slot_path_bits = block_path_bits.split_off(split_point);
@ -170,8 +127,18 @@ impl<
let mut block_path = proof.path;
let slot_path = block_path.split_off(split_point);
let block_res = MerkleProof::<F>::reconstruct_root2(leaf_hash, block_path_bits.clone(), block_last_bits.clone(), block_path);
let reconstructed_root = MerkleProof::<F>::reconstruct_root2(block_res.unwrap(), slot_path_bits, slot_last_bits, slot_path);
let block_res = MerkleProof::<F>::reconstruct_root2(
leaf_hash,
block_path_bits.clone(),
block_last_bits.clone(),
block_path,
);
let reconstructed_root = MerkleProof::<F>::reconstruct_root2(
block_res.unwrap(),
slot_path_bits,
slot_last_bits,
slot_path,
);
Ok(reconstructed_root.unwrap() == root)
}
@ -187,50 +154,49 @@ impl<
}
// ------ Dataset Tree --------
///dataset tree containing all slot trees
/// Dataset tree containing all slot trees
#[derive(Clone)]
pub struct DatasetTree<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> {
pub tree: MerkleTree<F>, // dataset tree
pub tree: MerkleTree<F>, // dataset tree
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
pub params: Params, // parameters
}
/// Dataset Merkle proof struct, containing the dataset proof and N_SAMPLES proofs.
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
#[derive(Clone)]
pub struct DatasetProof<F: RichField> {
pub slot_index: F,
pub entropy: HashOut<F>,
pub dataset_proof: MerkleProof<F>, // proof for dataset level tree
pub slot_proofs: Vec<MerkleProof<F>>, // proofs for sampled slot, contains N_SAMPLES proofs
pub cell_data: Vec<Vec<F>>,
}
impl<
pub struct DatasetProof<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> Default for DatasetTree<F, D> {
/// dataset tree with fake data, for testing only
fn default() -> Self {
let mut slot_trees = vec![];
let n_slots = 1 << DATASET_DEPTH;
for i in 0..n_slots {
slot_trees.push(SlotTree::<F, D>::default());
}
Self::new(slot_trees)
}
> {
pub slot_index: F,
pub entropy: HashOut<F>,
pub dataset_proof: MerkleProof<F>, // proof for dataset level tree
pub slot_proofs: Vec<MerkleProof<F>>, // proofs for sampled slot
pub cell_data: Vec<Cell<F,D>>,
}
impl<
F: RichField + Extendable<D> + Poseidon2,
const D: usize,
> DatasetTree<F, D> {
/// dataset tree with fake data, for testing only
/// create data for only the TESTING_SLOT_INDEX in params file
pub fn new_for_testing() -> Self {
/// Dataset tree with fake data, for testing only
pub fn new_default(params: &Params) -> Self {
let mut slot_trees = vec![];
let n_slots = 1 << DATASET_DEPTH;
let n_slots = 1 << params.dataset_depth_test();
for _ in 0..n_slots {
slot_trees.push(SlotTree::<F, D>::new_default(params));
}
Self::new(slot_trees, params.clone())
}
/// Create data for only the specified slot index in params
pub fn new_for_testing(params: &Params) -> Self {
let mut slot_trees = vec![];
// let n_slots = 1 << params.dataset_depth();
let n_slots = params.n_slots;
// zero hash
let zero = HashOut {
elements: [F::ZERO; 4],
@ -239,34 +205,34 @@ impl<
tree: MerkleTree::<F>::new(&[zero.clone()], zero.clone()).unwrap(),
block_trees: vec![],
cell_data: vec![],
params: params.clone(),
};
for i in 0..n_slots {
if (i == TESTING_SLOT_INDEX) {
slot_trees.push(SlotTree::<F, D>::default());
if i == params.testing_slot_index {
slot_trees.push(SlotTree::<F, D>::new_default(params));
} else {
slot_trees.push(zero_slot.clone());
}
}
// get the roots or slot trees
let slot_roots = slot_trees.iter()
.map(|t| {
t.tree.root().unwrap()
})
// get the roots of slot trees
let slot_roots = slot_trees
.iter()
.map(|t| t.tree.root().unwrap())
.collect::<Vec<_>>();
let dataset_tree = MerkleTree::<F>::new(&slot_roots, zero).unwrap();
Self {
tree: dataset_tree,
slot_trees,
params: params.clone(),
}
}
/// same as default but with supplied slot trees
pub fn new(slot_trees: Vec<SlotTree<F, D>>) -> Self {
// get the roots or slot trees
let slot_roots = slot_trees.iter()
.map(|t| {
t.tree.root().unwrap()
})
/// Same as default but with supplied slot trees
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: Params) -> Self {
// get the roots of slot trees
let slot_roots = slot_trees
.iter()
.map(|t| t.tree.root().unwrap())
.collect::<Vec<_>>();
// zero hash
let zero = HashOut {
@ -276,20 +242,24 @@ impl<
Self {
tree: dataset_tree,
slot_trees,
params,
}
}
/// generates a dataset level proof for given slot index
/// just a regular merkle tree proof
/// Generates a dataset level proof for the given slot index
/// Just a regular Merkle tree proof
pub fn get_proof(&self, index: usize) -> MerkleProof<F> {
let dataset_proof = self.tree.get_proof(index).unwrap();
dataset_proof
}
/// generates a proof for given slot index
/// also takes entropy so it can use it sample the slot
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F> {
let dataset_proof = self.tree.get_proof(index).unwrap();
/// Generates a proof for the given slot index
/// Also takes entropy so it can use it to sample the slot
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
let mut dataset_proof = self.tree.get_proof(index).unwrap();
// println!("d proof len = {}", dataset_proof.path.len());
Self::pad_proof(&mut dataset_proof, self.params.dataset_depth());
// println!("d proof len = {}", dataset_proof.path.len());
let slot = &self.slot_trees[index];
let slot_root = slot.tree.root().unwrap();
let mut slot_proofs = vec![];
@ -298,12 +268,24 @@ impl<
let mut entropy_as_digest = HashOut::<F>::ZERO;
entropy_as_digest.elements[0] = entropy_field;
// get the index for cell from H(slot_root|counter|entropy)
for i in 0..N_SAMPLES {
let cell_index_bits = calculate_cell_index_bits(&entropy_as_digest.elements.to_vec(), slot_root, i+1, MAX_DEPTH);
let mask_bits = usize_to_bits_le_padded(self.params.n_cells-1, self.params.max_depth+1);
for i in 0..self.params.n_samples {
let cell_index_bits = calculate_cell_index_bits(
&entropy_as_digest.elements.to_vec(),
slot_root,
i + 1,
self.params.max_depth,
mask_bits.clone()
);
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
let s_proof = slot.get_proof(cell_index);
let mut s_proof = slot.get_proof(cell_index);
Self::pad_proof(&mut s_proof, self.params.max_depth);
slot_proofs.push(s_proof);
cell_data.push(slot.cell_data[cell_index].data.clone());
let data_i = slot.cell_data[cell_index].data.clone();
let cell_i = Cell::<F,D>{
data: data_i
};
cell_data.push(cell_i);
}
DatasetProof {
@ -315,28 +297,39 @@ impl<
}
}
// verify the sampling - non-circuit version
pub fn verify_sampling(&self, proof: DatasetProof<F>) -> bool {
let slot = &self.slot_trees[proof.slot_index.to_canonical_u64() as usize];
pub fn pad_proof(merkle_proof: &mut MerkleProof<F>, max_depth: usize){
for i in merkle_proof.path.len()..max_depth{
merkle_proof.path.push(HashOut::<F>::ZERO);
}
}
// Verify the sampling - non-circuit version
pub fn verify_sampling(&self, proof: DatasetProof<F,D>) -> bool {
let slot_index = proof.slot_index.to_canonical_u64() as usize;
let slot = &self.slot_trees[slot_index];
let slot_root = slot.tree.root().unwrap();
// check dataset level proof
let d_res = proof.dataset_proof.verify(slot_root, self.tree.root().unwrap());
if (d_res.unwrap() == false) {
if d_res.unwrap() == false {
return false;
}
// sanity check
assert_eq!(N_SAMPLES, proof.slot_proofs.len());
assert_eq!(self.params.n_samples, proof.slot_proofs.len());
// get the index for cell from H(slot_root|counter|entropy)
for i in 0..N_SAMPLES {
// let entropy_field = F::from_canonical_u64(proof.entropy as u64);
// let mut entropy_as_digest = HashOut::<F>::ZERO;
// entropy_as_digest.elements[0] = entropy_field;
let cell_index_bits = calculate_cell_index_bits(&proof.entropy.elements.to_vec(), slot_root, i+1, MAX_DEPTH);
let mask_bits = usize_to_bits_le_padded(self.params.n_cells -1, self.params.max_depth);
for i in 0..self.params.n_samples {
let cell_index_bits = calculate_cell_index_bits(
&proof.entropy.elements.to_vec(),
slot_root,
i + 1,
self.params.max_depth,
mask_bits.clone(),
);
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
//check the cell_index is the same as one in the proof
// check the cell_index is the same as one in the proof
assert_eq!(cell_index, proof.slot_proofs[i].index);
let s_res = slot.verify_cell_proof(proof.slot_proofs[i].clone(), slot_root);
if (s_res.unwrap() == false) {
if s_res.unwrap() == false {
return false;
}
}
@ -352,76 +345,82 @@ mod tests {
use plonky2::plonk::config::GenericConfig;
use plonky2::iop::witness::PartialWitness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::circuits::sample_cells::{CircuitParams, DatasetTreeCircuit, SampleCircuitInput};
use crate::proof_input::test_params::{D, C, F, H, N_SLOTS};
use crate::circuits::params::CircuitParams;
use crate::circuits::sample_cells::{MerklePath, SampleCircuit, SampleCircuitInput};
use crate::proof_input::test_params::{C, D, F};
// test sample cells (non-circuit)
// Test sample cells (non-circuit)
#[test]
fn test_sample_cells() {
let dataset_t = DatasetTree::<F, D>::new_for_testing();
let slot_index = 2;
let entropy = 2;
let proof = dataset_t.sample_slot(slot_index,entropy);
let params = Params::default();
let dataset_t = DatasetTree::<F, D>::new_for_testing(&params);
let slot_index = params.testing_slot_index;
let entropy = params.entropy; // Use the entropy from Params if desired
let proof = dataset_t.sample_slot(slot_index, entropy);
let res = dataset_t.verify_sampling(proof);
assert_eq!(res, true);
}
// test sample cells in-circuit for a selected slot
// Test sample cells in-circuit for a selected slot
#[test]
fn test_sample_cells_circuit_from_selected_slot() -> anyhow::Result<()> {
let params = Params::default();
let dataset_t = DatasetTree::<F, D>::new_for_testing(&params);
let mut dataset_t = DatasetTree::<F, D>::new_for_testing();
let slot_index = params.testing_slot_index;
let entropy = params.entropy; // Use the entropy from Params if desired
let slot_index = TESTING_SLOT_INDEX;
let entropy = 123;
// sanity check
let proof = dataset_t.sample_slot(slot_index,entropy);
// Sanity check
let proof = dataset_t.sample_slot(slot_index, entropy);
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
let res = dataset_t.verify_sampling(proof.clone());
assert_eq!(res, true);
// let res = dataset_t.verify_sampling(proof.clone());
// assert_eq!(res, true);
// create the circuit
// Create the circuit
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
let circuit_params = CircuitParams{
max_depth: MAX_DEPTH,
max_log2_n_slots: DATASET_DEPTH,
block_tree_depth: BOT_DEPTH,
n_field_elems_per_cell: N_FIELD_ELEMS_PER_CELL,
n_samples: N_SAMPLES,
let circuit_params = CircuitParams {
max_depth: params.max_depth,
max_log2_n_slots: params.dataset_depth(),
block_tree_depth: params.bot_depth(),
n_field_elems_per_cell: params.n_field_elems_per_cell(),
n_samples: params.n_samples,
};
let circ = DatasetTreeCircuit::new(circuit_params);
let circ = SampleCircuit::new(circuit_params.clone());
let mut targets = circ.sample_slot_circuit(&mut builder);
// create a PartialWitness and assign
// Create a PartialWitness and assign
let mut pw = PartialWitness::new();
let mut slot_paths = vec![];
for i in 0..N_SAMPLES{
for i in 0..params.n_samples {
let path = proof.slot_proofs[i].path.clone();
slot_paths.push(path);
//TODO: need to be padded
let mp = MerklePath::<F,D>{
path,
};
slot_paths.push(mp);
}
println!("circuit params = {:?}", circuit_params);
let witness = SampleCircuitInput::<F,D>{
let witness = SampleCircuitInput::<F, D> {
entropy: proof.entropy.elements.clone().to_vec(),
dataset_root: dataset_t.tree.root().unwrap(),
slot_index: proof.slot_index.clone(),
slot_root,
n_cells_per_slot: F::from_canonical_u64((2_u32.pow(MAX_DEPTH as u32)) as u64),
n_slots_per_dataset: F::from_canonical_u64((2_u32.pow(DATASET_DEPTH as u32)) as u64),
n_cells_per_slot: F::from_canonical_usize(params.n_cells),
n_slots_per_dataset: F::from_canonical_usize(params.n_slots),
slot_proof: proof.dataset_proof.path.clone(),
cell_data: proof.cell_data.clone(),
merkle_paths: slot_paths,
};
println!("dataset ={:?}",dataset_t.slot_trees[0].tree.layers);
println!("dataset = {:?}", witness.slot_proof.clone());
println!("n_slots_per_dataset = {:?}", witness.n_slots_per_dataset.clone());
circ.sample_slot_assign_witness(&mut pw, &mut targets,witness);
circ.sample_slot_assign_witness(&mut pw, &mut targets, witness);
// build the circuit
// Build the circuit
let data = builder.build::<C>();
println!("circuit size = {:?}", data.common.degree_bits());
@ -430,7 +429,7 @@ mod tests {
let proof_with_pis = data.prove(pw)?;
println!("prove_time = {:?}", start_time.elapsed());
// verify the proof
// Verify the proof
let verifier_data = data.verifier_data();
assert!(
verifier_data.verify(proof_with_pis).is_ok(),
@ -439,4 +438,4 @@ mod tests {
Ok(())
}
}
}

View File

@ -0,0 +1,406 @@
// use std::fmt::Error;
use anyhow::{anyhow, Result, Error};
use std::num::ParseIntError;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::fs::File;
use std::io::{BufReader, Write};
use crate::proof_input::gen_input::DatasetTree;
use plonky2::hash::hash_types::{HashOut, RichField};
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2_field::extension::Extendable;
use plonky2_field::types::Field;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use crate::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
use crate::proof_input::test_params::Params;
// ... (Include necessary imports and your existing code)
impl<
F: RichField + Extendable<D> + Poseidon2 + Serialize,
const D: usize,
> DatasetTree<F, D> {
/// Function to generate witness and export to JSON
pub fn export_witness_to_json(&self, params: &Params, filename: &str) -> anyhow::Result<()> {
// Sample the slot
let slot_index = params.testing_slot_index;
let entropy = params.entropy;
let proof = self.sample_slot(slot_index, entropy);
let slot_root = self.slot_trees[slot_index].tree.root().unwrap();
// Prepare the witness data
let mut slot_paths = vec![];
for i in 0..params.n_samples {
let path = proof.slot_proofs[i].path.clone();
let mp = MerklePath::<F,D>{
path,
};
slot_paths.push(mp);
}
// Create the witness
let witness = SampleCircuitInput::<F, D> {
entropy: proof.entropy.elements.clone().to_vec(),
dataset_root: self.tree.root().unwrap(),
slot_index: proof.slot_index.clone(),
slot_root,
n_cells_per_slot: F::from_canonical_usize(params.n_cells_per_slot()),
n_slots_per_dataset: F::from_canonical_usize(params.n_slots_per_dataset()),
slot_proof: proof.dataset_proof.path.clone(),
cell_data: proof.cell_data.clone(),
merkle_paths: slot_paths,
};
// Convert the witness to a serializable format
let serializable_witness = SerializableWitness::from_witness(&witness);
// Serialize to JSON
let json_data = serde_json::to_string_pretty(&serializable_witness)?;
// Write to file
let mut file = File::create(filename)?;
file.write_all(json_data.as_bytes())?;
Ok(())
}
}
// Serializable versions of your data structures
#[derive(Serialize, Deserialize)]
struct SerializableWitness<
// F: RichField + Extendable<D> + Poseidon2 + Serialize,
// const D: usize,
> {
dataSetRoot: Vec<String>,
entropy: Vec<String>,
nCellsPerSlot: usize,
nSlotsPerDataSet: usize,
slotIndex: u64,
slotRoot: Vec<String>,
slotProof: Vec<String>,
cellData: Vec<Vec<String>>,
merklePaths: Vec<Vec<String>>,
}
impl<
// F: RichField + Extendable<D> + Poseidon2 + Serialize,
// const D: usize,
> SerializableWitness{
pub fn from_witness<
F: RichField + Extendable<D> + Poseidon2 + Serialize,
const D: usize,
>(witness: &SampleCircuitInput<F, D>) -> Self {
SerializableWitness {
dataSetRoot: witness
.dataset_root
.elements
.iter()
.map(|e| e.to_canonical_u64().to_string())
.collect(),
entropy: witness
.entropy
.iter()
.map(|e| e.to_canonical_u64().to_string())
.collect(),
nCellsPerSlot: witness.n_cells_per_slot.to_canonical_u64() as usize,
nSlotsPerDataSet: witness.n_slots_per_dataset.to_canonical_u64() as usize,
slotIndex: witness.slot_index.to_canonical_u64(),
slotRoot: witness
.slot_root
.elements
.iter()
.map(|e| e.to_canonical_u64().to_string())
.collect(),
slotProof: witness
.slot_proof
.iter()
.flat_map(|hash| hash.elements.iter())
.map(|e| e.to_canonical_u64().to_string())
.collect(),
cellData: witness
.cell_data
.iter()
.map(|data_vec| {
data_vec.data
.iter()
.map(|e| e.to_canonical_u64().to_string())
.collect()
})
.collect(),
merklePaths: witness
.merkle_paths
.iter()
.map(|path| {
path.path.iter()
.flat_map(|hash| hash.elements.iter())
.map(|e| e.to_canonical_u64().to_string())
.collect()
})
.collect(),
}
}
}
// pub struct SampleCircuitInput<
// F: RichField + Extendable<D> + Poseidon2,
// const D: usize,
// > {
// pub entropy: Vec<F>,
// pub dataset_root: HashOut<F>,
// pub slot_index: F,
// pub slot_root: HashOut<F>,
// pub n_cells_per_slot: F,
// pub n_slots_per_dataset: F,
// pub slot_proof: Vec<HashOut<F>>,
// pub cell_data: Vec<Vec<F>>,
// pub merkle_paths: Vec<Vec<HashOut<F>>>,
// }
impl<> SerializableWitness {
pub fn to_witness<
F: RichField + Extendable<D> + Poseidon2, const D: usize
>(&self) -> Result<SampleCircuitInput<F, D>> {
// Convert entropy
let entropy = self
.entropy
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>()?;
// Convert dataset_root
let dataset_root_elements = self
.dataSetRoot
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>()?;
let dataset_root = HashOut {
elements: dataset_root_elements
.try_into()
.map_err(|_| anyhow!("Invalid dataset_root length"))?,
};
// slot_index
let slot_index = F::from_canonical_u64(self.slotIndex);
// slot_root
let slot_root_elements = self
.slotRoot
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>()?;
let slot_root = HashOut {
elements: slot_root_elements
.try_into()
.map_err(|_| anyhow!("Invalid slot_root length"))?,
};
// n_cells_per_slot
let n_cells_per_slot = F::from_canonical_usize(self.nCellsPerSlot);
// n_slots_per_dataset
let n_slots_per_dataset = F::from_canonical_usize(self.nSlotsPerDataSet);
// slot_proof
let slot_proof_elements = self
.slotProof
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>()?;
if slot_proof_elements.len() % 4 != 0 {
return Err(anyhow!("Invalid slot_proof length"));
}
let slot_proof = slot_proof_elements
.chunks(4)
.map(|chunk| -> Result<HashOut<F>, Error> {
let elements: [F; 4] = chunk
.try_into()
.map_err(|_| anyhow!("Invalid chunk length"))?;
Ok(HashOut { elements })
})
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
// cell_data
let cell_data = self
.cellData
.iter()
.map(|vec_of_strings| -> Result<Cell<F,D>, Error> {
let cell = vec_of_strings
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>();
Ok(Cell::<F,D>{
data: cell.unwrap(),
})
})
.collect::<Result<Vec<Cell<F,D>>, Error>>()?;
// merkle_paths
let merkle_paths = self
.merklePaths
.iter()
.map(|path_strings| -> Result<MerklePath<F,D>, Error> {
let path_elements = path_strings
.iter()
.map(|s| -> Result<F, Error> {
let n = s.parse::<u64>()?;
Ok(F::from_canonical_u64(n))
})
.collect::<Result<Vec<F>, Error>>()?;
if path_elements.len() % 4 != 0 {
return Err(anyhow!("Invalid merkle path length"));
}
let path = path_elements
.chunks(4)
.map(|chunk| -> Result<HashOut<F>, Error> {
let elements: [F; 4] = chunk
.try_into()
.map_err(|_| anyhow!("Invalid chunk length"))?;
Ok(HashOut { elements })
})
.collect::<Result<Vec<HashOut<F>>, Error>>()?;
let mp = MerklePath::<F,D>{
path,
};
Ok(mp)
})
.collect::<Result<Vec<MerklePath<F,D>>, Error>>()?;
Ok(SampleCircuitInput {
entropy,
dataset_root,
slot_index,
slot_root,
n_cells_per_slot,
n_slots_per_dataset,
slot_proof,
cell_data,
merkle_paths,
})
}
}
pub fn import_witness_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
filename: &str,
) -> Result<SampleCircuitInput<F, D>> {
let file = File::open(filename)?;
let reader = BufReader::new(file);
let serializable_witness: SerializableWitness = serde_json::from_reader(reader)?;
let witness = serializable_witness.to_witness()?;
Ok(witness)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::proof_input::test_params::{F,D};
use std::fs;
// Test to generate the JSON file
#[test]
fn test_export_witness_to_json() -> anyhow::Result<()> {
// Create Params instance
let params = Params::default();
// Create the dataset tree
let dataset_t = DatasetTree::<F, D>::new_for_testing(&params);
// Export the witness to JSON
dataset_t.export_witness_to_json(&params, "input.json")?;
println!("Witness exported to input.json");
Ok(())
}
#[test]
fn test_import_witness_from_json() -> anyhow::Result<()> {
// First, ensure that the JSON file exists
// You can generate it using the export function if needed
// Import the witness from the JSON file
let witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
// Perform some checks to verify that the data was imported correctly
assert_eq!(witness.entropy.len(), 4); // Example check
// Add more assertions as needed
println!("Witness imported successfully");
Ok(())
}
#[test]
fn test_export_import_witness() -> anyhow::Result<()> {
// Create Params instance
let params = Params::default();
// Create the dataset tree
let dataset_t = DatasetTree::<F, D>::new_for_testing(&params);
// Generate the witness data
let slot_index = params.testing_slot_index;
let entropy = params.entropy;
let proof = dataset_t.sample_slot(slot_index, entropy);
let slot_root = dataset_t.slot_trees[slot_index].tree.root().unwrap();
let mut slot_paths = vec![];
for i in 0..params.n_samples {
let path = proof.slot_proofs[i].path.clone();
let mp = MerklePath::<F,D>{
path,
};
slot_paths.push(mp);
}
let original_witness = SampleCircuitInput::<F, D> {
entropy: proof.entropy.elements.clone().to_vec(),
dataset_root: dataset_t.tree.root().unwrap(),
slot_index: proof.slot_index.clone(),
slot_root,
n_cells_per_slot: F::from_canonical_usize(params.n_cells_per_slot()),
n_slots_per_dataset: F::from_canonical_usize(params.n_slots_per_dataset()),
slot_proof: proof.dataset_proof.path.clone(),
cell_data: proof.cell_data.clone(),
merkle_paths: slot_paths,
};
// Export the witness to JSON
dataset_t.export_witness_to_json(&params, "input.json")?;
println!("Witness exported to input.json");
// Import the witness from JSON
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
println!("Witness imported from input.json");
// Compare the original and imported witnesses
assert_eq!(original_witness, imported_witness, "Witnesses are not equal");
// Cleanup: Remove the generated JSON file
fs::remove_file("input.json")?;
println!("Test passed: Original and imported witnesses are equal.");
Ok(())
}
}

View File

@ -1,3 +1,4 @@
pub mod gen_input;
pub mod test_params;
pub mod utils;
pub mod utils;
pub mod json;

View File

@ -2,6 +2,8 @@
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use std::env;
use anyhow::{Result, Context};
// fake input params
@ -13,7 +15,7 @@ pub type H = PoseidonHash;
// hardcoded params for generating proof input
pub const MAX_DEPTH: usize = 8; // depth of big tree (slot tree depth, includes block tree depth)
pub const MAX_DEPTH: usize = 32; // depth of big tree (slot tree depth, includes block tree depth)
pub const MAX_SLOTS: usize = 256; // maximum number of slots
pub const CELL_SIZE: usize = 2048; // cell size in bytes
pub const BLOCK_SIZE: usize = 65536; // block size in bytes
@ -22,11 +24,12 @@ pub const N_SAMPLES: usize = 5; // number of samples to prove
pub const ENTROPY: usize = 1234567; // external randomness
pub const SEED: usize = 12345; // seed for creating fake data TODO: not used now
pub const N_SLOTS: usize = 8; // number of slots in the dataset
pub const N_SLOTS: usize = 16; // number of slots in the dataset
pub const TESTING_SLOT_INDEX: usize = 2; // the index of the slot to be sampled
pub const N_CELLS: usize = 512; // number of cells in each slot
/// Params struct
#[derive(Clone)]
pub struct Params {
pub max_depth: usize,
pub max_slots: usize,
@ -97,7 +100,7 @@ impl Params {
// BOT_DEPTH
pub fn bot_depth(&self) -> usize {
(self.block_size / self.cell_size).ilog2() as usize
(self.block_size / self.cell_size).trailing_zeros() as usize
}
// N_CELLS_IN_BLOCKS
@ -110,10 +113,36 @@ impl Params {
1 << (self.max_depth - self.bot_depth())
}
// Depth of test input
pub fn depth_test(&self) -> usize {
self.n_cells.trailing_zeros() as usize
}
// N_BLOCKS for the test input
pub fn n_blocks_test(&self) -> usize {
1 << (self.depth_test() - self.bot_depth())
}
// DATASET_DEPTH
pub fn dataset_depth(&self) -> usize {
self.max_slots.ilog2() as usize
self.max_slots.trailing_zeros() as usize
}
// DATASET_DEPTH for test
pub fn dataset_depth_test(&self) -> usize {
self.n_slots.trailing_zeros() as usize
}
// n_cells_per_slot (2^max_depth)
pub fn n_cells_per_slot(&self) -> usize {
1 << self.max_depth
}
// n_slots_per_dataset (2^dataset_depth)
pub fn n_slots_per_dataset(&self) -> usize {
1 << self.dataset_depth()
}
}
@ -127,4 +156,71 @@ pub const N_BLOCKS: usize = 1<<(MAX_DEPTH - BOT_DEPTH); // 2^(MAX_DEPTH - BOT_DE
pub const DATASET_DEPTH: usize = MAX_SLOTS.ilog2() as usize;
// TODO: load params
// load params
impl Params {
pub fn from_env() -> Result<Self> {
let max_depth = env::var("MAXDEPTH")
.context("MAXDEPTH not set")?
.parse::<usize>()
.context("Invalid MAXDEPTH")?;
let max_slots = env::var("MAXSLOTS")
.context("MAXSLOTS not set")?
.parse::<usize>()
.context("Invalid MAXSLOTS")?;
let cell_size = env::var("CELLSIZE")
.context("CELLSIZE not set")?
.parse::<usize>()
.context("Invalid CELLSIZE")?;
let block_size = env::var("BLOCKSIZE")
.context("BLOCKSIZE not set")?
.parse::<usize>()
.context("Invalid BLOCKSIZE")?;
let n_samples = env::var("NSAMPLES")
.context("NSAMPLES not set")?
.parse::<usize>()
.context("Invalid NSAMPLES")?;
let entropy = env::var("ENTROPY")
.context("ENTROPY not set")?
.parse::<usize>()
.context("Invalid ENTROPY")?;
let seed = env::var("SEED")
.context("SEED not set")?
.parse::<usize>()
.context("Invalid SEED")?;
let n_slots = env::var("NSLOTS")
.context("NSLOTS not set")?
.parse::<usize>()
.context("Invalid NSLOTS")?;
let testing_slot_index = env::var("SLOTINDEX")
.context("SLOTINDEX not set")?
.parse::<usize>()
.context("Invalid SLOTINDEX")?;
let n_cells = env::var("NCELLS")
.context("NCELLS not set")?
.parse::<usize>()
.context("Invalid NCELLS")?;
Ok(Params {
max_depth,
max_slots,
cell_size,
block_size,
n_samples,
entropy,
seed,
n_slots,
testing_slot_index,
n_cells,
})
}
}

View File

@ -2,26 +2,19 @@ use anyhow::Result;
use plonky2::field::extension::Extendable;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::{HashOut, HashOutTarget, RichField, NUM_HASH_OUT_ELTS};
use plonky2::hash::hash_types::{HashOut, HashOutTarget, NUM_HASH_OUT_ELTS, RichField};
use plonky2::hash::hashing::PlonkyPermutation;
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::iop::target::{BoolTarget, Target};
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData, VerifierCircuitData};
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher, PoseidonGoldilocksConfig};
use plonky2::plonk::proof::{Proof, ProofWithPublicInputs};
use std::marker::PhantomData;
use std::os::macos::raw::stat;
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
use serde::Serialize;
use crate::circuits::keyed_compress::key_compress_circuit;
use crate::circuits::params::HF;
use crate::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
use crate::circuits::utils::{add_assign_hash_out_target, assign_bool_targets, assign_hash_out_targets, mul_hash_out_target, usize_to_bits_le_padded};
use crate::circuits::utils::{assign_bool_targets, assign_hash_out_targets, usize_to_bits_le_padded};
use crate::merkle_tree::merkle_safe::MerkleTree;
use crate::merkle_tree::merkle_safe::{KEY_NONE,KEY_BOTTOM_LAYER};
/// the input to the merkle tree circuit
#[derive(Clone)]
@ -115,7 +108,6 @@ pub fn assign_witness<
#[cfg(test)]
mod tests {
use std::time::Instant;
use plonky2::hash::hash_types::HashOut;
use plonky2::hash::poseidon::PoseidonHash;
use super::*;
@ -124,11 +116,8 @@ mod tests {
use plonky2::iop::witness::PartialWitness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2_field::goldilocks_field::GoldilocksField;
use crate::circuits::merkle_circuit::{MerkleTreeCircuit, };
use crate::circuits::sample_cells::{CircuitParams, DatasetTreeCircuit, SampleCircuitInput};
use crate::circuits::utils::usize_to_bits_le_padded;
use crate::merkle_tree::merkle_safe::MerkleTree;
use crate::proof_input::test_params::{D, C, F, H, N_SLOTS, MAX_DEPTH};
// NOTE: for now these tests don't check the reconstructed root is equal to expected_root
// will be fixed later, but for that test check the prove_single_cell tests