add no-padding hash and refactor
This commit is contained in:
parent
a6bfc1ae92
commit
2b2aaab749
|
@ -13,7 +13,7 @@ use std::marker::PhantomData;
|
|||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::circuits::keyed_compress::key_compress_circuit;
|
||||
use crate::circuits::params::HF;
|
||||
use crate::circuits::utils::{add_assign_hash_out_target, assign_bool_targets, assign_hash_out_targets, mul_hash_out_target, usize_to_bits_le_padded};
|
||||
use crate::circuits::utils::{add_assign_hash_out_target, assign_bool_targets, assign_hash_out_targets, mul_hash_out_target};
|
||||
use crate::merkle_tree::merkle_safe::{KEY_NONE,KEY_BOTTOM_LAYER};
|
||||
|
||||
/// Merkle tree targets representing the input to the circuit
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
// global params for the circuits
|
||||
|
||||
use anyhow::{Result, Context};
|
||||
use std::env;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||
|
||||
|
@ -8,8 +10,8 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
|||
// will look into this later.
|
||||
pub type HF = Poseidon2Hash;
|
||||
|
||||
// params used for the circuits
|
||||
// should be defined prior to building the circuit
|
||||
/// params used for the circuits
|
||||
/// should be defined prior to building the circuit
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CircuitParams{
|
||||
pub max_depth: usize,
|
||||
|
@ -19,3 +21,48 @@ pub struct CircuitParams{
|
|||
pub n_samples: usize,
|
||||
}
|
||||
|
||||
impl CircuitParams {
|
||||
/// Creates a new `CircuitParams` struct from environment.
|
||||
///
|
||||
/// - `MAX_DEPTH`:The maximum slot depth
|
||||
/// - `MAX_LOG2_N_SLOTS`:The maximum log2 number of slots
|
||||
/// - `BLOCK_TREE_DEPTH`:The block tree depth
|
||||
/// - `N_FIELD_ELEMS_PER_CELL`: The number of field elements per cell
|
||||
/// - `N_SAMPLES`: number of samples
|
||||
///
|
||||
/// Returns an error if any environment variable is missing or fails to parse.
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let max_depth = env::var("MAX_DEPTH")
|
||||
.context("MAX_DEPTH is not set")?
|
||||
.parse::<usize>()
|
||||
.context("MAX_DEPTH must be a valid usize")?;
|
||||
|
||||
let max_log2_n_slots = env::var("MAX_LOG2_N_SLOTS")
|
||||
.context("MAX_LOG2_N_SLOTS is not set")?
|
||||
.parse::<usize>()
|
||||
.context("MAX_LOG2_N_SLOTS must be a valid usize")?;
|
||||
|
||||
let block_tree_depth = env::var("BLOCK_TREE_DEPTH")
|
||||
.context("BLOCK_TREE_DEPTH is not set")?
|
||||
.parse::<usize>()
|
||||
.context("BLOCK_TREE_DEPTH must be a valid usize")?;
|
||||
|
||||
let n_field_elems_per_cell = env::var("N_FIELD_ELEMS_PER_CELL")
|
||||
.context("N_FIELD_ELEMS_PER_CELL is not set")?
|
||||
.parse::<usize>()
|
||||
.context("N_FIELD_ELEMS_PER_CELL must be a valid usize")?;
|
||||
|
||||
let n_samples = env::var("N_SAMPLES")
|
||||
.context("N_SAMPLES is not set")?
|
||||
.parse::<usize>()
|
||||
.context("N_SAMPLES must be a valid usize")?;
|
||||
|
||||
Ok(CircuitParams {
|
||||
max_depth,
|
||||
max_log2_n_slots,
|
||||
block_tree_depth,
|
||||
n_field_elems_per_cell,
|
||||
n_samples,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ use plonky2::hash::hashing::PlonkyPermutation;
|
|||
use crate::circuits::params::{CircuitParams, HF};
|
||||
|
||||
use crate::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
|
||||
use crate::circuits::sponge::hash_n_with_padding;
|
||||
use crate::circuits::sponge::{hash_n_no_padding, hash_n_with_padding};
|
||||
use crate::circuits::utils::assign_hash_out_targets;
|
||||
|
||||
/// circuit for sampling a slot in a dataset merkle tree
|
||||
|
@ -212,7 +212,7 @@ impl<
|
|||
let mut hash_inputs:Vec<Target>= Vec::new();
|
||||
hash_inputs.extend_from_slice(&data_i);
|
||||
// let data_i_hash = builder.hash_n_to_hash_no_pad::<HF>(hash_inputs);
|
||||
let data_i_hash = hash_n_with_padding::<F,D,HF>(builder, hash_inputs);
|
||||
let data_i_hash = hash_n_no_padding::<F,D,HF>(builder, hash_inputs);
|
||||
// make the counter into hash digest
|
||||
let ctr_target = builder.constant(F::from_canonical_u64((i+1) as u64));
|
||||
let mut ctr = builder.add_virtual_hash();
|
||||
|
|
|
@ -50,7 +50,8 @@ pub fn hash_n_to_m_with_padding<
|
|||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
chunk.push(zero); // Should not happen, but pad zeros if necessary
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
|
@ -101,3 +102,73 @@ pub fn hash_n_to_m_with_padding<
|
|||
state = builder.permute::<H>(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// hash n targets (field elements) into hash digest / HashOutTarget (4 Goldilocks field elements)
|
||||
/// this function uses doesn't pad and expects input to be divisible by rate
|
||||
/// rate is fixed at 8 for now.
|
||||
pub fn hash_n_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: AlgebraicHasher<F>
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
inputs: Vec<Target>,
|
||||
) -> HashOutTarget {
|
||||
HashOutTarget::from_vec( hash_n_to_m_no_padding::<F, D, H>(builder, inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_n_to_m_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: AlgebraicHasher<F>
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
inputs: Vec<Target>,
|
||||
num_outputs: usize,
|
||||
) -> Vec<Target> {
|
||||
let rate = H::AlgebraicPermutation::RATE;
|
||||
let width = H::AlgebraicPermutation::WIDTH; // rate + capacity
|
||||
let zero = builder.zero();
|
||||
let one = builder.one();
|
||||
let mut state = H::AlgebraicPermutation::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let dom_sep_value = rate as u64 + 256 * 12 + 65536 * 8;
|
||||
let dom_sep = builder.constant(F::from_canonical_u64(dom_sep_value));
|
||||
state.set_elt(dom_sep, 8);
|
||||
|
||||
let n = inputs.len();
|
||||
assert_eq!(n % rate, 0, "Input length ({}) must be divisible by rate ({})", n, rate);
|
||||
let num_chunks = n / rate; // 10* padding
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks
|
||||
for _ in 0..num_chunks {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
state.set_elt(builder.add(state.as_ref()[j], chunk[j]), j);
|
||||
}
|
||||
// Apply permutation
|
||||
state = builder.permute::<H>(state);
|
||||
}
|
||||
// Squeeze until we have the desired number of outputs
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &s in state.squeeze() {
|
||||
outputs.push(s);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
state = builder.permute::<H>(state);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,19 +12,6 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
|||
|
||||
// --------- helper functions ---------
|
||||
|
||||
/// Converts an index to a vector of bits (LSB first) with padding.
|
||||
pub fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
let mut bits = Vec::with_capacity(bit_length);
|
||||
for i in 0..bit_length {
|
||||
bits.push(((index >> i) & 1) == 1);
|
||||
}
|
||||
// If index requires fewer bits, pad with `false`
|
||||
while bits.len() < bit_length {
|
||||
bits.push(false);
|
||||
}
|
||||
bits
|
||||
}
|
||||
|
||||
/// assign a vec of bool values to a vec of BoolTargets
|
||||
pub fn assign_bool_targets<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -4,21 +4,21 @@ use plonky2_field::extension::Extendable;
|
|||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use codex_plonky2_circuits::circuits::params::{CircuitParams, HF};
|
||||
use crate::params::Params;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
|
||||
use crate::params::TestParams;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le};
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuit, SampleCircuitInput};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use crate::sponge::hash_n_with_padding;
|
||||
use crate::sponge::{hash_bytes_no_padding, hash_n_with_padding};
|
||||
|
||||
/// generates input witness (SampleCircuitInput) from fake data
|
||||
/// generates circuit input (SampleCircuitInput) from fake data for testing
|
||||
/// which can be later stored into json see json.rs
|
||||
pub fn gen_witness<
|
||||
pub fn gen_testing_circuit_input<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(params: &Params) -> SampleCircuitInput<F,D>{
|
||||
>(params: &TestParams) -> SampleCircuitInput<F,D>{
|
||||
let dataset_t = DatasetTree::<F, D>::new_for_testing(¶ms);
|
||||
|
||||
let slot_index = params.testing_slot_index; // samples the specified slot
|
||||
|
@ -49,20 +49,20 @@ pub fn gen_witness<
|
|||
}
|
||||
}
|
||||
|
||||
/// verifies the witness.
|
||||
/// verifies the given circuit input.
|
||||
/// this is non circuit version for sanity check
|
||||
pub fn verify_witness<
|
||||
pub fn verify_circuit_input<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(witness: SampleCircuitInput<F,D>, params: &Params) -> bool{
|
||||
let slot_index = witness.slot_index.to_canonical_u64();
|
||||
let slot_root = witness.slot_root.clone();
|
||||
>(circ_input: SampleCircuitInput<F,D>, params: &TestParams) -> bool{
|
||||
let slot_index = circ_input.slot_index.to_canonical_u64();
|
||||
let slot_root = circ_input.slot_root.clone();
|
||||
// check dataset level proof
|
||||
let slot_proof = witness.slot_proof.clone();
|
||||
let dataset_path_bits = usize_to_bits_le_padded(slot_index as usize, params.dataset_depth());
|
||||
let slot_proof = circ_input.slot_proof.clone();
|
||||
let dataset_path_bits = usize_to_bits_le(slot_index as usize, params.dataset_max_depth());
|
||||
let last_index = params.n_slots - 1;
|
||||
let dataset_last_bits = usize_to_bits_le_padded(last_index, params.dataset_depth());
|
||||
let dataset_mask_bits = usize_to_bits_le_padded(last_index, params.dataset_depth()+1);
|
||||
let dataset_last_bits = usize_to_bits_le(last_index, params.dataset_max_depth());
|
||||
let dataset_mask_bits = usize_to_bits_le(last_index, params.dataset_max_depth()+1);
|
||||
let reconstructed_slot_root = MerkleProof::<F,D>::reconstruct_root2(
|
||||
slot_root,
|
||||
dataset_path_bits,
|
||||
|
@ -72,15 +72,15 @@ pub fn verify_witness<
|
|||
params.max_slots.trailing_zeros() as usize,
|
||||
).unwrap();
|
||||
// assert reconstructed equals dataset root
|
||||
assert_eq!(reconstructed_slot_root, witness.dataset_root.clone());
|
||||
assert_eq!(reconstructed_slot_root, circ_input.dataset_root.clone());
|
||||
|
||||
// check each sampled cell
|
||||
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le_padded(params.n_cells -1, params.max_depth);
|
||||
let mask_bits = usize_to_bits_le(params.n_cells -1, params.max_depth);
|
||||
for i in 0..params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&witness.entropy,
|
||||
&circ_input.entropy,
|
||||
slot_root,
|
||||
i + 1,
|
||||
params.max_depth,
|
||||
|
@ -88,9 +88,8 @@ pub fn verify_witness<
|
|||
);
|
||||
|
||||
let cell_index = bits_le_padded_to_usize(&cell_index_bits);
|
||||
println!("cell index ={}", cell_index);
|
||||
|
||||
let s_res = verify_cell_proof(&witness, ¶ms, cell_index, i);
|
||||
let s_res = verify_cell_proof(&circ_input, ¶ms, cell_index, i);
|
||||
if s_res.unwrap() == false {
|
||||
println!("call {} is false", i);
|
||||
return false;
|
||||
|
@ -103,10 +102,10 @@ pub fn verify_witness<
|
|||
pub fn verify_cell_proof<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(witness: &SampleCircuitInput<F,D>, params: &Params, cell_index: usize, ctr: usize) -> anyhow::Result<bool> {
|
||||
let mut block_path_bits = usize_to_bits_le_padded(cell_index, params.max_depth);
|
||||
>(circ_input: &SampleCircuitInput<F,D>, params: &TestParams, cell_index: usize, ctr: usize) -> anyhow::Result<bool> {
|
||||
let mut block_path_bits = usize_to_bits_le(cell_index, params.max_depth);
|
||||
let last_index = params.n_cells - 1;
|
||||
let mut block_last_bits = usize_to_bits_le_padded(last_index, params.max_depth);
|
||||
let mut block_last_bits = usize_to_bits_le(last_index, params.max_depth);
|
||||
|
||||
let split_point = params.bot_depth();
|
||||
|
||||
|
@ -114,13 +113,12 @@ pub fn verify_cell_proof<
|
|||
let slot_path_bits = block_path_bits.split_off(split_point);
|
||||
|
||||
// pub type HP = <PoseidonHash as Hasher<F>>::Permutation;
|
||||
let leaf_hash = hash_n_with_padding::<F,D,HF>(&witness.cell_data[ctr].data);
|
||||
// HF::hash_no_pad()
|
||||
let leaf_hash = hash_bytes_no_padding::<F,D,HF>(&circ_input.cell_data[ctr].data);
|
||||
|
||||
let mut block_path = witness.merkle_paths[ctr].path.clone();
|
||||
let mut block_path = circ_input.merkle_paths[ctr].path.clone();
|
||||
let slot_path = block_path.split_off(split_point);
|
||||
|
||||
let mut block_mask_bits = usize_to_bits_le_padded(last_index, params.max_depth+1);
|
||||
let mut block_mask_bits = usize_to_bits_le(last_index, params.max_depth+1);
|
||||
let mut slot_mask_bits = block_mask_bits.split_off(split_point);
|
||||
|
||||
block_mask_bits.push(false);
|
||||
|
@ -143,7 +141,7 @@ pub fn verify_cell_proof<
|
|||
params.max_depth - params.bot_depth(),
|
||||
);
|
||||
|
||||
Ok(reconstructed_root.unwrap() == witness.slot_root)
|
||||
Ok(reconstructed_root.unwrap() == circ_input.slot_root)
|
||||
}
|
||||
|
||||
|
||||
|
@ -151,7 +149,7 @@ pub fn verify_cell_proof<
|
|||
pub fn new_random_cell<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(params: &Params) -> Cell<F,D> {
|
||||
>(params: &TestParams) -> Cell<F,D> {
|
||||
let data = (0..params.n_field_elems_per_cell())
|
||||
.map(|_| F::rand())
|
||||
.collect::<Vec<_>>();
|
||||
|
@ -168,7 +166,7 @@ pub struct SlotTree<
|
|||
pub tree: MerkleTree<F, D>, // slot tree
|
||||
pub block_trees: Vec<MerkleTree<F,D>>, // vec of block trees
|
||||
pub cell_data: Vec<Cell<F, D>>, // cell data as field elements
|
||||
pub params: Params, // parameters
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
impl<
|
||||
|
@ -176,7 +174,7 @@ impl<
|
|||
const D: usize,
|
||||
> SlotTree<F, D> {
|
||||
/// Create a slot tree with fake data, for testing only
|
||||
pub fn new_default(params: &Params) -> Self {
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
// generate fake cell data
|
||||
let cell_data = (0..params.n_cells)
|
||||
.map(|_| new_random_cell(params))
|
||||
|
@ -185,10 +183,10 @@ impl<
|
|||
}
|
||||
|
||||
/// Create a new slot tree with the supplied cell data and parameters
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: Params) -> Self {
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: TestParams) -> Self {
|
||||
let leaves: Vec<HashOut<F>> = cells
|
||||
.iter()
|
||||
.map(|element| hash_n_with_padding::<F,D,HF>(&element.data))
|
||||
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
||||
.collect();
|
||||
let zero = HashOut {
|
||||
elements: [F::ZERO; 4],
|
||||
|
@ -255,7 +253,7 @@ pub struct DatasetTree<
|
|||
> {
|
||||
pub tree: MerkleTree<F,D>, // dataset tree
|
||||
pub slot_trees: Vec<SlotTree<F, D>>, // vec of slot trees
|
||||
pub params: Params, // parameters
|
||||
pub params: TestParams, // parameters
|
||||
}
|
||||
|
||||
/// Dataset Merkle proof struct, containing the dataset proof and sampled proofs.
|
||||
|
@ -276,7 +274,7 @@ impl<
|
|||
const D: usize,
|
||||
> DatasetTree<F, D> {
|
||||
/// Dataset tree with fake data, for testing only
|
||||
pub fn new_default(params: &Params) -> Self {
|
||||
pub fn new_default(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
let n_slots = 1 << params.dataset_depth_test();
|
||||
for _ in 0..n_slots {
|
||||
|
@ -286,7 +284,7 @@ impl<
|
|||
}
|
||||
|
||||
/// Create data for only the specified slot index in params
|
||||
pub fn new_for_testing(params: &Params) -> Self {
|
||||
pub fn new_for_testing(params: &TestParams) -> Self {
|
||||
let mut slot_trees = vec![];
|
||||
// let n_slots = 1 << params.dataset_depth();
|
||||
let n_slots = params.n_slots;
|
||||
|
@ -321,7 +319,7 @@ impl<
|
|||
}
|
||||
|
||||
/// Same as default but with supplied slot trees
|
||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: Params) -> Self {
|
||||
pub fn new(slot_trees: Vec<SlotTree<F, D>>, params: TestParams) -> Self {
|
||||
// get the roots of slot trees
|
||||
let slot_roots = slot_trees
|
||||
.iter()
|
||||
|
@ -344,7 +342,7 @@ impl<
|
|||
/// note: proofs are padded based on the params in self
|
||||
pub fn sample_slot(&self, index: usize, entropy: usize) -> DatasetProof<F,D> {
|
||||
let mut dataset_proof = self.tree.get_proof(index).unwrap();
|
||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_depth());
|
||||
Self::pad_proof(&mut dataset_proof, self.params.dataset_max_depth());
|
||||
|
||||
let slot = &self.slot_trees[index];
|
||||
let slot_root = slot.tree.root().unwrap();
|
||||
|
@ -355,7 +353,7 @@ impl<
|
|||
entropy_as_digest.elements[0] = entropy_field;
|
||||
|
||||
// get the index for cell from H(slot_root|counter|entropy)
|
||||
let mask_bits = usize_to_bits_le_padded(self.params.n_cells-1, self.params.max_depth+1);
|
||||
let mask_bits = usize_to_bits_le(self.params.n_cells-1, self.params.max_depth+1);
|
||||
for i in 0..self.params.n_samples {
|
||||
let cell_index_bits = calculate_cell_index_bits(
|
||||
&entropy_as_digest.elements.to_vec(),
|
||||
|
@ -407,17 +405,17 @@ mod tests {
|
|||
// Test sample cells (non-circuit)
|
||||
#[test]
|
||||
fn test_gen_verify_proof(){
|
||||
let params = Params::default();
|
||||
let w = gen_witness::<F,D>(¶ms);
|
||||
assert!(verify_witness::<F,D>(w,¶ms));
|
||||
let params = TestParams::default();
|
||||
let w = gen_testing_circuit_input::<F,D>(¶ms);
|
||||
assert!(verify_circuit_input::<F,D>(w, ¶ms));
|
||||
}
|
||||
|
||||
// Test sample cells in-circuit for a selected slot
|
||||
#[test]
|
||||
fn test_proof_in_circuit() -> anyhow::Result<()> {
|
||||
// get witness
|
||||
let params = Params::default();
|
||||
let witness = gen_witness::<F,D>(¶ms);
|
||||
// get input
|
||||
let params = TestParams::default();
|
||||
let circ_input = gen_testing_circuit_input::<F,D>(¶ms);
|
||||
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
|
@ -425,7 +423,7 @@ mod tests {
|
|||
|
||||
let circuit_params = CircuitParams {
|
||||
max_depth: params.max_depth,
|
||||
max_log2_n_slots: params.dataset_depth(),
|
||||
max_log2_n_slots: params.dataset_max_depth(),
|
||||
block_tree_depth: params.bot_depth(),
|
||||
n_field_elems_per_cell: params.n_field_elems_per_cell(),
|
||||
n_samples: params.n_samples,
|
||||
|
@ -439,7 +437,7 @@ mod tests {
|
|||
let mut pw = PartialWitness::new();
|
||||
|
||||
// assign a witness
|
||||
circ.sample_slot_assign_witness(&mut pw, &mut targets, witness);
|
||||
circ.sample_slot_assign_witness(&mut pw, &mut targets, circ_input);
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
|
|
@ -2,24 +2,25 @@ use anyhow::{anyhow, Error, Result};
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Write};
|
||||
use crate::gen_input::{DatasetTree, gen_witness};
|
||||
use crate::gen_input::{DatasetTree, gen_testing_circuit_input};
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{Cell, MerklePath, SampleCircuitInput};
|
||||
use crate::params::Params;
|
||||
use crate::params::TestParams;
|
||||
|
||||
pub fn export_witness_to_json<
|
||||
/// export circuit input to json file
|
||||
pub fn export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
> (witness :SampleCircuitInput<F, D>, filename: &str) -> Result<()>{
|
||||
// Convert the witness to a serializable format
|
||||
let serializable_witness = SerializableWitness::from_witness(&witness);
|
||||
> (circ_input:SampleCircuitInput<F, D>, filename: &str) -> Result<()>{
|
||||
// Convert the circuit input to a serializable format
|
||||
let serializable_circ_input = SerializableCircuitInput::from_circ_input(&circ_input);
|
||||
|
||||
// Serialize to JSON
|
||||
let json_data = serde_json::to_string_pretty(&serializable_witness)?;
|
||||
let json_data = serde_json::to_string_pretty(&serializable_circ_input)?;
|
||||
|
||||
// Write to file
|
||||
let mut file = File::create(filename)?;
|
||||
|
@ -28,23 +29,23 @@ pub fn export_witness_to_json<
|
|||
}
|
||||
|
||||
|
||||
/// Function to generate witness and export to JSON
|
||||
pub fn generate_and_export_witness_to_json<
|
||||
/// Function to generate circuit input and export to JSON
|
||||
pub fn generate_and_export_circ_input_to_json<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>( params: &Params, filename: &str) -> anyhow::Result<()> {
|
||||
>(params: &TestParams, filename: &str) -> Result<()> {
|
||||
|
||||
let witness = gen_witness::<F,D>(params);
|
||||
let circ_input = gen_testing_circuit_input::<F,D>(params);
|
||||
|
||||
export_witness_to_json(witness, filename)?;
|
||||
export_circ_input_to_json(circ_input, filename)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
// Serializable versions of the witness
|
||||
// Serializable versions of the circuit input
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableWitness<
|
||||
struct SerializableCircuitInput<
|
||||
> {
|
||||
dataSetRoot: Vec<String>,
|
||||
entropy: Vec<String>,
|
||||
|
@ -58,40 +59,40 @@ struct SerializableWitness<
|
|||
}
|
||||
|
||||
impl<
|
||||
> SerializableWitness{
|
||||
/// from the witness to serializable witness
|
||||
pub fn from_witness<
|
||||
> SerializableCircuitInput {
|
||||
/// from the circuit input to serializable circuit input
|
||||
pub fn from_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2 + Serialize,
|
||||
const D: usize,
|
||||
>(witness: &SampleCircuitInput<F, D>) -> Self {
|
||||
SerializableWitness {
|
||||
dataSetRoot: witness
|
||||
>(circ_input: &SampleCircuitInput<F, D>) -> Self {
|
||||
SerializableCircuitInput {
|
||||
dataSetRoot: circ_input
|
||||
.dataset_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
entropy: witness
|
||||
entropy: circ_input
|
||||
.entropy
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
nCellsPerSlot: witness.n_cells_per_slot.to_canonical_u64() as usize,
|
||||
nSlotsPerDataSet: witness.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||
slotIndex: witness.slot_index.to_canonical_u64(),
|
||||
slotRoot: witness
|
||||
nCellsPerSlot: circ_input.n_cells_per_slot.to_canonical_u64() as usize,
|
||||
nSlotsPerDataSet: circ_input.n_slots_per_dataset.to_canonical_u64() as usize,
|
||||
slotIndex: circ_input.slot_index.to_canonical_u64(),
|
||||
slotRoot: circ_input
|
||||
.slot_root
|
||||
.elements
|
||||
.iter()
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
slotProof: witness
|
||||
slotProof: circ_input
|
||||
.slot_proof
|
||||
.iter()
|
||||
.flat_map(|hash| hash.elements.iter())
|
||||
.map(|e| e.to_canonical_u64().to_string())
|
||||
.collect(),
|
||||
cellData: witness
|
||||
cellData: circ_input
|
||||
.cell_data
|
||||
.iter()
|
||||
.map(|data_vec| {
|
||||
|
@ -101,7 +102,7 @@ impl<
|
|||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
merklePaths: witness
|
||||
merklePaths: circ_input
|
||||
.merkle_paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
|
@ -115,9 +116,9 @@ impl<
|
|||
}
|
||||
}
|
||||
|
||||
impl<> SerializableWitness {
|
||||
/// from serializable witness to witness
|
||||
pub fn to_witness<
|
||||
impl<> SerializableCircuitInput {
|
||||
/// from serializable circuit input to circuit input
|
||||
pub fn to_circ_input<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize
|
||||
>(&self) -> Result<SampleCircuitInput<F, D>> {
|
||||
|
@ -258,87 +259,84 @@ impl<> SerializableWitness {
|
|||
}
|
||||
}
|
||||
|
||||
/// reads the json file, converts it to witness (SampleCircuitInput) and returns it
|
||||
pub fn import_witness_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||
/// reads the json file, converts it to circuit input (SampleCircuitInput) and returns it
|
||||
pub fn import_circ_input_from_json<F: RichField + Extendable<D> + Poseidon2, const D: usize>(
|
||||
filename: &str,
|
||||
) -> Result<SampleCircuitInput<F, D>> {
|
||||
let file = File::open(filename)?;
|
||||
let reader = BufReader::new(file);
|
||||
let serializable_witness: SerializableWitness = serde_json::from_reader(reader)?;
|
||||
let serializable_circ_input: SerializableCircuitInput = serde_json::from_reader(reader)?;
|
||||
|
||||
let witness = serializable_witness.to_witness()?;
|
||||
Ok(witness)
|
||||
let circ_input = serializable_circ_input.to_circ_input()?;
|
||||
Ok(circ_input)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::params::{BOT_DEPTH, C, D, F, MAX_DEPTH, N_CELLS};
|
||||
use crate::params::{C, D, F};
|
||||
use std::fs;
|
||||
use std::time::Instant;
|
||||
use codex_plonky2_circuits::circuits::params::{CircuitParams, HF};
|
||||
use codex_plonky2_circuits::circuits::params::CircuitParams;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleProof;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use crate::gen_input::verify_witness;
|
||||
use crate::sponge::hash_n_with_padding;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le_padded};
|
||||
use crate::gen_input::verify_circuit_input;
|
||||
|
||||
// Test to generate the JSON file
|
||||
#[test]
|
||||
fn test_export_witness_to_json() -> anyhow::Result<()> {
|
||||
fn test_export_circ_input_to_json() -> Result<()> {
|
||||
// Create Params
|
||||
let params = Params::default();
|
||||
// Export the witness to JSON
|
||||
generate_and_export_witness_to_json::<F,D>(¶ms, "input.json")?;
|
||||
let params = TestParams::default();
|
||||
// Export the circuit input to JSON
|
||||
generate_and_export_circ_input_to_json::<F,D>(¶ms, "input.json")?;
|
||||
|
||||
println!("Witness exported to input.json");
|
||||
println!("Circuit input exported to input.json");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_witness_from_json() -> anyhow::Result<()> {
|
||||
// Import the witness from the JSON file
|
||||
fn test_import_circ_input_from_json() -> anyhow::Result<()> {
|
||||
// Import the circuit input from the JSON file
|
||||
// NOTE: MAKE SURE THE FILE EXISTS
|
||||
let witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
println!("Witness imported successfully");
|
||||
let circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("input.json")?;
|
||||
println!("circuit input imported successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// export the witness and then import it and checks equality
|
||||
// export the circuit input and then import it and checks equality
|
||||
#[test]
|
||||
fn test_export_import_witness() -> anyhow::Result<()> {
|
||||
fn test_export_import_circ_input() -> anyhow::Result<()> {
|
||||
// Create Params instance
|
||||
let params = Params::default();
|
||||
let params = TestParams::default();
|
||||
|
||||
// Export the witness to JSON
|
||||
let original_witness = gen_witness(¶ms);
|
||||
export_witness_to_json(original_witness.clone(), "input.json")?;
|
||||
println!("Witness exported to input.json");
|
||||
// Export the circuit input to JSON
|
||||
let original_circ_input = gen_testing_circuit_input(¶ms);
|
||||
export_circ_input_to_json(original_circ_input.clone(), "input.json")?;
|
||||
println!("circuit input exported to input.json");
|
||||
|
||||
// Import the witness from JSON
|
||||
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
println!("Witness imported from input.json");
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("input.json")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
// Compare the original and imported witnesses
|
||||
assert_eq!(original_witness, imported_witness, "Witnesses are not equal");
|
||||
// Compare the original and imported circuit input
|
||||
assert_eq!(original_circ_input, imported_circ_input, "circuit input are not equal");
|
||||
|
||||
// cleanup: Remove the generated JSON file
|
||||
fs::remove_file("input.json")?;
|
||||
|
||||
println!("Test passed: Original and imported witnesses are equal.");
|
||||
println!("Test passed: Original and imported circuit input are equal.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// reads the json input and runs the circuit
|
||||
// reads the json input from file and runs the circuit
|
||||
#[test]
|
||||
fn test_json_witness_circuit() -> anyhow::Result<()> {
|
||||
let params = Params::default();
|
||||
fn test_read_json_and_run_circuit() -> anyhow::Result<()> {
|
||||
let params = TestParams::default();
|
||||
|
||||
// Create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
|
@ -346,7 +344,7 @@ mod tests {
|
|||
|
||||
let circuit_params = CircuitParams {
|
||||
max_depth: params.max_depth,
|
||||
max_log2_n_slots: params.dataset_depth(),
|
||||
max_log2_n_slots: params.dataset_max_depth(),
|
||||
block_tree_depth: params.bot_depth(),
|
||||
n_field_elems_per_cell: params.n_field_elems_per_cell(),
|
||||
n_samples: params.n_samples,
|
||||
|
@ -357,11 +355,11 @@ mod tests {
|
|||
// Create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// Import the witness from JSON
|
||||
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
println!("Witness imported from input.json");
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("input.json")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
circ.sample_slot_assign_witness(&mut pw, &mut targets, imported_witness);
|
||||
circ.sample_slot_assign_witness(&mut pw, &mut targets, imported_circ_input);
|
||||
|
||||
// Build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
@ -383,17 +381,17 @@ mod tests {
|
|||
}
|
||||
|
||||
// reads the json input and verify (non-circuit)
|
||||
// NOTE: expects the json input proof uses the default params
|
||||
// NOTE: expects that the json input proof uses the default params
|
||||
#[test]
|
||||
fn test_json_witness() -> anyhow::Result<()> {
|
||||
let params = Params::default();
|
||||
fn test_read_json_and_verify() -> anyhow::Result<()> {
|
||||
let params = TestParams::default();
|
||||
|
||||
// Import the witness from JSON
|
||||
let imported_witness: SampleCircuitInput<F, D> = import_witness_from_json("input.json")?;
|
||||
println!("Witness imported from input.json");
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("input.json")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
// Verify the proof
|
||||
let ver = verify_witness(imported_witness, ¶ms);
|
||||
let ver = verify_circuit_input(imported_circ_input, ¶ms);
|
||||
assert!(
|
||||
ver,
|
||||
"Merkle proof verification failed"
|
||||
|
|
|
@ -4,34 +4,40 @@ use plonky2::hash::poseidon::PoseidonHash;
|
|||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use std::env;
|
||||
use anyhow::{Result, Context};
|
||||
use codex_plonky2_circuits::circuits::params::CircuitParams;
|
||||
use plonky2_poseidon2::config::Poseidon2GoldilocksConfig;
|
||||
|
||||
// fake input params
|
||||
|
||||
// types
|
||||
// test types
|
||||
pub const D: usize = 2;
|
||||
pub type C = Poseidon2GoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F; // this is the goldilocks field
|
||||
// pub type H = PoseidonHash;
|
||||
// pub type HP = <PoseidonHash as plonky2::plonk::config::Hasher<F>>::Permutation;
|
||||
|
||||
// hardcoded params for generating proof input
|
||||
pub const MAX_DEPTH: usize = 32; // depth of big tree (slot tree depth, includes block tree depth)
|
||||
pub const MAX_SLOTS: usize = 256; // maximum number of slots
|
||||
pub const CELL_SIZE: usize = 2048; // cell size in bytes
|
||||
pub const BLOCK_SIZE: usize = 65536; // block size in bytes
|
||||
pub const N_SAMPLES: usize = 5; // number of samples to prove
|
||||
// hardcoded default params for generating proof input
|
||||
const DEFAULT_MAX_DEPTH: usize = 32; // depth of big tree (slot tree depth, includes block tree depth)
|
||||
const DEFAULT_MAX_SLOTS: usize = 256; // maximum number of slots
|
||||
const DEFAULT_CELL_SIZE: usize = 2048; // cell size in bytes
|
||||
const DEFAULT_BLOCK_SIZE: usize = 65536; // block size in bytes
|
||||
const DEFAULT_N_SAMPLES: usize = 5; // number of samples to prove
|
||||
|
||||
pub const ENTROPY: usize = 1234567; // external randomness
|
||||
pub const SEED: usize = 12345; // seed for creating fake data TODO: not used now
|
||||
const DEFAULT_ENTROPY: usize = 1234567; // external randomness
|
||||
const DEFAULT_SEED: usize = 12345; // seed for creating fake data TODO: not used now
|
||||
|
||||
pub const N_SLOTS: usize = 16; // number of slots in the dataset
|
||||
pub const TESTING_SLOT_INDEX: usize = 3; // the index of the slot to be sampled
|
||||
pub const N_CELLS: usize = 512; // number of cells in each slot
|
||||
const DEFAULT_N_SLOTS: usize = 16; // number of slots in the dataset
|
||||
const DEFAULT_SLOT_INDEX: usize = 3; // the index of the slot to be sampled
|
||||
const DEFAULT_N_CELLS: usize = 512; // number of cells in each slot
|
||||
|
||||
/// Params struct
|
||||
#[derive(Clone)]
|
||||
pub struct Params {
|
||||
pub circuit_params: CircuitParams,
|
||||
pub test: TestParams,
|
||||
}
|
||||
|
||||
/// test params
|
||||
#[derive(Clone)]
|
||||
pub struct TestParams{
|
||||
pub max_depth: usize,
|
||||
pub max_slots: usize,
|
||||
pub cell_size: usize,
|
||||
|
@ -45,25 +51,25 @@ pub struct Params {
|
|||
}
|
||||
|
||||
/// Implement the Default trait for Params using the hardcoded constants
|
||||
impl Default for Params {
|
||||
impl Default for TestParams {
|
||||
fn default() -> Self {
|
||||
Params {
|
||||
max_depth: MAX_DEPTH,
|
||||
max_slots: MAX_SLOTS,
|
||||
cell_size: CELL_SIZE,
|
||||
block_size: BLOCK_SIZE,
|
||||
n_samples: N_SAMPLES,
|
||||
entropy: ENTROPY,
|
||||
seed: SEED,
|
||||
n_slots: N_SLOTS,
|
||||
testing_slot_index: TESTING_SLOT_INDEX,
|
||||
n_cells: N_CELLS,
|
||||
TestParams {
|
||||
max_depth: DEFAULT_MAX_DEPTH,
|
||||
max_slots: DEFAULT_MAX_SLOTS,
|
||||
cell_size: DEFAULT_CELL_SIZE,
|
||||
block_size: DEFAULT_BLOCK_SIZE,
|
||||
n_samples: DEFAULT_N_SAMPLES,
|
||||
entropy: DEFAULT_ENTROPY,
|
||||
seed: DEFAULT_SEED,
|
||||
n_slots: DEFAULT_N_SLOTS,
|
||||
testing_slot_index: DEFAULT_SLOT_INDEX,
|
||||
n_cells: DEFAULT_N_CELLS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement a new function to create Params with custom values
|
||||
impl Params {
|
||||
impl TestParams {
|
||||
pub fn new(
|
||||
max_depth: usize,
|
||||
max_slots: usize,
|
||||
|
@ -76,7 +82,7 @@ impl Params {
|
|||
testing_slot_index: usize,
|
||||
n_cells: usize,
|
||||
) -> Self {
|
||||
Params {
|
||||
TestParams {
|
||||
max_depth,
|
||||
max_slots,
|
||||
cell_size,
|
||||
|
@ -96,12 +102,12 @@ impl Params {
|
|||
|
||||
// N_FIELD_ELEMS_PER_CELL
|
||||
pub fn n_field_elems_per_cell(&self) -> usize {
|
||||
self.cell_size * 8 / self.goldilocks_f_size()
|
||||
(self.cell_size + 62) / 62 * 8
|
||||
}
|
||||
|
||||
// BOT_DEPTH
|
||||
pub fn bot_depth(&self) -> usize {
|
||||
(self.block_size / self.cell_size).trailing_zeros() as usize
|
||||
log2(self.block_size / self.cell_size)
|
||||
}
|
||||
|
||||
// N_CELLS_IN_BLOCKS
|
||||
|
@ -125,40 +131,33 @@ impl Params {
|
|||
}
|
||||
|
||||
// DATASET_DEPTH
|
||||
pub fn dataset_depth(&self) -> usize {
|
||||
self.max_slots.trailing_zeros() as usize
|
||||
pub fn dataset_max_depth(&self) -> usize {
|
||||
// self.max_slots.trailing_zeros() as usize
|
||||
ceiling_log2(self.max_slots)
|
||||
}
|
||||
|
||||
// DATASET_DEPTH for test
|
||||
pub fn dataset_depth_test(&self) -> usize {
|
||||
self.n_slots.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// n_cells_per_slot (2^max_depth)
|
||||
pub fn n_cells_per_slot(&self) -> usize {
|
||||
1 << self.max_depth
|
||||
}
|
||||
|
||||
// n_slots_per_dataset (2^dataset_depth)
|
||||
pub fn n_slots_per_dataset(&self) -> usize {
|
||||
1 << self.dataset_depth()
|
||||
ceiling_log2(self.n_slots)
|
||||
// self.n_slots.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub fn log2(x: usize) -> usize {
|
||||
assert!(x.is_power_of_two(), "Input must be a power of 2.");
|
||||
x.trailing_zeros() as usize
|
||||
}
|
||||
|
||||
// computed constants
|
||||
pub const GOLDILOCKS_F_SIZE: usize = 64;
|
||||
pub const N_FIELD_ELEMS_PER_CELL: usize = CELL_SIZE * 8 / GOLDILOCKS_F_SIZE;
|
||||
pub const BOT_DEPTH: usize = (BLOCK_SIZE/CELL_SIZE).ilog2() as usize; // block tree depth
|
||||
pub fn ceiling_log2(x: usize) -> usize {
|
||||
if x <= 1 {
|
||||
return 0;
|
||||
}
|
||||
usize::BITS as usize - x.saturating_sub(1).leading_zeros() as usize
|
||||
}
|
||||
|
||||
pub const N_CELLS_IN_BLOCKS: usize = 1<< BOT_DEPTH; //2^BOT_DEPTH
|
||||
pub const N_BLOCKS: usize = 1<<(MAX_DEPTH - BOT_DEPTH); // 2^(MAX_DEPTH - BOT_DEPTH)
|
||||
|
||||
pub const DATASET_DEPTH: usize = MAX_SLOTS.ilog2() as usize;
|
||||
|
||||
/// load params from env
|
||||
impl Params {
|
||||
/// load test params from env
|
||||
impl TestParams {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let max_depth = env::var("MAXDEPTH")
|
||||
.context("MAXDEPTH not set")?
|
||||
|
@ -210,7 +209,7 @@ impl Params {
|
|||
.parse::<usize>()
|
||||
.context("Invalid NCELLS")?;
|
||||
|
||||
Ok(Params {
|
||||
Ok(TestParams {
|
||||
max_depth,
|
||||
max_slots,
|
||||
cell_size,
|
||||
|
@ -224,3 +223,22 @@ impl Params {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// load params from env
|
||||
impl Params {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let test_params = TestParams::from_env()?;
|
||||
let circuit_params = CircuitParams{
|
||||
max_depth: test_params.max_depth,
|
||||
max_log2_n_slots: test_params.dataset_max_depth(),
|
||||
block_tree_depth: test_params.bot_depth(),
|
||||
n_field_elems_per_cell: test_params.n_field_elems_per_cell(),
|
||||
n_samples:test_params.n_samples,
|
||||
};
|
||||
|
||||
Ok(Params{
|
||||
circuit_params,
|
||||
test: test_params,
|
||||
})
|
||||
}
|
||||
}
|
|
@ -46,7 +46,8 @@ pub fn hash_n_to_m_with_padding<
|
|||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
chunk.push(zero); // Pad with zeros if necessary (should not happen here)
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
|
@ -97,6 +98,75 @@ pub fn hash_n_to_m_with_padding<
|
|||
}
|
||||
}
|
||||
|
||||
/// sponge function for bytes with no padding
|
||||
/// expects the input to be divisible by rate
|
||||
/// note: rate is fixed at 8 for now
|
||||
/// used here for testing / sanity check
|
||||
pub fn hash_bytes_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_bytes_to_m_no_padding::<F, D, H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_bytes_to_m_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let one = F::ONE;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 8);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let n = inputs.len();
|
||||
assert_eq!(n % rate, 0, "Input length ({}) must be divisible by rate ({})", n, rate);
|
||||
let num_chunks = n / rate; // Calculate number of chunks
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks
|
||||
for _ in 0..num_chunks {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::types::Field;
|
||||
|
|
|
@ -13,7 +13,7 @@ use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
|||
use serde::Serialize;
|
||||
use codex_plonky2_circuits::circuits::merkle_circuit::{MerkleProofTarget, MerkleTreeCircuit, MerkleTreeTargets};
|
||||
use codex_plonky2_circuits::circuits::utils::{assign_bool_targets, assign_hash_out_targets};
|
||||
use crate::utils::usize_to_bits_le_padded;
|
||||
use crate::utils::usize_to_bits_le;
|
||||
|
||||
use codex_plonky2_circuits::merkle_tree::merkle_safe::MerkleTree;
|
||||
|
||||
|
@ -117,11 +117,10 @@ mod tests {
|
|||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
// use crate::circuits::utils::usize_to_bits_le_padded;
|
||||
// use crate::merkle_tree::merkle_safe::MerkleTree;
|
||||
|
||||
// NOTE: for now these tests don't check the reconstructed root is equal to expected_root
|
||||
// will be fixed later, but for that test check the prove_single_cell tests
|
||||
// will be fixed later, but for that test check the other tests in this crate
|
||||
#[test]
|
||||
fn test_build_circuit() -> anyhow::Result<()> {
|
||||
// circuit params
|
||||
|
@ -176,10 +175,10 @@ mod tests {
|
|||
builder.connect(expected_root.elements[i], reconstructed_root_target.elements[i]);
|
||||
}
|
||||
|
||||
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||
let path_bits = usize_to_bits_le(leaf_index, max_depth);
|
||||
let last_index = (nleaves - 1) as usize;
|
||||
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||
let last_bits = usize_to_bits_le(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
|
@ -265,10 +264,10 @@ mod tests {
|
|||
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
let path_bits = usize_to_bits_le_padded(leaf_index, max_depth);
|
||||
let path_bits = usize_to_bits_le(leaf_index, max_depth);
|
||||
let last_index = (nleaves - 1) as usize;
|
||||
let last_bits = usize_to_bits_le_padded(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le_padded(last_index, max_depth+1);
|
||||
let last_bits = usize_to_bits_le(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
|
|
|
@ -9,20 +9,41 @@ use codex_plonky2_circuits::circuits::params::HF;
|
|||
use anyhow::Result;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use crate::sponge::hash_n_with_padding;
|
||||
|
||||
// --------- helper functions ---------
|
||||
|
||||
/// Converts an index to a vector of bits (LSB first) with padding.
|
||||
pub(crate) fn usize_to_bits_le_padded(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
/// Converts an index to a vector of bits (LSB first) no padding.
|
||||
pub(crate) fn usize_to_bits_le(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
// Assert that the index can fit within the given bit length.
|
||||
assert!(
|
||||
index < (1 << bit_length),
|
||||
"Index ({}) does not fit in {} bits",
|
||||
index,
|
||||
bit_length
|
||||
);
|
||||
|
||||
let mut bits = Vec::with_capacity(bit_length);
|
||||
for i in 0..bit_length {
|
||||
bits.push(((index >> i) & 1) == 1);
|
||||
}
|
||||
// If index requires fewer bits, pad with `false`
|
||||
while bits.len() < bit_length {
|
||||
bits.push(false);
|
||||
}
|
||||
|
||||
// No padding
|
||||
bits
|
||||
}
|
||||
|
||||
/// returns the first bit_length bits of index
|
||||
pub(crate) fn low_bits(index: usize, bit_length: usize) -> Vec<bool> {
|
||||
|
||||
let mut bits = Vec::with_capacity(bit_length);
|
||||
|
||||
for i in 0..bit_length {
|
||||
// get the i-th bit and push its bool value
|
||||
bits.push(((index >> i) & 1) == 1);
|
||||
}
|
||||
|
||||
bits
|
||||
}
|
||||
|
||||
/// calculate the sampled cell index from entropy, slot root, and counter
|
||||
/// this is the non-circuit version for testing
|
||||
pub(crate) fn calculate_cell_index_bits<
|
||||
|
@ -39,7 +60,7 @@ pub(crate) fn calculate_cell_index_bits<
|
|||
let hash_output = hash_n_with_padding::<F,D,HF>(&hash_inputs);
|
||||
let cell_index_bytes = hash_output.elements[0].to_canonical_u64();
|
||||
|
||||
let cell_index_bits = usize_to_bits_le_padded(cell_index_bytes as usize, depth);
|
||||
let cell_index_bits = low_bits(cell_index_bytes as usize, depth);
|
||||
|
||||
let mut masked_cell_index_bits = vec![];
|
||||
|
||||
|
|
2523
workflow/input.json
2523
workflow/input.json
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue