mirror of
https://github.com/logos-storage/proof-aggregation.git
synced 2026-01-04 06:43:07 +00:00
organize and refactor proof-input
This commit is contained in:
parent
bb2f44a914
commit
08c8c7fea8
27
proof-input/src/hash/key_compress.rs
Executable file
27
proof-input/src/hash/key_compress.rs
Executable file
@ -0,0 +1,27 @@
|
||||
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
|
||||
/// Compression function which takes two 256 bit inputs (HashOut) and u64 key (which is converted to field element in the function)
|
||||
/// and returns a 256 bit output (HashOut / 4 Goldilocks field elems).
|
||||
pub fn key_compress<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H:Hasher<F>
|
||||
>(x: HashOut<F>, y: HashOut<F>, key: u64) -> HashOut<F> {
|
||||
|
||||
let key_field = F::from_canonical_u64(key);
|
||||
|
||||
let mut perm = H::Permutation::new(core::iter::repeat(F::ZERO));
|
||||
perm.set_from_slice(&x.elements, 0);
|
||||
perm.set_from_slice(&y.elements, NUM_HASH_OUT_ELTS);
|
||||
perm.set_elt(key_field,NUM_HASH_OUT_ELTS*2);
|
||||
|
||||
perm.permute();
|
||||
|
||||
HashOut {
|
||||
elements: perm.squeeze()[..NUM_HASH_OUT_ELTS].try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
2
proof-input/src/hash/mod.rs
Normal file
2
proof-input/src/hash/mod.rs
Normal file
@ -0,0 +1,2 @@
|
||||
pub mod sponge;
|
||||
pub mod key_compress;
|
||||
292
proof-input/src/hash/sponge.rs
Executable file
292
proof-input/src/hash/sponge.rs
Executable file
@ -0,0 +1,292 @@
|
||||
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
|
||||
/// sponge function similar to the in-circuit one
|
||||
/// used here for testing / sanity check
|
||||
pub fn hash_n_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_n_to_m_with_padding::<F,D,H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_n_to_m_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let one = F::ONE;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 63);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let input_n = inputs.len();
|
||||
let num_chunks = (input_n + rate) / rate; // Calculate number of chunks with 10* padding
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks except the last one
|
||||
for _ in 0..(num_chunks - 1) {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Process the last chunk with 10* padding
|
||||
let rem = num_chunks * rate - input_n; // Number of padding elements (0 < rem <= rate)
|
||||
let ofs = rate - rem; // Offset where padding starts
|
||||
|
||||
let mut last_chunk = Vec::with_capacity(rate);
|
||||
// Absorb remaining inputs
|
||||
for _ in 0..ofs {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
last_chunk.push(input);
|
||||
} else {
|
||||
last_chunk.push(zero);
|
||||
}
|
||||
}
|
||||
// Add the '1' padding bit
|
||||
last_chunk.push(one);
|
||||
// Pad with zeros to reach the full rate
|
||||
while last_chunk.len() < rate {
|
||||
last_chunk.push(zero);
|
||||
}
|
||||
|
||||
// Add the last chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + last_chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
/// sponge function for hashing without padding
|
||||
/// expects the input to be divisible by rate
|
||||
/// note: rate is fixed at 8 for now
|
||||
/// used here for input generation and testing / sanity check
|
||||
pub fn hash_n_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_n_to_m_no_padding::<F, D, H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_n_to_m_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 8);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let n = inputs.len();
|
||||
assert_eq!(n % rate, 0, "Input length ({}) must be divisible by rate ({})", n, rate);
|
||||
let num_chunks = n / rate; // Calculate number of chunks
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks
|
||||
for _ in 0..num_chunks {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
/// sponge function for bytes
|
||||
/// note: rate is fixed at 8 for now
|
||||
/// used here for testing / sanity check
|
||||
pub fn hash_bytes<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[u8],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_bytes_with_padding::<F,D,H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_bytes_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[u8],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 63);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let byte_rate = 62;
|
||||
let input_n = inputs.len();
|
||||
let num_chunks = (input_n + byte_rate) / byte_rate; // Calculate number of chunks with 10* padding
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks except the last one
|
||||
for _ in 0..(num_chunks - 1) {
|
||||
let mut chunk = Vec::with_capacity(byte_rate);
|
||||
for _ in 0..byte_rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
let chunk_felts = convert_bytes_to_field_rate8(&chunk);
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk_felts[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Process the last chunk with 10* padding
|
||||
let rem = num_chunks * byte_rate - input_n; // Number of padding elements (0 < rem <= rate)
|
||||
let ofs = byte_rate - rem; // Offset where padding starts
|
||||
|
||||
let mut last_chunk = Vec::with_capacity(byte_rate);
|
||||
// Absorb remaining inputs
|
||||
for _ in 0..ofs {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
last_chunk.push(input);
|
||||
} else {
|
||||
panic!("Insufficient input elements for last chunk; expected more elements!");
|
||||
}
|
||||
}
|
||||
// Add the '1' padding bit
|
||||
last_chunk.push(1u8);
|
||||
// Pad with zeros to reach the full rate
|
||||
while last_chunk.len() < byte_rate {
|
||||
last_chunk.push(0u8);
|
||||
}
|
||||
|
||||
let last_chunk_felts = convert_bytes_to_field_rate8(&last_chunk);
|
||||
|
||||
// Add the last chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + last_chunk_felts[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert 31 little-endian bytes into 4 field element limbs (62 bits each).
|
||||
fn convert_31_bytes_to_4_felts(ptr: &[u8]) -> [u64; 4] {
|
||||
assert!(ptr.len() >= 31, "Need at least 31 bytes, got {}", ptr.len());
|
||||
// Read 8-byte chunks as little-endian
|
||||
let q0 = u64::from_le_bytes(ptr[0..8].try_into().unwrap());
|
||||
let q7 = u64::from_le_bytes(ptr[7..15].try_into().unwrap());
|
||||
let q15 = u64::from_le_bytes(ptr[15..23].try_into().unwrap());
|
||||
let q23 = u64::from_le_bytes(ptr[23..31].try_into().unwrap());
|
||||
const MASK: u64 = 0x3fffffffffffffff;
|
||||
let mut felts = [0u64; 4];
|
||||
felts[0] = q0 & MASK;
|
||||
felts[1] = (q7 >> 6) | ((ptr[15] as u64 & 0x0f) << 58);
|
||||
felts[2] = (q15 >> 4) | ((ptr[23] as u64 & 0x03) << 60);
|
||||
felts[3] = q23 >> 2;
|
||||
felts
|
||||
}
|
||||
|
||||
/// Convert 62 bytes (rate 8) into 8 field element limbs by two 31-byte conversions.
|
||||
pub fn convert_bytes_to_field_rate8<F: RichField + Extendable<D> + Poseidon2, const D: usize>(ptr: &[u8]) -> [F; 8] {
|
||||
assert!(ptr.len() >= 62, "Need at least 62 bytes for rate 8, got {}", ptr.len());
|
||||
let mut felts = [F::ZERO; 8];
|
||||
let a = convert_31_bytes_to_4_felts(&ptr[0..31]);
|
||||
let a_felts = a.map(|x| F::from_canonical_u64(x));
|
||||
let b = convert_31_bytes_to_4_felts(&ptr[31..62]);
|
||||
let b_felts = b.map(|x| F::from_canonical_u64(x));
|
||||
felts[0..4].copy_from_slice(&a_felts);
|
||||
felts[4..8].copy_from_slice(&b_felts);
|
||||
felts
|
||||
}
|
||||
@ -7,8 +7,8 @@ use codex_plonky2_circuits::circuits::sample_cells::Cell;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use crate::params::{InputParams, HF};
|
||||
use crate::sponge::hash_bytes_no_padding;
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le};
|
||||
use crate::hash::sponge::hash_n_no_padding;
|
||||
use crate::input_generator::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, usize_to_bits_le};
|
||||
|
||||
// ----------------- slot tree -----------------
|
||||
#[derive(Clone)]
|
||||
@ -41,7 +41,7 @@ impl<
|
||||
pub fn new(cells: Vec<Cell<F, D>>, params: InputParams) -> Self {
|
||||
let leaves: Vec<HashOut<F>> = cells
|
||||
.iter()
|
||||
.map(|element| hash_bytes_no_padding::<F,D,HF>(&element.data))
|
||||
.map(|element| hash_n_no_padding::<F,D,HF>(&element.data))
|
||||
.collect();
|
||||
|
||||
let n_blocks = params.n_blocks_test();
|
||||
@ -4,13 +4,13 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::params::{Params,InputParams};
|
||||
use crate::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, ceiling_log2, usize_to_bits_le};
|
||||
use crate::input_generator::utils::{bits_le_padded_to_usize, calculate_cell_index_bits, ceiling_log2, usize_to_bits_le};
|
||||
use crate::merkle_tree::merkle_safe::MerkleProof;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{MerklePath, SampleCircuitInput};
|
||||
use plonky2::plonk::config::AlgebraicHasher;
|
||||
use crate::data_structs::DatasetTree;
|
||||
use crate::serialization::circuit_input::export_circ_input_to_json;
|
||||
use crate::sponge::hash_bytes_no_padding;
|
||||
use crate::input_generator::data_structs::DatasetTree;
|
||||
use crate::input_generator::serialization::export_circ_input_to_json;
|
||||
use crate::hash::sponge::hash_n_no_padding;
|
||||
|
||||
/// Input Generator to generates circuit input (SampleCircuitInput)
|
||||
/// which can be later stored into json see json.rs
|
||||
@ -164,7 +164,7 @@ impl<
|
||||
let slot_path_bits = block_path_bits.split_off(split_point);
|
||||
|
||||
// pub type HP = <PoseidonHash as Hasher<F>>::Permutation;
|
||||
let leaf_hash = hash_bytes_no_padding::<F,D,H>(&circ_input.cell_data[ctr].data);
|
||||
let leaf_hash = hash_n_no_padding::<F,D,H>(&circ_input.cell_data[ctr].data);
|
||||
|
||||
let mut block_path = circ_input.merkle_paths[ctr].path.clone();
|
||||
let slot_path = block_path.split_off(split_point);
|
||||
@ -195,64 +195,3 @@ impl<
|
||||
Ok(reconstructed_root.unwrap() == circ_input.slot_root)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Instant;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use super::*;
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||
|
||||
// types used in all tests
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
type H = PoseidonHash;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
|
||||
// Test sample cells (non-circuit)
|
||||
#[test]
|
||||
fn test_gen_verify_proof(){
|
||||
let input_gen = InputGenerator::<F,D,H>::default();
|
||||
let w = input_gen.gen_testing_circuit_input();
|
||||
assert!(input_gen.verify_circuit_input(w));
|
||||
}
|
||||
|
||||
// Test sample cells in-circuit for a selected slot
|
||||
#[test]
|
||||
fn test_proof_in_circuit() -> anyhow::Result<()> {
|
||||
// get input
|
||||
let mut params = Params::default();
|
||||
params.set_n_samples(10);
|
||||
let input_params = params.input_params;
|
||||
let circuit_params = params.circuit_params;
|
||||
let input_gen = InputGenerator::<F,D,H>::new(input_params);
|
||||
let circ_input = input_gen.gen_testing_circuit_input();
|
||||
|
||||
// build the circuit
|
||||
let circ = SampleCircuit::<F,D,H>::new(circuit_params.clone());
|
||||
let (targets, data) = circ.build_with_standard_config()?;
|
||||
println!("circuit size = {:?}", data.common.degree_bits());
|
||||
|
||||
// separate the prover and verifier
|
||||
let verifier_data = data.verifier_data();
|
||||
let prover_data = data.prover_data();
|
||||
|
||||
// Prove the circuit using the circuit input
|
||||
let start_time = Instant::now();
|
||||
let proof_with_pis: ProofWithPublicInputs<F, C, D> = circ.prove(&targets, &circ_input, &prover_data)?;
|
||||
println!("prove_time = {:?}", start_time.elapsed());
|
||||
|
||||
// Verify the proof
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
6
proof-input/src/input_generator/mod.rs
Normal file
6
proof-input/src/input_generator/mod.rs
Normal file
@ -0,0 +1,6 @@
|
||||
pub mod gen_input;
|
||||
pub mod utils;
|
||||
pub mod data_structs;
|
||||
pub mod serialization;
|
||||
|
||||
pub use gen_input::InputGenerator;
|
||||
108
proof-input/src/serialization/circuit_input.rs → proof-input/src/input_generator/serialization.rs
Executable file → Normal file
108
proof-input/src/serialization/circuit_input.rs → proof-input/src/input_generator/serialization.rs
Executable file → Normal file
@ -276,111 +276,3 @@ pub fn import_circ_input_from_json<
|
||||
let circ_input = serializable_circ_input.to_circ_input()?;
|
||||
Ok(circ_input)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::params::{C, D, F, HF, Params};
|
||||
use codex_plonky2_circuits::circuits::sample_cells::{SampleCircuit, SampleCircuitInput};
|
||||
use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use crate::gen_input::InputGenerator;
|
||||
use crate::serialization::circuit_input::{export_circ_input_to_json, import_circ_input_from_json};
|
||||
|
||||
// Test to generate the JSON file
|
||||
#[test]
|
||||
fn test_export_circ_input_to_json() -> anyhow::Result<()> {
|
||||
// Create InputGenerator
|
||||
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||
// Export the circuit input to JSON
|
||||
input_gen.generate_and_export_circ_input_to_json( "../output/test/")?;
|
||||
|
||||
println!("Circuit input exported to input.json");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_circ_input_from_json() -> anyhow::Result<()> {
|
||||
// Import the circuit input from the JSON file
|
||||
// NOTE: MAKE SURE THE FILE EXISTS
|
||||
let _circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
||||
println!("circuit input imported successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// export the circuit input and then import it and checks equality
|
||||
#[test]
|
||||
fn test_export_import_circ_input() -> anyhow::Result<()> {
|
||||
// Create InputGenerator
|
||||
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||
|
||||
// Export the circuit input to JSON
|
||||
let original_circ_input = input_gen.gen_testing_circuit_input();
|
||||
export_circ_input_to_json(original_circ_input.clone(), "../output/test/")?;
|
||||
println!("circuit input exported to input.json");
|
||||
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
// Compare the original and imported circuit input
|
||||
assert_eq!(original_circ_input, imported_circ_input, "circuit input are not equal");
|
||||
|
||||
// cleanup: Remove the generated JSON file
|
||||
// fs::remove_file("input.json")?;
|
||||
|
||||
println!("Test passed: Original and imported circuit input are equal.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// reads the json input from file and runs the circuit
|
||||
#[test]
|
||||
fn test_read_json_and_run_circuit() -> anyhow::Result<()> {
|
||||
// Create the circuit
|
||||
let circuit_params = Params::default().circuit_params;
|
||||
|
||||
let circ = SampleCircuit::<F, D, HF>::new(circuit_params.clone());
|
||||
let (targets, data) = circ.build_with_standard_config()?;
|
||||
|
||||
let verifier_data: VerifierCircuitData<F, C, D> = data.verifier_data();
|
||||
let prover_data: ProverCircuitData<F, C, D> = data.prover_data();
|
||||
println!("circuit size = {:?}", verifier_data.common.degree_bits());
|
||||
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
let proof = circ.prove(&targets, &imported_circ_input, &prover_data)?;
|
||||
|
||||
// Verify the proof
|
||||
assert!(
|
||||
verifier_data.verify(proof).is_ok(),
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// reads the json input and verify (non-circuit)
|
||||
// NOTE: expects that the json input proof uses the default params
|
||||
#[test]
|
||||
fn test_read_json_and_verify() -> anyhow::Result<()> {
|
||||
// Create InputGenerator
|
||||
let input_gen = InputGenerator::<F,D,HF>::default();
|
||||
|
||||
// Import the circuit input from JSON
|
||||
let imported_circ_input: SampleCircuitInput<F, D> = import_circ_input_from_json("../output/test/")?;
|
||||
println!("circuit input imported from input.json");
|
||||
|
||||
// Verify the proof
|
||||
let ver = input_gen.verify_circuit_input(imported_circ_input);
|
||||
assert!(
|
||||
ver,
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -2,7 +2,7 @@ use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::params::HF;
|
||||
use crate::sponge::hash_n_with_padding;
|
||||
use crate::hash::sponge::hash_n_with_padding;
|
||||
|
||||
// --------- helper functions ---------
|
||||
|
||||
@ -100,7 +100,3 @@ pub fn ceiling_log2(
|
||||
|
||||
(last_bits, mask)
|
||||
}
|
||||
|
||||
pub fn zero<F: RichField + Extendable<D> + Poseidon2, const D: usize>() -> HashOut<F>{
|
||||
HashOut { elements: [F::ZERO; 4],}
|
||||
}
|
||||
@ -1,9 +1,4 @@
|
||||
|
||||
pub mod gen_input;
|
||||
pub mod params;
|
||||
pub mod utils;
|
||||
pub mod recursion;
|
||||
pub mod sponge;
|
||||
pub mod merkle_tree;
|
||||
pub mod data_structs;
|
||||
pub mod serialization;
|
||||
pub mod input_generator;
|
||||
pub mod hash;
|
||||
|
||||
@ -1,109 +0,0 @@
|
||||
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
|
||||
/// Compression function which takes two 256 bit inputs (HashOut) and u64 key (which is converted to field element in the function)
|
||||
/// and returns a 256 bit output (HashOut / 4 Goldilocks field elems).
|
||||
pub fn key_compress<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H:Hasher<F>
|
||||
>(x: HashOut<F>, y: HashOut<F>, key: u64) -> HashOut<F> {
|
||||
|
||||
let key_field = F::from_canonical_u64(key);
|
||||
|
||||
let mut perm = H::Permutation::new(core::iter::repeat(F::ZERO));
|
||||
perm.set_from_slice(&x.elements, 0);
|
||||
perm.set_from_slice(&y.elements, NUM_HASH_OUT_ELTS);
|
||||
perm.set_elt(key_field,NUM_HASH_OUT_ELTS*2);
|
||||
|
||||
perm.permute();
|
||||
|
||||
HashOut {
|
||||
elements: perm.squeeze()[..NUM_HASH_OUT_ELTS].try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||
use super::*;
|
||||
// test types
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F;
|
||||
pub type H = Poseidon2Hash;
|
||||
|
||||
/// tests the non-circuit key_compress with concrete cases
|
||||
#[test]
|
||||
pub fn test_key_compress(){
|
||||
let ref_inp_1: [F; 4] = [
|
||||
F::from_canonical_u64(0x0000000000000001),
|
||||
F::from_canonical_u64(0x0000000000000002),
|
||||
F::from_canonical_u64(0x0000000000000003),
|
||||
F::from_canonical_u64(0x0000000000000004),
|
||||
];
|
||||
|
||||
let ref_inp_2: [F; 4] = [
|
||||
F::from_canonical_u64(0x0000000000000005),
|
||||
F::from_canonical_u64(0x0000000000000006),
|
||||
F::from_canonical_u64(0x0000000000000007),
|
||||
F::from_canonical_u64(0x0000000000000008),
|
||||
];
|
||||
|
||||
let ref_out_key_0: [F; 4] = [
|
||||
F::from_canonical_u64(0xc4a4082f411ba790),
|
||||
F::from_canonical_u64(0x98c2ed7546c44cce),
|
||||
F::from_canonical_u64(0xc9404f373b78c979),
|
||||
F::from_canonical_u64(0x65d6b3c998920f59),
|
||||
];
|
||||
|
||||
let ref_out_key_1: [F; 4] = [
|
||||
F::from_canonical_u64(0xca47449a05283778),
|
||||
F::from_canonical_u64(0x08d3ced2020391ac),
|
||||
F::from_canonical_u64(0xda461ea45670fb12),
|
||||
F::from_canonical_u64(0x57f2c0b6c98a05c5),
|
||||
];
|
||||
|
||||
let ref_out_key_2: [F; 4] = [
|
||||
F::from_canonical_u64(0xe6fcec96a7a7f4b0),
|
||||
F::from_canonical_u64(0x3002a22356daa551),
|
||||
F::from_canonical_u64(0x899e2c1075a45f3f),
|
||||
F::from_canonical_u64(0xf07e38ccb3ade312),
|
||||
];
|
||||
|
||||
let ref_out_key_3: [F; 4] = [
|
||||
F::from_canonical_u64(0x9930cff752b046fb),
|
||||
F::from_canonical_u64(0x41570687cadcea0b),
|
||||
F::from_canonical_u64(0x3ac093a5a92066c7),
|
||||
F::from_canonical_u64(0xc45c75a3911cde87),
|
||||
];
|
||||
|
||||
// `HashOut` for inputs
|
||||
let inp1 = HashOut { elements: ref_inp_1 };
|
||||
let inp2 = HashOut { elements: ref_inp_2 };
|
||||
|
||||
// Expected outputs
|
||||
let expected_outputs = [
|
||||
ref_out_key_0,
|
||||
ref_out_key_1,
|
||||
ref_out_key_2,
|
||||
ref_out_key_3,
|
||||
];
|
||||
|
||||
// Iterate over each key and test key_compress output
|
||||
for (key, &expected) in expected_outputs.iter().enumerate() {
|
||||
let output = key_compress::<F, D, H>(inp1, inp2, key as u64);
|
||||
|
||||
// Assert that output matches the expected result
|
||||
assert_eq!(output.elements, expected, "Output mismatch for key: {}", key);
|
||||
|
||||
println!("Test passed for key {}", key);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -110,186 +110,4 @@ pub fn assign_witness<
|
||||
assign_hash_out_targets(pw, &targets.merkle_path.path[i].0, &witnesses.merkle_path[i])?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::hash::hash_types::HashOut;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use super::*;
|
||||
use crate::merkle_tree::merkle_safe::MerkleTree;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use crate::utils::usize_to_bits_le;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
#[test]
|
||||
fn test_mt_build_circuit() -> anyhow::Result<()> {
|
||||
// circuit params
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
// Generate random leaf data
|
||||
let nleaves = 16; // Number of leaves
|
||||
let max_depth = 4;
|
||||
let data = (0..nleaves)
|
||||
.map(|i| GoldilocksField::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
PoseidonHash::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
//initialize the Merkle tree
|
||||
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||
|
||||
// select leaf index to prove
|
||||
let leaf_index: usize = 8;
|
||||
|
||||
// get the Merkle proof for the selected leaf
|
||||
let proof = tree.get_proof(leaf_index)?;
|
||||
// sanity check:
|
||||
let check = proof.verify(tree.layers[0][leaf_index],tree.root().unwrap()).unwrap();
|
||||
assert_eq!(check, true);
|
||||
|
||||
// create the circuit
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
let (mut targets, reconstructed_root_target) = build_circuit::<F,D,H>(&mut builder, max_depth);
|
||||
|
||||
// expected Merkle root
|
||||
let expected_root = builder.add_virtual_hash();
|
||||
|
||||
// check equality with expected root
|
||||
for i in 0..NUM_HASH_OUT_ELTS {
|
||||
builder.connect(expected_root.elements[i], reconstructed_root_target.elements[i]);
|
||||
}
|
||||
|
||||
let path_bits = usize_to_bits_le(leaf_index, max_depth);
|
||||
let last_index = (nleaves - 1) as usize;
|
||||
let last_bits = usize_to_bits_le(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
leaf: tree.layers[0][leaf_index],
|
||||
path_bits,
|
||||
last_bits,
|
||||
mask_bits,
|
||||
merkle_path: proof.path,
|
||||
};
|
||||
|
||||
// create a PartialWitness and assign
|
||||
let mut pw = PartialWitness::new();
|
||||
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||
pw.set_hash_target(expected_root, tree.root().unwrap())?;
|
||||
|
||||
// build the circuit
|
||||
let data = builder.build::<C>();
|
||||
|
||||
// Prove the circuit with the assigned witness
|
||||
let proof_with_pis = data.prove(pw)?;
|
||||
|
||||
// verify the proof
|
||||
let verifier_data = data.verifier_data();
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// same as test above but for all leaves
|
||||
#[test]
|
||||
fn test_verify_all_leaves() -> anyhow::Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type H = PoseidonHash;
|
||||
|
||||
let nleaves = 16; // Number of leaves
|
||||
let max_depth = 4;
|
||||
let data = (0..nleaves)
|
||||
.map(|i| GoldilocksField::from_canonical_u64(i as u64))
|
||||
.collect::<Vec<_>>();
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
PoseidonHash::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
let (mut targets, reconstructed_root_target) = build_circuit::<F,D,H>(&mut builder, max_depth);
|
||||
|
||||
// expected Merkle root
|
||||
let expected_root_target = builder.add_virtual_hash();
|
||||
|
||||
// check equality with expected root
|
||||
for i in 0..NUM_HASH_OUT_ELTS {
|
||||
builder.connect(expected_root_target.elements[i], reconstructed_root_target.elements[i]);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
|
||||
for leaf_index in 0..nleaves {
|
||||
let proof = tree.get_proof(leaf_index)?;
|
||||
let check = proof.verify(tree.layers[0][leaf_index], expected_root)?;
|
||||
assert!(
|
||||
check,
|
||||
"Merkle proof verification failed for leaf index {}",
|
||||
leaf_index
|
||||
);
|
||||
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
let path_bits = usize_to_bits_le(leaf_index, max_depth);
|
||||
let last_index = nleaves - 1;
|
||||
let last_bits = usize_to_bits_le(last_index, max_depth);
|
||||
let mask_bits = usize_to_bits_le(last_index, max_depth+1);
|
||||
|
||||
// circuit input
|
||||
let circuit_input = MerkleTreeCircuitInput::<F, D>{
|
||||
leaf: tree.layers[0][leaf_index],
|
||||
path_bits,
|
||||
last_bits,
|
||||
mask_bits,
|
||||
merkle_path: proof.path,
|
||||
};
|
||||
|
||||
assign_witness(&mut pw, &mut targets, circuit_input)?;
|
||||
pw.set_hash_target(expected_root_target, expected_root)?;
|
||||
|
||||
let proof_with_pis = data.prove(pw)?;
|
||||
|
||||
let verifier_data = data.verifier_data();
|
||||
assert!(
|
||||
verifier_data.verify(proof_with_pis).is_ok(),
|
||||
"Merkle proof verification failed in circuit for leaf index {}",
|
||||
leaf_index
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
@ -9,8 +9,7 @@ use std::ops::Shr;
|
||||
use plonky2::plonk::config::AlgebraicHasher;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use crate::merkle_tree::key_compress::key_compress;
|
||||
use crate::utils::zero;
|
||||
use crate::hash::key_compress::key_compress;
|
||||
|
||||
// Constants for the keys used in compression
|
||||
pub const KEY_NONE: u64 = 0x0;
|
||||
@ -107,13 +106,13 @@ fn merkle_tree_worker<
|
||||
return Ok(vec![xs.to_vec()]);
|
||||
}
|
||||
|
||||
let halfn = m / 2;
|
||||
let n = 2 * halfn;
|
||||
let half_n = m / 2;
|
||||
let n = 2 * half_n;
|
||||
let is_odd = n != m;
|
||||
|
||||
let mut ys = Vec::with_capacity(halfn + if is_odd { 1 } else { 0 });
|
||||
let mut ys = Vec::with_capacity(half_n + if is_odd { 1 } else { 0 });
|
||||
|
||||
for i in 0..halfn {
|
||||
for i in 0..half_n {
|
||||
let key = if is_bottom_layer { KEY_BOTTOM_LAYER } else { KEY_NONE };
|
||||
let h = key_compress::<F, D, H>(xs[2 * i], xs[2 * i + 1], key);
|
||||
ys.push(h);
|
||||
@ -145,7 +144,7 @@ pub struct MerkleProof<
|
||||
> {
|
||||
pub index: usize, // Index of the leaf
|
||||
pub path: Vec<HashOut<F>>, // Sibling hashes from the leaf to the root
|
||||
pub nleaves: usize, // Total number of leaves
|
||||
pub n_leaves: usize, // Total number of leaves
|
||||
phantom_data: PhantomData<H>
|
||||
}
|
||||
|
||||
@ -157,18 +156,18 @@ impl<
|
||||
pub fn new(
|
||||
index: usize,
|
||||
path: Vec<HashOut<F>>,
|
||||
nleaves: usize,
|
||||
n_leaves: usize,
|
||||
) -> Self{
|
||||
Self{
|
||||
index,
|
||||
path,
|
||||
nleaves,
|
||||
n_leaves,
|
||||
phantom_data: PhantomData::default(),
|
||||
}
|
||||
}
|
||||
/// Reconstructs the root hash from the proof and the given leaf.
|
||||
pub fn reconstruct_root(&self, leaf: HashOut<F>) -> Result<HashOut<F>> {
|
||||
let mut m = self.nleaves;
|
||||
let mut m = self.n_leaves;
|
||||
let mut j = self.index;
|
||||
let mut h = leaf;
|
||||
let mut bottom_flag = KEY_BOTTOM_LAYER;
|
||||
@ -197,6 +196,11 @@ impl<
|
||||
|
||||
/// reconstruct the root using path_bits and last_bits in similar way as the circuit
|
||||
/// this is used for testing - sanity check
|
||||
/// * `leaf`: the leaf hash
|
||||
/// * `path_bits`: the linear index of the leaf, in binary decomposition (least significant bit first)
|
||||
/// * `last_bits`: the index of the last leaf (= nLeaves-1), in binary decomposition
|
||||
/// * `mask_bits`: the bits of the mask `2^ceilingLog2(size) - 1`
|
||||
/// * `merkle_path`: the Merkle inclusion proof (required hashes, starting from the leaf and ending near the root)
|
||||
pub fn reconstruct_root2(leaf: HashOut<F>, path_bits: Vec<bool>, last_bits:Vec<bool>, path: Vec<HashOut<F>>, mask_bits:Vec<bool>, depth: usize) -> Result<HashOut<F>> {
|
||||
let is_last = compute_is_last(path_bits.clone(),last_bits);
|
||||
|
||||
@ -223,9 +227,11 @@ impl<
|
||||
i += 1;
|
||||
}
|
||||
|
||||
let mut mask_bits_corrected = mask_bits.clone();
|
||||
mask_bits_corrected[0] = true;
|
||||
let mut reconstructed_root = HashOut::<F>::ZERO;
|
||||
for k in 0..depth{
|
||||
let diff = (mask_bits[k] as u64) - (mask_bits[k+1] as u64);
|
||||
let diff = (mask_bits_corrected[k] as u64) - (mask_bits_corrected[k+1] as u64);
|
||||
let mul_res: Vec<F> = h[k+1].elements.iter().map(|e| e.mul(F::from_canonical_u64(diff))).collect();
|
||||
reconstructed_root = HashOut::<F>::from_vec(
|
||||
mul_res.iter().zip(reconstructed_root.elements).map(|(e1,e2)| e1.add(e2)).collect()
|
||||
@ -259,283 +265,6 @@ fn compute_is_last(path_bits: Vec<bool>, last_bits: Vec<bool>) -> Vec<bool> {
|
||||
is_last
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use plonky2::field::types::Field;
|
||||
use crate::merkle_tree::key_compress::key_compress;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::hash::poseidon::PoseidonHash;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
|
||||
// types used in all tests
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
type H = PoseidonHash;
|
||||
|
||||
fn compress(
|
||||
x: HashOut<F>,
|
||||
y: HashOut<F>,
|
||||
key: u64,
|
||||
) -> HashOut<F> {
|
||||
key_compress::<F,D,H>(x,y,key)
|
||||
}
|
||||
|
||||
fn make_tree(
|
||||
data: &[F],
|
||||
) -> Result<MerkleTree<F,D, H>> {
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<GoldilocksField>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
H::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
MerkleTree::<F, D, H>::new(&leaves)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_proof_test() -> Result<()> {
|
||||
let data = (1u64..=8)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to obtain leaf hashes
|
||||
let leaves: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| {
|
||||
// Hash each field element to get the leaf hash
|
||||
H::hash_no_pad(&[element])
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Build the Merkle tree
|
||||
let tree = MerkleTree::<F, D, H>::new(&leaves)?;
|
||||
|
||||
// Get the root
|
||||
let root = tree.root()?;
|
||||
|
||||
// Get a proof for the first leaf
|
||||
let proof = tree.get_proof(0)?;
|
||||
|
||||
// Verify the proof
|
||||
let is_valid = proof.verify(leaves[0], root)?;
|
||||
assert!(is_valid, "Merkle proof verification failed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_even_bottom_layer() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=8)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
let expected_root =
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
leaf_hashes[7],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_odd_bottom_layer() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=7)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
let expected_root =
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
zero::<F,D>(),
|
||||
KEY_ODD_AND_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_correctness_even_bottom_odd_upper_layers() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=10)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
let expected_root = compress(
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[0],
|
||||
leaf_hashes[1],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[2],
|
||||
leaf_hashes[3],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[4],
|
||||
leaf_hashes[5],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
compress(
|
||||
leaf_hashes[6],
|
||||
leaf_hashes[7],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
KEY_NONE,
|
||||
),
|
||||
compress(
|
||||
compress(
|
||||
compress(
|
||||
leaf_hashes[8],
|
||||
leaf_hashes[9],
|
||||
KEY_BOTTOM_LAYER,
|
||||
),
|
||||
zero::<F,D>(),
|
||||
KEY_ODD,
|
||||
),
|
||||
zero::<F,D>(),
|
||||
KEY_ODD,
|
||||
),
|
||||
KEY_NONE,
|
||||
);
|
||||
|
||||
// Build the tree
|
||||
let tree = make_tree(&data)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Check that the computed root matches the expected root
|
||||
assert_eq!(computed_root, expected_root);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proofs() -> Result<()> {
|
||||
// Data for the test (field elements)
|
||||
let data = (1u64..=10)
|
||||
.map(|i| F::from_canonical_u64(i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Hash the data to get leaf hashes
|
||||
let leaf_hashes: Vec<HashOut<F>> = data
|
||||
.iter()
|
||||
.map(|&element| H::hash_no_pad(&[element]))
|
||||
.collect();
|
||||
|
||||
// Build the tree
|
||||
let tree = MerkleTree::<F, D, H>::new(&leaf_hashes)?;
|
||||
|
||||
// Get the root
|
||||
let expected_root = tree.root()?;
|
||||
|
||||
// Verify proofs for all leaves
|
||||
for (i, &leaf_hash) in leaf_hashes.iter().enumerate() {
|
||||
let proof = tree.get_proof(i)?;
|
||||
let is_valid = proof.verify(leaf_hash, expected_root)?;
|
||||
assert!(is_valid, "Proof verification failed for leaf {}", i);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
pub fn zero<F: RichField + Extendable<D> + Poseidon2, const D: usize>() -> HashOut<F>{
|
||||
HashOut { elements: [F::ZERO; 4],}
|
||||
}
|
||||
@ -1,4 +1,2 @@
|
||||
pub mod merkle_safe;
|
||||
pub mod key_compress;
|
||||
pub mod merkle_circuit;
|
||||
pub mod test;
|
||||
|
||||
@ -1,182 +0,0 @@
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::{Poseidon2, Poseidon2Hash};
|
||||
use anyhow::Result;
|
||||
use crate::merkle_tree::merkle_safe::{MerkleProof, MerkleTree};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
type F = GoldilocksField;
|
||||
type H = Poseidon2Hash;
|
||||
const D: usize = 2;
|
||||
|
||||
struct TestCase {
|
||||
n: usize,
|
||||
digest: [u64; 4],
|
||||
}
|
||||
|
||||
fn digest_seq<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
>(n: usize) -> Vec<HashOut<F>> {
|
||||
(0..n)
|
||||
.map(|i| HashOut {
|
||||
elements: [
|
||||
F::from_canonical_u64((i + 1) as u64),
|
||||
F::ZERO,
|
||||
F::ZERO,
|
||||
F::ZERO,
|
||||
],
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merkle_roots() -> Result<()> {
|
||||
|
||||
let test_cases: Vec<TestCase> = vec![
|
||||
TestCase { n: 1, digest: [0x232f21acc9d346d8, 0x2eba96d3a73822c1, 0x4163308f6d0eff64, 0x5190c2b759734aff] },
|
||||
TestCase { n: 2, digest: [0x999dde2cb60b5bdb, 0xacb725a87250a306, 0x8eeb00a6fc173443, 0x5f510b7eeece33bb] },
|
||||
TestCase { n: 3, digest: [0x00b72dc0a592b9c0, 0x68575842dd1c6e27, 0x871d5146985881d6, 0xc945d7f3d5fdde00] },
|
||||
];
|
||||
|
||||
for test_case in test_cases {
|
||||
let n = test_case.n;
|
||||
let expected_digest = test_case.digest;
|
||||
|
||||
// Generate the inputs
|
||||
let inputs = digest_seq::<F,D>(n);
|
||||
|
||||
// Build the Merkle tree
|
||||
let tree = MerkleTree::<F, D, H>::new(&inputs)?;
|
||||
|
||||
// Get the computed root
|
||||
let computed_root = tree.root()?;
|
||||
|
||||
// Construct the expected root hash
|
||||
let expected_root = HashOut {
|
||||
elements: [
|
||||
F::from_canonical_u64(expected_digest[0]),
|
||||
F::from_canonical_u64(expected_digest[1]),
|
||||
F::from_canonical_u64(expected_digest[2]),
|
||||
F::from_canonical_u64(expected_digest[3]),
|
||||
],
|
||||
};
|
||||
|
||||
// Compare computed root to expected digest
|
||||
assert_eq!(
|
||||
computed_root, expected_root,
|
||||
"Mismatch at n = {}",
|
||||
n
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merkle_proof_with_given_leaf_and_root() -> Result<()> {
|
||||
// Parse the root
|
||||
let root_elements = vec![
|
||||
"14459953088494886308",
|
||||
"12400665201701660877",
|
||||
"8918969394875474575",
|
||||
"3734475392324688728",
|
||||
];
|
||||
let root = HashOut {
|
||||
elements: root_elements
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let num = s.parse::<u64>().unwrap();
|
||||
F::from_canonical_u64(num)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
// Parse the leaf
|
||||
let leaf_elements = vec![
|
||||
"6216356142838248961",
|
||||
"7651361162368135479",
|
||||
"8250178335123580371",
|
||||
"3813462866599431579",
|
||||
];
|
||||
let leaf = HashOut {
|
||||
elements: leaf_elements
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let num = s.parse::<u64>().unwrap();
|
||||
F::from_canonical_u64(num)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
// Parse the proof
|
||||
let proof_strings = vec![
|
||||
"1345604040032513712",
|
||||
"7222769029677219453",
|
||||
"4856886058017005512",
|
||||
"17218820401481758629",
|
||||
"6741690371018853470",
|
||||
"10000950172891759230",
|
||||
"1256624250298316158",
|
||||
"14572953286928282395",
|
||||
"11250861626949238654",
|
||||
"2066450512590186880",
|
||||
"4406339264013603126",
|
||||
"6649535526486987988",
|
||||
"14920223145083393283",
|
||||
"18017129979212138612",
|
||||
"1235310154294028825",
|
||||
"16382646529383194172",
|
||||
];
|
||||
|
||||
let proof_numbers: Vec<u64> = proof_strings
|
||||
.iter()
|
||||
.map(|s| s.parse::<u64>().unwrap())
|
||||
.collect();
|
||||
|
||||
let proof_elements: Vec<F> = proof_numbers
|
||||
.iter()
|
||||
.map(|&num| F::from_canonical_u64(num))
|
||||
.collect();
|
||||
|
||||
let path_hashes: Vec<HashOut<F>> = proof_elements
|
||||
.chunks(4)
|
||||
.map(|chunk| HashOut {
|
||||
elements: chunk.try_into().unwrap(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let num_indices = 1 << path_hashes.len();
|
||||
let mut found = false;
|
||||
|
||||
for index in 0..num_indices {
|
||||
let proof = MerkleProof::<F,D,H>::new(
|
||||
index,
|
||||
path_hashes.clone(),
|
||||
num_indices,
|
||||
);
|
||||
|
||||
// Reconstruct the root
|
||||
let reconstructed_root = proof.reconstruct_root(leaf.clone())?;
|
||||
|
||||
// Compare with the given root
|
||||
if reconstructed_root == root {
|
||||
println!("Proof is valid for index {}", index);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assert!(found, "No valid proof found for the given leaf and root");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -1,85 +0,0 @@
|
||||
// some tests for the leaf in tree recursion
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use codex_plonky2_circuits::recursion::leaf::{LeafCircuit, LeafInput};
|
||||
use codex_plonky2_circuits::recursion::dummy_gen::DummyProofGen;
|
||||
use crate::params::{F, D, C, HF};
|
||||
use crate::recursion::run_sampling_circ;
|
||||
|
||||
pub fn run_leaf_circ<const T: usize>(inner_proof: ProofWithPublicInputs<F, C, D>, inner_verifier_data: VerifierCircuitData<F, C, D>, flag: bool, index: usize) -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, ProverCircuitData<F, C, D>, VerifierCircuitData<F, C, D>)> {
|
||||
|
||||
// ------------------- leaf --------------------
|
||||
let leaf = LeafCircuit::<F,D,C,HF,T>::new(inner_verifier_data.clone());
|
||||
|
||||
// build
|
||||
let (targets, data) = leaf.build_with_standard_config()?;
|
||||
let verifier_data: VerifierCircuitData<F,C,D> = data.verifier_data();
|
||||
let prover_data = data.prover_data();
|
||||
println!("leaf circuit degree bits = {:?}", prover_data.common.degree_bits());
|
||||
|
||||
// prove
|
||||
let input = LeafInput{
|
||||
inner_proof,
|
||||
flag,
|
||||
index,
|
||||
};
|
||||
let proof = leaf.prove(&targets, &input, &prover_data)?;
|
||||
println!("pub input size = {}", proof.public_inputs.len());
|
||||
println!("proof size = {:?} bytes", proof.to_bytes().len());
|
||||
println!("pub input = {:?}", proof.public_inputs);
|
||||
|
||||
// verify
|
||||
assert!(
|
||||
verifier_data.verify(proof.clone()).is_ok(),
|
||||
"proof verification failed"
|
||||
);
|
||||
|
||||
let flag_buckets: Vec<F> = proof.public_inputs[9..13].to_vec();
|
||||
if flag {
|
||||
check_flag_buckets(index, flag_buckets);
|
||||
} else {
|
||||
for i in 0..flag_buckets.len() {
|
||||
assert_eq!(flag_buckets[i], F::ZERO, "bucket not valid");
|
||||
}
|
||||
}
|
||||
|
||||
Ok((proof, prover_data, verifier_data))
|
||||
}
|
||||
|
||||
fn check_flag_buckets(index: usize, flag_buckets: Vec<F>) {
|
||||
// Compute the bucket and bit position from the input index.
|
||||
let bucket = index / 32;
|
||||
let bit = index % 32;
|
||||
// For each flag target (bucket), assign the appropriate 32-bit one-hot value.
|
||||
for (i, &flag_bucket) in flag_buckets.iter().enumerate() {
|
||||
let value: u64 = if i == bucket {
|
||||
1 << bit
|
||||
} else {
|
||||
0
|
||||
};
|
||||
assert_eq!(value, flag_bucket.to_canonical_u64(), "bucket value mismatch");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_real_leaf_circ() -> anyhow::Result<()> {
|
||||
let (inner_proof, _, inner_verifier) = run_sampling_circ()?;
|
||||
|
||||
run_leaf_circ::<128>(inner_proof, inner_verifier, true, 1)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dummy_leaf_circ() -> anyhow::Result<()> {
|
||||
let (_, _, inner_verifier) = run_sampling_circ()?;
|
||||
let (dummy_proof, dummy_vd) = DummyProofGen::gen_dummy_proof_and_vd_zero_pi(&inner_verifier.common)?;
|
||||
run_leaf_circ::<128>(dummy_proof, dummy_vd, false, 0)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,31 +0,0 @@
|
||||
use plonky2::plonk::circuit_data::{ProverCircuitData, VerifierCircuitData};
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use codex_plonky2_circuits::circuits::sample_cells::SampleCircuit;
|
||||
use crate::gen_input::InputGenerator;
|
||||
use crate::params::{C, D, F, HF, Params};
|
||||
|
||||
pub mod tree_test;
|
||||
pub mod leaf_test;
|
||||
pub mod node_test;
|
||||
pub mod wrap_test;
|
||||
|
||||
|
||||
pub fn run_sampling_circ() -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, ProverCircuitData<F, C, D>, VerifierCircuitData<F, C, D>)> {
|
||||
//------------ sampling inner circuit ----------------------
|
||||
// Circuit that does the sampling - 100 samples
|
||||
let mut params = Params::default();
|
||||
params.set_n_samples(100);
|
||||
let input_gen = InputGenerator::<F,D,HF>::new(params.input_params.clone());
|
||||
let one_circ_input = input_gen.gen_testing_circuit_input();
|
||||
let samp_circ = SampleCircuit::<F,D,HF>::new(params.circuit_params);
|
||||
let (inner_tar, inner_data) = samp_circ.build_with_standard_config()?;
|
||||
|
||||
let inner_verifier_data = inner_data.verifier_data();
|
||||
let inner_prover_data = inner_data.prover_data();
|
||||
|
||||
println!("sampling circuit degree bits = {:?}", inner_verifier_data.common.degree_bits());
|
||||
let inner_proof = samp_circ.prove(&inner_tar, &one_circ_input, &inner_prover_data)?;
|
||||
|
||||
Ok((inner_proof, inner_prover_data, inner_verifier_data))
|
||||
}
|
||||
@ -1,62 +0,0 @@
|
||||
// some tests for the leaf in tree recursion
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::plonk::circuit_data::VerifierCircuitData;
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use codex_plonky2_circuits::recursion::node::{NodeCircuit, NodeInput};
|
||||
use crate::params::{F, D, C, HF};
|
||||
use crate::recursion::leaf_test::tests::run_leaf_circ;
|
||||
use crate::recursion::run_sampling_circ;
|
||||
|
||||
fn run_node_circ<const N: usize, const T: usize>(leaf_proofs: Vec<ProofWithPublicInputs<F, C, D>>, leaf_verifier_data: VerifierCircuitData<F, C, D>, _flag: bool, index: usize) -> anyhow::Result<()> {
|
||||
|
||||
// ------------------- Node --------------------
|
||||
// N leaf proofs
|
||||
assert_eq!(leaf_proofs.len(), N);
|
||||
let node = NodeCircuit::<F,D,C,HF, N, T>::new(leaf_verifier_data.clone());
|
||||
|
||||
// build
|
||||
let (targets, data) = node.build_with_standard_config()?;
|
||||
let verifier_data: VerifierCircuitData<F,C,D> = data.verifier_data();
|
||||
let prover_data = data.prover_data();
|
||||
println!("node circuit degree bits = {:?}", prover_data.common.degree_bits());
|
||||
|
||||
// prove
|
||||
let input = NodeInput{
|
||||
inner_proofs: leaf_proofs,
|
||||
verifier_only_data: leaf_verifier_data.verifier_only,
|
||||
condition: false,
|
||||
flags: [true; N].to_vec(),
|
||||
index,
|
||||
};
|
||||
|
||||
let proof = node.prove(&targets, &input, &prover_data)?;
|
||||
println!("pub input size = {}", proof.public_inputs.len());
|
||||
println!("proof size = {:?} bytes", proof.to_bytes().len());
|
||||
println!("pub input = {:?}", proof.public_inputs);
|
||||
|
||||
// verify
|
||||
assert!(
|
||||
verifier_data.verify(proof.clone()).is_ok(),
|
||||
"proof verification failed"
|
||||
);
|
||||
|
||||
// TODO: check flags
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_real_node_circ() -> anyhow::Result<()> {
|
||||
let (inner_proof, _, inner_verifier) = run_sampling_circ()?;
|
||||
// this is a bit wasteful to build leaf twice, TODO: fix this
|
||||
let (leaf_proof_1, _, _leaf_verifier_1) = run_leaf_circ::<128>(inner_proof.clone(), inner_verifier.clone(), true, 0)?;
|
||||
let (leaf_proof_2, _, leaf_verifier_2) = run_leaf_circ::<128>(inner_proof, inner_verifier, true, 1)?;
|
||||
let leaf_proofs = vec![leaf_proof_1,leaf_proof_2];
|
||||
run_node_circ::<2,128>(leaf_proofs, leaf_verifier_2, true, 0)
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,58 +0,0 @@
|
||||
// some tests for the tree recursion
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::plonk::proof::{ProofWithPublicInputs};
|
||||
use crate::params::{F, D, C, HF};
|
||||
use codex_plonky2_circuits::recursion::{tree::TreeRecursion};
|
||||
use crate::recursion::run_sampling_circ;
|
||||
|
||||
fn run_tree_recursion<const N: usize, const T: usize>(compress: bool) -> anyhow::Result<()> {
|
||||
|
||||
//------------ sampling inner circuit ----------------------
|
||||
// Circuit that does the sampling - 100 samples
|
||||
let (inner_proof, _inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
||||
|
||||
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
||||
|
||||
// ------------------- tree --------------------
|
||||
// N-to-1 tree aggregation
|
||||
|
||||
let mut tree = TreeRecursion::<F,D,C,HF, N, T>::build_with_standard_config(inner_verifier_data.clone())?;
|
||||
|
||||
// aggregate
|
||||
let root = if !compress {
|
||||
tree.prove_tree(&proofs)?
|
||||
} else {
|
||||
println!("Mode: tree with compression");
|
||||
tree.prove_tree_and_compress(&proofs)?
|
||||
};
|
||||
println!("pub input size = {}", root.public_inputs.len());
|
||||
println!("pub input = {:?}", root.public_inputs);
|
||||
println!("proof size = {:?} bytes", root.to_bytes().len());
|
||||
|
||||
let inner_pi: Vec<Vec<F>> = proofs.iter().map(|p| p.public_inputs.clone()).collect();
|
||||
|
||||
assert!(
|
||||
tree.verify_proof_and_public_input(root,inner_pi.clone(), compress).is_ok(),
|
||||
"proof verification failed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tree_recursion() -> anyhow::Result<()> {
|
||||
// total number of proofs to aggregate
|
||||
const T:usize = 4;
|
||||
run_tree_recursion::<2, T>(false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tree_recursion_with_compression() -> anyhow::Result<()> {
|
||||
// total number of proofs to aggregate
|
||||
const T:usize = 4;
|
||||
run_tree_recursion::<2, T>(true)
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,117 +0,0 @@
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::gates::noop::NoopGate;
|
||||
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData};
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
use codex_plonky2_circuits::bn254_wrapper::config::PoseidonBN254GoldilocksConfig;
|
||||
use codex_plonky2_circuits::bn254_wrapper::wrap::{WrapCircuit, WrapInput};
|
||||
use codex_plonky2_circuits::circuit_helper::Plonky2Circuit;
|
||||
use codex_plonky2_circuits::recursion::tree::TreeRecursion;
|
||||
use crate::params::{D, C, F, HF};
|
||||
use crate::recursion::run_sampling_circ;
|
||||
|
||||
type OuterParameters = PoseidonBN254GoldilocksConfig;
|
||||
|
||||
fn bn254_wrap(proof: ProofWithPublicInputs<F, C, D>, vd: VerifierCircuitData<F, C, D>) -> anyhow::Result<()>{
|
||||
// wrap this in the outer circuit.
|
||||
let wrapper = WrapCircuit::<F,D,C,OuterParameters>::new(vd);
|
||||
let (targ, data) = wrapper.build_with_standard_config().unwrap();
|
||||
println!(
|
||||
"wrapper circuit degree: {}",
|
||||
data.common.degree_bits()
|
||||
);
|
||||
let verifier_data = data.verifier_data();
|
||||
let prover_data = data.prover_data();
|
||||
let wrap_input = WrapInput{
|
||||
inner_proof: proof,
|
||||
};
|
||||
let proof = wrapper.prove(&targ, &wrap_input,&prover_data).unwrap();
|
||||
|
||||
assert!(verifier_data.verify(proof).is_ok());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dummy_wrap() -> anyhow::Result<()>{
|
||||
|
||||
let conf = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(conf);
|
||||
|
||||
for _ in 0..(4096+10) {
|
||||
builder.add_gate(NoopGate, vec![]);
|
||||
}
|
||||
// Add one virtual public input so that the circuit has minimal structure.
|
||||
let t = builder.add_virtual_public_input();
|
||||
|
||||
// Set up the dummy circuit and wrapper.
|
||||
let dummy_circuit = builder.build::<C>();
|
||||
let mut pw = PartialWitness::new();
|
||||
pw.set_target(t, F::ZERO).expect("faulty assign");
|
||||
println!(
|
||||
"dummy circuit degree: {}",
|
||||
dummy_circuit.common.degree_bits()
|
||||
);
|
||||
let dummy_inner_proof = dummy_circuit.prove(pw).unwrap();
|
||||
assert!(dummy_circuit.verify(dummy_inner_proof.clone()).is_ok());
|
||||
println!("Verified dummy_circuit");
|
||||
|
||||
// wrap this in the outer circuit.
|
||||
bn254_wrap(dummy_inner_proof, dummy_circuit.verifier_data())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_tree_recursion<const N: usize, const T: usize>(compress: bool) -> anyhow::Result<()> {
|
||||
|
||||
//------------ sampling inner circuit ----------------------
|
||||
// Circuit that does the sampling - 100 samples
|
||||
let (inner_proof, _inner_prover_data, inner_verifier_data) = run_sampling_circ()?;
|
||||
|
||||
let proofs: Vec<ProofWithPublicInputs<F, C, D>> = (0..T).map(|_i| inner_proof.clone()).collect();
|
||||
|
||||
// ------------------- tree --------------------
|
||||
// N-to-1 tree aggregation
|
||||
|
||||
let mut tree = TreeRecursion::<F, D,C,HF, N, T>::build_with_standard_config(inner_verifier_data.clone())?;
|
||||
|
||||
// aggregate
|
||||
let root = if !compress {
|
||||
tree.prove_tree(&proofs)?
|
||||
} else {
|
||||
println!("Mode: tree with compression");
|
||||
tree.prove_tree_and_compress(&proofs)?
|
||||
};
|
||||
println!("pub input size = {}", root.public_inputs.len());
|
||||
println!("pub input = {:?}", root.public_inputs);
|
||||
println!("proof size = {:?} bytes", root.to_bytes().len());
|
||||
|
||||
// sanity check
|
||||
let vd = if !compress {
|
||||
tree.get_node_verifier_data()}
|
||||
else{
|
||||
tree.get_compression_verifier_data()};
|
||||
assert!(vd.verify(root.clone()).is_ok());
|
||||
|
||||
bn254_wrap(root, vd)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wrap_tree_recursion() -> anyhow::Result<()> {
|
||||
// total number of proofs to aggregate
|
||||
const T:usize = 4;
|
||||
run_tree_recursion::<2, T>(false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wrap_tree_recursion_with_compression() -> anyhow::Result<()> {
|
||||
// total number of proofs to aggregate
|
||||
const T:usize = 4;
|
||||
run_tree_recursion::<2, T>(true)
|
||||
}
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
pub mod circuit_input;
|
||||
@ -1,298 +0,0 @@
|
||||
use plonky2::hash::hash_types::{HashOut, NUM_HASH_OUT_ELTS, RichField};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
|
||||
/// sponge function similar to the in-circuit one
|
||||
/// used here for testing / sanity check
|
||||
pub fn hash_n_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_n_to_m_with_padding::<F,D,H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_n_to_m_with_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let one = F::ONE;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 63);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let input_n = inputs.len();
|
||||
let num_chunks = (input_n + rate) / rate; // Calculate number of chunks with 10* padding
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks except the last one
|
||||
for _ in 0..(num_chunks - 1) {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Process the last chunk with 10* padding
|
||||
let rem = num_chunks * rate - input_n; // Number of padding elements (0 < rem <= rate)
|
||||
let ofs = rate - rem; // Offset where padding starts
|
||||
|
||||
let mut last_chunk = Vec::with_capacity(rate);
|
||||
// Absorb remaining inputs
|
||||
for _ in 0..ofs {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
last_chunk.push(input);
|
||||
} else {
|
||||
last_chunk.push(zero);
|
||||
}
|
||||
}
|
||||
// Add the '1' padding bit
|
||||
last_chunk.push(one);
|
||||
// Pad with zeros to reach the full rate
|
||||
while last_chunk.len() < rate {
|
||||
last_chunk.push(zero);
|
||||
}
|
||||
|
||||
// Add the last chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + last_chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
/// sponge function for bytes with no padding
|
||||
/// expects the input to be divisible by rate
|
||||
/// note: rate is fixed at 8 for now
|
||||
/// used here for testing / sanity check
|
||||
pub fn hash_bytes_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
H: Hasher<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
) -> HashOut<F>{
|
||||
HashOut::<F>::from_vec(hash_bytes_to_m_no_padding::<F, D, H::Permutation>(inputs, NUM_HASH_OUT_ELTS))
|
||||
}
|
||||
|
||||
pub fn hash_bytes_to_m_no_padding<
|
||||
F: RichField + Extendable<D> + Poseidon2,
|
||||
const D: usize,
|
||||
P: PlonkyPermutation<F>
|
||||
>(
|
||||
inputs: &[F],
|
||||
num_outputs: usize,
|
||||
) -> Vec<F> {
|
||||
let rate = P::RATE;
|
||||
let width = P::WIDTH; // rate + capacity
|
||||
let zero = F::ZERO;
|
||||
let mut perm = P::new(core::iter::repeat(zero).take(width));
|
||||
|
||||
// Set the domain separator at index 8
|
||||
let domsep_value = F::from_canonical_u64(rate as u64 + 256 * 12 + 65536 * 8);
|
||||
perm.set_elt(domsep_value, 8);
|
||||
|
||||
let n = inputs.len();
|
||||
assert_eq!(n % rate, 0, "Input length ({}) must be divisible by rate ({})", n, rate);
|
||||
let num_chunks = n / rate; // Calculate number of chunks
|
||||
let mut input_iter = inputs.iter();
|
||||
|
||||
// Process all chunks
|
||||
for _ in 0..num_chunks {
|
||||
let mut chunk = Vec::with_capacity(rate);
|
||||
for _ in 0..rate {
|
||||
if let Some(&input) = input_iter.next() {
|
||||
chunk.push(input);
|
||||
} else {
|
||||
// should not happen here
|
||||
panic!("Insufficient input elements for chunk; expected more elements.");
|
||||
}
|
||||
}
|
||||
// Add the chunk to the state
|
||||
for j in 0..rate {
|
||||
perm.set_elt(perm.as_ref()[j] + chunk[j],j);
|
||||
}
|
||||
// Apply permutation
|
||||
perm.permute();
|
||||
}
|
||||
|
||||
// Squeeze outputs until we have the desired number
|
||||
let mut outputs = Vec::with_capacity(num_outputs);
|
||||
loop {
|
||||
for &item in perm.squeeze() {
|
||||
outputs.push(item);
|
||||
if outputs.len() == num_outputs {
|
||||
return outputs;
|
||||
}
|
||||
}
|
||||
perm.permute();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2_poseidon2::poseidon2_hash::poseidon2::Poseidon2Hash;
|
||||
use crate::sponge::hash_n_with_padding;
|
||||
|
||||
// test types
|
||||
pub const D: usize = 2;
|
||||
pub type C = PoseidonGoldilocksConfig;
|
||||
pub type F = <C as GenericConfig<D>>::F;
|
||||
pub type H = Poseidon2Hash;
|
||||
|
||||
#[test]
|
||||
fn test_sponge_hash_rate_8() {
|
||||
|
||||
struct TestCase {
|
||||
n: usize,
|
||||
digest: [u64; 4],
|
||||
}
|
||||
|
||||
let test_cases: Vec<TestCase> = vec![
|
||||
TestCase { n: 0, digest: [0x509f3a747e4a6fca, 0xd6f21d91afb92eb3, 0xf65ef4075dcfb169, 0xbceaf22e0cd21b3d] },
|
||||
TestCase { n: 1, digest: [0xfa286adad207c7ea, 0x97d864ff2e89415e, 0xcf002b28585bd945, 0x95ec163fbdd0792e] },
|
||||
TestCase { n: 2, digest: [0xe4b779622cbb574f, 0x1fe4b1bc9a0c9fc7, 0x40051ada5252de9b, 0xb351345b1894a59f] },
|
||||
TestCase { n: 3, digest: [0x133a5a2fd0cae006, 0x072a7769ca9a550d, 0x92134dad95d394c6, 0x22234de7d7270aab] },
|
||||
TestCase { n: 4, digest: [0x78269e830f2a824a, 0x76f8b00469a8fa81, 0x6793369b1d75ebf5, 0xfba1a89dc21d9b30] },
|
||||
TestCase { n: 5, digest: [0x263994efd2cd5c57, 0x7c37a93fd48fc98b, 0xa081b26a68767d13, 0x16af92d6e1e4d7f8] },
|
||||
TestCase { n: 6, digest: [0x0b0b0f1d64f8d58c, 0x2946089b2eb949fc, 0xf68bcf08b69a95e7, 0x814d6eb4b2df848c] },
|
||||
TestCase { n: 7, digest: [0xae0c900a194ee051, 0x4555257fba7a500b, 0x1713fd448cc82c3a, 0xaf8f2e895e2136f3] },
|
||||
TestCase { n: 8, digest: [0x100351f04fc470b7, 0x79d3c3c416087158, 0x113bb1c70a6e84ee, 0x3eab2507cdc254d3] },
|
||||
TestCase { n: 9, digest: [0xbab284d7f11855d6, 0xe1b53d108f308a1c, 0x971fea7184337830, 0x6d674ae321cfb9ba] },
|
||||
TestCase { n: 10, digest: [0x68c00dbe0ed03a8f, 0xab5ba3617eb6f76b, 0x5d735bb89418cc0b, 0xff4101076f3f3c70] },
|
||||
TestCase { n: 11, digest: [0xaecce2fa7de4f97d, 0x07cee3dc720812e0, 0x4155bf667391a9e8, 0xbf8a49a12f40e746] },
|
||||
TestCase { n: 12, digest: [0xd3f43f06fc7affd2, 0xee9a8ac5ef44071a, 0xe00ec9e7f468d0e2, 0x944e34913a974233] },
|
||||
TestCase { n: 13, digest: [0xcd50fe6ab5e3de54, 0x9b2093adaeac949c, 0xa176a2a9e2c82787, 0xd35f0635a1ec333f] },
|
||||
TestCase { n: 14, digest: [0x8f5188d26ca0368c, 0x0116bf587e5cc970, 0x30654ee52a3c66d8, 0xe8ded60382c44b04] },
|
||||
TestCase { n: 15, digest: [0xc7f020f910327951, 0x13a468945463870d, 0xbcf8ca584edb30f3, 0x7e7234f0b8954e7e] },
|
||||
TestCase { n: 16, digest: [0xf8a9aef7392048e7, 0x6124715a2c5343eb, 0x1b7f17ebec4a5b13, 0xdf61d868051dad75] },
|
||||
TestCase { n: 17, digest: [0x44d1fb6822c7f3fa, 0x2623cc2240022e42, 0xc90ce9259c9e1160, 0x7a42bc611acacc12] },
|
||||
TestCase { n: 18, digest: [0x85dab5b06ef2d176, 0x24a587b13a4e3b30, 0xf547a00373299873, 0xb298a6ef846d64a1] },
|
||||
TestCase { n: 19, digest: [0x7cc060a3f2a74260, 0xa07dc76e73335eb0, 0xf8ed9acbcf8a242e, 0xd32eaf3150005e49] },
|
||||
TestCase { n: 20, digest: [0x3e961c84e53106f9, 0x63d9a807f9cfd88c, 0x7031e8834a17821a, 0xf2e1c79698798fa9] },
|
||||
TestCase { n: 21, digest: [0x8a0ab00081c9828f, 0xa5f7aadaf3af046e, 0xada8b4c6220b3420, 0x80ebc8c91a65518c] },
|
||||
TestCase { n: 22, digest: [0x39505fc00f052122, 0xb13edc24a35665c7, 0xa7b164fffe37ec64, 0x8f7eeb42c068e19f] },
|
||||
TestCase { n: 23, digest: [0x1f49d6f25f39522b, 0x879377d8df727784, 0x00f1461600d09cdd, 0xd2c7946a44e1aa66] },
|
||||
TestCase { n: 24, digest: [0x1c6f7a68537f7dc7, 0x64e6e09714dc0854, 0x9abfed111e51bd96, 0x65061b2bc484ed8b] },
|
||||
TestCase { n: 25, digest: [0x95fd5cc6bc02ab29, 0xe2e3c96d9b1b8b5d, 0xadcf491caa16549e, 0x97d91e370da3c0b4] },
|
||||
TestCase { n: 26, digest: [0x7599c5052ba67767, 0x3fe4a05f44e96ed6, 0xbbfe6874aa53808c, 0xd6771e162cc9f0ff] },
|
||||
TestCase { n: 27, digest: [0xdff28121d822093c, 0x7313ea03b57bb436, 0x10ed29b28a77d8c3, 0x6ee304be541fe36f] },
|
||||
TestCase { n: 28, digest: [0xce2b7f232b504b48, 0x02c638c398c12cb0, 0x4f1d416215377a86, 0x2d43ff6c5dd88f8c] },
|
||||
TestCase { n: 29, digest: [0xa60cb008de647e9a, 0x502e2e740f68e2d1, 0xe983eb54e4052013, 0xe76e59c5e5dbcca2] },
|
||||
TestCase { n: 30, digest: [0x7735e3ac5e08fa00, 0x211a86449207c30d, 0x9d80ddd40e7760b2, 0xe60f32f28597a188] },
|
||||
TestCase { n: 31, digest: [0x6fab3f12496f0691, 0x5116ad81bedd7d84, 0xaa8a7713a80b323b, 0xce6d94533fc40b88] },
|
||||
TestCase { n: 32, digest: [0xce51cdbd641d57c0, 0xf638202a88ee7f9c, 0x26c291ecc5162b45, 0x04a0a62b949c236f] },
|
||||
TestCase { n: 33, digest: [0x923391e4a4cde9e2, 0xdcb3acccba80597d, 0x247bb4b67044a0e1, 0x65bbac92e096d1ec] },
|
||||
TestCase { n: 34, digest: [0x1550d0234ae35f05, 0x16f4d1708923d4f1, 0x232319cb4090ea4e, 0x8354e1aed093070c] },
|
||||
TestCase { n: 35, digest: [0xc7dd24e6db4ea70f, 0x80bc3d2aac952cb1, 0xabbd1a878bc50565, 0xf1ebc3b8d513c591] },
|
||||
TestCase { n: 36, digest: [0xba9c4b1ce906efb1, 0xa332d0daccc62979, 0xfb658fcad0b5fbbd, 0x62d21407f34a35ee] },
|
||||
TestCase { n: 37, digest: [0xcb2973d44f2b589d, 0x01708b32c4556317, 0x3ad51597c12b8564, 0x28d3a5d7de72cfd5] },
|
||||
TestCase { n: 38, digest: [0x1dcf1f4ab7338296, 0xb88c661141b5aabb, 0x7e546b6e9b31bc90, 0xf26f7e6ffabb4e69] },
|
||||
TestCase { n: 39, digest: [0x2e139ff910c0f410, 0xba3d2c0a92ec3845, 0x2860e475933a7108, 0x8f2a6c6d13bedc7a] },
|
||||
TestCase { n: 40, digest: [0xc18a53c17c360ef4, 0x5e56ea9228988c68, 0xee0bd138436e996d, 0x06afd46a753f8257] },
|
||||
TestCase { n: 41, digest: [0x2c992403c5277dc5, 0xba8770bc3a54b043, 0x51b882882a7b7864, 0xf75e179a53e7948e] },
|
||||
TestCase { n: 42, digest: [0xde855183965741c3, 0x93520eac77a8f98d, 0x6412ae8cf0522d78, 0x9db49c6b455a83b4] },
|
||||
TestCase { n: 43, digest: [0x552e357ddb7e1ef6, 0x5fa779e9c7373b56, 0x18f7c445e27e5dcf, 0x2664ecee5e7bc6c2] },
|
||||
TestCase { n: 44, digest: [0x37b8a716c87e5489, 0x1201fcd31e407152, 0x0979d7887c42e1ca, 0x902e8b2bf748b356] },
|
||||
TestCase { n: 45, digest: [0xa48bdd1d464960ed, 0x8e92c1af0cf258bc, 0x7c5b447524b92ba9, 0xac63902e613e4ef0] },
|
||||
TestCase { n: 46, digest: [0x542e62f9317fe11d, 0xc23ba113a3f3c810, 0x2bda30c42a89cc7e, 0x35616e9f1a00aa8f] },
|
||||
TestCase { n: 47, digest: [0x1c9194a0acfa97d7, 0x60d536ac106dd774, 0x8855b4a40e110080, 0xc2c408114e8c20d6] },
|
||||
TestCase { n: 48, digest: [0x0e90b1cc3ac49e0c, 0x1b73aa8e0decbf88, 0x0ca9ef7070e0513f, 0x25cfb975571b6139] },
|
||||
TestCase { n: 49, digest: [0xba6d6f7aa664f2e7, 0x4b9af896093937b9, 0x115b9aeb6c5f563e, 0x41cb5f42c6d3b115] },
|
||||
TestCase { n: 50, digest: [0xdc3bdc491154caf6, 0xb95159bae61b2035, 0x98bd384fb3d0100b, 0xd70226f2b71ea465] },
|
||||
TestCase { n: 51, digest: [0x57f31da51bcd2eab, 0x4a3b3945a8662b5c, 0x44dffaa325530b19, 0x47f4e41c2c1474cf] },
|
||||
TestCase { n: 52, digest: [0xc3f518f6cf3b43bf, 0x1214790ff48554e4, 0x99c1eabc61b218fd, 0xf90b03954d7937f8] },
|
||||
TestCase { n: 53, digest: [0x6357b3cdcbc1283a, 0x6acc0c2d5aac9261, 0xdf11e7ad14d432d1, 0x2242b26bdcc8a380] },
|
||||
TestCase { n: 54, digest: [0x1946dc4471f8c502, 0x6be7d72499e0b4a5, 0x6e11de349239ff90, 0xfca78044256b8b54] },
|
||||
TestCase { n: 55, digest: [0x302b38fb3df623dd, 0x69b362f7932fd7af, 0x2b47156f9135508b, 0xfe6c574f0a102e92] },
|
||||
TestCase { n: 56, digest: [0xfdc9bd08a0416122, 0x063ebf4767fc7914, 0x330f36279d94050e, 0x79c61f80746893ec] },
|
||||
TestCase { n: 57, digest: [0x7b5d8384b67af5c0, 0xa705e0163fa4d839, 0x1e203432e872104e, 0xe0e7699f20a291f4] },
|
||||
TestCase { n: 58, digest: [0xb0aa74a52fe04366, 0x194b0d4afcdc03d9, 0x5134dc604b5d9f2a, 0x53c6bf9d5a1d502b] },
|
||||
TestCase { n: 59, digest: [0xd5c8258f6fc80e2b, 0x82bac373eb051b48, 0x5ef620241420462d, 0x58635db0134fb97a] },
|
||||
TestCase { n: 60, digest: [0x42ebb974ac5dd0ef, 0x676d0c6b3dde78c3, 0x14ed5eda2c9cb9de, 0x0f78a26badaa447c] },
|
||||
TestCase { n: 61, digest: [0x2b3ca7711db999d5, 0xb74bd29abcb6179a, 0x8ba196525e6adb25, 0x86cb9464ae269a43] },
|
||||
TestCase { n: 62, digest: [0x3d0e61a2ca7a65a2, 0x31f77852d41a6c8d, 0x2e4ceaa39763a53d, 0x5232ff5a3d78755e] },
|
||||
TestCase { n: 63, digest: [0xb2ed789e88c1f525, 0x1592c1a1eafd2a9b, 0x98700c512f8c9a5d, 0xf96837b5d99a4eb4] },
|
||||
TestCase { n: 64, digest: [0xe4b7d14e11de2fa9, 0xe81afff2cee68e14, 0xc58abb080bf37dd3, 0x36ae8b2196b5ae88] },
|
||||
TestCase { n: 65, digest: [0xa1df9ff199c41d63, 0xd02c067d3d12edc1, 0xc9b598130fa60794, 0x5afe82d34c3fc8fa] },
|
||||
TestCase { n: 66, digest: [0x0bc0094a1f07256d, 0x33c5b4c2a171d5bd, 0x1f38f1b1dc92aa54, 0x4610d21f276faa11] },
|
||||
TestCase { n: 67, digest: [0x8072f00df8f7e44f, 0x42f0c2b8fe81d8a0, 0x2b5caf9e7c0ff611, 0x92b0b3a4a4bebe1a] },
|
||||
TestCase { n: 68, digest: [0x6539f06fab064b57, 0xdb298b91f6c4f44f, 0x5d8f8eec6b7e8c86, 0x848a447123f39006] },
|
||||
TestCase { n: 69, digest: [0x87f32efc9eaa65f6, 0xc5699d4ab6443852, 0x61008286bc651f4a, 0xcbcf714354843da3] },
|
||||
TestCase { n: 70, digest: [0xffb8ad2258107315, 0xf7d6a58eb54f2745, 0xaecf888211821114, 0x7e0ea33b4d56976e] },
|
||||
TestCase { n: 71, digest: [0xa9e5b6d70f67db2b, 0x072fd05840040322, 0x40ffcc86e3909dec, 0x3d80f61616a9e6d7] },
|
||||
TestCase { n: 72, digest: [0xa77dd95d9ff4d7b8, 0x3a0e0502f74c091a, 0x1fa83de1e7dc716d, 0xe01ae447cc3a0e40] },
|
||||
TestCase { n: 73, digest: [0xc4a29dc875a308eb, 0xd2ed0da7aab24b0c, 0x4c2aaaed0bc4f059, 0xaea772c635ea901a] },
|
||||
TestCase { n: 74, digest: [0xaad59bf06c151ecf, 0x5e0f45e55df36692, 0x4798afb8b944a01e, 0xd7152cd819bbd7f8] },
|
||||
TestCase { n: 75, digest: [0x89ae5b2b35ba07c7, 0x129f4ff59afaa1a3, 0x4275f3f797112650, 0xea3b4baaf7190a19] },
|
||||
TestCase { n: 76, digest: [0xab068e43be297604, 0x17bd1c3cf4afec96, 0xaa84a8098dba4516, 0xa6e487ceafb02c49] },
|
||||
TestCase { n: 77, digest: [0x2c85080ef895bb4a, 0xbd280690a789c124, 0xca4f8423b50de8a5, 0xec809bb8c30de95b] },
|
||||
TestCase { n: 78, digest: [0x51c3d13543e4922b, 0xff9c11d5b93268db, 0xd9cf911cc5326948, 0x4b7bb11eafe7fd44] },
|
||||
TestCase { n: 79, digest: [0xb435274d75678586, 0x8600e7f2db687493, 0x282873a3600a38da, 0x727791507d1b600e] },
|
||||
TestCase { n: 80, digest: [0x23ae45602324f628, 0x0dc16b33f43209c5, 0x2455376f83b1aeff, 0xd5470f22ec2113bc] },
|
||||
];
|
||||
|
||||
for test_case in test_cases {
|
||||
let n = test_case.n;
|
||||
let expected_digest = test_case.digest;
|
||||
|
||||
// Generate inputs
|
||||
let inputs: Vec<F> = (0..n)
|
||||
.map(|i| F::from_canonical_u64(i as u64 + 1))
|
||||
.collect();
|
||||
|
||||
// Call the sponge function
|
||||
let output = hash_n_with_padding::<F,D,H>(&inputs);
|
||||
|
||||
// Compare the outputs
|
||||
for (i, &out_elem) in output.elements.iter().enumerate() {
|
||||
let expected_elem = F::from_canonical_u64(expected_digest[i]);
|
||||
assert_eq!(
|
||||
out_elem,
|
||||
expected_elem,
|
||||
"Mismatch at test case n={}, output element {}",
|
||||
n,
|
||||
i
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user