From fcf216012cf30a6bc2a41c7dbfe25f2a5e779219 Mon Sep 17 00:00:00 2001 From: Nicholas Ward Date: Fri, 17 Jun 2022 16:28:23 -0700 Subject: [PATCH] memory row generation --- evm/src/all_stark.rs | 4 + evm/src/memory/memory_stark.rs | 89 ++++++++++- plonky2/src/gadgets/curve_msm.rs | 71 +++++++++ starky/src/factorial_stark.rs | 244 +++++++++++++++++++++++++++++ starky/src/permutation_stark.rs | 253 +++++++++++++++++++++++++++++++ 5 files changed, 660 insertions(+), 1 deletion(-) create mode 100644 plonky2/src/gadgets/curve_msm.rs create mode 100644 starky/src/factorial_stark.rs create mode 100644 starky/src/permutation_stark.rs diff --git a/evm/src/all_stark.rs b/evm/src/all_stark.rs index a3c78b88..acb6c935 100644 --- a/evm/src/all_stark.rs +++ b/evm/src/all_stark.rs @@ -162,6 +162,10 @@ mod tests { .unwrap() }) .collect(); + let memory_trace = memory_stark.generate_trace(keccak_inputs); + let column_to_copy: Vec<_> = keccak_trace[keccak_looked_col].values[..].into(); + + let default = vec![F::ONE; 1]; let mut cpu_trace_rows = vec![]; for i in 0..num_keccak_perms { diff --git a/evm/src/memory/memory_stark.rs b/evm/src/memory/memory_stark.rs index b1ba0f1b..a92d3bd1 100644 --- a/evm/src/memory/memory_stark.rs +++ b/evm/src/memory/memory_stark.rs @@ -152,7 +152,34 @@ pub fn generate_range_check_value( } impl, const D: usize> MemoryStark { - pub(crate) fn generate_memory(trace_cols: &mut [Vec]) { + fn generate_trace_rows(&self, memory_ops: Vec<(F, F, F, [F; 8], F, F)>) -> Vec<[F; NUM_REGISTERS]> { + let num_ops = memory_ops.len(); + + let mut trace_cols: [Vec; NUM_REGISTERS] = vec![vec![F::ZERO; num_ops]; NUM_REGISTERS].try_into().unwrap(); + for i in 0..num_ops { + let (context, segment, virt, values, is_read, timestamp) = memory_ops[i]; + trace_cols[MEMORY_ADDR_CONTEXT][i] = context; + trace_cols[MEMORY_ADDR_SEGMENT][i] = segment; + trace_cols[MEMORY_ADDR_VIRTUAL][i] = virt; + for j in 0..8 { + trace_cols[memory_value_limb(j)][i] = values[j]; + } + trace_cols[MEMORY_IS_READ][i] = is_read; + trace_cols[MEMORY_TIMESTAMP][i] = timestamp; + } + + self.generate_memory(&mut trace_cols); + + let mut trace_rows = vec![[F::ZERO; NUM_REGISTERS]]; + for (i, col) in trace_cols.iter().enumerate() { + for (j, &val) in col.iter().enumerate() { + trace_rows[j][i] = val; + } + } + trace_rows + } + + fn generate_memory(&self, trace_cols: &mut [Vec]) { let context = &trace_cols[MEMORY_ADDR_CONTEXT]; let segment = &trace_cols[MEMORY_ADDR_SEGMENT]; let virtuals = &trace_cols[MEMORY_ADDR_VIRTUAL]; @@ -441,8 +468,13 @@ impl, const D: usize> Stark for MemoryStark(stark) } + + #[test] + fn test_memory_stark() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = MemoryStark; + + let stark = S { + f: Default::default(), + }; + + const MAX_CONTEXT: usize = 256; + const MAX_SEGMENT: usize = 8; + const MAX_VIRTUAL: usize = 1 << 12; + + let mut rng = thread_rng(); + + let num_ops = 20; + let mut memory_ops = Vec::new(); + let current_memory_values: HashMap<(F, F, F), [F; 8]> = HashMap::new(); + let mut cur_timestamp = 0; + for i in 0..num_ops { + let is_read = if i == 0 { false } else { rng.gen() }; + let is_read_F = F::from_bool(is_read); + + let (context, segment, virt, vals) = if is_read { + let written: Vec<_> = current_memory_values.keys().collect(); + let &(context, segment, virt) = written[rng.gen_range(0..written.len())]; + let &vals = current_memory_values.get(&(context, segment, virt)).unwrap(); + + (context, segment, virt, vals) + } else { + let context = F::from_canonical_usize(rng.gen_range(0..256)); + let segment = F::from_canonical_usize(rng.gen_range(0..8)); + let virt = F::from_canonical_usize(rng.gen_range(0..20)); + + let val: [u32; 8] = rng.gen(); + let vals: [F; 8] = val.iter().map(|&x| F::from_canonical_u32(x)).collect::>().try_into().unwrap(); + + current_memory_values.insert((context, segment, virt), vals); + + (context, segment, virt, vals) + }; + + let timestamp = F::from_canonical_usize(cur_timestamp); + cur_timestamp += 1; + + memory_ops.push((context, segment, virt, is_read_F, vals, timestamp)) + } + + let rows = stark.generate_trace_rows(memory_ops); + + Ok(()) + } } diff --git a/plonky2/src/gadgets/curve_msm.rs b/plonky2/src/gadgets/curve_msm.rs new file mode 100644 index 00000000..60d1ddc4 --- /dev/null +++ b/plonky2/src/gadgets/curve_msm.rs @@ -0,0 +1,71 @@ +use plonky2_field::extension_field::Extendable; +use plonky2_field::field_types::Field; + +use crate::curve::curve_types::{AffinePoint, Curve, CurveScalar}; +use crate::gadgets::curve::AffinePointTarget; +use crate::gadgets::nonnative::NonNativeTarget; +use crate::hash::hash_types::RichField; +use crate::plonk::circuit_builder::CircuitBuilder; + +const DIGITS_PER_CHUNK: usize = 80; + +const WINDOW_SIZE: usize = 4; + +pub struct MsmPrecomputationTarget { + /// For each generator (in the order they were passed to `msm_precompute`), contains a vector + /// of powers, i.e. [(2^w)^i] for i < DIGITS. + powers_per_generator: Vec>>, +} + +impl, const D: usize> CircuitBuilder { + pub fn precompute_single_generator( + &mut self, + g: AffinePointTarget, + ) -> Vec> { + let digits = (C::ScalarField::BITS + WINDOW_SIZE - 1) / WINDOW_SIZE; + let mut powers: Vec> = Vec::with_capacity(digits); + powers.push(g); + for i in 1..digits { + let mut power_i_proj = powers[i - 1].clone(); + for _j in 0..WINDOW_SIZE { + power_i_proj = self.curve_double(&power_i_proj); + } + powers.push(power_i_proj); + } + powers + } + + pub fn msm_precompute( + &mut self, + generators: &[AffinePointTarget], + ) -> MsmPrecomputationTarget { + MsmPrecomputationTarget { + powers_per_generator: generators + .into_iter() + .map(|g| self.precompute_single_generator(g.clone())) + .collect(), + w, + } + } + + pub fn msm_execute( + &mut self, + precomputation: &MsmPrecomputationTarget, + scalars: &[NonNativeTarget], + ) -> AffinePointTarget { + debug_assert_eq!(precomputation.powers_per_generator.len(), scalars.len()); + + let digits = (C::ScalarField::BITS + WINDOW_SIZE - 1) / WINDOW_SIZE; + let base = 1 << WINDOW_SIZE; + + for (i, scalar) in scalars.iter().enumerate() { + let digits = self.split_nonnative_to_4_bit_limbs(scalar); + } + + let digits: Vec<_> = (0..base).map(|i| self.constant(F::from_canonical_usize(i))).collect(); + let mut digit_acc: Vec> = Vec::new(); + for i in 0..base { + + } + } +} diff --git a/starky/src/factorial_stark.rs b/starky/src/factorial_stark.rs new file mode 100644 index 00000000..6650de25 --- /dev/null +++ b/starky/src/factorial_stark.rs @@ -0,0 +1,244 @@ +use std::marker::PhantomData; + +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::permutation::PermutationPair; +use crate::stark::Stark; +use crate::util::trace_rows_to_poly_values; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; + +/// Toy STARK system used for testing. +/// Computes a Factorial sequence with state `[fact, n]` using the state transition +/// `fact' <- fact * (n + 1), n' <- n + 1`. +#[derive(Copy, Clone)] +struct FactorialStark, const D: usize> { + num_rows: usize, + _phantom: PhantomData, +} + +impl, const D: usize> FactorialStark { + // The first public input is `x0`. + const PI_INDEX_X0: usize = 0; + // The second public input is the first element of the last row, which should be equal to + // `num_rows` factorial. + const PI_INDEX_RES: usize = 1; + + fn new(num_rows: usize) -> Self { + Self { + num_rows, + _phantom: PhantomData, + } + } + + /// Generate the trace using `x0, 1` as initial state values. + fn generate_trace(&self, x0: F) -> Vec> { + let mut trace_rows = (0..self.num_rows) + .scan([x0, F::ONE], |acc, _| { + let tmp = *acc; + acc[0] = tmp[0] * (tmp[1] + F::ONE); + acc[1] = tmp[1] + F::ONE; + Some(tmp) + }) + .collect::>(); + trace_rows_to_poly_values(trace_rows) + } +} + +impl, const D: usize> Stark for FactorialStark { + const COLUMNS: usize = 2; + const PUBLIC_INPUTS: usize = 2; + + fn eval_packed_generic( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + // Check public inputs. + yield_constr + .constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]); + yield_constr + .constraint_last_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_RES]); + + // x0' <- x0 * (x1 + 1) + yield_constr.constraint_transition( + vars.next_values[0] - vars.local_values[0] * (vars.local_values[1] + FE::ONE), + ); + // x1' <- x1 + 1 + yield_constr.constraint_transition(vars.next_values[1] - vars.local_values[1] - FE::ONE); + } + + fn eval_ext_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + // Check public inputs. + let pis_constraints = [ + builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]), + builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_RES]), + ]; + yield_constr.constraint_first_row(builder, pis_constraints[0]); + yield_constr.constraint_last_row(builder, pis_constraints[1]); + + let one = builder.one_extension(); + // x0' <- x0 * (x1 + 1) + let first_col_constraint = { + let tmp1 = builder.add_extension(vars.local_values[1], one); + let tmp2 = builder.mul_extension(vars.local_values[0], tmp1); + builder.sub_extension(vars.next_values[0], tmp2) + }; + yield_constr.constraint_transition(builder, first_col_constraint); + // x1' <- x1 + 1 + let second_col_constraint = { + let tmp = builder.add_extension(vars.local_values[1], one); + builder.sub_extension(vars.next_values[1], tmp) + }; + yield_constr.constraint_transition(builder, second_col_constraint); + } + + fn constraint_degree(&self) -> usize { + 2 + } +} + +#[cfg(test)] +mod tests { + use anyhow::Result; + use plonky2::field::extension_field::Extendable; + use plonky2::field::field_types::Field; + use plonky2::hash::hash_types::RichField; + use plonky2::iop::witness::PartialWitness; + use plonky2::plonk::circuit_builder::CircuitBuilder; + use plonky2::plonk::circuit_data::CircuitConfig; + use plonky2::plonk::config::{ + AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig, + }; + use plonky2::util::timing::TimingTree; + + use crate::config::StarkConfig; + use crate::factorial_stark::FactorialStark; + use crate::proof::StarkProofWithPublicInputs; + use crate::prover::prove; + use crate::recursive_verifier::{ + add_virtual_stark_proof_with_pis, recursively_verify_stark_proof, + set_stark_proof_with_pis_target, + }; + use crate::stark::Stark; + use crate::stark_testing::test_stark_low_degree; + use crate::verifier::verify_stark_proof; + + fn factorial(n: usize, x0: F) -> F { + (0..n) + .fold((x0, F::ONE), |x, _| (x.0 * (x.1 + F::ONE), x.1 + F::ONE)) + .0 + } + + #[test] + fn test_factorial_stark() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FactorialStark; + + let config = StarkConfig::standard_fast_config(); + let num_rows = 1 << 3; + let public_inputs = [F::ONE, factorial(num_rows - 1, F::ONE)]; + let stark = S::new(num_rows); + let trace = stark.generate_trace(public_inputs[0]); + let proof = prove::( + stark, + &config, + trace, + public_inputs, + &mut TimingTree::default(), + )?; + + verify_stark_proof(stark, proof, &config) + } + + #[test] + fn test_factorial_stark_degree() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FactorialStark; + + let num_rows = 1 << 3; + let stark = S::new(num_rows); + test_stark_low_degree(stark) + } + + #[test] + fn test_recursive_stark_verifier() -> Result<()> { + init_logger(); + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FactorialStark; + + let config = StarkConfig::standard_fast_config(); + let num_rows = 1 << 5; + let public_inputs = [F::ONE, factorial(num_rows - 1, F::ONE)]; + let stark = S::new(num_rows); + let trace = stark.generate_trace(public_inputs[0]); + let proof = prove::( + stark, + &config, + trace, + public_inputs, + &mut TimingTree::default(), + )?; + verify_stark_proof(stark, proof.clone(), &config)?; + + recursive_proof::(stark, proof, &config, true) + } + + fn recursive_proof< + F: RichField + Extendable, + C: GenericConfig, + S: Stark + Copy, + InnerC: GenericConfig, + const D: usize, + >( + stark: S, + inner_proof: StarkProofWithPublicInputs, + inner_config: &StarkConfig, + print_gate_counts: bool, + ) -> Result<()> + where + InnerC::Hasher: AlgebraicHasher, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, + [(); C::Hasher::HASH_SIZE]:, + { + let circuit_config = CircuitConfig::standard_recursion_config(); + let mut builder = CircuitBuilder::::new(circuit_config); + let mut pw = PartialWitness::new(); + let degree_bits = inner_proof.proof.recover_degree_bits(inner_config); + let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); + set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); + + recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config); + + if print_gate_counts { + builder.print_gate_counts(0); + } + + let data = builder.build::(); + let proof = data.prove(pw)?; + data.verify(proof) + } + + fn init_logger() { + let _ = env_logger::builder().format_timestamp(None).try_init(); + } +} diff --git a/starky/src/permutation_stark.rs b/starky/src/permutation_stark.rs new file mode 100644 index 00000000..b227847d --- /dev/null +++ b/starky/src/permutation_stark.rs @@ -0,0 +1,253 @@ +use std::marker::PhantomData; + +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::permutation::PermutationPair; +use crate::stark::Stark; +use crate::util::trace_rows_to_poly_values; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; + +/// Toy STARK system used for testing. +/// Proves that the last three public inputs are a sorted version of the first three. +/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`. +/// Note: The `i, j` columns are only used to test the permutation argument. +#[derive(Copy, Clone)] +struct SortingStark, const D: usize> { + num_rows: usize, + _phantom: PhantomData, +} + +impl, const D: usize> SortingStark { + // The first public input is `a`. + const PI_INDEX_X0: usize = 0; + // The second public input is `b`. + const PI_INDEX_X1: usize = 1; + // The second public input is `c`. + const PI_INDEX_X1: usize = 2; + // The third public input is the second element of the last row, which should be equal to the + // `num_rows`-th Fibonacci number. + const PI_INDEX_RES: usize = 2; + + fn new(num_rows: usize) -> Self { + Self { + num_rows, + _phantom: PhantomData, + } + } + + /// Generate the trace using `x0, x1, 0, 1` as initial state values. + fn generate_trace(&self, x0: F, x1: F) -> Vec> { + let mut trace_rows = (0..self.num_rows) + .scan([x0, x1, F::ZERO, F::ONE], |acc, _| { + let tmp = *acc; + acc[0] = tmp[1]; + acc[1] = tmp[0] + tmp[1]; + acc[2] = tmp[2] + F::ONE; + acc[3] = tmp[3] + F::ONE; + Some(tmp) + }) + .collect::>(); + trace_rows[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another. + trace_rows_to_poly_values(trace_rows) + } +} + +impl, const D: usize> Stark for SortingStark { + const COLUMNS: usize = 4; + const PUBLIC_INPUTS: usize = 3; + + fn eval_packed_generic( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + // Check public inputs. + yield_constr + .constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]); + yield_constr + .constraint_first_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_X1]); + yield_constr + .constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]); + + // x0' <- x1 + yield_constr.constraint_transition(vars.next_values[0] - vars.local_values[1]); + // x1' <- x0 + x1 + yield_constr.constraint_transition( + vars.next_values[1] - vars.local_values[0] - vars.local_values[1], + ); + } + + fn eval_ext_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + // Check public inputs. + let pis_constraints = [ + builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]), + builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_X1]), + builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_RES]), + ]; + yield_constr.constraint_first_row(builder, pis_constraints[0]); + yield_constr.constraint_first_row(builder, pis_constraints[1]); + yield_constr.constraint_last_row(builder, pis_constraints[2]); + + // x0' <- x1 + let first_col_constraint = builder.sub_extension(vars.next_values[0], vars.local_values[1]); + yield_constr.constraint_transition(builder, first_col_constraint); + // x1' <- x0 + x1 + let second_col_constraint = { + let tmp = builder.sub_extension(vars.next_values[1], vars.local_values[0]); + builder.sub_extension(tmp, vars.local_values[1]) + }; + yield_constr.constraint_transition(builder, second_col_constraint); + } + + fn constraint_degree(&self) -> usize { + 2 + } + + fn permutation_pairs(&self) -> Vec { + vec![PermutationPair::singletons(2, 3)] + } +} + +#[cfg(test)] +mod tests { + use anyhow::Result; + use plonky2::field::extension_field::Extendable; + use plonky2::field::field_types::Field; + use plonky2::hash::hash_types::RichField; + use plonky2::iop::witness::PartialWitness; + use plonky2::plonk::circuit_builder::CircuitBuilder; + use plonky2::plonk::circuit_data::CircuitConfig; + use plonky2::plonk::config::{ + AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig, + }; + use plonky2::util::timing::TimingTree; + + use crate::config::StarkConfig; + use crate::fibonacci_stark::FibonacciStark; + use crate::proof::StarkProofWithPublicInputs; + use crate::prover::prove; + use crate::recursive_verifier::{ + add_virtual_stark_proof_with_pis, recursively_verify_stark_proof, + set_stark_proof_with_pis_target, + }; + use crate::stark::Stark; + use crate::stark_testing::test_stark_low_degree; + use crate::verifier::verify_stark_proof; + + fn fibonacci(n: usize, x0: F, x1: F) -> F { + (0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1 + } + + #[test] + fn test_fibonacci_stark() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FibonacciStark; + + let config = StarkConfig::standard_fast_config(); + let num_rows = 1 << 5; + let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; + let stark = S::new(num_rows); + let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); + let proof = prove::( + stark, + &config, + trace, + public_inputs, + &mut TimingTree::default(), + )?; + + verify_stark_proof(stark, proof, &config) + } + + #[test] + fn test_fibonacci_stark_degree() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FibonacciStark; + + let num_rows = 1 << 5; + let stark = S::new(num_rows); + test_stark_low_degree(stark) + } + + #[test] + fn test_recursive_stark_verifier() -> Result<()> { + init_logger(); + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = FibonacciStark; + + let config = StarkConfig::standard_fast_config(); + let num_rows = 1 << 5; + let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; + let stark = S::new(num_rows); + let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); + let proof = prove::( + stark, + &config, + trace, + public_inputs, + &mut TimingTree::default(), + )?; + verify_stark_proof(stark, proof.clone(), &config)?; + + recursive_proof::(stark, proof, &config, true) + } + + fn recursive_proof< + F: RichField + Extendable, + C: GenericConfig, + S: Stark + Copy, + InnerC: GenericConfig, + const D: usize, + >( + stark: S, + inner_proof: StarkProofWithPublicInputs, + inner_config: &StarkConfig, + print_gate_counts: bool, + ) -> Result<()> + where + InnerC::Hasher: AlgebraicHasher, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, + [(); C::Hasher::HASH_SIZE]:, + { + let circuit_config = CircuitConfig::standard_recursion_config(); + let mut builder = CircuitBuilder::::new(circuit_config); + let mut pw = PartialWitness::new(); + let degree_bits = inner_proof.proof.recover_degree_bits(inner_config); + let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); + set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); + + recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config); + + if print_gate_counts { + builder.print_gate_counts(0); + } + + let data = builder.build::(); + let proof = data.prove(pw)?; + data.verify(proof) + } + + fn init_logger() { + let _ = env_logger::builder().format_timestamp(None).try_init(); + } +}