diff --git a/.github/workflows/continuous-integration-workflow.yml b/.github/workflows/continuous-integration-workflow.yml index 0e3e5977..03924ba6 100644 --- a/.github/workflows/continuous-integration-workflow.yml +++ b/.github/workflows/continuous-integration-workflow.yml @@ -24,7 +24,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly-2022-11-23 + toolchain: nightly override: true - name: rust-cache @@ -61,7 +61,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly-2022-11-23 + toolchain: nightly override: true components: rustfmt, clippy diff --git a/evm/src/cross_table_lookup.rs b/evm/src/cross_table_lookup.rs index 1b184a50..15e186aa 100644 --- a/evm/src/cross_table_lookup.rs +++ b/evm/src/cross_table_lookup.rs @@ -217,7 +217,7 @@ impl CtlData { } } -pub(crate) fn cross_table_lookup_data, const D: usize>( +pub(crate) fn cross_table_lookup_data( trace_poly_values: &[Vec>; NUM_TABLES], cross_table_lookups: &[CrossTableLookup], ctl_challenges: &GrandProductChallengeSet, @@ -371,7 +371,7 @@ impl<'a, F: RichField + Extendable, const D: usize> } } -pub(crate) fn eval_cross_table_lookup_checks( +pub(crate) fn eval_cross_table_lookup_checks( vars: StarkEvaluationVars, ctl_vars: &[CtlCheckVars], consumer: &mut ConstraintConsumer

, @@ -379,7 +379,6 @@ pub(crate) fn eval_cross_table_lookup_checks, FE: FieldExtension, P: PackedField, - C: GenericConfig, S: Stark, { for lookup_vars in ctl_vars { @@ -540,11 +539,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< } } -pub(crate) fn verify_cross_table_lookups< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub(crate) fn verify_cross_table_lookups, const D: usize>( cross_table_lookups: &[CrossTableLookup], ctl_zs_lasts: [Vec; NUM_TABLES], config: &StarkConfig, @@ -573,11 +568,7 @@ pub(crate) fn verify_cross_table_lookups< Ok(()) } -pub(crate) fn verify_cross_table_lookups_circuit< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub(crate) fn verify_cross_table_lookups_circuit, const D: usize>( builder: &mut CircuitBuilder, cross_table_lookups: Vec>, ctl_zs_lasts: [Vec; NUM_TABLES], diff --git a/evm/src/fixed_recursive_verifier.rs b/evm/src/fixed_recursive_verifier.rs index 344b6d1c..e662874f 100644 --- a/evm/src/fixed_recursive_verifier.rs +++ b/evm/src/fixed_recursive_verifier.rs @@ -228,7 +228,7 @@ where } // Verify the CTL checks. - verify_cross_table_lookups_circuit::( + verify_cross_table_lookups_circuit::( &mut builder, all_cross_table_lookups(), pis.map(|p| p.ctl_zs_last), diff --git a/evm/src/permutation.rs b/evm/src/permutation.rs index 64223ad7..a92774cb 100644 --- a/evm/src/permutation.rs +++ b/evm/src/permutation.rs @@ -13,7 +13,7 @@ use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use plonky2::plonk::config::{AlgebraicHasher, Hasher}; use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit}; use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget}; use plonky2_maybe_rayon::*; @@ -89,7 +89,7 @@ pub(crate) struct GrandProductChallengeSet { } /// Compute all Z polynomials (for permutation arguments). -pub(crate) fn compute_permutation_z_polys( +pub(crate) fn compute_permutation_z_polys( stark: &S, config: &StarkConfig, trace_poly_values: &[PolynomialValues], @@ -97,7 +97,6 @@ pub(crate) fn compute_permutation_z_polys( ) -> Vec> where F: RichField + Extendable, - C: GenericConfig, S: Stark, { let permutation_pairs = stark.permutation_pairs(); @@ -286,7 +285,7 @@ where pub(crate) permutation_challenge_sets: Vec>, } -pub(crate) fn eval_permutation_checks( +pub(crate) fn eval_permutation_checks( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, @@ -296,7 +295,6 @@ pub(crate) fn eval_permutation_checks, FE: FieldExtension, P: PackedField, - C: GenericConfig, S: Stark, { let PermutationCheckVars { diff --git a/evm/src/prover.rs b/evm/src/prover.rs index b4ea0d90..c801950a 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -124,7 +124,7 @@ where let ctl_data_per_table = timed!( timing, "compute CTL data", - cross_table_lookup_data::( + cross_table_lookup_data::( &trace_poly_values, &all_stark.cross_table_lookups, &ctl_challenges, @@ -286,7 +286,7 @@ where timed!( timing, "compute permutation Z(x) polys", - compute_permutation_z_polys::(stark, config, trace_poly_values, challenges) + compute_permutation_z_polys::(stark, config, trace_poly_values, challenges) ) }); let num_permutation_zs = permutation_zs.as_ref().map(|v| v.len()).unwrap_or(0); @@ -533,7 +533,7 @@ where filter_column: &zs_columns.filter_column, }) .collect::>(); - eval_vanishing_poly::( + eval_vanishing_poly::( stark, config, vars, @@ -550,7 +550,7 @@ where let num_challenges = alphas.len(); - (0..P::WIDTH).into_iter().map(move |i| { + (0..P::WIDTH).map(move |i| { (0..num_challenges) .map(|j| constraints_evals[j].as_slice()[i]) .collect() @@ -651,7 +651,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( filter_column: &zs_columns.filter_column, }) .collect::>(); - eval_vanishing_poly::( + eval_vanishing_poly::( stark, config, vars, diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index 1fba88e3..d3018ba4 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -132,7 +132,7 @@ impl, C: GenericConfig, const D: usize> } // Verify the CTL checks. - verify_cross_table_lookups::( + verify_cross_table_lookups::( &cross_table_lookups, pis.map(|p| p.ctl_zs_last), inner_config, @@ -393,7 +393,7 @@ fn verify_stark_proof_with_challenges_circuit< with_context!( builder, "evaluate vanishing polynomial", - eval_vanishing_poly_circuit::( + eval_vanishing_poly_circuit::( builder, stark, inner_config, diff --git a/evm/src/vanishing_poly.rs b/evm/src/vanishing_poly.rs index e776fa5c..3a2da78c 100644 --- a/evm/src/vanishing_poly.rs +++ b/evm/src/vanishing_poly.rs @@ -2,7 +2,6 @@ use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; @@ -17,7 +16,7 @@ use crate::permutation::{ use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) fn eval_vanishing_poly( +pub(crate) fn eval_vanishing_poly( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, @@ -28,12 +27,11 @@ pub(crate) fn eval_vanishing_poly, FE: FieldExtension, P: PackedField, - C: GenericConfig, S: Stark, { stark.eval_packed_generic(vars, consumer); if let Some(permutation_vars) = permutation_vars { - eval_permutation_checks::( + eval_permutation_checks::( stark, config, vars, @@ -41,10 +39,10 @@ pub(crate) fn eval_vanishing_poly(vars, ctl_vars, consumer); + eval_cross_table_lookup_checks::(vars, ctl_vars, consumer); } -pub(crate) fn eval_vanishing_poly_circuit( +pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, config: &StarkConfig, @@ -54,7 +52,6 @@ pub(crate) fn eval_vanishing_poly_circuit( consumer: &mut RecursiveConstraintConsumer, ) where F: RichField + Extendable, - C: GenericConfig, S: Stark, [(); S::COLUMNS]:, { diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index c6d0373e..b4b2f6bd 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -97,7 +97,7 @@ where config, )?; - verify_cross_table_lookups::( + verify_cross_table_lookups::( cross_table_lookups, all_proof.stark_proofs.map(|p| p.proof.openings.ctl_zs_last), config, @@ -155,7 +155,7 @@ where next_zs: permutation_ctl_zs_next[..num_permutation_zs].to_vec(), permutation_challenge_sets: challenges.permutation_challenge_sets.clone().unwrap(), }); - eval_vanishing_poly::( + eval_vanishing_poly::( stark, config, vars, diff --git a/plonky2/benches/field_arithmetic.rs b/plonky2/benches/field_arithmetic.rs index a3b66fa1..9db45a50 100644 --- a/plonky2/benches/field_arithmetic.rs +++ b/plonky2/benches/field_arithmetic.rs @@ -119,7 +119,7 @@ pub(crate) fn bench_field(c: &mut Criterion) { &format!("batch_multiplicative_inverse-tiny<{}>", type_name::()), |b| { b.iter_batched( - || (0..2).into_iter().map(|_| F::rand()).collect::>(), + || (0..2).map(|_| F::rand()).collect::>(), |x| F::batch_multiplicative_inverse(&x), BatchSize::SmallInput, ) @@ -130,7 +130,7 @@ pub(crate) fn bench_field(c: &mut Criterion) { &format!("batch_multiplicative_inverse-small<{}>", type_name::()), |b| { b.iter_batched( - || (0..4).into_iter().map(|_| F::rand()).collect::>(), + || (0..4).map(|_| F::rand()).collect::>(), |x| F::batch_multiplicative_inverse(&x), BatchSize::SmallInput, ) @@ -141,7 +141,7 @@ pub(crate) fn bench_field(c: &mut Criterion) { &format!("batch_multiplicative_inverse-medium<{}>", type_name::()), |b| { b.iter_batched( - || (0..16).into_iter().map(|_| F::rand()).collect::>(), + || (0..16).map(|_| F::rand()).collect::>(), |x| F::batch_multiplicative_inverse(&x), BatchSize::SmallInput, ) @@ -152,7 +152,7 @@ pub(crate) fn bench_field(c: &mut Criterion) { &format!("batch_multiplicative_inverse-large<{}>", type_name::()), |b| { b.iter_batched( - || (0..256).into_iter().map(|_| F::rand()).collect::>(), + || (0..256).map(|_| F::rand()).collect::>(), |x| F::batch_multiplicative_inverse(&x), BatchSize::LargeInput, ) @@ -163,12 +163,7 @@ pub(crate) fn bench_field(c: &mut Criterion) { &format!("batch_multiplicative_inverse-huge<{}>", type_name::()), |b| { b.iter_batched( - || { - (0..65536) - .into_iter() - .map(|_| F::rand()) - .collect::>() - }, + || (0..65536).map(|_| F::rand()).collect::>(), |x| F::batch_multiplicative_inverse(&x), BatchSize::LargeInput, ) diff --git a/plonky2/examples/bench_recursion.rs b/plonky2/examples/bench_recursion.rs index 5a1196e1..9b06b435 100644 --- a/plonky2/examples/bench_recursion.rs +++ b/plonky2/examples/bench_recursion.rs @@ -103,7 +103,7 @@ where { let (inner_proof, inner_vd, inner_cd) = inner; let mut builder = CircuitBuilder::::new(config.clone()); - let pt = builder.add_virtual_proof_with_pis::(inner_cd); + let pt = builder.add_virtual_proof_with_pis(inner_cd); let inner_data = builder.add_virtual_verifier_data(inner_cd.config.fri_config.cap_height); diff --git a/plonky2/src/fri/challenges.rs b/plonky2/src/fri/challenges.rs index 7a184504..41b00a7e 100644 --- a/plonky2/src/fri/challenges.rs +++ b/plonky2/src/fri/challenges.rs @@ -73,7 +73,7 @@ impl, H: AlgebraicHasher, const D: usize> } } - pub fn fri_challenges>( + pub fn fri_challenges( &mut self, builder: &mut CircuitBuilder, commit_phase_merkle_caps: &[MerkleCapTarget], diff --git a/plonky2/src/fri/proof.rs b/plonky2/src/fri/proof.rs index f841b274..0e0d7df3 100644 --- a/plonky2/src/fri/proof.rs +++ b/plonky2/src/fri/proof.rs @@ -15,7 +15,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::hash::path_compression::{compress_merkle_proofs, decompress_merkle_proofs}; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::Hasher; use crate::plonk::plonk_common::salt_size; use crate::plonk::proof::{FriInferredElements, ProofChallenges}; @@ -135,11 +135,7 @@ pub struct CompressedFriProof, H: Hasher, const impl, H: Hasher, const D: usize> FriProof { /// Compress all the Merkle paths in the FRI proof and remove duplicate indices. - pub fn compress>( - self, - indices: &[usize], - params: &FriParams, - ) -> CompressedFriProof { + pub fn compress(self, indices: &[usize], params: &FriParams) -> CompressedFriProof { let FriProof { commit_phase_merkle_caps, query_round_proofs, @@ -241,7 +237,7 @@ impl, H: Hasher, const D: usize> FriProof, H: Hasher, const D: usize> CompressedFriProof { /// Decompress all the Merkle paths in the FRI proof and reinsert duplicate indices. - pub(crate) fn decompress>( + pub(crate) fn decompress( self, challenges: &ProofChallenges, fri_inferred_elements: FriInferredElements, diff --git a/plonky2/src/fri/recursive_verifier.rs b/plonky2/src/fri/recursive_verifier.rs index ac74f50f..8e9329d5 100644 --- a/plonky2/src/fri/recursive_verifier.rs +++ b/plonky2/src/fri/recursive_verifier.rs @@ -25,7 +25,7 @@ use crate::with_context; impl, const D: usize> CircuitBuilder { /// Computes P'(x^arity) from {P(x*g^i)}_(i=0..arity), where g is a `arity`-th root of unity /// and P' is the FRI reduced polynomial. - fn compute_evaluation>( + fn compute_evaluation( &mut self, x: Target, x_index_within_coset_bits: &[BoolTarget], @@ -58,7 +58,7 @@ impl, const D: usize> CircuitBuilder { /// Make sure we have enough wires and routed wires to do the FRI checks efficiently. This check /// isn't required -- without it we'd get errors elsewhere in the stack -- but just gives more /// helpful errors. - fn check_recursion_config>(&self, max_fri_arity_bits: usize) { + fn check_recursion_config(&self, max_fri_arity_bits: usize) { let random_access = RandomAccessGate::::new_from_config( &self.config, max_fri_arity_bits.max(self.config.fri_config.cap_height), @@ -91,11 +91,7 @@ impl, const D: usize> CircuitBuilder { ); } - fn fri_verify_proof_of_work>( - &mut self, - fri_pow_response: Target, - config: &FriConfig, - ) { + fn fri_verify_proof_of_work(&mut self, fri_pow_response: Target, config: &FriConfig) { self.assert_leading_zeros( fri_pow_response, config.proof_of_work_bits + (64 - F::order().bits()) as u32, @@ -114,7 +110,7 @@ impl, const D: usize> CircuitBuilder { C::Hasher: AlgebraicHasher, { if let Some(max_arity_bits) = params.max_arity_bits() { - self.check_recursion_config::(max_arity_bits); + self.check_recursion_config(max_arity_bits); } debug_assert_eq!( @@ -129,7 +125,7 @@ impl, const D: usize> CircuitBuilder { with_context!( self, "check PoW", - self.fri_verify_proof_of_work::(challenges.fri_pow_response, ¶ms.config) + self.fri_verify_proof_of_work(challenges.fri_pow_response, ¶ms.config) ); // Check that parameters are coherent. @@ -206,7 +202,7 @@ impl, const D: usize> CircuitBuilder { } } - fn fri_combine_initial>( + fn fri_combine_initial( &mut self, instance: &FriInstanceInfoTarget, proof: &FriInitialTreeProofTarget, @@ -298,7 +294,7 @@ impl, const D: usize> CircuitBuilder { let mut old_eval = with_context!( self, "combine initial oracles", - self.fri_combine_initial::( + self.fri_combine_initial( instance, &round_proof.initial_trees_proof, challenges.fri_alpha, @@ -324,7 +320,7 @@ impl, const D: usize> CircuitBuilder { old_eval = with_context!( self, "infer evaluation using interpolation", - self.compute_evaluation::( + self.compute_evaluation( subgroup_x, x_index_within_coset_bits, arity_bits, diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index e0c8e79d..e884554f 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -184,7 +184,6 @@ impl> MerkleTree { // Mask out high bits to get the index within the sub-tree. let mut pair_index = leaf_index & ((1 << num_layers) - 1); let siblings = (0..num_layers) - .into_iter() .map(|i| { let parity = pair_index & 1; pair_index >>= 1; diff --git a/plonky2/src/iop/ext_target.rs b/plonky2/src/iop/ext_target.rs index c9929b21..08bdcba0 100644 --- a/plonky2/src/iop/ext_target.rs +++ b/plonky2/src/iop/ext_target.rs @@ -139,9 +139,7 @@ pub fn flatten_target(l: &[ExtensionTarget]) -> Vec { } /// Batch every D-sized chunks into extension targets. -pub fn unflatten_target, const D: usize>( - l: &[Target], -) -> Vec> { +pub fn unflatten_target(l: &[Target]) -> Vec> { debug_assert_eq!(l.len() % D, 0); l.chunks_exact(D) .map(|c| c.to_vec().try_into().unwrap()) diff --git a/plonky2/src/plonk/get_challenges.rs b/plonky2/src/plonk/get_challenges.rs index 240d9047..1f6bd7ae 100644 --- a/plonky2/src/plonk/get_challenges.rs +++ b/plonky2/src/plonk/get_challenges.rs @@ -277,7 +277,7 @@ impl, const D: usize> CircuitBuilder { plonk_gammas, plonk_alphas, plonk_zeta, - fri_challenges: challenger.fri_challenges::( + fri_challenges: challenger.fri_challenges( self, commit_phase_merkle_caps, final_poly, diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index ea616e8f..67884e01 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -65,7 +65,7 @@ impl, C: GenericConfig, const D: usize> P plonk_zs_partial_products_cap, quotient_polys_cap, openings, - opening_proof: opening_proof.compress::(indices, params), + opening_proof: opening_proof.compress(indices, params), } } } @@ -163,7 +163,7 @@ impl, C: GenericConfig, const D: usize> plonk_zs_partial_products_cap, quotient_polys_cap, openings, - opening_proof: opening_proof.decompress::(challenges, fri_inferred_elements, params), + opening_proof: opening_proof.decompress(challenges, fri_inferred_elements, params), } } } diff --git a/plonky2/src/plonk/prover.rs b/plonky2/src/plonk/prover.rs index f8f66ea1..f2b61226 100644 --- a/plonky2/src/plonk/prover.rs +++ b/plonky2/src/plonk/prover.rs @@ -420,7 +420,7 @@ fn compute_quotient_polys< public_inputs_hash, ); - let mut quotient_values_batch = eval_vanishing_poly_base_batch::( + let mut quotient_values_batch = eval_vanishing_poly_base_batch::( common_data, &indices_batch, &shifted_xs_batch, diff --git a/plonky2/src/plonk/vanishing_poly.rs b/plonky2/src/plonk/vanishing_poly.rs index 30473650..d1d06403 100644 --- a/plonky2/src/plonk/vanishing_poly.rs +++ b/plonky2/src/plonk/vanishing_poly.rs @@ -10,7 +10,6 @@ use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CommonCircuitData; -use crate::plonk::config::GenericConfig; use crate::plonk::plonk_common; use crate::plonk::plonk_common::eval_l_0_circuit; use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBaseBatch}; @@ -22,11 +21,7 @@ use crate::with_context; /// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random /// linear combination of gate constraints, plus some other terms relating to the permutation /// argument. All such terms should vanish on `H`. -pub(crate) fn eval_vanishing_poly< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub(crate) fn eval_vanishing_poly, const D: usize>( common_data: &CommonCircuitData, x: F::Extension, vars: EvaluationVars, @@ -41,7 +36,7 @@ pub(crate) fn eval_vanishing_poly< let max_degree = common_data.quotient_degree_factor; let num_prods = common_data.num_partial_products; - let constraint_terms = evaluate_gate_constraints::(common_data, vars); + let constraint_terms = evaluate_gate_constraints::(common_data, vars); // The L_0(x) (Z(x) - 1) vanishing terms. let mut vanishing_z_1_terms = Vec::new(); @@ -97,11 +92,7 @@ pub(crate) fn eval_vanishing_poly< } /// Like `eval_vanishing_poly`, but specialized for base field points. Batched. -pub(crate) fn eval_vanishing_poly_base_batch< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub(crate) fn eval_vanishing_poly_base_batch, const D: usize>( common_data: &CommonCircuitData, indices_batch: &[usize], xs_batch: &[F], @@ -129,7 +120,7 @@ pub(crate) fn eval_vanishing_poly_base_batch< let num_gate_constraints = common_data.num_gate_constraints; let constraint_terms_batch = - evaluate_gate_constraints_base_batch::(common_data, vars_batch); + evaluate_gate_constraints_base_batch::(common_data, vars_batch); debug_assert!(constraint_terms_batch.len() == n * num_gate_constraints); let num_challenges = common_data.config.num_challenges; @@ -208,11 +199,7 @@ pub(crate) fn eval_vanishing_poly_base_batch< /// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not /// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly /// the capacity that we need. -pub fn evaluate_gate_constraints< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub fn evaluate_gate_constraints, const D: usize>( common_data: &CommonCircuitData, vars: EvaluationVars, ) -> Vec { @@ -242,11 +229,7 @@ pub fn evaluate_gate_constraints< /// Returns a vector of `num_gate_constraints * vars_batch.len()` field elements. The constraints /// corresponding to `vars_batch[i]` are found in `result[i], result[vars_batch.len() + i], /// result[2 * vars_batch.len() + i], ...`. -pub fn evaluate_gate_constraints_base_batch< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub fn evaluate_gate_constraints_base_batch, const D: usize>( common_data: &CommonCircuitData, vars_batch: EvaluationVarsBaseBatch, ) -> Vec { @@ -273,11 +256,7 @@ pub fn evaluate_gate_constraints_base_batch< constraints_batch } -pub fn evaluate_gate_constraints_circuit< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub fn evaluate_gate_constraints_circuit, const D: usize>( builder: &mut CircuitBuilder, common_data: &CommonCircuitData, vars: EvaluationTargets, @@ -308,11 +287,7 @@ pub fn evaluate_gate_constraints_circuit< /// /// Assumes `x != 1`; if `x` could be 1 then this is unsound. This is fine if `x` is a random /// variable drawn from a sufficiently large domain. -pub(crate) fn eval_vanishing_poly_circuit< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub(crate) fn eval_vanishing_poly_circuit, const D: usize>( builder: &mut CircuitBuilder, common_data: &CommonCircuitData, x: ExtensionTarget, @@ -332,7 +307,7 @@ pub(crate) fn eval_vanishing_poly_circuit< let constraint_terms = with_context!( builder, "evaluate gate constraints", - evaluate_gate_constraints_circuit::(builder, common_data, vars,) + evaluate_gate_constraints_circuit::(builder, common_data, vars,) ); // The L_0(x) (Z(x) - 1) vanishing terms. diff --git a/plonky2/src/plonk/verifier.rs b/plonky2/src/plonk/verifier.rs index 893720c6..7e68e59e 100644 --- a/plonky2/src/plonk/verifier.rs +++ b/plonky2/src/plonk/verifier.rs @@ -59,7 +59,7 @@ pub(crate) fn verify_with_challenges< let partial_products = &proof.openings.partial_products; // Evaluate the vanishing polynomial at our challenge point, zeta. - let vanishing_polys_zeta = eval_vanishing_poly::( + let vanishing_polys_zeta = eval_vanishing_poly::( common_data, challenges.plonk_zeta, vars, diff --git a/plonky2/src/recursion/conditional_recursive_verifier.rs b/plonky2/src/recursion/conditional_recursive_verifier.rs index 2596e1f0..6331118b 100644 --- a/plonky2/src/recursion/conditional_recursive_verifier.rs +++ b/plonky2/src/recursion/conditional_recursive_verifier.rs @@ -374,9 +374,9 @@ mod tests { // Conditionally verify the two proofs. let mut builder = CircuitBuilder::::new(config); let mut pw = PartialWitness::new(); - let pt = builder.add_virtual_proof_with_pis::(&data.common); + let pt = builder.add_virtual_proof_with_pis(&data.common); pw.set_proof_with_pis_target(&pt, &proof); - let dummy_pt = builder.add_virtual_proof_with_pis::(&data.common); + let dummy_pt = builder.add_virtual_proof_with_pis(&data.common); pw.set_proof_with_pis_target::(&dummy_pt, &dummy_proof); let inner_data = builder.add_virtual_verifier_data(data.common.config.fri_config.cap_height); diff --git a/plonky2/src/recursion/cyclic_recursion.rs b/plonky2/src/recursion/cyclic_recursion.rs index 656ff3b9..9b305626 100644 --- a/plonky2/src/recursion/cyclic_recursion.rs +++ b/plonky2/src/recursion/cyclic_recursion.rs @@ -40,7 +40,7 @@ impl, const D: usize> VerifierOnlyCircuitData { } impl VerifierCircuitTarget { - fn from_slice, C: GenericConfig, const D: usize>( + fn from_slice, const D: usize>( slice: &[Target], common_data: &CommonCircuitData, ) -> Result { @@ -101,7 +101,7 @@ impl, const D: usize> CircuitBuilder { self.goal_common_data = Some(common_data.clone()); } - let inner_cyclic_pis = VerifierCircuitTarget::from_slice::( + let inner_cyclic_pis = VerifierCircuitTarget::from_slice::( &cyclic_proof_with_pis.public_inputs, common_data, )?; @@ -207,7 +207,7 @@ mod tests { let data = builder.build::(); let config = CircuitConfig::standard_recursion_config(); let mut builder = CircuitBuilder::::new(config); - let proof = builder.add_virtual_proof_with_pis::(&data.common); + let proof = builder.add_virtual_proof_with_pis(&data.common); let verifier_data = builder.add_virtual_verifier_data(data.common.config.fri_config.cap_height); builder.verify_proof::(&proof, &verifier_data, &data.common); @@ -215,7 +215,7 @@ mod tests { let config = CircuitConfig::standard_recursion_config(); let mut builder = CircuitBuilder::::new(config); - let proof = builder.add_virtual_proof_with_pis::(&data.common); + let proof = builder.add_virtual_proof_with_pis(&data.common); let verifier_data = builder.add_virtual_verifier_data(data.common.config.fri_config.cap_height); builder.verify_proof::(&proof, &verifier_data, &data.common); @@ -257,7 +257,7 @@ mod tests { let condition = builder.add_virtual_bool_target_safe(); // Unpack inner proof's public inputs. - let inner_cyclic_proof_with_pis = builder.add_virtual_proof_with_pis::(&common_data); + let inner_cyclic_proof_with_pis = builder.add_virtual_proof_with_pis(&common_data); let inner_cyclic_pis = &inner_cyclic_proof_with_pis.public_inputs; let inner_cyclic_initial_hash = HashOutTarget::try_from(&inner_cyclic_pis[0..4]).unwrap(); let inner_cyclic_latest_hash = HashOutTarget::try_from(&inner_cyclic_pis[4..8]).unwrap(); diff --git a/plonky2/src/recursion/dummy_circuit.rs b/plonky2/src/recursion/dummy_circuit.rs index c8b98a96..53fa0bb0 100644 --- a/plonky2/src/recursion/dummy_circuit.rs +++ b/plonky2/src/recursion/dummy_circuit.rs @@ -113,7 +113,7 @@ impl, const D: usize> CircuitBuilder { { let dummy_circuit = dummy_circuit::(common_data); let dummy_proof_with_pis = dummy_proof(&dummy_circuit, HashMap::new())?; - let dummy_proof_with_pis_target = self.add_virtual_proof_with_pis::(common_data); + let dummy_proof_with_pis_target = self.add_virtual_proof_with_pis(common_data); let dummy_verifier_data_target = self.add_virtual_verifier_data(self.config.fri_config.cap_height); diff --git a/plonky2/src/recursion/recursive_verifier.rs b/plonky2/src/recursion/recursive_verifier.rs index 9aafb1f5..91beb049 100644 --- a/plonky2/src/recursion/recursive_verifier.rs +++ b/plonky2/src/recursion/recursive_verifier.rs @@ -74,7 +74,7 @@ impl, const D: usize> CircuitBuilder { let vanishing_polys_zeta = with_context!( self, "evaluate the vanishing polynomial at our challenge point, zeta.", - eval_vanishing_poly_circuit::( + eval_vanishing_poly_circuit::( self, inner_common_data, challenges.plonk_zeta, @@ -126,11 +126,11 @@ impl, const D: usize> CircuitBuilder { ); } - pub fn add_virtual_proof_with_pis>( + pub fn add_virtual_proof_with_pis( &mut self, common_data: &CommonCircuitData, ) -> ProofWithPublicInputsTarget { - let proof = self.add_virtual_proof::(common_data); + let proof = self.add_virtual_proof(common_data); let public_inputs = self.add_virtual_targets(common_data.num_public_inputs); ProofWithPublicInputsTarget { proof, @@ -138,10 +138,7 @@ impl, const D: usize> CircuitBuilder { } } - fn add_virtual_proof>( - &mut self, - common_data: &CommonCircuitData, - ) -> ProofTarget { + fn add_virtual_proof(&mut self, common_data: &CommonCircuitData) -> ProofTarget { let config = &common_data.config; let fri_params = &common_data.fri_params; let cap_height = fri_params.config.cap_height; @@ -158,15 +155,12 @@ impl, const D: usize> CircuitBuilder { wires_cap: self.add_virtual_cap(cap_height), plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height), quotient_polys_cap: self.add_virtual_cap(cap_height), - openings: self.add_opening_set::(common_data), + openings: self.add_opening_set(common_data), opening_proof: self.add_virtual_fri_proof(num_leaves_per_oracle, fri_params), } } - fn add_opening_set>( - &mut self, - common_data: &CommonCircuitData, - ) -> OpeningSetTarget { + fn add_opening_set(&mut self, common_data: &CommonCircuitData) -> OpeningSetTarget { let config = &common_data.config; let num_challenges = config.num_challenges; let total_partial_products = num_challenges * common_data.num_partial_products; @@ -363,7 +357,7 @@ mod tests { { let mut builder = CircuitBuilder::::new(config.clone()); let mut pw = PartialWitness::new(); - let pt = builder.add_virtual_proof_with_pis::(&inner_cd); + let pt = builder.add_virtual_proof_with_pis(&inner_cd); pw.set_proof_with_pis_target(&pt, &inner_proof); let inner_data = builder.add_virtual_verifier_data(inner_cd.config.fri_config.cap_height); diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index bba42712..4f5d2921 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -14,7 +14,7 @@ use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use plonky2::plonk::config::{AlgebraicHasher, Hasher}; use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget}; use plonky2_maybe_rayon::*; @@ -63,7 +63,7 @@ pub(crate) struct PermutationChallengeSet { } /// Compute all Z polynomials (for permutation arguments). -pub(crate) fn compute_permutation_z_polys( +pub(crate) fn compute_permutation_z_polys( stark: &S, config: &StarkConfig, trace_poly_values: &[PolynomialValues], @@ -71,7 +71,6 @@ pub(crate) fn compute_permutation_z_polys( ) -> Vec> where F: RichField + Extendable, - C: GenericConfig, S: Stark, { let permutation_pairs = stark.permutation_pairs(); @@ -260,7 +259,7 @@ where pub(crate) permutation_challenge_sets: Vec>, } -pub(crate) fn eval_permutation_checks( +pub(crate) fn eval_permutation_checks( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, @@ -270,7 +269,6 @@ pub(crate) fn eval_permutation_checks, FE: FieldExtension, P: PackedField, - C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 2350830c..2f541d56 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -80,7 +80,7 @@ where config.num_challenges, stark.permutation_batch_size(), ); - let permutation_z_polys = compute_permutation_z_polys::( + let permutation_z_polys = compute_permutation_z_polys::( &stark, config, &trace_poly_values, @@ -285,7 +285,7 @@ where permutation_challenge_sets: permutation_challenge_sets.to_vec(), }, ); - eval_vanishing_poly::( + eval_vanishing_poly::( stark, config, vars, @@ -303,7 +303,7 @@ where let num_challenges = alphas.len(); - (0..P::WIDTH).into_iter().map(move |i| { + (0..P::WIDTH).map(move |i| { (0..num_challenges) .map(|j| constraints_evals[j].as_slice()[i]) .collect() diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index d080f80f..11d83479 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -128,7 +128,7 @@ fn verify_stark_proof_with_challenges_circuit< with_context!( builder, "evaluate vanishing polynomial", - eval_vanishing_poly_circuit::( + eval_vanishing_poly_circuit::( builder, &stark, inner_config, diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 906b8980..a3fe753e 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -2,7 +2,6 @@ use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; @@ -13,7 +12,7 @@ use crate::permutation::{ use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) fn eval_vanishing_poly( +pub(crate) fn eval_vanishing_poly( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, @@ -23,14 +22,13 @@ pub(crate) fn eval_vanishing_poly, FE: FieldExtension, P: PackedField, - C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { stark.eval_packed_generic(vars, consumer); if let Some(permutation_data) = permutation_data { - eval_permutation_checks::( + eval_permutation_checks::( stark, config, vars, @@ -40,7 +38,7 @@ pub(crate) fn eval_vanishing_poly( +pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, config: &StarkConfig, @@ -49,7 +47,6 @@ pub(crate) fn eval_vanishing_poly_circuit( consumer: &mut RecursiveConstraintConsumer, ) where F: RichField + Extendable, - C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 443fcb31..6930c7cb 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -98,7 +98,7 @@ where next_zs: permutation_zs_next.as_ref().unwrap().clone(), permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), }); - eval_vanishing_poly::( + eval_vanishing_poly::( &stark, config, vars,