diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 9306d0b2..8a33eb41 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use plonky2::field::batch_util::batch_multiply_inplace; -use plonky2::field::extension_field::Extendable; +use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::field_types::Field; use plonky2::field::polynomial::PolynomialValues; use plonky2::hash::hash_types::RichField; @@ -11,7 +11,9 @@ use plonky2::plonk::config::{GenericConfig, Hasher}; use rayon::prelude::*; use crate::config::StarkConfig; +use crate::constraint_consumer::ConstraintConsumer; use crate::stark::Stark; +use crate::vars::StarkEvaluationVars; /// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another. /// In particular, there should exist some permutation `pi` such that for any `i`, @@ -39,6 +41,7 @@ pub(crate) struct PermutationChallenge { } /// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. +#[derive(Clone)] pub(crate) struct PermutationChallengeSet { pub(crate) challenges: Vec>, } @@ -49,6 +52,7 @@ pub(crate) fn compute_permutation_z_polys( config: &StarkConfig, challenger: &mut Challenger, trace_poly_values: &[PolynomialValues], + permutation_challenge_sets: &[PermutationChallengeSet], ) -> Vec> where F: RichField + Extendable, @@ -56,12 +60,6 @@ where S: Stark, { let permutation_pairs = stark.permutation_pairs(); - let permutation_challenge_sets = get_n_permutation_challenge_sets( - challenger, - config.num_challenges, - stark.permutation_batch_size(), - ); - let permutation_batches = get_permutation_batches( &permutation_pairs, &permutation_challenge_sets, @@ -192,3 +190,63 @@ pub(crate) fn get_permutation_batches<'a, F: Field>( }) .collect() } + +// TODO: Use slices. +pub struct PermutationCheckData, const D2: usize> { + pub(crate) local_zs: Vec, + pub(crate) next_zs: Vec, + pub(crate) permutation_challenge_sets: Vec>, +} + +pub(crate) fn eval_permutation_checks( + stark: &S, + config: &StarkConfig, + vars: StarkEvaluationVars, + local_zs: &[FE], + next_zs: &[FE], + consumer: &mut ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallengeSet], +) where + F: RichField + Extendable, + FE: FieldExtension, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + // TODO: Z_1 check. + let permutation_pairs = stark.permutation_pairs(); + + let permutation_batches = get_permutation_batches( + &permutation_pairs, + permutation_challenge_sets, + config.num_challenges, + stark.permutation_batch_size(), + ); + + // Each zs value corresponds to a permutation batch. + for (i, instances) in permutation_batches.iter().enumerate() { + // Z(gx) * down = Z x * up + let (reduced_lhs, reduced_rhs): (Vec, Vec) = instances + .iter() + .map(|instance| { + let PermutationInstance { + pair: PermutationPair { column_pairs }, + challenge: PermutationChallenge { beta, gamma }, + } = instance; + column_pairs.iter().rev().fold( + (FE::from_basefield(*gamma), FE::from_basefield(*gamma)), + |(lhs, rhs), &(i, j)| { + ( + lhs.scalar_mul(*beta) + vars.local_values[i], + rhs.scalar_mul(*beta) + vars.local_values[j], + ) + }, + ) + }) + .unzip(); + let constraint = next_zs[i] * reduced_rhs.into_iter().product() + - local_zs[i] * reduced_lhs.into_iter().product(); + consumer.constraint(constraint); + } +} diff --git a/starky/src/prover.rs b/starky/src/prover.rs index be1f198b..0206cb95 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -18,9 +18,13 @@ use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::compute_permutation_z_polys; +use crate::permutation::PermutationCheckData; +use crate::permutation::{ + compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, +}; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; +use crate::vanishing_poly::eval_vanishing_poly; use crate::vars::StarkEvaluationVars; pub fn prove( @@ -80,28 +84,41 @@ where challenger.observe_cap(&trace_cap); // Permutation arguments. - let permutation_zs_commitment = if stark.uses_permutation_args() { + let permutation_zs_commitment_challenges = if stark.uses_permutation_args() { + let permutation_challenge_sets = get_n_permutation_challenge_sets( + &mut challenger, + config.num_challenges, + stark.permutation_batch_size(), + ); let permutation_z_polys = compute_permutation_z_polys::( &stark, config, &mut challenger, &trace_poly_values, + &permutation_challenge_sets, ); + timed!( timing, "compute permutation Z commitments", - Some(PolynomialBatch::from_values( - permutation_z_polys, - rate_bits, - false, - config.fri_config.cap_height, - timing, - None, + Some(( + PolynomialBatch::from_values( + permutation_z_polys, + rate_bits, + false, + config.fri_config.cap_height, + timing, + None, + ), + permutation_challenge_sets )) ) } else { None }; + let permutation_zs_commitment = permutation_zs_commitment_challenges + .as_ref() + .map(|(comm, _)| comm); let permutation_zs_cap = permutation_zs_commitment .as_ref() .map(|commit| commit.merkle_tree.cap.clone()); @@ -113,10 +130,11 @@ where let quotient_polys = compute_quotient_polys::( &stark, &trace_commitment, + &permutation_zs_commitment_challenges, public_inputs, alphas, degree_bits, - rate_bits, + config, ); let all_quotient_chunks = quotient_polys .into_par_iter() @@ -156,13 +174,13 @@ where zeta, g, &trace_commitment, - permutation_zs_commitment.as_ref(), + permutation_zs_commitment, "ient_commitment, ); challenger.observe_openings(&openings.to_fri_openings()); let initial_merkle_trees = once(&trace_commitment) - .chain(permutation_zs_commitment.as_ref()) + .chain(permutation_zs_commitment) .chain(once("ient_commitment)) .collect_vec(); @@ -196,10 +214,14 @@ where fn compute_quotient_polys( stark: &S, trace_commitment: &PolynomialBatch, + permutation_zs_commitment_challenges: &Option<( + PolynomialBatch, + Vec>, + )>, public_inputs: [F; S::PUBLIC_INPUTS], alphas: Vec, degree_bits: usize, - rate_bits: usize, + config: &StarkConfig, ) -> Vec> where F: RichField + Extendable, @@ -209,6 +231,7 @@ where [(); S::PUBLIC_INPUTS]:, { let degree = 1 << degree_bits; + let rate_bits = config.fri_config.rate_bits; let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor()); assert!( @@ -255,7 +278,22 @@ where next_values: &get_at_index(trace_commitment, (i + next_step) % size), public_inputs: &public_inputs, }; - stark.eval_packed_base(vars, &mut consumer); + let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map( + |(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckData { + local_zs: get_at_index(&permutation_zs_commitment, i).to_vec(), + next_zs: get_at_index(&permutation_zs_commitment, (i + next_step) % size) + .to_vec(), + permutation_challenge_sets: permutation_challenge_sets.to_vec(), + }, + ); + eval_vanishing_poly::( + stark, + config, + vars, + permutation_check_data, + &mut consumer, + ); + // stark.eval_packed_base(vars, &mut consumer); // TODO: Add in constraints for permutation arguments. // TODO: Fix this once we use a genuine `PackedField`. let mut constraints_evals = consumer.accumulators(); diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index a3323796..dc598167 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -4,82 +4,39 @@ use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::{ - get_permutation_batches, PermutationChallenge, PermutationChallengeSet, PermutationInstance, - PermutationPair, -}; +use crate::permutation::{eval_permutation_checks, PermutationCheckData}; use crate::stark::Stark; use crate::vars::StarkEvaluationVars; -pub(crate) fn eval_vanishing_poly( - stark: S, +pub(crate) fn eval_vanishing_poly( + stark: &S, config: &StarkConfig, - vars: StarkEvaluationVars, - local_zs: &[F::Extension], - next_zs: &[F::Extension], - mut consumer: ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallengeSet], + vars: StarkEvaluationVars, + permutation_data: Option>, + consumer: &mut ConstraintConsumer, ) where F: RichField + Extendable, + FE: FieldExtension, C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { - stark.eval_packed_generic(vars, &mut consumer); -} - -fn eval_permutation_checks( - stark: S, - config: &StarkConfig, - vars: StarkEvaluationVars, - local_zs: &[F::Extension], - next_zs: &[F::Extension], - consumer: &mut ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallengeSet], -) where - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, -{ - // TODO: Z_1 check. - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, + stark.eval_packed_generic(vars, consumer); + if let Some(PermutationCheckData { + local_zs, + next_zs, permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - // Z(gx) * down = Z x * up - let (reduced_lhs, reduced_rhs): (Vec, Vec) = instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: PermutationChallenge { beta, gamma }, - } = instance; - column_pairs.iter().rev().fold( - ( - F::Extension::from_basefield(*gamma), - F::Extension::from_basefield(*gamma), - ), - |(lhs, rhs), &(i, j)| { - ( - lhs.scalar_mul(*beta) + vars.local_values[i], - rhs.scalar_mul(*beta) + vars.local_values[j], - ) - }, - ) - }) - .unzip(); - let constraint = next_zs[i] * reduced_rhs.into_iter().product() - - local_zs[i] * reduced_lhs.into_iter().product(); - consumer.constraint(constraint); + }) = permutation_data + { + eval_permutation_checks::( + stark, + config, + vars, + &local_zs, + &next_zs, + consumer, + &permutation_challenge_sets, + ); } }