From d52fabaf26975be5d00ea520282b9dd23bd80045 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 21 Feb 2022 10:18:05 +0100 Subject: [PATCH 01/14] First pass --- starky/src/lib.rs | 1 + starky/src/permutation.rs | 54 ++++++++++++++++++++------------ starky/src/vanishing_poly.rs | 60 ++++++++++++++++++++++++++++++++++++ 3 files changed, 95 insertions(+), 20 deletions(-) create mode 100644 starky/src/vanishing_poly.rs diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 1df9629e..51a73479 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -14,6 +14,7 @@ pub mod prover; pub mod recursive_verifier; pub mod stark; pub mod stark_testing; +pub mod vanishing_poly; pub mod vars; pub mod verifier; diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 01cfa8bf..9306d0b2 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -62,26 +62,12 @@ where stark.permutation_batch_size(), ); - // Get a list of instances of our batch-permutation argument. These are permutation arguments - // where the same `Z(x)` polynomial is used to check more than one permutation. - // Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we - // start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we - // chunk these arguments based on our batch size. - let permutation_batches = permutation_pairs - .iter() - .cartesian_product(0..config.num_challenges) - .chunks(stark.permutation_batch_size()) - .into_iter() - .map(|batch| { - batch - .enumerate() - .map(|(i, (pair, chal))| { - let challenge = permutation_challenge_sets[i].challenges[chal]; - PermutationInstance { pair, challenge } - }) - .collect_vec() - }) - .collect_vec(); + let permutation_batches = get_permutation_batches( + &permutation_pairs, + &permutation_challenge_sets, + config.num_challenges, + stark.permutation_batch_size(), + ); permutation_batches .into_par_iter() @@ -178,3 +164,31 @@ pub(crate) fn get_n_permutation_challenge_sets>( .map(|_| get_permutation_challenge_set(challenger, num_challenges)) .collect() } + +/// Get a list of instances of our batch-permutation argument. These are permutation arguments +/// where the same `Z(x)` polynomial is used to check more than one permutation. +/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we +/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we +/// chunk these arguments based on our batch size. +pub(crate) fn get_permutation_batches<'a, F: Field>( + permutation_pairs: &'a [PermutationPair], + permutation_challenge_sets: &[PermutationChallengeSet], + num_challenges: usize, + batch_size: usize, +) -> Vec>> { + permutation_pairs + .iter() + .cartesian_product(0..num_challenges) + .chunks(batch_size) + .into_iter() + .map(|batch| { + batch + .enumerate() + .map(|(i, (pair, chal))| { + let challenge = permutation_challenge_sets[i].challenges[chal]; + PermutationInstance { pair, challenge } + }) + .collect_vec() + }) + .collect() +} diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs new file mode 100644 index 00000000..6f7225b5 --- /dev/null +++ b/starky/src/vanishing_poly.rs @@ -0,0 +1,60 @@ +use plonky2::field::extension_field::Extendable; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::config::GenericConfig; +use rayon::prelude::*; + +use crate::config::StarkConfig; +use crate::constraint_consumer::ConstraintConsumer; +use crate::permutation::{get_permutation_batches, PermutationChallenge}; +use crate::stark::Stark; +use crate::vars::StarkEvaluationVars; + +pub(crate) fn eval_vanishing_poly( + stark: S, + config: &StarkConfig, + vars: StarkEvaluationVars, + local_zs: &[F::Extension], + next_zs: &[F::Extension], + mut consumer: ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallenge], +) where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + stark.eval_packed_base(vars, &mut consumer); +} + +fn eval_permutation_checks( + stark: S, + config: &StarkConfig, + vars: StarkEvaluationVars, + local_zs: &[F::Extension], + next_zs: &[F::Extension], + mut consumer: ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallenge], +) where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + let permutation_pairs = stark.permutation_pairs(); + + let permutation_batches = get_permutation_batches( + &permutation_pairs, + &permutation_challenge_sets, + config.num_challenges, + stark.permutation_batch_size(), + ); + + // Each zs value corresponds to a permutation batch. + permutation_batches + .into_par_iter() + .map(|instances| compute_permutation_z_poly(&instances, trace_poly_values)) + .collect() +} From 79ba85eb088a41cda96b76ccf1ca5b50646a2597 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 21 Feb 2022 10:52:04 +0100 Subject: [PATCH 02/14] Compiles --- starky/src/vanishing_poly.rs | 57 ++++++++++++++++++++++++++---------- 1 file changed, 41 insertions(+), 16 deletions(-) diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 6f7225b5..a3323796 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -1,23 +1,24 @@ -use plonky2::field::extension_field::Extendable; -use plonky2::field::packed_field::PackedField; +use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::hash::hash_types::RichField; use plonky2::plonk::config::GenericConfig; -use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::{get_permutation_batches, PermutationChallenge}; +use crate::permutation::{ + get_permutation_batches, PermutationChallenge, PermutationChallengeSet, PermutationInstance, + PermutationPair, +}; use crate::stark::Stark; use crate::vars::StarkEvaluationVars; pub(crate) fn eval_vanishing_poly( stark: S, config: &StarkConfig, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, local_zs: &[F::Extension], next_zs: &[F::Extension], - mut consumer: ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallenge], + mut consumer: ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallengeSet], ) where F: RichField + Extendable, C: GenericConfig, @@ -25,17 +26,17 @@ pub(crate) fn eval_vanishing_poly( [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { - stark.eval_packed_base(vars, &mut consumer); + stark.eval_packed_generic(vars, &mut consumer); } fn eval_permutation_checks( stark: S, config: &StarkConfig, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, local_zs: &[F::Extension], next_zs: &[F::Extension], - mut consumer: ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallenge], + consumer: &mut ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallengeSet], ) where F: RichField + Extendable, C: GenericConfig, @@ -43,18 +44,42 @@ fn eval_permutation_checks( [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { + // TODO: Z_1 check. let permutation_pairs = stark.permutation_pairs(); let permutation_batches = get_permutation_batches( &permutation_pairs, - &permutation_challenge_sets, + permutation_challenge_sets, config.num_challenges, stark.permutation_batch_size(), ); // Each zs value corresponds to a permutation batch. - permutation_batches - .into_par_iter() - .map(|instances| compute_permutation_z_poly(&instances, trace_poly_values)) - .collect() + for (i, instances) in permutation_batches.iter().enumerate() { + // Z(gx) * down = Z x * up + let (reduced_lhs, reduced_rhs): (Vec, Vec) = instances + .iter() + .map(|instance| { + let PermutationInstance { + pair: PermutationPair { column_pairs }, + challenge: PermutationChallenge { beta, gamma }, + } = instance; + column_pairs.iter().rev().fold( + ( + F::Extension::from_basefield(*gamma), + F::Extension::from_basefield(*gamma), + ), + |(lhs, rhs), &(i, j)| { + ( + lhs.scalar_mul(*beta) + vars.local_values[i], + rhs.scalar_mul(*beta) + vars.local_values[j], + ) + }, + ) + }) + .unzip(); + let constraint = next_zs[i] * reduced_rhs.into_iter().product() + - local_zs[i] * reduced_lhs.into_iter().product(); + consumer.constraint(constraint); + } } From 5c1173379e4c6f111f84fcb03fa8fa354cb3f19e Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 21 Feb 2022 16:05:24 +0100 Subject: [PATCH 03/14] Compiles --- starky/src/permutation.rs | 72 ++++++++++++++++++++++++++--- starky/src/prover.rs | 66 +++++++++++++++++++++------ starky/src/vanishing_poly.rs | 87 +++++++++--------------------------- 3 files changed, 139 insertions(+), 86 deletions(-) diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 9306d0b2..8a33eb41 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use plonky2::field::batch_util::batch_multiply_inplace; -use plonky2::field::extension_field::Extendable; +use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::field_types::Field; use plonky2::field::polynomial::PolynomialValues; use plonky2::hash::hash_types::RichField; @@ -11,7 +11,9 @@ use plonky2::plonk::config::{GenericConfig, Hasher}; use rayon::prelude::*; use crate::config::StarkConfig; +use crate::constraint_consumer::ConstraintConsumer; use crate::stark::Stark; +use crate::vars::StarkEvaluationVars; /// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another. /// In particular, there should exist some permutation `pi` such that for any `i`, @@ -39,6 +41,7 @@ pub(crate) struct PermutationChallenge { } /// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. +#[derive(Clone)] pub(crate) struct PermutationChallengeSet { pub(crate) challenges: Vec>, } @@ -49,6 +52,7 @@ pub(crate) fn compute_permutation_z_polys( config: &StarkConfig, challenger: &mut Challenger, trace_poly_values: &[PolynomialValues], + permutation_challenge_sets: &[PermutationChallengeSet], ) -> Vec> where F: RichField + Extendable, @@ -56,12 +60,6 @@ where S: Stark, { let permutation_pairs = stark.permutation_pairs(); - let permutation_challenge_sets = get_n_permutation_challenge_sets( - challenger, - config.num_challenges, - stark.permutation_batch_size(), - ); - let permutation_batches = get_permutation_batches( &permutation_pairs, &permutation_challenge_sets, @@ -192,3 +190,63 @@ pub(crate) fn get_permutation_batches<'a, F: Field>( }) .collect() } + +// TODO: Use slices. +pub struct PermutationCheckData, const D2: usize> { + pub(crate) local_zs: Vec, + pub(crate) next_zs: Vec, + pub(crate) permutation_challenge_sets: Vec>, +} + +pub(crate) fn eval_permutation_checks( + stark: &S, + config: &StarkConfig, + vars: StarkEvaluationVars, + local_zs: &[FE], + next_zs: &[FE], + consumer: &mut ConstraintConsumer, + permutation_challenge_sets: &[PermutationChallengeSet], +) where + F: RichField + Extendable, + FE: FieldExtension, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + // TODO: Z_1 check. + let permutation_pairs = stark.permutation_pairs(); + + let permutation_batches = get_permutation_batches( + &permutation_pairs, + permutation_challenge_sets, + config.num_challenges, + stark.permutation_batch_size(), + ); + + // Each zs value corresponds to a permutation batch. + for (i, instances) in permutation_batches.iter().enumerate() { + // Z(gx) * down = Z x * up + let (reduced_lhs, reduced_rhs): (Vec, Vec) = instances + .iter() + .map(|instance| { + let PermutationInstance { + pair: PermutationPair { column_pairs }, + challenge: PermutationChallenge { beta, gamma }, + } = instance; + column_pairs.iter().rev().fold( + (FE::from_basefield(*gamma), FE::from_basefield(*gamma)), + |(lhs, rhs), &(i, j)| { + ( + lhs.scalar_mul(*beta) + vars.local_values[i], + rhs.scalar_mul(*beta) + vars.local_values[j], + ) + }, + ) + }) + .unzip(); + let constraint = next_zs[i] * reduced_rhs.into_iter().product() + - local_zs[i] * reduced_lhs.into_iter().product(); + consumer.constraint(constraint); + } +} diff --git a/starky/src/prover.rs b/starky/src/prover.rs index be1f198b..0206cb95 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -18,9 +18,13 @@ use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::compute_permutation_z_polys; +use crate::permutation::PermutationCheckData; +use crate::permutation::{ + compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, +}; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; +use crate::vanishing_poly::eval_vanishing_poly; use crate::vars::StarkEvaluationVars; pub fn prove( @@ -80,28 +84,41 @@ where challenger.observe_cap(&trace_cap); // Permutation arguments. - let permutation_zs_commitment = if stark.uses_permutation_args() { + let permutation_zs_commitment_challenges = if stark.uses_permutation_args() { + let permutation_challenge_sets = get_n_permutation_challenge_sets( + &mut challenger, + config.num_challenges, + stark.permutation_batch_size(), + ); let permutation_z_polys = compute_permutation_z_polys::( &stark, config, &mut challenger, &trace_poly_values, + &permutation_challenge_sets, ); + timed!( timing, "compute permutation Z commitments", - Some(PolynomialBatch::from_values( - permutation_z_polys, - rate_bits, - false, - config.fri_config.cap_height, - timing, - None, + Some(( + PolynomialBatch::from_values( + permutation_z_polys, + rate_bits, + false, + config.fri_config.cap_height, + timing, + None, + ), + permutation_challenge_sets )) ) } else { None }; + let permutation_zs_commitment = permutation_zs_commitment_challenges + .as_ref() + .map(|(comm, _)| comm); let permutation_zs_cap = permutation_zs_commitment .as_ref() .map(|commit| commit.merkle_tree.cap.clone()); @@ -113,10 +130,11 @@ where let quotient_polys = compute_quotient_polys::( &stark, &trace_commitment, + &permutation_zs_commitment_challenges, public_inputs, alphas, degree_bits, - rate_bits, + config, ); let all_quotient_chunks = quotient_polys .into_par_iter() @@ -156,13 +174,13 @@ where zeta, g, &trace_commitment, - permutation_zs_commitment.as_ref(), + permutation_zs_commitment, "ient_commitment, ); challenger.observe_openings(&openings.to_fri_openings()); let initial_merkle_trees = once(&trace_commitment) - .chain(permutation_zs_commitment.as_ref()) + .chain(permutation_zs_commitment) .chain(once("ient_commitment)) .collect_vec(); @@ -196,10 +214,14 @@ where fn compute_quotient_polys( stark: &S, trace_commitment: &PolynomialBatch, + permutation_zs_commitment_challenges: &Option<( + PolynomialBatch, + Vec>, + )>, public_inputs: [F; S::PUBLIC_INPUTS], alphas: Vec, degree_bits: usize, - rate_bits: usize, + config: &StarkConfig, ) -> Vec> where F: RichField + Extendable, @@ -209,6 +231,7 @@ where [(); S::PUBLIC_INPUTS]:, { let degree = 1 << degree_bits; + let rate_bits = config.fri_config.rate_bits; let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor()); assert!( @@ -255,7 +278,22 @@ where next_values: &get_at_index(trace_commitment, (i + next_step) % size), public_inputs: &public_inputs, }; - stark.eval_packed_base(vars, &mut consumer); + let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map( + |(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckData { + local_zs: get_at_index(&permutation_zs_commitment, i).to_vec(), + next_zs: get_at_index(&permutation_zs_commitment, (i + next_step) % size) + .to_vec(), + permutation_challenge_sets: permutation_challenge_sets.to_vec(), + }, + ); + eval_vanishing_poly::( + stark, + config, + vars, + permutation_check_data, + &mut consumer, + ); + // stark.eval_packed_base(vars, &mut consumer); // TODO: Add in constraints for permutation arguments. // TODO: Fix this once we use a genuine `PackedField`. let mut constraints_evals = consumer.accumulators(); diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index a3323796..dc598167 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -4,82 +4,39 @@ use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::{ - get_permutation_batches, PermutationChallenge, PermutationChallengeSet, PermutationInstance, - PermutationPair, -}; +use crate::permutation::{eval_permutation_checks, PermutationCheckData}; use crate::stark::Stark; use crate::vars::StarkEvaluationVars; -pub(crate) fn eval_vanishing_poly( - stark: S, +pub(crate) fn eval_vanishing_poly( + stark: &S, config: &StarkConfig, - vars: StarkEvaluationVars, - local_zs: &[F::Extension], - next_zs: &[F::Extension], - mut consumer: ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallengeSet], + vars: StarkEvaluationVars, + permutation_data: Option>, + consumer: &mut ConstraintConsumer, ) where F: RichField + Extendable, + FE: FieldExtension, C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { - stark.eval_packed_generic(vars, &mut consumer); -} - -fn eval_permutation_checks( - stark: S, - config: &StarkConfig, - vars: StarkEvaluationVars, - local_zs: &[F::Extension], - next_zs: &[F::Extension], - consumer: &mut ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallengeSet], -) where - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, -{ - // TODO: Z_1 check. - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, + stark.eval_packed_generic(vars, consumer); + if let Some(PermutationCheckData { + local_zs, + next_zs, permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - // Z(gx) * down = Z x * up - let (reduced_lhs, reduced_rhs): (Vec, Vec) = instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: PermutationChallenge { beta, gamma }, - } = instance; - column_pairs.iter().rev().fold( - ( - F::Extension::from_basefield(*gamma), - F::Extension::from_basefield(*gamma), - ), - |(lhs, rhs), &(i, j)| { - ( - lhs.scalar_mul(*beta) + vars.local_values[i], - rhs.scalar_mul(*beta) + vars.local_values[j], - ) - }, - ) - }) - .unzip(); - let constraint = next_zs[i] * reduced_rhs.into_iter().product() - - local_zs[i] * reduced_lhs.into_iter().product(); - consumer.constraint(constraint); + }) = permutation_data + { + eval_permutation_checks::( + stark, + config, + vars, + &local_zs, + &next_zs, + consumer, + &permutation_challenge_sets, + ); } } From 85c1e1d5e07bfc4c4cb34a85373f09392a18e5c1 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 21 Feb 2022 18:00:03 +0100 Subject: [PATCH 04/14] Should work (does not) --- starky/src/fibonacci_stark.rs | 29 ++++++++++++++++++++++------- starky/src/prover.rs | 17 +++++++++-------- starky/src/verifier.rs | 16 +++++++++++++++- 3 files changed, 46 insertions(+), 16 deletions(-) diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index a0204359..2bbd333f 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -2,16 +2,21 @@ use std::marker::PhantomData; use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::packed_field::PackedField; +use plonky2::fri::structure::{FriInstanceInfo, FriInstanceInfoTarget}; use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::permutation::PermutationPair; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// Toy STARK system used for testing. -/// Computes a Fibonacci sequence with state `[x0, x1]` using the state transition -/// `x0 <- x1, x1 <- x0 + x1`. +/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition +/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`. +/// Note: The `i, j` columns are used to test the permutation argument. #[derive(Copy, Clone)] struct FibonacciStark, const D: usize> { num_rows: usize, @@ -34,21 +39,25 @@ impl, const D: usize> FibonacciStark { } } - /// Generate the trace using `x0, x1` as inital state values. + /// Generate the trace using `x0, x1, 0, 1` as initial state values. fn generate_trace(&self, x0: F, x1: F) -> Vec<[F; Self::COLUMNS]> { - (0..self.num_rows) - .scan([x0, x1], |acc, _| { + let mut trace = (0..self.num_rows) + .scan([x0, x1, F::ZERO, F::ONE], |acc, _| { let tmp = *acc; acc[0] = tmp[1]; acc[1] = tmp[0] + tmp[1]; + acc[2] = tmp[2] + F::ONE; + acc[3] = tmp[3] + F::ONE; Some(tmp) }) - .collect() + .collect::>(); + trace[self.num_rows - 1][3] = F::ZERO; + trace } } impl, const D: usize> Stark for FibonacciStark { - const COLUMNS: usize = 2; + const COLUMNS: usize = 4; const PUBLIC_INPUTS: usize = 3; fn eval_packed_generic( @@ -105,6 +114,12 @@ impl, const D: usize> Stark for FibonacciStar fn constraint_degree(&self) -> usize { 2 } + + fn permutation_pairs(&self) -> Vec { + vec![PermutationPair { + column_pairs: vec![(2, 3)], + }] + } } #[cfg(test)] diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 0206cb95..e0c14dde 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -211,10 +211,10 @@ where /// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`, /// where the `C_i`s are the Stark constraints. -fn compute_quotient_polys( +fn compute_quotient_polys<'a, F, C, S, const D: usize>( stark: &S, - trace_commitment: &PolynomialBatch, - permutation_zs_commitment_challenges: &Option<( + trace_commitment: &'a PolynomialBatch, + permutation_zs_commitment_challenges: &'a Option<( PolynomialBatch, Vec>, )>, @@ -251,9 +251,8 @@ where let z_h_on_coset = ZeroPolyOnCoset::::new(degree_bits, quotient_degree_bits); // Retrieve the LDE values at index `i`. - let get_at_index = |comm: &PolynomialBatch, i: usize| -> [F; S::COLUMNS] { - comm.get_lde_values(i * step).try_into().unwrap() - }; + let get_at_index = + |comm: &'a PolynomialBatch, i: usize| -> &'a [F] { comm.get_lde_values(i * step) }; // Last element of the subgroup. let last = F::primitive_root_of_unity(degree_bits).inverse(); let size = degree << quotient_degree_bits; @@ -274,8 +273,10 @@ where lagrange_last.values[i], ); let vars = StarkEvaluationVars:: { - local_values: &get_at_index(trace_commitment, i), - next_values: &get_at_index(trace_commitment, (i + next_step) % size), + local_values: &get_at_index(trace_commitment, i).try_into().unwrap(), + next_values: &get_at_index(trace_commitment, (i + next_step) % size) + .try_into() + .unwrap(), public_inputs: &public_inputs, }; let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map( diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 686ecd98..1603b208 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -11,8 +11,10 @@ use plonky2::plonk::plonk_common::reduce_with_powers; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; +use crate::permutation::PermutationCheckData; use crate::proof::{StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs}; use crate::stark::Stark; +use crate::vanishing_poly::eval_vanishing_poly; use crate::vars::StarkEvaluationVars; pub fn verify_stark_proof< @@ -88,7 +90,19 @@ where l_1, l_last, ); - stark.eval_ext(vars, &mut consumer); + // stark.eval_ext(vars, &mut consumer); + let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckData { + local_zs: permutation_zs.as_ref().unwrap().clone(), + next_zs: permutation_zs_right.as_ref().unwrap().clone(), + permutation_challenge_sets: challenges.permutation_challenge_sets, + }); + eval_vanishing_poly::( + &stark, + config, + vars, + permutation_data, + &mut consumer, + ); // TODO: Add in constraints for permutation arguments. let vanishing_polys_zeta = consumer.accumulators(); From 56e269e27abaa50f33493b06cc105240e8cdd334 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 10:37:08 +0100 Subject: [PATCH 05/14] Working (not recursively) --- starky/src/fibonacci_stark.rs | 3 --- starky/src/lib.rs | 1 + starky/src/permutation.rs | 24 ++++++++++++++---------- starky/src/prover.rs | 4 ++-- 4 files changed, 17 insertions(+), 15 deletions(-) diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index 2bbd333f..6ffbe858 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -2,12 +2,9 @@ use std::marker::PhantomData; use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::packed_field::PackedField; -use plonky2::fri::structure::{FriInstanceInfo, FriInstanceInfoTarget}; use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::permutation::PermutationPair; use crate::stark::Stark; diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 51a73479..8249d90b 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -3,6 +3,7 @@ #![allow(unused_variables)] #![allow(incomplete_features)] #![allow(clippy::too_many_arguments)] +#![allow(clippy::type_complexity)] #![feature(generic_const_exprs)] pub mod config; diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 8a33eb41..75fa8400 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -62,7 +62,7 @@ where let permutation_pairs = stark.permutation_pairs(); let permutation_batches = get_permutation_batches( &permutation_pairs, - &permutation_challenge_sets, + permutation_challenge_sets, config.num_challenges, stark.permutation_batch_size(), ); @@ -234,15 +234,19 @@ pub(crate) fn eval_permutation_checks Date: Tue, 22 Feb 2022 10:46:51 +0100 Subject: [PATCH 06/14] Fill permutation todos --- starky/src/recursive_verifier.rs | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index ea7ffb70..c1071b5a 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -187,24 +187,29 @@ pub fn add_virtual_stark_proof, S: Stark, con let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_leaves_per_oracle = &[ - S::COLUMNS, - // TODO: permutation polys - stark.quotient_degree_factor() * config.num_challenges, - ]; - - let permutation_zs_cap = if stark.uses_permutation_args() { - Some(builder.add_virtual_cap(cap_height)) + let num_leaves_per_oracle = if stark.uses_permutation_args() { + vec![ + S::COLUMNS, + stark.num_permutation_batches(config), + stark.quotient_degree_factor() * config.num_challenges, + ] } else { - None + vec![ + S::COLUMNS, + stark.quotient_degree_factor() * config.num_challenges, + ] }; + let permutation_zs_cap = stark + .uses_permutation_args() + .then(|| builder.add_virtual_cap(cap_height)); + StarkProofTarget { trace_cap: builder.add_virtual_cap(cap_height), permutation_zs_cap, quotient_polys_cap: builder.add_virtual_cap(cap_height), openings: add_stark_opening_set::(builder, stark, config), - opening_proof: builder.add_virtual_fri_proof(num_leaves_per_oracle, &fri_params), + opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), } } @@ -217,8 +222,10 @@ fn add_stark_opening_set, S: Stark, const D: StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - permutation_zs: vec![/*TODO*/], - permutation_zs_right: vec![/*TODO*/], + permutation_zs: builder + .add_virtual_extension_targets(stark.num_permutation_batches(config)), + permutation_zs_right: builder + .add_virtual_extension_targets(stark.num_permutation_batches(config)), quotient_polys: builder .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), } From 6cd2fc62b57714357b6e6dd21ed085dbb2f95b46 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 11:44:24 +0100 Subject: [PATCH 07/14] Should work (does not) --- plonky2/src/iop/challenger.rs | 2 +- starky/src/fibonacci_stark.rs | 2 +- starky/src/get_challenges.rs | 42 ++++++-- starky/src/permutation.rs | 160 +++++++++++++++++++++++++++---- starky/src/proof.rs | 29 +++--- starky/src/recursive_verifier.rs | 37 +++++-- starky/src/vanishing_poly.rs | 48 +++++++--- starky/src/verifier.rs | 1 - 8 files changed, 260 insertions(+), 61 deletions(-) diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index c3a4403a..5b374834 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -208,7 +208,7 @@ impl, H: AlgebraicHasher, const D: usize> } } - pub(crate) fn get_challenge(&mut self, builder: &mut CircuitBuilder) -> Target { + pub fn get_challenge(&mut self, builder: &mut CircuitBuilder) -> Target { self.absorb_buffered_inputs(builder); if self.output_buffer.is_empty() { diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index 6ffbe858..10b54d69 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -234,7 +234,7 @@ mod tests { let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); - recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config); + recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config)?; if print_gate_counts { builder.print_gate_counts(0); diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index 1cb1e633..8ee71667 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -11,7 +11,9 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::config::StarkConfig; -use crate::permutation::get_n_permutation_challenge_sets; +use crate::permutation::{ + get_n_permutation_challenge_sets, get_n_permutation_challenge_sets_target, +}; use crate::proof::*; use crate::stark::Stark; @@ -131,9 +133,11 @@ where pub(crate) fn get_challenges_target< F: RichField + Extendable, C: GenericConfig, + S: Stark, const D: usize, >( builder: &mut CircuitBuilder, + stark: &S, trace_cap: &MerkleCapTarget, permutation_zs_cap: Option<&MerkleCapTarget>, quotient_polys_cap: &MerkleCapTarget, @@ -142,7 +146,7 @@ pub(crate) fn get_challenges_target< final_poly: &PolynomialCoeffsExtTarget, pow_witness: Target, config: &StarkConfig, -) -> StarkProofChallengesTarget +) -> Result> where C::Hasher: AlgebraicHasher, { @@ -151,6 +155,23 @@ where let mut challenger = RecursiveChallenger::::new(builder); challenger.observe_cap(trace_cap); + + let permutation_challenge_sets = if stark.uses_permutation_args() { + get_n_permutation_challenge_sets_target( + builder, + &mut challenger, + num_challenges, + stark.permutation_batch_size(), + ) + } else { + vec![] + }; + if stark.uses_permutation_args() { + let cap = + permutation_zs_cap.ok_or_else(|| anyhow::Error::msg("expected permutation_zs_cap")); + challenger.observe_cap(cap?); + } + let stark_alphas = challenger.get_n_challenges(builder, num_challenges); challenger.observe_cap(quotient_polys_cap); @@ -158,7 +179,8 @@ where challenger.observe_openings(&openings.to_fri_openings()); - StarkProofChallengesTarget { + Ok(StarkProofChallengesTarget { + permutation_challenge_sets, stark_alphas, stark_zeta, fri_challenges: challenger.fri_challenges::( @@ -168,15 +190,20 @@ where pow_witness, &config.fri_config, ), - } + }) } impl StarkProofWithPublicInputsTarget { - pub(crate) fn get_challenges, C: GenericConfig>( + pub(crate) fn get_challenges< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + >( &self, builder: &mut CircuitBuilder, + stark: &S, config: &StarkConfig, - ) -> StarkProofChallengesTarget + ) -> Result> where C::Hasher: AlgebraicHasher, { @@ -194,8 +221,9 @@ impl StarkProofWithPublicInputsTarget { }, } = &self.proof; - get_challenges_target::( + get_challenges_target::( builder, + stark, trace_cap, permutation_zs_cap.as_ref(), quotient_polys_cap, diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 75fa8400..1113094d 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -6,14 +6,17 @@ use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::field_types::Field; use plonky2::field::polynomial::PolynomialValues; use plonky2::hash::hash_types::RichField; -use plonky2::iop::challenger::Challenger; -use plonky2::plonk::config::{GenericConfig, Hasher}; +use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::target::Target; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; use rayon::prelude::*; use crate::config::StarkConfig; -use crate::constraint_consumer::ConstraintConsumer; +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::stark::Stark; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another. /// In particular, there should exist some permutation `pi` such that for any `i`, @@ -26,24 +29,24 @@ pub struct PermutationPair { } /// A single instance of a permutation check protocol. -pub(crate) struct PermutationInstance<'a, F: Field> { +pub(crate) struct PermutationInstance<'a, T: Copy> { pub(crate) pair: &'a PermutationPair, - pub(crate) challenge: PermutationChallenge, + pub(crate) challenge: PermutationChallenge, } /// Randomness for a single instance of a permutation check protocol. #[derive(Copy, Clone)] -pub(crate) struct PermutationChallenge { +pub(crate) struct PermutationChallenge { /// Randomness used to combine multiple columns into one. - pub(crate) beta: F, + pub(crate) beta: T, /// Random offset that's added to the beta-reduced column values. - pub(crate) gamma: F, + pub(crate) gamma: T, } /// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. #[derive(Clone)] -pub(crate) struct PermutationChallengeSet { - pub(crate) challenges: Vec>, +pub(crate) struct PermutationChallengeSet { + pub(crate) challenges: Vec>, } /// Compute all Z polynomials (for permutation arguments). @@ -163,17 +166,60 @@ pub(crate) fn get_n_permutation_challenge_sets>( .collect() } +fn get_permutation_challenge_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, +) -> PermutationChallenge { + let beta = challenger.get_challenge(builder); + let gamma = challenger.get_challenge(builder); + PermutationChallenge { beta, gamma } +} + +fn get_permutation_challenge_set_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + num_challenges: usize, +) -> PermutationChallengeSet { + let challenges = (0..num_challenges) + .map(|_| get_permutation_challenge_target(builder, challenger)) + .collect(); + PermutationChallengeSet { challenges } +} + +pub(crate) fn get_n_permutation_challenge_sets_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + num_challenges: usize, + num_sets: usize, +) -> Vec> { + (0..num_sets) + .map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges)) + .collect() +} + /// Get a list of instances of our batch-permutation argument. These are permutation arguments /// where the same `Z(x)` polynomial is used to check more than one permutation. /// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we /// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we /// chunk these arguments based on our batch size. -pub(crate) fn get_permutation_batches<'a, F: Field>( +pub(crate) fn get_permutation_batches<'a, T: Copy>( permutation_pairs: &'a [PermutationPair], - permutation_challenge_sets: &[PermutationChallengeSet], + permutation_challenge_sets: &[PermutationChallengeSet], num_challenges: usize, batch_size: usize, -) -> Vec>> { +) -> Vec>> { permutation_pairs .iter() .cartesian_product(0..num_challenges) @@ -202,10 +248,8 @@ pub(crate) fn eval_permutation_checks, - local_zs: &[FE], - next_zs: &[FE], + permutation_data: PermutationCheckData, consumer: &mut ConstraintConsumer, - permutation_challenge_sets: &[PermutationChallengeSet], ) where F: RichField + Extendable, FE: FieldExtension, @@ -214,12 +258,17 @@ pub(crate) fn eval_permutation_checks { + pub(crate) local_zs: Vec>, + pub(crate) next_zs: Vec>, + pub(crate) permutation_challenge_sets: Vec>, +} + +pub(crate) fn eval_permutation_checks_recursively( + builder: &mut CircuitBuilder, + stark: &S, + config: &StarkConfig, + vars: StarkEvaluationTargets, + permutation_data: PermutationCheckDataTarget, + consumer: &mut RecursiveConstraintConsumer, +) where + F: RichField + Extendable, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + let PermutationCheckDataTarget { + local_zs, + next_zs, + permutation_challenge_sets, + } = permutation_data; + // TODO: Z_1 check. + let permutation_pairs = stark.permutation_pairs(); + + let permutation_batches = get_permutation_batches( + &permutation_pairs, + &permutation_challenge_sets, + config.num_challenges, + stark.permutation_batch_size(), + ); + + // Each zs value corresponds to a permutation batch. + for (i, instances) in permutation_batches.iter().enumerate() { + // Z(gx) * down = Z x * up + let (reduced_lhs, reduced_rhs): (Vec>, Vec>) = + instances + .iter() + .map(|instance| { + let PermutationInstance { + pair: PermutationPair { column_pairs }, + challenge: PermutationChallenge { beta, gamma }, + } = instance; + let zero = builder.zero_extension(); + let beta_ext = builder.convert_to_ext(*beta); + let gamma_ext = builder.convert_to_ext(*gamma); + let mut reduced = + column_pairs + .iter() + .rev() + .fold((zero, zero), |(lhs, rhs), &(i, j)| { + ( + builder.mul_add_extension(lhs, beta_ext, vars.local_values[i]), + builder.mul_add_extension(rhs, beta_ext, vars.local_values[j]), + ) + }); + reduced.0 = builder.add_extension(reduced.0, gamma_ext); + reduced.1 = builder.add_extension(reduced.1, gamma_ext); + reduced + }) + .unzip(); + let reduced_lhs_product = builder.mul_many_extension(&reduced_lhs); + let reduced_rhs_product = builder.mul_many_extension(&reduced_rhs); + // constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product + let constraint = { + let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product); + builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp) + }; + consumer.constraint(builder, constraint) + } +} diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 4807b443..d1f86d7e 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -113,6 +113,7 @@ pub(crate) struct StarkProofChallenges, const D: us } pub(crate) struct StarkProofChallengesTarget { + pub permutation_challenge_sets: Vec>, pub stark_alphas: Vec, pub stark_zeta: ExtensionTarget, pub fri_challenges: FriChallengesTarget, @@ -179,27 +180,29 @@ impl, const D: usize> StarkOpeningSet { pub struct StarkOpeningSetTarget { pub local_values: Vec>, pub next_values: Vec>, - pub permutation_zs: Vec>, - pub permutation_zs_right: Vec>, + pub permutation_zs: Option>>, + pub permutation_zs_right: Option>>, pub quotient_polys: Vec>, } impl StarkOpeningSetTarget { pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget { let zeta_batch = FriOpeningBatchTarget { - values: [ - self.local_values.as_slice(), - self.quotient_polys.as_slice(), - self.permutation_zs.as_slice(), - ] - .concat(), + values: self + .local_values + .iter() + .chain(self.permutation_zs.iter().flatten()) + .chain(&self.quotient_polys) + .copied() + .collect_vec(), }; let zeta_right_batch = FriOpeningBatchTarget { - values: [ - self.next_values.as_slice(), - self.permutation_zs_right.as_slice(), - ] - .concat(), + values: self + .next_values + .iter() + .chain(self.permutation_zs_right.iter().flatten()) + .copied() + .collect_vec(), }; FriOpeningsTarget { batches: vec![zeta_batch, zeta_right_batch], diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index c1071b5a..cc547396 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,5 +1,6 @@ use std::iter::once; +use anyhow::Result; use itertools::Itertools; use plonky2::field::extension_field::Extendable; use plonky2::field::field_types::Field; @@ -13,11 +14,13 @@ use plonky2::util::reducing::ReducingFactorTarget; use crate::config::StarkConfig; use crate::constraint_consumer::RecursiveConstraintConsumer; +use crate::permutation::PermutationCheckDataTarget; use crate::proof::{ StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget, }; use crate::stark::Stark; +use crate::vanishing_poly::eval_vanishing_poly_recursively; use crate::vars::StarkEvaluationTargets; pub fn recursively_verify_stark_proof< @@ -30,14 +33,15 @@ pub fn recursively_verify_stark_proof< stark: S, proof_with_pis: StarkProofWithPublicInputsTarget, inner_config: &StarkConfig, -) where +) -> Result<()> +where C::Hasher: AlgebraicHasher, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS); let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config); - let challenges = proof_with_pis.get_challenges::(builder, inner_config); + let challenges = proof_with_pis.get_challenges::(builder, &stark, inner_config)?; recursively_verify_stark_proof_with_challenges::( builder, @@ -47,6 +51,8 @@ pub fn recursively_verify_stark_proof< inner_config, degree_bits, ); + + Ok(()) } /// Recursively verifies an inner proof. @@ -104,8 +110,21 @@ fn recursively_verify_stark_proof_with_challenges< l_1, l_last, ); - stark.eval_ext_recursively(builder, vars, &mut consumer); - // TODO: Add in constraints for permutation arguments. + let permutation_data = stark + .uses_permutation_args() + .then(|| PermutationCheckDataTarget { + local_zs: permutation_zs.as_ref().unwrap().clone(), + next_zs: permutation_zs_right.as_ref().unwrap().clone(), + permutation_challenge_sets: challenges.permutation_challenge_sets, + }); + eval_vanishing_poly_recursively::( + builder, + &stark, + inner_config, + vars, + permutation_data, + &mut consumer, + ); let vanishing_polys_zeta = consumer.accumulators(); // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. @@ -222,10 +241,12 @@ fn add_stark_opening_set, S: Stark, const D: StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - permutation_zs: builder - .add_virtual_extension_targets(stark.num_permutation_batches(config)), - permutation_zs_right: builder - .add_virtual_extension_targets(stark.num_permutation_batches(config)), + permutation_zs: stark + .uses_permutation_args() + .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))), + permutation_zs_right: stark + .uses_permutation_args() + .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))), quotient_polys: builder .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), } diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index dc598167..55ea7a5a 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -1,12 +1,16 @@ use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; -use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::{eval_permutation_checks, PermutationCheckData}; +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::permutation::{ + eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckData, + PermutationCheckDataTarget, +}; use crate::stark::Stark; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; pub(crate) fn eval_vanishing_poly( stark: &S, @@ -23,20 +27,40 @@ pub(crate) fn eval_vanishing_poly( [(); S::PUBLIC_INPUTS]:, { stark.eval_packed_generic(vars, consumer); - if let Some(PermutationCheckData { - local_zs, - next_zs, - permutation_challenge_sets, - }) = permutation_data - { + if let Some(permutation_data) = permutation_data { eval_permutation_checks::( stark, config, vars, - &local_zs, - &next_zs, + permutation_data, + consumer, + ); + } +} + +pub(crate) fn eval_vanishing_poly_recursively( + builder: &mut CircuitBuilder, + stark: &S, + config: &StarkConfig, + vars: StarkEvaluationTargets, + permutation_data: Option>, + consumer: &mut RecursiveConstraintConsumer, +) where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + stark.eval_ext_recursively(builder, vars, consumer); + if let Some(permutation_data) = permutation_data { + eval_permutation_checks_recursively::( + builder, + stark, + config, + vars, + permutation_data, consumer, - &permutation_challenge_sets, ); } } diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 1603b208..6bb1ac4e 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -90,7 +90,6 @@ where l_1, l_last, ); - // stark.eval_ext(vars, &mut consumer); let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckData { local_zs: permutation_zs.as_ref().unwrap().clone(), next_zs: permutation_zs_right.as_ref().unwrap().clone(), From 064b3c07a829deefadb5e71a974971da845e5ab8 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 16:18:41 +0100 Subject: [PATCH 08/14] Forgot to set permutation cap --- starky/src/prover.rs | 2 -- starky/src/recursive_verifier.rs | 6 ++++++ starky/src/stark.rs | 1 - starky/src/verifier.rs | 1 - 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 5cfcf6ea..ac6689c2 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -294,8 +294,6 @@ where permutation_check_data, &mut consumer, ); - // stark.eval_packed_base(vars, &mut consumer); - // TODO: Add in constraints for permutation arguments. // TODO: Fix this once we use a genuine `PackedField`. let mut constraints_evals = consumer.accumulators(); // We divide the constraints evaluations by `Z_H(x)`. diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index cc547396..608d6dc1 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -295,5 +295,11 @@ pub fn set_stark_proof_target, W, const D: usize>( &proof.openings.to_fri_openings(), ); + if let (Some(permutation_zs_cap_target), Some(permutation_zs_cap)) = + (&proof_target.permutation_zs_cap, &proof.permutation_zs_cap) + { + witness.set_cap_target(&permutation_zs_cap_target, &permutation_zs_cap); + } + set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); } diff --git a/starky/src/stark.rs b/starky/src/stark.rs index a2a2f7fd..72614574 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -16,7 +16,6 @@ use crate::vars::StarkEvaluationTargets; use crate::vars::StarkEvaluationVars; /// Represents a STARK system. -// TODO: Add a `constraint_degree` fn that returns the maximum constraint degree. pub trait Stark, const D: usize>: Sync { /// The total number of columns in the trace. const COLUMNS: usize; diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 6bb1ac4e..44d3f9c7 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -102,7 +102,6 @@ where permutation_data, &mut consumer, ); - // TODO: Add in constraints for permutation arguments. let vanishing_polys_zeta = consumer.accumulators(); // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. From 4ea418a4865c7cb70b3bfdc17eab233e292a19f1 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 16:35:20 +0100 Subject: [PATCH 09/14] Clippy --- starky/src/recursive_verifier.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index 608d6dc1..de01d39e 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -298,7 +298,7 @@ pub fn set_stark_proof_target, W, const D: usize>( if let (Some(permutation_zs_cap_target), Some(permutation_zs_cap)) = (&proof_target.permutation_zs_cap, &proof.permutation_zs_cap) { - witness.set_cap_target(&permutation_zs_cap_target, &permutation_zs_cap); + witness.set_cap_target(permutation_zs_cap_target, permutation_zs_cap); } set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); From 150d76444081563e454e4edd554a5128c1b13271 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 17:00:08 +0100 Subject: [PATCH 10/14] Simplification --- starky/src/fibonacci_stark.rs | 6 ++-- starky/src/get_challenges.rs | 58 +++++++++++++------------------- starky/src/proof.rs | 4 +-- starky/src/recursive_verifier.rs | 10 ++---- starky/src/verifier.rs | 4 +-- 5 files changed, 33 insertions(+), 49 deletions(-) diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index 10b54d69..7961ad50 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -13,7 +13,7 @@ use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// Toy STARK system used for testing. /// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition /// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`. -/// Note: The `i, j` columns are used to test the permutation argument. +/// Note: The `i, j` columns are only used to test the permutation argument. #[derive(Copy, Clone)] struct FibonacciStark, const D: usize> { num_rows: usize, @@ -48,7 +48,7 @@ impl, const D: usize> FibonacciStark { Some(tmp) }) .collect::>(); - trace[self.num_rows - 1][3] = F::ZERO; + trace[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another. trace } } @@ -234,7 +234,7 @@ mod tests { let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); - recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config)?; + recursively_verify_stark_proof::(&mut builder, stark, pt, inner_config); if print_gate_counts { builder.print_gate_counts(0); diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index 8ee71667..0f4aacee 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -1,4 +1,3 @@ -use anyhow::Result; use plonky2::field::extension_field::Extendable; use plonky2::field::polynomial::PolynomialCoeffs; use plonky2::fri::proof::{FriProof, FriProofTarget}; @@ -28,7 +27,7 @@ fn get_challenges( pow_witness: F, config: &StarkConfig, degree_bits: usize, -) -> Result> +) -> StarkProofChallenges where F: RichField + Extendable, C: GenericConfig, @@ -40,20 +39,15 @@ where challenger.observe_cap(trace_cap); - let permutation_challenge_sets = if stark.uses_permutation_args() { - get_n_permutation_challenge_sets( + let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| { + let tmp = get_n_permutation_challenge_sets( &mut challenger, num_challenges, stark.permutation_batch_size(), - ) - } else { - vec![] - }; - if stark.uses_permutation_args() { - let cap = - permutation_zs_cap.ok_or_else(|| anyhow::Error::msg("expected permutation_zs_cap")); - challenger.observe_cap(cap?); - } + ); + challenger.observe_cap(permutation_zs_cap); + tmp + }); let stark_alphas = challenger.get_n_challenges(num_challenges); @@ -62,7 +56,7 @@ where challenger.observe_openings(&openings.to_fri_openings()); - Ok(StarkProofChallenges { + StarkProofChallenges { permutation_challenge_sets, stark_alphas, stark_zeta, @@ -73,7 +67,7 @@ where degree_bits, &config.fri_config, ), - }) + } } impl StarkProofWithPublicInputs @@ -86,11 +80,10 @@ where stark: &S, config: &StarkConfig, degree_bits: usize, - ) -> anyhow::Result> { - Ok(self - .get_challenges(stark, config, degree_bits)? + ) -> Vec { + self.get_challenges(stark, config, degree_bits) .fri_challenges - .fri_query_indices) + .fri_query_indices } /// Computes all Fiat-Shamir challenges used in the STARK proof. @@ -99,7 +92,7 @@ where stark: &S, config: &StarkConfig, degree_bits: usize, - ) -> Result> { + ) -> StarkProofChallenges { let StarkProof { trace_cap, permutation_zs_cap, @@ -146,7 +139,7 @@ pub(crate) fn get_challenges_target< final_poly: &PolynomialCoeffsExtTarget, pow_witness: Target, config: &StarkConfig, -) -> Result> +) -> StarkProofChallengesTarget where C::Hasher: AlgebraicHasher, { @@ -156,21 +149,16 @@ where challenger.observe_cap(trace_cap); - let permutation_challenge_sets = if stark.uses_permutation_args() { - get_n_permutation_challenge_sets_target( + let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| { + let tmp = get_n_permutation_challenge_sets_target( builder, &mut challenger, num_challenges, stark.permutation_batch_size(), - ) - } else { - vec![] - }; - if stark.uses_permutation_args() { - let cap = - permutation_zs_cap.ok_or_else(|| anyhow::Error::msg("expected permutation_zs_cap")); - challenger.observe_cap(cap?); - } + ); + challenger.observe_cap(permutation_zs_cap); + tmp + }); let stark_alphas = challenger.get_n_challenges(builder, num_challenges); @@ -179,7 +167,7 @@ where challenger.observe_openings(&openings.to_fri_openings()); - Ok(StarkProofChallengesTarget { + StarkProofChallengesTarget { permutation_challenge_sets, stark_alphas, stark_zeta, @@ -190,7 +178,7 @@ where pow_witness, &config.fri_config, ), - }) + } } impl StarkProofWithPublicInputsTarget { @@ -203,7 +191,7 @@ impl StarkProofWithPublicInputsTarget { builder: &mut CircuitBuilder, stark: &S, config: &StarkConfig, - ) -> Result> + ) -> StarkProofChallengesTarget where C::Hasher: AlgebraicHasher, { diff --git a/starky/src/proof.rs b/starky/src/proof.rs index d1f86d7e..1975b1b9 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -101,7 +101,7 @@ pub struct CompressedStarkProofWithPublicInputs< pub(crate) struct StarkProofChallenges, const D: usize> { /// Randomness used in any permutation arguments. - pub permutation_challenge_sets: Vec>, + pub permutation_challenge_sets: Option>>, /// Random values used to combine STARK constraints. pub stark_alphas: Vec, @@ -113,7 +113,7 @@ pub(crate) struct StarkProofChallenges, const D: us } pub(crate) struct StarkProofChallengesTarget { - pub permutation_challenge_sets: Vec>, + pub permutation_challenge_sets: Option>>, pub stark_alphas: Vec, pub stark_zeta: ExtensionTarget, pub fri_challenges: FriChallengesTarget, diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index de01d39e..6a7363ae 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,6 +1,5 @@ use std::iter::once; -use anyhow::Result; use itertools::Itertools; use plonky2::field::extension_field::Extendable; use plonky2::field::field_types::Field; @@ -33,15 +32,14 @@ pub fn recursively_verify_stark_proof< stark: S, proof_with_pis: StarkProofWithPublicInputsTarget, inner_config: &StarkConfig, -) -> Result<()> -where +) where C::Hasher: AlgebraicHasher, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS); let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config); - let challenges = proof_with_pis.get_challenges::(builder, &stark, inner_config)?; + let challenges = proof_with_pis.get_challenges::(builder, &stark, inner_config); recursively_verify_stark_proof_with_challenges::( builder, @@ -51,8 +49,6 @@ where inner_config, degree_bits, ); - - Ok(()) } /// Recursively verifies an inner proof. @@ -115,7 +111,7 @@ fn recursively_verify_stark_proof_with_challenges< .then(|| PermutationCheckDataTarget { local_zs: permutation_zs.as_ref().unwrap().clone(), next_zs: permutation_zs_right.as_ref().unwrap().clone(), - permutation_challenge_sets: challenges.permutation_challenge_sets, + permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), }); eval_vanishing_poly_recursively::( builder, diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 44d3f9c7..959cbc8e 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -34,7 +34,7 @@ where { ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS); let degree_bits = proof_with_pis.proof.recover_degree_bits(config); - let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits)?; + let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits); verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) } @@ -93,7 +93,7 @@ where let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckData { local_zs: permutation_zs.as_ref().unwrap().clone(), next_zs: permutation_zs_right.as_ref().unwrap().clone(), - permutation_challenge_sets: challenges.permutation_challenge_sets, + permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), }); eval_vanishing_poly::( &stark, From a31c58b69d17c8899f2e86fac4070a323493bbad Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 17:23:55 +0100 Subject: [PATCH 11/14] Use ReducingFactor --- plonky2/src/util/reducing.rs | 15 +++++++++++- starky/src/permutation.rs | 47 ++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 27 deletions(-) diff --git a/plonky2/src/util/reducing.rs b/plonky2/src/util/reducing.rs index f29c6d08..626668e6 100644 --- a/plonky2/src/util/reducing.rs +++ b/plonky2/src/util/reducing.rs @@ -1,6 +1,6 @@ use std::borrow::Borrow; -use plonky2_field::extension_field::Extendable; +use plonky2_field::extension_field::{Extendable, FieldExtension}; use plonky2_field::field_types::Field; use plonky2_field::polynomial::PolynomialCoeffs; @@ -35,6 +35,11 @@ impl ReducingFactor { self.base * x } + fn mul_ext, const D: usize>(&mut self, x: FE) -> FE { + self.count += 1; + x.scalar_mul(self.base) + } + fn mul_poly(&mut self, p: &mut PolynomialCoeffs) { self.count += 1; *p *= self.base; @@ -45,6 +50,14 @@ impl ReducingFactor { .fold(F::ZERO, |acc, x| self.mul(acc) + *x.borrow()) } + pub fn reduce_ext, const D: usize>( + &mut self, + iter: impl DoubleEndedIterator>, + ) -> FE { + iter.rev() + .fold(FE::ZERO, |acc, x| self.mul_ext(acc) + *x.borrow()) + } + pub fn reduce_polys( &mut self, polys: impl DoubleEndedIterator>>, diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 1113094d..dad4b661 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -11,6 +11,7 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget}; use rayon::prelude::*; use crate::config::StarkConfig; @@ -283,19 +284,15 @@ pub(crate) fn eval_permutation_checks, Vec<_>) = column_pairs + .iter() + .map(|&(i, j)| (vars.local_values[i], vars.local_values[j])) + .unzip(); + ( + factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma), + factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma), + ) }) .unzip(); let constraint = next_zs[i] * reduced_rhs.into_iter().product() @@ -353,19 +350,17 @@ pub(crate) fn eval_permutation_checks_recursively( let zero = builder.zero_extension(); let beta_ext = builder.convert_to_ext(*beta); let gamma_ext = builder.convert_to_ext(*gamma); - let mut reduced = - column_pairs - .iter() - .rev() - .fold((zero, zero), |(lhs, rhs), &(i, j)| { - ( - builder.mul_add_extension(lhs, beta_ext, vars.local_values[i]), - builder.mul_add_extension(rhs, beta_ext, vars.local_values[j]), - ) - }); - reduced.0 = builder.add_extension(reduced.0, gamma_ext); - reduced.1 = builder.add_extension(reduced.1, gamma_ext); - reduced + let mut factor = ReducingFactorTarget::new(beta_ext); + let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs + .iter() + .map(|&(i, j)| (vars.local_values[i], vars.local_values[j])) + .unzip(); + let reduced_lhs = factor.reduce(&lhs, builder); + let reduced_rhs = factor.reduce(&rhs, builder); + ( + builder.add_extension(reduced_lhs, gamma_ext), + builder.add_extension(reduced_rhs, gamma_ext), + ) }) .unzip(); let reduced_lhs_product = builder.mul_many_extension(&reduced_lhs); From 17bbc6f3e403642296ca73be0147debb31e9e86e Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 17:30:08 +0100 Subject: [PATCH 12/14] Minor --- starky/src/prover.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/starky/src/prover.rs b/starky/src/prover.rs index ac6689c2..fe007f05 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -84,7 +84,7 @@ where challenger.observe_cap(&trace_cap); // Permutation arguments. - let permutation_zs_commitment_challenges = if stark.uses_permutation_args() { + let permutation_zs_commitment_challenges = stark.uses_permutation_args().then(|| { let permutation_challenge_sets = get_n_permutation_challenge_sets( &mut challenger, config.num_challenges, @@ -101,7 +101,7 @@ where timed!( timing, "compute permutation Z commitments", - Some(( + ( PolynomialBatch::from_values( permutation_z_polys, rate_bits, @@ -111,11 +111,9 @@ where None, ), permutation_challenge_sets - )) + ) ) - } else { - None - }; + }); let permutation_zs_commitment = permutation_zs_commitment_challenges .as_ref() .map(|(comm, _)| comm); From 8c5cbbc7c66a51c805003dfcce5ed02d472cf99d Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Tue, 22 Feb 2022 17:40:48 +0100 Subject: [PATCH 13/14] Add first row Z check --- starky/src/permutation.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index dad4b661..d2a16fc4 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -264,7 +264,12 @@ pub(crate) fn eval_permutation_checks( next_zs, permutation_challenge_sets, } = permutation_data; - // TODO: Z_1 check. + + let one = builder.one_extension(); + // Check that Z(1) = 1; + for &z in &local_zs { + let z_1 = builder.sub_extension(z, one); + consumer.constraint_first_row(builder, z_1); + } + let permutation_pairs = stark.permutation_pairs(); let permutation_batches = get_permutation_batches( From dd4cc21309c860a7be2f2e1baa830bdba7e033a6 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Wed, 23 Feb 2022 09:36:28 +0100 Subject: [PATCH 14/14] PR feedback --- starky/src/permutation.rs | 13 +++++----- starky/src/proof.rs | 2 ++ starky/src/prover.rs | 38 +++++++++++++-------------- starky/src/recursive_verifier.rs | 44 +++++++++++++++++++++++--------- starky/src/vanishing_poly.rs | 12 +++++---- starky/src/verifier.rs | 34 +++++++++++++++++++++--- 6 files changed, 95 insertions(+), 48 deletions(-) diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index d2a16fc4..2e1d603c 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -4,6 +4,7 @@ use itertools::Itertools; use plonky2::field::batch_util::batch_multiply_inplace; use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::field_types::Field; +use plonky2::field::packed_field::PackedField; use plonky2::field::polynomial::PolynomialValues; use plonky2::hash::hash_types::RichField; use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; @@ -54,7 +55,6 @@ pub(crate) struct PermutationChallengeSet { pub(crate) fn compute_permutation_z_polys( stark: &S, config: &StarkConfig, - challenger: &mut Challenger, trace_poly_values: &[PolynomialValues], permutation_challenge_sets: &[PermutationChallengeSet], ) -> Vec> @@ -239,27 +239,28 @@ pub(crate) fn get_permutation_batches<'a, T: Copy>( } // TODO: Use slices. -pub struct PermutationCheckData, const D2: usize> { +pub struct PermutationCheckVars, const D2: usize> { pub(crate) local_zs: Vec, pub(crate) next_zs: Vec, pub(crate) permutation_challenge_sets: Vec>, } -pub(crate) fn eval_permutation_checks( +pub(crate) fn eval_permutation_checks( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, - permutation_data: PermutationCheckData, + permutation_data: PermutationCheckVars, consumer: &mut ConstraintConsumer, ) where F: RichField + Extendable, FE: FieldExtension, + P: PackedField, C: GenericConfig, S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { - let PermutationCheckData { + let PermutationCheckVars { local_zs, next_zs, permutation_challenge_sets, @@ -350,7 +351,6 @@ pub(crate) fn eval_permutation_checks_recursively( // Each zs value corresponds to a permutation batch. for (i, instances) in permutation_batches.iter().enumerate() { - // Z(gx) * down = Z x * up let (reduced_lhs, reduced_rhs): (Vec>, Vec>) = instances .iter() @@ -359,7 +359,6 @@ pub(crate) fn eval_permutation_checks_recursively( pair: PermutationPair { column_pairs }, challenge: PermutationChallenge { beta, gamma }, } = instance; - let zero = builder.zero_extension(); let beta_ext = builder.convert_to_ext(*beta); let gamma_ext = builder.convert_to_ext(*gamma); let mut factor = ReducingFactorTarget::new(beta_ext); diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 1975b1b9..dba3db3e 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -32,6 +32,7 @@ pub struct StarkProof, C: GenericConfig, } impl, C: GenericConfig, const D: usize> StarkProof { + /// Recover the length of the trace from a STARK proof and a STARK config. pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize { let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] .initial_trees_proof @@ -51,6 +52,7 @@ pub struct StarkProofTarget { } impl StarkProofTarget { + /// Recover the length of the trace from a STARK proof and a STARK config. pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize { let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] .initial_trees_proof diff --git a/starky/src/prover.rs b/starky/src/prover.rs index fe007f05..336b9963 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -18,7 +18,7 @@ use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::PermutationCheckData; +use crate::permutation::PermutationCheckVars; use crate::permutation::{ compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, }; @@ -93,26 +93,23 @@ where let permutation_z_polys = compute_permutation_z_polys::( &stark, config, - &mut challenger, &trace_poly_values, &permutation_challenge_sets, ); - timed!( + let permutation_zs_commitment = timed!( timing, "compute permutation Z commitments", - ( - PolynomialBatch::from_values( - permutation_z_polys, - rate_bits, - false, - config.fri_config.cap_height, - timing, - None, - ), - permutation_challenge_sets + PolynomialBatch::from_values( + permutation_z_polys, + rate_bits, + false, + config.fri_config.cap_height, + timing, + None, ) - ) + ); + (permutation_zs_commitment, permutation_challenge_sets) }); let permutation_zs_commitment = permutation_zs_commitment_challenges .as_ref() @@ -251,6 +248,8 @@ where // Retrieve the LDE values at index `i`. let get_at_index = |comm: &'a PolynomialBatch, i: usize| -> &'a [F] { comm.get_lde_values(i * step) }; + let get_trace_at_index = |i| get_at_index(trace_commitment, i).try_into().unwrap(); + // Last element of the subgroup. let last = F::primitive_root_of_unity(degree_bits).inverse(); let size = degree << quotient_degree_bits; @@ -271,21 +270,20 @@ where lagrange_last.values[i], ); let vars = StarkEvaluationVars:: { - local_values: &get_at_index(trace_commitment, i).try_into().unwrap(), - next_values: &get_at_index(trace_commitment, (i + next_step) % size) - .try_into() - .unwrap(), + local_values: &get_trace_at_index(i), + next_values: &get_trace_at_index((i + next_step) % size), public_inputs: &public_inputs, }; let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map( - |(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckData { + |(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckVars { local_zs: get_at_index(permutation_zs_commitment, i).to_vec(), next_zs: get_at_index(permutation_zs_commitment, (i + next_step) % size) .to_vec(), permutation_challenge_sets: permutation_challenge_sets.to_vec(), }, ); - eval_vanishing_poly::( + // TODO: Use packed field for F. + eval_vanishing_poly::( stark, config, vars, diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index 6a7363ae..c1abbdb0 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,5 +1,6 @@ use std::iter::once; +use anyhow::{ensure, Result}; use itertools::Itertools; use plonky2::field::extension_field::Extendable; use plonky2::field::field_types::Field; @@ -69,6 +70,7 @@ fn recursively_verify_stark_proof_with_challenges< [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { + check_permutation_options(&stark, &proof_with_pis, &challenges).unwrap(); let one = builder.one_extension(); let StarkProofWithPublicInputsTarget { @@ -202,18 +204,14 @@ pub fn add_virtual_stark_proof, S: Stark, con let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_leaves_per_oracle = if stark.uses_permutation_args() { - vec![ - S::COLUMNS, - stark.num_permutation_batches(config), - stark.quotient_degree_factor() * config.num_challenges, - ] - } else { - vec![ - S::COLUMNS, - stark.quotient_degree_factor() * config.num_challenges, - ] - }; + let num_leaves_per_oracle = once(S::COLUMNS) + .chain( + stark + .uses_permutation_args() + .then(|| stark.num_permutation_batches(config)), + ) + .chain(once(stark.quotient_degree_factor() * config.num_challenges)) + .collect_vec(); let permutation_zs_cap = stark .uses_permutation_args() @@ -299,3 +297,25 @@ pub fn set_stark_proof_target, W, const D: usize>( set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); } + +/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff +/// the Stark uses a permutation argument. +fn check_permutation_options, S: Stark, const D: usize>( + stark: &S, + proof_with_pis: &StarkProofWithPublicInputsTarget, + challenges: &StarkProofChallengesTarget, +) -> Result<()> { + let options_is_some = [ + proof_with_pis.proof.permutation_zs_cap.is_some(), + proof_with_pis.proof.openings.permutation_zs.is_some(), + proof_with_pis.proof.openings.permutation_zs_right.is_some(), + challenges.permutation_challenge_sets.is_some(), + ]; + ensure!( + options_is_some + .into_iter() + .all(|b| b == stark.uses_permutation_args()), + "Permutation data doesn't match with Stark configuration." + ); + Ok(()) +} diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 55ea7a5a..c8c75730 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -1,4 +1,5 @@ use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::GenericConfig; @@ -6,21 +7,22 @@ use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::permutation::{ - eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckData, - PermutationCheckDataTarget, + eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckDataTarget, + PermutationCheckVars, }; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) fn eval_vanishing_poly( +pub(crate) fn eval_vanishing_poly( stark: &S, config: &StarkConfig, vars: StarkEvaluationVars, - permutation_data: Option>, + permutation_data: Option>, consumer: &mut ConstraintConsumer, ) where F: RichField + Extendable, FE: FieldExtension, + P: PackedField, C: GenericConfig, S: Stark, [(); S::COLUMNS]:, @@ -28,7 +30,7 @@ pub(crate) fn eval_vanishing_poly( { stark.eval_packed_generic(vars, consumer); if let Some(permutation_data) = permutation_data { - eval_permutation_checks::( + eval_permutation_checks::( stark, config, vars, diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 959cbc8e..a9bf897c 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -11,7 +11,7 @@ use plonky2::plonk::plonk_common::reduce_with_powers; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::PermutationCheckData; +use crate::permutation::PermutationCheckVars; use crate::proof::{StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; @@ -55,6 +55,7 @@ where [(); S::PUBLIC_INPUTS]:, [(); C::Hasher::HASH_SIZE]:, { + check_permutation_options(&stark, &proof_with_pis, &challenges)?; let StarkProofWithPublicInputs { proof, public_inputs, @@ -90,12 +91,12 @@ where l_1, l_last, ); - let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckData { + let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars { local_zs: permutation_zs.as_ref().unwrap().clone(), next_zs: permutation_zs_right.as_ref().unwrap().clone(), permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), }); - eval_vanishing_poly::( + eval_vanishing_poly::( &stark, config, vars, @@ -153,7 +154,32 @@ fn eval_l_1_and_l_last(log_n: usize, x: F) -> (F, F) { (z_x * invs[0], z_x * invs[1]) } -/// Recover the length of the trace from a STARK proof and a STARK config. +/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff +/// the Stark uses a permutation argument. +fn check_permutation_options< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + stark: &S, + proof_with_pis: &StarkProofWithPublicInputs, + challenges: &StarkProofChallenges, +) -> Result<()> { + let options_is_some = [ + proof_with_pis.proof.permutation_zs_cap.is_some(), + proof_with_pis.proof.openings.permutation_zs.is_some(), + proof_with_pis.proof.openings.permutation_zs_right.is_some(), + challenges.permutation_challenge_sets.is_some(), + ]; + ensure!( + options_is_some + .into_iter() + .all(|b| b == stark.uses_permutation_args()), + "Permutation data doesn't match with Stark configuration." + ); + Ok(()) +} #[cfg(test)] mod tests {