diff --git a/evm/src/prover.rs b/evm/src/prover.rs index 2da098f2..0b7858d3 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -612,7 +612,9 @@ where local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) [..num_lookup_columns] .to_vec(), - next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step), + next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step) + [..num_lookup_columns] + .to_vec(), challenges: challenges.to_vec(), }); diff --git a/starky/Cargo.toml b/starky/Cargo.toml index a3f9b37c..0efae5fc 100644 --- a/starky/Cargo.toml +++ b/starky/Cargo.toml @@ -20,8 +20,10 @@ timing = ["plonky2/timing"] anyhow = { version = "1.0.40", default-features = false } itertools = { version = "0.11.0", default-features = false } log = { version = "0.4.14", default-features = false } +num-bigint = { version = "0.4.3", default-features = false } plonky2_maybe_rayon = { path = "../maybe_rayon", default-features = false } plonky2 = { path = "../plonky2", default-features = false } +plonky2_util = { path = "../util", default-features = false } [dev-dependencies] env_logger = { version = "0.9.0", default-features = false } diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index d34ccfd2..903c0abf 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -11,7 +11,7 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; -use crate::permutation::PermutationPair; +use crate::lookup::{Column, Lookup}; use crate::stark::Stark; use crate::util::trace_rows_to_poly_values; @@ -41,15 +41,16 @@ impl, const D: usize> FibonacciStark { } } - /// Generate the trace using `x0, x1, 0, 1` as initial state values. + /// Generate the trace using `x0, x1, 0, 1, 1` as initial state values. fn generate_trace(&self, x0: F, x1: F) -> Vec> { let mut trace_rows = (0..self.num_rows) - .scan([x0, x1, F::ZERO, F::ONE], |acc, _| { + .scan([x0, x1, F::ZERO, F::ONE, F::ONE], |acc, _| { let tmp = *acc; acc[0] = tmp[1]; acc[1] = tmp[0] + tmp[1]; acc[2] = tmp[2] + F::ONE; acc[3] = tmp[3] + F::ONE; + // acc[4] (i.e. frequency column) remains unchanged, as we're permuting a strictly monotonous sequence. Some(tmp) }) .collect::>(); @@ -58,7 +59,7 @@ impl, const D: usize> FibonacciStark { } } -const COLUMNS: usize = 4; +const COLUMNS: usize = 5; const PUBLIC_INPUTS: usize = 3; impl, const D: usize> Stark for FibonacciStark { @@ -127,8 +128,13 @@ impl, const D: usize> Stark for FibonacciStar 2 } - fn permutation_pairs(&self) -> Vec { - vec![PermutationPair::singletons(2, 3)] + fn lookups(&self) -> Vec> { + vec![Lookup { + columns: vec![Column::single(2)], + table_column: Column::single(3), + frequencies_column: Column::single(4), + filter_columns: vec![None; 1], + }] } } diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index b34b427d..5f9beddc 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -12,16 +12,13 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::config::StarkConfig; -use crate::permutation::{ - get_n_permutation_challenge_sets, get_n_permutation_challenge_sets_target, -}; +use crate::lookup::{get_grand_product_challenge_set, get_grand_product_challenge_set_target}; use crate::proof::*; use crate::stark::Stark; -fn get_challenges( - stark: &S, +fn get_challenges( trace_cap: &MerkleCap, - permutation_zs_cap: Option<&MerkleCap>, + auxiliary_polys_cap: Option<&MerkleCap>, quotient_polys_cap: &MerkleCap, openings: &StarkOpeningSet, commit_phase_merkle_caps: &[MerkleCap], @@ -33,7 +30,6 @@ fn get_challenges( where F: RichField + Extendable, C: GenericConfig, - S: Stark, { let num_challenges = config.num_challenges; @@ -41,13 +37,9 @@ where challenger.observe_cap(trace_cap); - let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| { - let tmp = get_n_permutation_challenge_sets( - &mut challenger, - num_challenges, - stark.permutation_batch_size(), - ); - challenger.observe_cap(permutation_zs_cap); + let lookup_challenge_set = auxiliary_polys_cap.map(|auxiliary_polys_cap| { + let tmp = get_grand_product_challenge_set(&mut challenger, num_challenges); + challenger.observe_cap(auxiliary_polys_cap); tmp }); @@ -59,7 +51,7 @@ where challenger.observe_openings(&openings.to_fri_openings()); StarkProofChallenges { - permutation_challenge_sets, + lookup_challenge_set, stark_alphas, stark_zeta, fri_challenges: challenger.fri_challenges::( @@ -79,27 +71,21 @@ where { // TODO: Should be used later in compression? #![allow(dead_code)] - pub(crate) fn fri_query_indices>( - &self, - stark: &S, - config: &StarkConfig, - degree_bits: usize, - ) -> Vec { - self.get_challenges(stark, config, degree_bits) + pub(crate) fn fri_query_indices(&self, config: &StarkConfig, degree_bits: usize) -> Vec { + self.get_challenges(config, degree_bits) .fri_challenges .fri_query_indices } /// Computes all Fiat-Shamir challenges used in the STARK proof. - pub(crate) fn get_challenges>( + pub(crate) fn get_challenges( &self, - stark: &S, config: &StarkConfig, degree_bits: usize, ) -> StarkProofChallenges { let StarkProof { trace_cap, - permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof: @@ -111,10 +97,9 @@ where }, } = &self.proof; - get_challenges::( - stark, + get_challenges::( trace_cap, - permutation_zs_cap.as_ref(), + auxiliary_polys_cap.as_ref(), quotient_polys_cap, openings, commit_phase_merkle_caps, @@ -130,13 +115,11 @@ where pub(crate) fn get_challenges_target< F: RichField + Extendable, C: GenericConfig, - S: Stark, const D: usize, >( builder: &mut CircuitBuilder, - stark: &S, trace_cap: &MerkleCapTarget, - permutation_zs_cap: Option<&MerkleCapTarget>, + auxiliary_polys_cap: Option<&MerkleCapTarget>, quotient_polys_cap: &MerkleCapTarget, openings: &StarkOpeningSetTarget, commit_phase_merkle_caps: &[MerkleCapTarget], @@ -153,13 +136,8 @@ where challenger.observe_cap(trace_cap); - let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| { - let tmp = get_n_permutation_challenge_sets_target( - builder, - &mut challenger, - num_challenges, - stark.permutation_batch_size(), - ); + let lookup_challenge_set = auxiliary_polys_cap.map(|permutation_zs_cap| { + let tmp = get_grand_product_challenge_set_target(builder, &mut challenger, num_challenges); challenger.observe_cap(permutation_zs_cap); tmp }); @@ -172,7 +150,7 @@ where challenger.observe_openings(&openings.to_fri_openings()); StarkProofChallengesTarget { - permutation_challenge_sets, + lookup_challenge_set, stark_alphas, stark_zeta, fri_challenges: challenger.fri_challenges( @@ -186,22 +164,19 @@ where } impl StarkProofWithPublicInputsTarget { - pub(crate) fn get_challenges< - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - >( + pub(crate) fn get_challenges( &self, builder: &mut CircuitBuilder, - stark: &S, config: &StarkConfig, ) -> StarkProofChallengesTarget where + F: RichField + Extendable, + C: GenericConfig, C::Hasher: AlgebraicHasher, { let StarkProofTarget { trace_cap, - permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof: @@ -213,11 +188,10 @@ impl StarkProofWithPublicInputsTarget { }, } = &self.proof; - get_challenges_target::( + get_challenges_target::( builder, - stark, trace_cap, - permutation_zs_cap.as_ref(), + auxiliary_polys_cap.as_ref(), quotient_polys_cap, openings, commit_phase_merkle_caps, diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 635e57bd..f6b4f5e0 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -1,5 +1,6 @@ #![allow(clippy::too_many_arguments)] #![allow(clippy::type_complexity)] +#![allow(unused)] // TODO: Remove post code migration #![cfg_attr(not(feature = "std"), no_std)] extern crate alloc; @@ -9,7 +10,7 @@ mod get_challenges; pub mod config; pub mod constraint_consumer; pub mod evaluation_frame; -pub mod permutation; +pub mod lookup; pub mod proof; pub mod prover; pub mod recursive_verifier; diff --git a/starky/src/lookup.rs b/starky/src/lookup.rs new file mode 100644 index 00000000..19f20424 --- /dev/null +++ b/starky/src/lookup.rs @@ -0,0 +1,1002 @@ +use alloc::vec; +use alloc::vec::Vec; +use core::borrow::Borrow; +use core::fmt::Debug; +use core::iter::repeat; + +use itertools::Itertools; +use num_bigint::BigUint; +use plonky2::field::batch_util::batch_add_inplace; +use plonky2::field::extension::{Extendable, FieldExtension}; +use plonky2::field::packed::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::target::Target; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::config::{AlgebraicHasher, Hasher}; +use plonky2::plonk::plonk_common::{ + reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit, +}; +use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; +use plonky2_util::ceil_div_usize; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::evaluation_frame::StarkEvaluationFrame; +use crate::stark::Stark; + +/// Represents a filter, which evaluates to 1 if the row must be considered and 0 if it should be ignored. +/// It's an arbitrary degree 2 combination of columns: `products` are the degree 2 terms, and `constants` are +/// the degree 1 terms. +#[derive(Clone, Debug)] +pub struct Filter { + products: Vec<(Column, Column)>, + constants: Vec>, +} + +impl Filter { + pub fn new(products: Vec<(Column, Column)>, constants: Vec>) -> Self { + Self { + products, + constants, + } + } + + /// Returns a filter made of a single column. + pub fn new_simple(col: Column) -> Self { + Self { + products: vec![], + constants: vec![col], + } + } + + /// Given the column values for the current and next rows, evaluates the filter. + pub(crate) fn eval_filter(&self, v: &[P], next_v: &[P]) -> P + where + FE: FieldExtension, + P: PackedField, + { + self.products + .iter() + .map(|(col1, col2)| col1.eval_with_next(v, next_v) * col2.eval_with_next(v, next_v)) + .sum::

() + + self + .constants + .iter() + .map(|col| col.eval_with_next(v, next_v)) + .sum::

() + } + + /// Circuit version of `eval_filter`: + /// Given the column values for the current and next rows, evaluates the filter. + pub(crate) fn eval_filter_circuit( + &self, + builder: &mut CircuitBuilder, + v: &[ExtensionTarget], + next_v: &[ExtensionTarget], + ) -> ExtensionTarget + where + F: RichField + Extendable, + { + let prods = self + .products + .iter() + .map(|(col1, col2)| { + let col1_eval = col1.eval_with_next_circuit(builder, v, next_v); + let col2_eval = col2.eval_with_next_circuit(builder, v, next_v); + builder.mul_extension(col1_eval, col2_eval) + }) + .collect::>(); + + let consts = self + .constants + .iter() + .map(|col| col.eval_with_next_circuit(builder, v, next_v)) + .collect::>(); + + let prods = builder.add_many_extension(prods); + let consts = builder.add_many_extension(consts); + builder.add_extension(prods, consts) + } + + /// Evaluate on a row of a table given in column-major form. + pub(crate) fn eval_table(&self, table: &[PolynomialValues], row: usize) -> F { + self.products + .iter() + .map(|(col1, col2)| col1.eval_table(table, row) * col2.eval_table(table, row)) + .sum::() + + self + .constants + .iter() + .map(|col| col.eval_table(table, row)) + .sum() + } + + pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues]) -> Vec { + let length = table[0].len(); + + (0..length) + .map(|row| self.eval_table(table, row)) + .collect::>() + } +} + +/// Represent two linear combination of columns, corresponding to the current and next row values. +/// Each linear combination is represented as: +/// - a vector of `(usize, F)` corresponding to the column number and the associated multiplicand +/// - the constant of the linear combination. +#[derive(Clone, Debug)] +pub struct Column { + linear_combination: Vec<(usize, F)>, + next_row_linear_combination: Vec<(usize, F)>, + constant: F, +} + +impl Column { + /// Returns the representation of a single column in the current row. + pub fn single(c: usize) -> Self { + Self { + linear_combination: vec![(c, F::ONE)], + next_row_linear_combination: vec![], + constant: F::ZERO, + } + } + + /// Returns multiple single columns in the current row. + pub fn singles>>( + cs: I, + ) -> impl Iterator { + cs.into_iter().map(|c| Self::single(*c.borrow())) + } + + /// Returns the representation of a single column in the next row. + pub fn single_next_row(c: usize) -> Self { + Self { + linear_combination: vec![], + next_row_linear_combination: vec![(c, F::ONE)], + constant: F::ZERO, + } + } + + /// Returns multiple single columns for the next row. + pub fn singles_next_row>>( + cs: I, + ) -> impl Iterator { + cs.into_iter().map(|c| Self::single_next_row(*c.borrow())) + } + + /// Returns a linear combination corresponding to a constant. + pub fn constant(constant: F) -> Self { + Self { + linear_combination: vec![], + next_row_linear_combination: vec![], + constant, + } + } + + /// Returns a linear combination corresponding to 0. + pub fn zero() -> Self { + Self::constant(F::ZERO) + } + + /// Returns a linear combination corresponding to 1. + pub fn one() -> Self { + Self::constant(F::ONE) + } + + /// Given an iterator of `(usize, F)` and a constant, returns the association linear combination of columns for the current row. + pub fn linear_combination_with_constant>( + iter: I, + constant: F, + ) -> Self { + let v = iter.into_iter().collect::>(); + assert!(!v.is_empty()); + + // Because this is a debug assertion, we only check it when the `std` + // feature is activated, as `Itertools::unique` relies on collections. + #[cfg(feature = "std")] + debug_assert_eq!( + v.iter().map(|(c, _)| c).unique().count(), + v.len(), + "Duplicate columns." + ); + + Self { + linear_combination: v, + next_row_linear_combination: vec![], + constant, + } + } + + /// Given an iterator of `(usize, F)` and a constant, returns the associated linear combination of columns for the current and the next rows. + pub fn linear_combination_and_next_row_with_constant>( + iter: I, + next_row_iter: I, + constant: F, + ) -> Self { + let v = iter.into_iter().collect::>(); + let next_row_v = next_row_iter.into_iter().collect::>(); + + assert!(!v.is_empty() || !next_row_v.is_empty()); + + // Because these are debug assertions, we only check them when the `std` + // feature is activated, as `Itertools::unique` relies on collections. + #[cfg(feature = "std")] + { + debug_assert_eq!( + v.iter().map(|(c, _)| c).unique().count(), + v.len(), + "Duplicate columns." + ); + debug_assert_eq!( + next_row_v.iter().map(|(c, _)| c).unique().count(), + next_row_v.len(), + "Duplicate columns." + ); + } + + Self { + linear_combination: v, + next_row_linear_combination: next_row_v, + constant, + } + } + + /// Returns a linear combination of columns, with no additional constant. + pub fn linear_combination>(iter: I) -> Self { + Self::linear_combination_with_constant(iter, F::ZERO) + } + + /// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order: + /// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n. + pub fn le_bits>>(cs: I) -> Self { + Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers())) + } + + /// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order: + /// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n + k where `k` is an + /// additional constant. + pub fn le_bits_with_constant>>( + cs: I, + constant: F, + ) -> Self { + Self::linear_combination_with_constant( + cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers()), + constant, + ) + } + + /// Given an iterator of columns (c_0, ..., c_n) containing bytes in little endian order: + /// returns the representation of c_0 + 256 * c_1 + ... + 256^n * c_n. + pub fn le_bytes>>(cs: I) -> Self { + Self::linear_combination( + cs.into_iter() + .map(|c| *c.borrow()) + .zip(F::from_canonical_u16(256).powers()), + ) + } + + /// Given an iterator of columns, returns the representation of their sum. + pub fn sum>>(cs: I) -> Self { + Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(repeat(F::ONE))) + } + + /// Given the column values for the current row, returns the evaluation of the linear combination. + pub(crate) fn eval(&self, v: &[P]) -> P + where + FE: FieldExtension, + P: PackedField, + { + self.linear_combination + .iter() + .map(|&(c, f)| v[c] * FE::from_basefield(f)) + .sum::

() + + FE::from_basefield(self.constant) + } + + /// Given the column values for the current and next rows, evaluates the current and next linear combinations and returns their sum. + pub(crate) fn eval_with_next(&self, v: &[P], next_v: &[P]) -> P + where + FE: FieldExtension, + P: PackedField, + { + self.linear_combination + .iter() + .map(|&(c, f)| v[c] * FE::from_basefield(f)) + .sum::

() + + self + .next_row_linear_combination + .iter() + .map(|&(c, f)| next_v[c] * FE::from_basefield(f)) + .sum::

() + + FE::from_basefield(self.constant) + } + + /// Evaluate on a row of a table given in column-major form. + pub(crate) fn eval_table(&self, table: &[PolynomialValues], row: usize) -> F { + let mut res = self + .linear_combination + .iter() + .map(|&(c, f)| table[c].values[row] * f) + .sum::() + + self.constant; + + // If we access the next row at the last row, for sanity, we consider the next row's values to be 0. + // If the lookups are correctly written, the filter should be 0 in that case anyway. + if !self.next_row_linear_combination.is_empty() && row < table[0].values.len() - 1 { + res += self + .next_row_linear_combination + .iter() + .map(|&(c, f)| table[c].values[row + 1] * f) + .sum::(); + } + + res + } + + /// Evaluates the column on all rows. + pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues]) -> Vec { + let length = table[0].len(); + (0..length) + .map(|row| self.eval_table(table, row)) + .collect::>() + } + + /// Circuit version of `eval`: Given a row's targets, returns their linear combination. + pub(crate) fn eval_circuit( + &self, + builder: &mut CircuitBuilder, + v: &[ExtensionTarget], + ) -> ExtensionTarget + where + F: RichField + Extendable, + { + let pairs = self + .linear_combination + .iter() + .map(|&(c, f)| { + ( + v[c], + builder.constant_extension(F::Extension::from_basefield(f)), + ) + }) + .collect::>(); + let constant = builder.constant_extension(F::Extension::from_basefield(self.constant)); + builder.inner_product_extension(F::ONE, constant, pairs) + } + + /// Circuit version of `eval_with_next`: + /// Given the targets of the current and next row, returns the sum of their linear combinations. + pub(crate) fn eval_with_next_circuit( + &self, + builder: &mut CircuitBuilder, + v: &[ExtensionTarget], + next_v: &[ExtensionTarget], + ) -> ExtensionTarget + where + F: RichField + Extendable, + { + let mut pairs = self + .linear_combination + .iter() + .map(|&(c, f)| { + ( + v[c], + builder.constant_extension(F::Extension::from_basefield(f)), + ) + }) + .collect::>(); + let next_row_pairs = self.next_row_linear_combination.iter().map(|&(c, f)| { + ( + next_v[c], + builder.constant_extension(F::Extension::from_basefield(f)), + ) + }); + pairs.extend(next_row_pairs); + let constant = builder.constant_extension(F::Extension::from_basefield(self.constant)); + builder.inner_product_extension(F::ONE, constant, pairs) + } +} + +pub(crate) type ColumnFilter<'a, F> = (&'a [Column], &'a Option>); + +pub struct Lookup { + /// Columns whose values should be contained in the lookup table. + /// These are the f_i(x) polynomials in the logUp paper. + pub columns: Vec>, + /// Column containing the lookup table. + /// This is the t(x) polynomial in the paper. + pub table_column: Column, + /// Column containing the frequencies of `columns` in `table_column`. + /// This is the m(x) polynomial in the paper. + pub frequencies_column: Column, + + /// Columns to filter some elements. There is at most one filter + /// column per column to lookup. + pub filter_columns: Vec>>, +} + +impl Lookup { + pub fn num_helper_columns(&self, constraint_degree: usize) -> usize { + // One helper column for each column batch of size `constraint_degree-1`, + // then one column for the inverse of `table + challenge` and one for the `Z` polynomial. + ceil_div_usize(self.columns.len(), constraint_degree - 1) + 1 + } +} + +/// Randomness for a single instance of a permutation check protocol. +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +pub(crate) struct GrandProductChallenge { + /// Randomness used to combine multiple columns into one. + pub(crate) beta: T, + /// Random offset that's added to the beta-reduced column values. + pub(crate) gamma: T, +} + +impl GrandProductChallenge { + pub(crate) fn combine<'a, FE, P, T: IntoIterator, const D2: usize>( + &self, + terms: T, + ) -> P + where + FE: FieldExtension, + P: PackedField, + T::IntoIter: DoubleEndedIterator, + { + reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma) + } +} + +impl GrandProductChallenge { + pub(crate) fn combine_circuit, const D: usize>( + &self, + builder: &mut CircuitBuilder, + terms: &[ExtensionTarget], + ) -> ExtensionTarget { + let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta); + let gamma = builder.convert_to_ext(self.gamma); + builder.add_extension(reduced, gamma) + } +} + +impl GrandProductChallenge { + pub(crate) fn combine_base_circuit, const D: usize>( + &self, + builder: &mut CircuitBuilder, + terms: &[Target], + ) -> Target { + let reduced = reduce_with_powers_circuit(builder, terms, self.beta); + builder.add(reduced, self.gamma) + } +} + +/// Like `GrandProductChallenge`, but with `num_challenges` copies to boost soundness. +#[derive(Clone, Eq, PartialEq, Debug)] +pub struct GrandProductChallengeSet { + pub(crate) challenges: Vec>, +} + +impl GrandProductChallengeSet { + pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { + buffer.write_usize(self.challenges.len())?; + for challenge in &self.challenges { + buffer.write_target(challenge.beta)?; + buffer.write_target(challenge.gamma)?; + } + Ok(()) + } + + pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { + let length = buffer.read_usize()?; + let mut challenges = Vec::with_capacity(length); + for _ in 0..length { + challenges.push(GrandProductChallenge { + beta: buffer.read_target()?, + gamma: buffer.read_target()?, + }); + } + + Ok(GrandProductChallengeSet { challenges }) + } +} + +fn get_grand_product_challenge>( + challenger: &mut Challenger, +) -> GrandProductChallenge { + let beta = challenger.get_challenge(); + let gamma = challenger.get_challenge(); + GrandProductChallenge { beta, gamma } +} + +pub(crate) fn get_grand_product_challenge_set>( + challenger: &mut Challenger, + num_challenges: usize, +) -> GrandProductChallengeSet { + let challenges = (0..num_challenges) + .map(|_| get_grand_product_challenge(challenger)) + .collect(); + GrandProductChallengeSet { challenges } +} + +fn get_grand_product_challenge_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, +) -> GrandProductChallenge { + let beta = challenger.get_challenge(builder); + let gamma = challenger.get_challenge(builder); + GrandProductChallenge { beta, gamma } +} + +pub(crate) fn get_grand_product_challenge_set_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + num_challenges: usize, +) -> GrandProductChallengeSet { + let challenges = (0..num_challenges) + .map(|_| get_grand_product_challenge_target(builder, challenger)) + .collect(); + GrandProductChallengeSet { challenges } +} + +/// logUp protocol from +/// Compute the helper columns for the lookup argument. +/// Given columns `f0,...,fk` and a column `t`, such that `∪fi ⊆ t`, and challenges `x`, +/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`, +/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column. +pub(crate) fn lookup_helper_columns( + lookup: &Lookup, + trace_poly_values: &[PolynomialValues], + challenge: F, + constraint_degree: usize, +) -> Vec> { + assert!( + constraint_degree == 2 || constraint_degree == 3, + "TODO: Allow other constraint degrees." + ); + + assert_eq!(lookup.columns.len(), lookup.filter_columns.len()); + + let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len(); + assert!(BigUint::from(num_total_logup_entries) < F::characteristic()); + + let num_helper_columns = lookup.num_helper_columns(constraint_degree); + let mut helper_columns: Vec> = Vec::with_capacity(num_helper_columns); + + let looking_cols = lookup + .columns + .iter() + .map(|col| vec![col.clone()]) + .collect::>>>(); + + let grand_challenge = GrandProductChallenge { + beta: F::ONE, + gamma: challenge, + }; + + let columns_filters = looking_cols + .iter() + .zip(lookup.filter_columns.iter()) + .map(|(col, filter)| (&col[..], filter)) + .collect::>(); + // For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and + // add it to the helper columns. + // Note: these are the h_k(x) polynomials in the paper, with a few differences: + // * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the + // h_k polynomials; instead there's a separate helper column for it (see below). + // * Here, we use 1 instead of -1 as the numerator (and subtract later). + // * Here, for now, the batch size (l) is always constraint_degree - 1 = 2. + // * Here, there are filters for the columns, to only select some rows + // in a given column. + let mut helper_columns = get_helper_cols( + trace_poly_values, + trace_poly_values[0].len(), + &columns_filters, + grand_challenge, + constraint_degree, + ); + + // Add `1/(table+challenge)` to the helper columns. + // This is 1/phi_0(x) = 1/(x + t(x)) from the paper. + // Here, we don't include m(x) in the numerator, instead multiplying it with this column later. + let mut table = lookup.table_column.eval_all_rows(trace_poly_values); + for x in table.iter_mut() { + *x = challenge + *x; + } + let table_inverse: Vec = F::batch_multiplicative_inverse(&table); + + // Compute the `Z` polynomial with `Z(1)=0` and `Z(gx) = Z(x) + sum h_i(x) - frequencies(x)g(x)`. + // This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H. + // In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled + // into the h_k(x) polynomials. + let frequencies = &lookup.frequencies_column.eval_all_rows(trace_poly_values); + let mut z = Vec::with_capacity(frequencies.len()); + z.push(F::ZERO); + for i in 0..frequencies.len() - 1 { + let x = helper_columns[..num_helper_columns - 1] + .iter() + .map(|col| col.values[i]) + .sum::() + - frequencies[i] * table_inverse[i]; + z.push(z[i] + x); + } + helper_columns.push(z.into()); + + helper_columns +} + +/// Given data associated to a lookup, check the associated helper polynomials. +pub(crate) fn eval_helper_columns( + filter: &[Option>], + columns: &[Vec

], + local_values: &[P], + next_values: &[P], + helper_columns: &[P], + constraint_degree: usize, + challenges: &GrandProductChallenge, + consumer: &mut ConstraintConsumer

, +) where + F: RichField + Extendable, + FE: FieldExtension, + P: PackedField, +{ + if !helper_columns.is_empty() { + for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { + let fs = + &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; + let h = helper_columns[j]; + + match chunk.len() { + 2 => { + let combin0 = challenges.combine(&chunk[0]); + let combin1 = challenges.combine(chunk[1].iter()); + + let f0 = if let Some(filter0) = &fs[0] { + filter0.eval_filter(local_values, next_values) + } else { + P::ONES + }; + let f1 = if let Some(filter1) = &fs[1] { + filter1.eval_filter(local_values, next_values) + } else { + P::ONES + }; + + consumer.constraint(combin1 * combin0 * h - f0 * combin1 - f1 * combin0); + } + 1 => { + let combin = challenges.combine(&chunk[0]); + let f0 = if let Some(filter1) = &fs[0] { + filter1.eval_filter(local_values, next_values) + } else { + P::ONES + }; + consumer.constraint(combin * h - f0); + } + + _ => todo!("Allow other constraint degrees"), + } + } + } +} + +/// Circuit version of `eval_helper_columns`. +/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials. +pub(crate) fn eval_helper_columns_circuit, const D: usize>( + builder: &mut CircuitBuilder, + filter: &[Option>], + columns: &[Vec>], + local_values: &[ExtensionTarget], + next_values: &[ExtensionTarget], + helper_columns: &[ExtensionTarget], + constraint_degree: usize, + challenges: &GrandProductChallenge, + consumer: &mut RecursiveConstraintConsumer, +) { + if !helper_columns.is_empty() { + for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { + let fs = + &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; + let h = helper_columns[j]; + + let one = builder.one_extension(); + match chunk.len() { + 2 => { + let combin0 = challenges.combine_circuit(builder, &chunk[0]); + let combin1 = challenges.combine_circuit(builder, &chunk[1]); + + let f0 = if let Some(filter0) = &fs[0] { + filter0.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + let f1 = if let Some(filter1) = &fs[1] { + filter1.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + + let constr = builder.mul_sub_extension(combin0, h, f0); + let constr = builder.mul_extension(constr, combin1); + let f1_constr = builder.mul_extension(f1, combin0); + let constr = builder.sub_extension(constr, f1_constr); + + consumer.constraint(builder, constr); + } + 1 => { + let combin = challenges.combine_circuit(builder, &chunk[0]); + let f0 = if let Some(filter1) = &fs[0] { + filter1.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + let constr = builder.mul_sub_extension(combin, h, f0); + consumer.constraint(builder, constr); + } + + _ => todo!("Allow other constraint degrees"), + } + } + } +} + +/// Given a STARK's trace, and the data associated to one lookup (either CTL or range check), +/// returns the associated helper polynomials. +pub(crate) fn get_helper_cols( + trace: &[PolynomialValues], + degree: usize, + columns_filters: &[ColumnFilter], + challenge: GrandProductChallenge, + constraint_degree: usize, +) -> Vec> { + let num_helper_columns = ceil_div_usize(columns_filters.len(), constraint_degree - 1); + + let mut helper_columns = Vec::with_capacity(num_helper_columns); + + let mut filter_index = 0; + for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) { + let (first_col, first_filter) = cols_filts.next().unwrap(); + + let mut filter_col = Vec::with_capacity(degree); + let first_combined = (0..degree) + .map(|d| { + let f = if let Some(filter) = first_filter { + let f = filter.eval_table(trace, d); + filter_col.push(f); + f + } else { + filter_col.push(F::ONE); + F::ONE + }; + if f.is_one() { + let evals = first_col + .iter() + .map(|c| c.eval_table(trace, d)) + .collect::>(); + challenge.combine(evals.iter()) + } else { + assert_eq!(f, F::ZERO, "Non-binary filter?"); + // Dummy value. Cannot be zero since it will be batch-inverted. + F::ONE + } + }) + .collect::>(); + + let mut acc = F::batch_multiplicative_inverse(&first_combined); + for d in 0..degree { + if filter_col[d].is_zero() { + acc[d] = F::ZERO; + } + } + + for (col, filt) in cols_filts { + let mut filter_col = Vec::with_capacity(degree); + let mut combined = (0..degree) + .map(|d| { + let f = if let Some(filter) = filt { + let f = filter.eval_table(trace, d); + filter_col.push(f); + f + } else { + filter_col.push(F::ONE); + F::ONE + }; + if f.is_one() { + let evals = col + .iter() + .map(|c| c.eval_table(trace, d)) + .collect::>(); + challenge.combine(evals.iter()) + } else { + assert_eq!(f, F::ZERO, "Non-binary filter?"); + // Dummy value. Cannot be zero since it will be batch-inverted. + F::ONE + } + }) + .collect::>(); + + combined = F::batch_multiplicative_inverse(&combined); + + for d in 0..degree { + if filter_col[d].is_zero() { + combined[d] = F::ZERO; + } + } + + batch_add_inplace(&mut acc, &combined); + } + + helper_columns.push(acc.into()); + } + assert_eq!(helper_columns.len(), num_helper_columns); + + helper_columns +} + +pub(crate) struct LookupCheckVars +where + F: Field, + FE: FieldExtension, + P: PackedField, +{ + pub(crate) local_values: Vec

, + pub(crate) next_values: Vec

, + pub(crate) challenges: Vec, +} + +/// Constraints for the logUp lookup argument. +pub(crate) fn eval_packed_lookups_generic( + stark: &S, + lookups: &[Lookup], + vars: &S::EvaluationFrame, + lookup_vars: LookupCheckVars, + yield_constr: &mut ConstraintConsumer

, +) where + F: RichField + Extendable, + FE: FieldExtension, + P: PackedField, + S: Stark, +{ + let local_values = vars.get_local_values(); + let next_values = vars.get_next_values(); + let degree = stark.constraint_degree(); + assert!( + degree == 2 || degree == 3, + "TODO: Allow other constraint degrees." + ); + let mut start = 0; + for lookup in lookups { + let num_helper_columns = lookup.num_helper_columns(degree); + for &challenge in &lookup_vars.challenges { + let grand_challenge = GrandProductChallenge { + beta: F::ONE, + gamma: challenge, + }; + let lookup_columns = lookup + .columns + .iter() + .map(|col| vec![col.eval_with_next(local_values, next_values)]) + .collect::>>(); + + // For each chunk, check that `h_i (x+f_2i) (x+f_{2i+1}) = (x+f_2i) * filter_{2i+1} + (x+f_{2i+1}) * filter_2i` + // if the chunk has length 2 or if it has length 1, check that `h_i * (x+f_2i) = filter_2i`, where x is the challenge + eval_helper_columns( + &lookup.filter_columns, + &lookup_columns, + local_values, + next_values, + &lookup_vars.local_values[start..start + num_helper_columns - 1], + degree, + &grand_challenge, + yield_constr, + ); + + let challenge = FE::from_basefield(challenge); + + // Check the `Z` polynomial. + let z = lookup_vars.local_values[start + num_helper_columns - 1]; + let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; + let table_with_challenge = lookup.table_column.eval(local_values) + challenge; + let y = lookup_vars.local_values[start..start + num_helper_columns - 1] + .iter() + .fold(P::ZEROS, |acc, x| acc + *x) + * table_with_challenge + - lookup.frequencies_column.eval(local_values); + // Check that in the first row, z = 0; + yield_constr.constraint_first_row(z); + yield_constr.constraint((next_z - z) * table_with_challenge - y); + start += num_helper_columns; + } + } +} + +pub(crate) struct LookupCheckVarsTarget { + pub(crate) local_values: Vec>, + pub(crate) next_values: Vec>, + pub(crate) challenges: Vec, +} + +pub(crate) fn eval_ext_lookups_circuit< + F: RichField + Extendable, + S: Stark, + const D: usize, +>( + builder: &mut CircuitBuilder, + stark: &S, + vars: &S::EvaluationFrameTarget, + lookup_vars: LookupCheckVarsTarget, + yield_constr: &mut RecursiveConstraintConsumer, +) { + let one = builder.one_extension(); + let degree = stark.constraint_degree(); + let lookups = stark.lookups(); + + let local_values = vars.get_local_values(); + let next_values = vars.get_next_values(); + assert!( + degree == 2 || degree == 3, + "TODO: Allow other constraint degrees." + ); + let mut start = 0; + for lookup in lookups { + let num_helper_columns = lookup.num_helper_columns(degree); + let col_values = lookup + .columns + .iter() + .map(|col| vec![col.eval_with_next_circuit(builder, local_values, next_values)]) + .collect::>(); + + for &challenge in &lookup_vars.challenges { + let grand_challenge = GrandProductChallenge { + beta: builder.one(), + gamma: challenge, + }; + + eval_helper_columns_circuit( + builder, + &lookup.filter_columns, + &col_values, + local_values, + next_values, + &lookup_vars.local_values[start..start + num_helper_columns - 1], + degree, + &grand_challenge, + yield_constr, + ); + let challenge = builder.convert_to_ext(challenge); + + let z = lookup_vars.local_values[start + num_helper_columns - 1]; + let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; + let table_column = lookup + .table_column + .eval_circuit(builder, vars.get_local_values()); + let table_with_challenge = builder.add_extension(table_column, challenge); + let mut y = builder.add_many_extension( + &lookup_vars.local_values[start..start + num_helper_columns - 1], + ); + + let frequencies_column = lookup + .frequencies_column + .eval_circuit(builder, vars.get_local_values()); + y = builder.mul_extension(y, table_with_challenge); + y = builder.sub_extension(y, frequencies_column); + + // Check that in the first row, z = 0; + yield_constr.constraint_first_row(builder, z); + let mut constraint = builder.sub_extension(next_z, z); + constraint = builder.mul_extension(constraint, table_with_challenge); + constraint = builder.sub_extension(constraint, y); + yield_constr.constraint(builder, constraint); + start += num_helper_columns; + } + } +} diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs deleted file mode 100644 index 1059a79b..00000000 --- a/starky/src/permutation.rs +++ /dev/null @@ -1,398 +0,0 @@ -//! Permutation arguments. - -use alloc::vec; -use alloc::vec::Vec; - -use itertools::Itertools; -use plonky2::field::batch_util::batch_multiply_inplace; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::packed::PackedField; -use plonky2::field::polynomial::PolynomialValues; -use plonky2::field::types::Field; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::target::Target; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{AlgebraicHasher, Hasher}; -use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget}; -use plonky2_maybe_rayon::*; - -use crate::config::StarkConfig; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::stark::Stark; - -/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another. -/// In particular, there should exist some permutation `pi` such that for any `i`, -/// `trace[lhs[i]] = pi(trace[rhs[i]])`. Here `trace` denotes the trace in column-major form, so -/// `trace[col]` is a column vector. -pub struct PermutationPair { - /// Each entry contains two column indices, representing two columns which should be - /// permutations of one another. - pub column_pairs: Vec<(usize, usize)>, -} - -impl PermutationPair { - pub fn singletons(lhs: usize, rhs: usize) -> Self { - Self { - column_pairs: vec![(lhs, rhs)], - } - } -} - -/// A single instance of a permutation check protocol. -pub(crate) struct PermutationInstance<'a, T: Copy> { - pub(crate) pair: &'a PermutationPair, - pub(crate) challenge: PermutationChallenge, -} - -/// Randomness for a single instance of a permutation check protocol. -#[derive(Copy, Clone)] -pub(crate) struct PermutationChallenge { - /// Randomness used to combine multiple columns into one. - pub(crate) beta: T, - /// Random offset that's added to the beta-reduced column values. - pub(crate) gamma: T, -} - -/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. -#[derive(Clone)] -pub(crate) struct PermutationChallengeSet { - pub(crate) challenges: Vec>, -} - -/// Compute all Z polynomials (for permutation arguments). -pub(crate) fn compute_permutation_z_polys( - stark: &S, - config: &StarkConfig, - trace_poly_values: &[PolynomialValues], - permutation_challenge_sets: &[PermutationChallengeSet], -) -> Vec> -where - F: RichField + Extendable, - S: Stark, -{ - let permutation_pairs = stark.permutation_pairs(); - let permutation_batches = get_permutation_batches( - &permutation_pairs, - permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - permutation_batches - .into_par_iter() - .map(|instances| compute_permutation_z_poly(&instances, trace_poly_values)) - .collect() -} - -/// Compute a single Z polynomial. -fn compute_permutation_z_poly( - instances: &[PermutationInstance], - trace_poly_values: &[PolynomialValues], -) -> PolynomialValues { - let degree = trace_poly_values[0].len(); - let (reduced_lhs_polys, reduced_rhs_polys): (Vec<_>, Vec<_>) = instances - .iter() - .map(|instance| permutation_reduced_polys(instance, trace_poly_values, degree)) - .unzip(); - - let numerator = poly_product_elementwise(reduced_lhs_polys.into_iter()); - let denominator = poly_product_elementwise(reduced_rhs_polys.into_iter()); - - // Compute the quotients. - let denominator_inverses = F::batch_multiplicative_inverse(&denominator.values); - let mut quotients = numerator.values; - batch_multiply_inplace(&mut quotients, &denominator_inverses); - - // Compute Z, which contains partial products of the quotients. - let mut partial_products = Vec::with_capacity(degree); - let mut acc = F::ONE; - for q in quotients { - partial_products.push(acc); - acc *= q; - } - PolynomialValues::new(partial_products) -} - -/// Computes the reduced polynomial, `\sum beta^i f_i(x) + gamma`, for both the "left" and "right" -/// sides of a given `PermutationPair`. -fn permutation_reduced_polys( - instance: &PermutationInstance, - trace_poly_values: &[PolynomialValues], - degree: usize, -) -> (PolynomialValues, PolynomialValues) { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: PermutationChallenge { beta, gamma }, - } = instance; - - let mut reduced_lhs = PolynomialValues::constant(*gamma, degree); - let mut reduced_rhs = PolynomialValues::constant(*gamma, degree); - for ((lhs, rhs), weight) in column_pairs.iter().zip(beta.powers()) { - reduced_lhs.add_assign_scaled(&trace_poly_values[*lhs], weight); - reduced_rhs.add_assign_scaled(&trace_poly_values[*rhs], weight); - } - (reduced_lhs, reduced_rhs) -} - -/// Computes the elementwise product of a set of polynomials. Assumes that the set is non-empty and -/// that each polynomial has the same length. -fn poly_product_elementwise( - mut polys: impl Iterator>, -) -> PolynomialValues { - let mut product = polys.next().expect("Expected at least one polynomial"); - for poly in polys { - batch_multiply_inplace(&mut product.values, &poly.values) - } - product -} - -fn get_permutation_challenge>( - challenger: &mut Challenger, -) -> PermutationChallenge { - let beta = challenger.get_challenge(); - let gamma = challenger.get_challenge(); - PermutationChallenge { beta, gamma } -} - -fn get_permutation_challenge_set>( - challenger: &mut Challenger, - num_challenges: usize, -) -> PermutationChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_permutation_challenge(challenger)) - .collect(); - PermutationChallengeSet { challenges } -} - -pub(crate) fn get_n_permutation_challenge_sets>( - challenger: &mut Challenger, - num_challenges: usize, - num_sets: usize, -) -> Vec> { - (0..num_sets) - .map(|_| get_permutation_challenge_set(challenger, num_challenges)) - .collect() -} - -fn get_permutation_challenge_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, -) -> PermutationChallenge { - let beta = challenger.get_challenge(builder); - let gamma = challenger.get_challenge(builder); - PermutationChallenge { beta, gamma } -} - -fn get_permutation_challenge_set_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - num_challenges: usize, -) -> PermutationChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_permutation_challenge_target(builder, challenger)) - .collect(); - PermutationChallengeSet { challenges } -} - -pub(crate) fn get_n_permutation_challenge_sets_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - num_challenges: usize, - num_sets: usize, -) -> Vec> { - (0..num_sets) - .map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges)) - .collect() -} - -/// Get a list of instances of our batch-permutation argument. These are permutation arguments -/// where the same `Z(x)` polynomial is used to check more than one permutation. -/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we -/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we -/// chunk these arguments based on our batch size. -pub(crate) fn get_permutation_batches<'a, T: Copy>( - permutation_pairs: &'a [PermutationPair], - permutation_challenge_sets: &[PermutationChallengeSet], - num_challenges: usize, - batch_size: usize, -) -> Vec>> { - permutation_pairs - .iter() - .cartesian_product(0..num_challenges) - .chunks(batch_size) - .into_iter() - .map(|batch| { - batch - .enumerate() - .map(|(i, (pair, chal))| { - let challenge = permutation_challenge_sets[i].challenges[chal]; - PermutationInstance { pair, challenge } - }) - .collect_vec() - }) - .collect() -} - -pub struct PermutationCheckVars -where - F: Field, - FE: FieldExtension, - P: PackedField, -{ - pub(crate) local_zs: Vec

, - pub(crate) next_zs: Vec

, - pub(crate) permutation_challenge_sets: Vec>, -} - -pub(crate) fn eval_permutation_checks( - stark: &S, - config: &StarkConfig, - vars: &S::EvaluationFrame, - permutation_data: PermutationCheckVars, - consumer: &mut ConstraintConsumer

, -) where - F: RichField + Extendable, - FE: FieldExtension, - P: PackedField, - S: Stark, -{ - let local_values = vars.get_local_values(); - - let PermutationCheckVars { - local_zs, - next_zs, - permutation_challenge_sets, - } = permutation_data; - - // Check that Z(1) = 1; - for &z in &local_zs { - consumer.constraint_first_row(z - FE::ONE); - } - - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, - &permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - // Z(gx) * down = Z x * up - let (reduced_lhs, reduced_rhs): (Vec

, Vec

) = instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: PermutationChallenge { beta, gamma }, - } = instance; - let mut factor = ReducingFactor::new(*beta); - let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs - .iter() - .map(|&(i, j)| (local_values[i], local_values[j])) - .unzip(); - ( - factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma), - factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma), - ) - }) - .unzip(); - let constraint = next_zs[i] * reduced_rhs.into_iter().product::

() - - local_zs[i] * reduced_lhs.into_iter().product::

(); - consumer.constraint(constraint); - } -} - -pub struct PermutationCheckDataTarget { - pub(crate) local_zs: Vec>, - pub(crate) next_zs: Vec>, - pub(crate) permutation_challenge_sets: Vec>, -} - -pub(crate) fn eval_permutation_checks_circuit( - builder: &mut CircuitBuilder, - stark: &S, - config: &StarkConfig, - vars: &S::EvaluationFrameTarget, - permutation_data: PermutationCheckDataTarget, - consumer: &mut RecursiveConstraintConsumer, -) where - F: RichField + Extendable, - S: Stark, -{ - let local_values = vars.get_local_values(); - - let PermutationCheckDataTarget { - local_zs, - next_zs, - permutation_challenge_sets, - } = permutation_data; - - let one = builder.one_extension(); - // Check that Z(1) = 1; - for &z in &local_zs { - let z_1 = builder.sub_extension(z, one); - consumer.constraint_first_row(builder, z_1); - } - - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, - &permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - let (reduced_lhs, reduced_rhs): (Vec>, Vec>) = - instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: PermutationChallenge { beta, gamma }, - } = instance; - let beta_ext = builder.convert_to_ext(*beta); - let gamma_ext = builder.convert_to_ext(*gamma); - let mut factor = ReducingFactorTarget::new(beta_ext); - let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs - .iter() - .map(|&(i, j)| (local_values[i], local_values[j])) - .unzip(); - let reduced_lhs = factor.reduce(&lhs, builder); - let reduced_rhs = factor.reduce(&rhs, builder); - ( - builder.add_extension(reduced_lhs, gamma_ext), - builder.add_extension(reduced_rhs, gamma_ext), - ) - }) - .unzip(); - let reduced_lhs_product = builder.mul_many_extension(reduced_lhs); - let reduced_rhs_product = builder.mul_many_extension(reduced_rhs); - // constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product - let constraint = { - let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product); - builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp) - }; - consumer.constraint(builder, constraint) - } -} diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 6bd5f787..e2239928 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -18,14 +18,14 @@ use plonky2::plonk::config::GenericConfig; use plonky2_maybe_rayon::*; use crate::config::StarkConfig; -use crate::permutation::PermutationChallengeSet; +use crate::lookup::GrandProductChallengeSet; #[derive(Debug, Clone)] pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, /// Merkle cap of LDEs of permutation Z values. - pub permutation_zs_cap: Option>, + pub auxiliary_polys_cap: Option>, /// Merkle cap of LDEs of trace values. pub quotient_polys_cap: MerkleCap, /// Purported values of each polynomial at the challenge point. @@ -48,7 +48,7 @@ impl, C: GenericConfig, const D: usize> S pub struct StarkProofTarget { pub trace_cap: MerkleCapTarget, - pub permutation_zs_cap: Option, + pub auxiliary_polys_cap: Option, pub quotient_polys_cap: MerkleCapTarget, pub openings: StarkOpeningSetTarget, pub opening_proof: FriProofTarget, @@ -106,7 +106,7 @@ pub struct CompressedStarkProofWithPublicInputs< pub(crate) struct StarkProofChallenges, const D: usize> { /// Randomness used in any permutation arguments. - pub permutation_challenge_sets: Option>>, + pub lookup_challenge_set: Option>, /// Random values used to combine STARK constraints. pub stark_alphas: Vec, @@ -118,7 +118,7 @@ pub(crate) struct StarkProofChallenges, const D: us } pub(crate) struct StarkProofChallengesTarget { - pub permutation_challenge_sets: Option>>, + pub lookup_challenge_set: Option>, pub stark_alphas: Vec, pub stark_zeta: ExtensionTarget, pub fri_challenges: FriChallengesTarget, @@ -129,8 +129,8 @@ pub(crate) struct StarkProofChallengesTarget { pub struct StarkOpeningSet, const D: usize> { pub local_values: Vec, pub next_values: Vec, - pub permutation_zs: Option>, - pub permutation_zs_next: Option>, + pub auxiliary_polys: Option>, + pub auxiliary_polys_next: Option>, pub quotient_polys: Vec, } @@ -139,7 +139,7 @@ impl, const D: usize> StarkOpeningSet { zeta: F::Extension, g: F, trace_commitment: &PolynomialBatch, - permutation_zs_commitment: Option<&PolynomialBatch>, + auxiliary_polys_commitment: Option<&PolynomialBatch>, quotient_commitment: &PolynomialBatch, ) -> Self { let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { @@ -152,8 +152,8 @@ impl, const D: usize> StarkOpeningSet { Self { local_values: eval_commitment(zeta, trace_commitment), next_values: eval_commitment(zeta_next, trace_commitment), - permutation_zs: permutation_zs_commitment.map(|c| eval_commitment(zeta, c)), - permutation_zs_next: permutation_zs_commitment.map(|c| eval_commitment(zeta_next, c)), + auxiliary_polys: auxiliary_polys_commitment.map(|c| eval_commitment(zeta, c)), + auxiliary_polys_next: auxiliary_polys_commitment.map(|c| eval_commitment(zeta_next, c)), quotient_polys: eval_commitment(zeta, quotient_commitment), } } @@ -163,7 +163,7 @@ impl, const D: usize> StarkOpeningSet { values: self .local_values .iter() - .chain(self.permutation_zs.iter().flatten()) + .chain(self.auxiliary_polys.iter().flatten()) .chain(&self.quotient_polys) .copied() .collect_vec(), @@ -172,7 +172,7 @@ impl, const D: usize> StarkOpeningSet { values: self .next_values .iter() - .chain(self.permutation_zs_next.iter().flatten()) + .chain(self.auxiliary_polys_next.iter().flatten()) .copied() .collect_vec(), }; @@ -185,8 +185,8 @@ impl, const D: usize> StarkOpeningSet { pub struct StarkOpeningSetTarget { pub local_values: Vec>, pub next_values: Vec>, - pub permutation_zs: Option>>, - pub permutation_zs_next: Option>>, + pub auxiliary_polys: Option>>, + pub auxiliary_polys_next: Option>>, pub quotient_polys: Vec>, } @@ -196,7 +196,7 @@ impl StarkOpeningSetTarget { values: self .local_values .iter() - .chain(self.permutation_zs.iter().flatten()) + .chain(self.auxiliary_polys.iter().flatten()) .chain(&self.quotient_polys) .copied() .collect_vec(), @@ -205,7 +205,7 @@ impl StarkOpeningSetTarget { values: self .next_values .iter() - .chain(self.permutation_zs_next.iter().flatten()) + .chain(self.auxiliary_polys_next.iter().flatten()) .copied() .collect_vec(), }; diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 56154d91..f9b40217 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -21,9 +21,8 @@ use plonky2_maybe_rayon::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; use crate::evaluation_frame::StarkEvaluationFrame; -use crate::permutation::{ - compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, - PermutationCheckVars, +use crate::lookup::{ + get_grand_product_challenge_set, lookup_helper_columns, Lookup, LookupCheckVars, }; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; @@ -69,25 +68,45 @@ where let mut challenger = Challenger::new(); challenger.observe_cap(&trace_cap); - // Permutation arguments. - let permutation_zs_commitment_challenges = stark.uses_permutation_args().then(|| { - let permutation_challenge_sets = get_n_permutation_challenge_sets( - &mut challenger, - config.num_challenges, - stark.permutation_batch_size(), - ); - let permutation_z_polys = compute_permutation_z_polys::( - &stark, - config, - &trace_poly_values, - &permutation_challenge_sets, - ); + // Lookup argument. + let constraint_degree = stark.constraint_degree(); + let lookups = stark.lookups(); + let lookup_challenges = stark.uses_lookups().then(|| { + get_grand_product_challenge_set(&mut challenger, config.num_challenges) + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>() + }); - let permutation_zs_commitment = timed!( + let num_lookup_columns = lookups + .iter() + .map(|l| l.num_helper_columns(constraint_degree)) + .sum(); + + let auxiliary_polys_commitment = stark.uses_lookups().then(|| { + let lookup_helper_columns = timed!(timing, "compute lookup helper columns", { + let challenges = lookup_challenges.as_ref().expect("We do have challenges."); + let mut columns = Vec::with_capacity(num_lookup_columns); + for lookup in &lookups { + for &challenge in challenges { + columns.extend(lookup_helper_columns( + lookup, + &trace_poly_values, + challenge, + constraint_degree, + )); + } + } + columns + }); + + // Get the polynomial commitments for all auxiliary polynomials. + let auxiliary_polys_commitment = timed!( timing, "compute permutation Z commitments", PolynomialBatch::from_values( - permutation_z_polys, + lookup_helper_columns, rate_bits, false, config.fri_config.cap_height, @@ -95,38 +114,68 @@ where None, ) ); - (permutation_zs_commitment, permutation_challenge_sets) + + auxiliary_polys_commitment }); - let permutation_zs_commitment = permutation_zs_commitment_challenges - .as_ref() - .map(|(comm, _)| comm); - let permutation_zs_cap = permutation_zs_commitment + + let auxiliary_polys_cap = auxiliary_polys_commitment .as_ref() .map(|commit| commit.merkle_tree.cap.clone()); - if let Some(cap) = &permutation_zs_cap { + if let Some(cap) = &auxiliary_polys_cap { challenger.observe_cap(cap); } let alphas = challenger.get_n_challenges(config.num_challenges); - let quotient_polys = compute_quotient_polys::::Packing, C, S, D>( - &stark, - &trace_commitment, - &permutation_zs_commitment_challenges, - public_inputs, - alphas, - degree_bits, - config, + + #[cfg(test)] + { + check_constraints( + &stark, + &trace_commitment, + public_inputs, + &auxiliary_polys_commitment, + lookup_challenges.as_ref(), + &lookups, + alphas.clone(), + degree_bits, + num_lookup_columns, + ); + } + + let quotient_polys = timed!( + timing, + "compute quotient polys", + compute_quotient_polys::::Packing, C, S, D>( + &stark, + &trace_commitment, + &auxiliary_polys_commitment, + lookup_challenges.as_ref(), + &lookups, + public_inputs, + alphas, + degree_bits, + num_lookup_columns, + config, + ) ); - let all_quotient_chunks = quotient_polys - .into_par_iter() - .flat_map(|mut quotient_poly| { - quotient_poly - .trim_to_len(degree * stark.quotient_degree_factor()) - .expect("Quotient has failed, the vanishing polynomial is not divisible by Z_H"); - // Split quotient into degree-n chunks. - quotient_poly.chunks(degree) - }) - .collect(); + + let all_quotient_chunks = timed!( + timing, + "split quotient polys", + quotient_polys + .into_par_iter() + .flat_map(|mut quotient_poly| { + quotient_poly + .trim_to_len(degree * stark.quotient_degree_factor()) + .expect( + "Quotient has failed, the vanishing polynomial is not divisible by Z_H", + ); + // Split quotient into degree-n chunks. + quotient_poly.chunks(degree) + }) + .collect() + ); + let quotient_commitment = timed!( timing, "compute quotient commitment", @@ -139,6 +188,8 @@ where None, ) ); + + // Observe the quotient polynomials Merkle cap. let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone(); challenger.observe_cap("ient_polys_cap); @@ -151,17 +202,21 @@ where zeta.exp_power_of_2(degree_bits) != F::Extension::ONE, "Opening point is in the subgroup." ); + + // Compute all openings: evaluate all committed polynomials at `zeta` and, when necessary, at `g * zeta`. let openings = StarkOpeningSet::new( zeta, g, &trace_commitment, - permutation_zs_commitment, + auxiliary_polys_commitment.as_ref(), "ient_commitment, ); + + // Get the FRI openings and observe them. challenger.observe_openings(&openings.to_fri_openings()); let initial_merkle_trees = once(&trace_commitment) - .chain(permutation_zs_commitment) + .chain(&auxiliary_polys_commitment) .chain(once("ient_commitment)) .collect_vec(); @@ -178,7 +233,7 @@ where ); let proof = StarkProof { trace_cap, - permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof, @@ -195,13 +250,13 @@ where fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( stark: &S, trace_commitment: &'a PolynomialBatch, - permutation_zs_commitment_challenges: &'a Option<( - PolynomialBatch, - Vec>, - )>, + auxiliary_polys_commitment: &'a Option>, + lookup_challenges: Option<&'a Vec>, + lookups: &[Lookup], public_inputs: &[F], alphas: Vec, degree_bits: usize, + num_lookup_columns: usize, config: &StarkConfig, ) -> Vec> where @@ -263,23 +318,35 @@ where lagrange_basis_first, lagrange_basis_last, ); + // Get the local and next row evaluations for the current STARK, + // as well as the public inputs. let vars = S::EvaluationFrame::from_values( &get_trace_values_packed(i_start), &get_trace_values_packed(i_next_start), public_inputs, ); - let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map( - |(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckVars { - local_zs: permutation_zs_commitment.get_lde_values_packed(i_start, step), - next_zs: permutation_zs_commitment.get_lde_values_packed(i_next_start, step), - permutation_challenge_sets: permutation_challenge_sets.to_vec(), - }, - ); + // Get the local and next row evaluations for the permutation argument, + // as well as the associated challenges. + let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { + local_values: auxiliary_polys_commitment + .as_ref() + .unwrap() + .get_lde_values_packed(i_start, step) + .to_vec(), + next_values: auxiliary_polys_commitment + .as_ref() + .unwrap() + .get_lde_values_packed(i_next_start, step), + challenges: challenges.to_vec(), + }); + + // Evaluate the polynomial combining all constraints, including + // those associated to the permutation arguments. eval_vanishing_poly::( stark, - config, &vars, - permutation_check_data, + lookups, + lookup_vars, &mut consumer, ); @@ -307,3 +374,102 @@ where .map(|values| values.coset_ifft(F::coset_shift())) .collect() } + +#[cfg(test)] +/// Check that all constraints evaluate to zero on `H`. +/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup. +fn check_constraints<'a, F, C, S, const D: usize>( + stark: &S, + trace_commitment: &'a PolynomialBatch, + public_inputs: &[F], + auxiliary_commitment: &'a Option>, + lookup_challenges: Option<&'a Vec>, + lookups: &[Lookup], + alphas: Vec, + degree_bits: usize, + num_lookup_columns: usize, +) where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, +{ + let degree = 1 << degree_bits; + let rate_bits = 0; // Set this to higher value to check constraint degree. + + let size = degree << rate_bits; + let step = 1 << rate_bits; + + // Evaluation of the first Lagrange polynomial. + let lagrange_first = PolynomialValues::selector(degree, 0).lde(rate_bits); + // Evaluation of the last Lagrange polynomial. + let lagrange_last = PolynomialValues::selector(degree, degree - 1).lde(rate_bits); + + let subgroup = F::two_adic_subgroup(degree_bits + rate_bits); + + // Get the evaluations of a batch of polynomials over our subgroup. + let get_subgroup_evals = |comm: &PolynomialBatch| -> Vec> { + let values = comm + .polynomials + .par_iter() + .map(|coeffs| coeffs.clone().fft().values) + .collect::>(); + transpose(&values) + }; + + // Get batch evaluations of the trace and permutation polynomials over our subgroup. + let trace_subgroup_evals = get_subgroup_evals(trace_commitment); + let auxiliary_subgroup_evals = auxiliary_commitment.as_ref().map(get_subgroup_evals); + + // Last element of the subgroup. + let last = F::primitive_root_of_unity(degree_bits).inverse(); + + let constraint_values = (0..size) + .map(|i| { + let i_next = (i + step) % size; + + let x = subgroup[i]; + let z_last = x - last; + let lagrange_basis_first = lagrange_first.values[i]; + let lagrange_basis_last = lagrange_last.values[i]; + + let mut consumer = ConstraintConsumer::new( + alphas.clone(), + z_last, + lagrange_basis_first, + lagrange_basis_last, + ); + // Get the local and next row evaluations for the current STARK's trace. + let vars = S::EvaluationFrame::from_values( + &trace_subgroup_evals[i], + &trace_subgroup_evals[i_next], + public_inputs, + ); + // Get the local and next row evaluations for the current STARK's permutation argument. + let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { + local_values: auxiliary_subgroup_evals.as_ref().unwrap()[i].clone(), + next_values: auxiliary_subgroup_evals.as_ref().unwrap()[i_next].clone(), + challenges: challenges.to_vec(), + }); + + // Evaluate the polynomial combining all constraints, including those associated + // to the permutation arguments. + eval_vanishing_poly::( + stark, + &vars, + lookups, + lookup_vars, + &mut consumer, + ); + consumer.accumulators() + }) + .collect::>(); + + // Assert that all constraints evaluate to 0 over our subgroup. + for v in constraint_values { + assert!( + v.iter().all(|x| x.is_zero()), + "Constraint failed in {}", + core::any::type_name::() + ); + } +} diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index 18db561b..e91583f1 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,3 +1,4 @@ +use alloc::vec; use alloc::vec::Vec; use core::iter::once; @@ -17,7 +18,7 @@ use plonky2::with_context; use crate::config::StarkConfig; use crate::constraint_consumer::RecursiveConstraintConsumer; use crate::evaluation_frame::StarkEvaluationFrame; -use crate::permutation::PermutationCheckDataTarget; +use crate::lookup::LookupCheckVarsTarget; use crate::proof::{ StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget, @@ -43,7 +44,7 @@ pub fn verify_stark_proof_circuit< let challenges = with_context!( builder, "compute challenges", - proof_with_pis.get_challenges::(builder, &stark, inner_config) + proof_with_pis.get_challenges::(builder, inner_config) ); verify_stark_proof_with_challenges_circuit::( @@ -72,7 +73,7 @@ fn verify_stark_proof_with_challenges_circuit< ) where C::Hasher: AlgebraicHasher, { - check_permutation_options(&stark, &proof_with_pis, &challenges).unwrap(); + check_lookup_options(&stark, &proof_with_pis, &challenges).unwrap(); let one = builder.one_extension(); let StarkProofWithPublicInputsTarget { @@ -82,8 +83,8 @@ fn verify_stark_proof_with_challenges_circuit< let StarkOpeningSetTarget { local_values, next_values, - permutation_zs, - permutation_zs_next, + auxiliary_polys, + auxiliary_polys_next, quotient_polys, } = &proof.openings; @@ -112,25 +113,27 @@ fn verify_stark_proof_with_challenges_circuit< l_last, ); - let permutation_data = stark - .uses_permutation_args() - .then(|| PermutationCheckDataTarget { - local_zs: permutation_zs.as_ref().unwrap().clone(), - next_zs: permutation_zs_next.as_ref().unwrap().clone(), - permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), - }); + let num_lookup_columns = stark.num_lookup_helper_columns(inner_config); + let lookup_challenges = stark.uses_lookups().then(|| { + challenges + .lookup_challenge_set + .unwrap() + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>() + }); + + let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget { + local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(), + next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(), + challenges: lookup_challenges.unwrap(), + }); with_context!( builder, "evaluate vanishing polynomial", - eval_vanishing_poly_circuit::( - builder, - &stark, - inner_config, - &vars, - permutation_data, - &mut consumer, - ) + eval_vanishing_poly_circuit::(builder, &stark, &vars, lookup_vars, &mut consumer) ); let vanishing_polys_zeta = consumer.accumulators(); @@ -146,7 +149,7 @@ fn verify_stark_proof_with_challenges_circuit< } let merkle_caps = once(proof.trace_cap) - .chain(proof.permutation_zs_cap) + .chain(proof.auxiliary_polys_cap) .chain(once(proof.quotient_polys_cap)) .collect_vec(); @@ -212,22 +215,19 @@ pub fn add_virtual_stark_proof, S: Stark, con let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_leaves_per_oracle = once(S::COLUMNS) - .chain( - stark - .uses_permutation_args() - .then(|| stark.num_permutation_batches(config)), - ) - .chain(once(stark.quotient_degree_factor() * config.num_challenges)) - .collect_vec(); + let num_leaves_per_oracle = vec![ + S::COLUMNS, + stark.num_lookup_helper_columns(config), + stark.quotient_degree_factor() * config.num_challenges, + ]; - let permutation_zs_cap = stark - .uses_permutation_args() + let auxiliary_polys_cap = stark + .uses_lookups() .then(|| builder.add_virtual_cap(cap_height)); StarkProofTarget { trace_cap: builder.add_virtual_cap(cap_height), - permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap: builder.add_virtual_cap(cap_height), openings: add_stark_opening_set_target::(builder, stark, config), opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), @@ -243,12 +243,12 @@ fn add_stark_opening_set_target, S: Stark, co StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - permutation_zs: stark - .uses_permutation_args() - .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))), - permutation_zs_next: stark - .uses_permutation_args() - .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))), + auxiliary_polys: stark.uses_lookups().then(|| { + builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config)) + }), + auxiliary_polys_next: stark.uses_lookups().then(|| { + builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config)) + }), quotient_polys: builder .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), } @@ -297,33 +297,34 @@ pub fn set_stark_proof_target, W, const D: usize>( &proof.openings.to_fri_openings(), ); - if let (Some(permutation_zs_cap_target), Some(permutation_zs_cap)) = - (&proof_target.permutation_zs_cap, &proof.permutation_zs_cap) - { - witness.set_cap_target(permutation_zs_cap_target, permutation_zs_cap); + if let (Some(auxiliary_polys_cap_target), Some(auxiliary_polys_cap)) = ( + &proof_target.auxiliary_polys_cap, + &proof.auxiliary_polys_cap, + ) { + witness.set_cap_target(auxiliary_polys_cap_target, auxiliary_polys_cap); } set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); } -/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff +/// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff /// the Stark uses a permutation argument. -fn check_permutation_options, S: Stark, const D: usize>( +fn check_lookup_options, S: Stark, const D: usize>( stark: &S, proof_with_pis: &StarkProofWithPublicInputsTarget, challenges: &StarkProofChallengesTarget, ) -> Result<()> { let options_is_some = [ - proof_with_pis.proof.permutation_zs_cap.is_some(), - proof_with_pis.proof.openings.permutation_zs.is_some(), - proof_with_pis.proof.openings.permutation_zs_next.is_some(), - challenges.permutation_challenge_sets.is_some(), + proof_with_pis.proof.auxiliary_polys_cap.is_some(), + proof_with_pis.proof.openings.auxiliary_polys.is_some(), + proof_with_pis.proof.openings.auxiliary_polys_next.is_some(), + challenges.lookup_challenge_set.is_some(), ]; ensure!( options_is_some .into_iter() - .all(|b| b == stark.uses_permutation_args()), - "Permutation data doesn't match with Stark configuration." + .all(|b| b == stark.uses_lookups()), + "Lookups data doesn't match with Stark configuration." ); Ok(()) } diff --git a/starky/src/stark.rs b/starky/src/stark.rs index 1e7b0711..a9f2b260 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -3,6 +3,7 @@ use alloc::vec::Vec; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; +use plonky2::field::types::Field; use plonky2::fri::structure::{ FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo, FriPolynomialInfo, @@ -10,12 +11,15 @@ use plonky2::fri::structure::{ use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::util::ceil_div_usize; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::evaluation_frame::StarkEvaluationFrame; -use crate::permutation::PermutationPair; +use crate::lookup::Lookup; + +const TRACE_ORACLE_INDEX: usize = 0; +const AUXILIARY_ORACLE_INDEX: usize = 1; +const QUOTIENT_ORACLE_INDEX: usize = 2; /// Represents a STARK system. pub trait Stark, const D: usize>: Sync { @@ -94,49 +98,47 @@ pub trait Stark, const D: usize>: Sync { g: F, config: &StarkConfig, ) -> FriInstanceInfo { - let mut oracles = vec![]; - - let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS); - oracles.push(FriOracleInfo { + let trace_oracle = FriOracleInfo { num_polys: Self::COLUMNS, blinding: false, - }); - - let permutation_zs_info = if self.uses_permutation_args() { - let num_z_polys = self.num_permutation_batches(config); - let polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_z_polys); - oracles.push(FriOracleInfo { - num_polys: num_z_polys, - blinding: false, - }); - polys - } else { - vec![] }; + let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - let num_quotient_polys = self.quotient_degree_factor() * config.num_challenges; - let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys); - oracles.push(FriOracleInfo { + let num_lookup_columns = self.num_lookup_helper_columns(config); + let num_auxiliary_polys = num_lookup_columns; + let auxiliary_oracle = FriOracleInfo { + num_polys: num_auxiliary_polys, + blinding: false, + }; + let auxiliary_polys_info = + FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); + + let num_quotient_polys = self.num_quotient_polys(config); + let quotient_oracle = FriOracleInfo { num_polys: num_quotient_polys, blinding: false, - }); + }; + let quotient_info = + FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); let zeta_batch = FriBatchInfo { point: zeta, polynomials: [ trace_info.clone(), - permutation_zs_info.clone(), + auxiliary_polys_info.clone(), quotient_info, ] .concat(), }; let zeta_next_batch = FriBatchInfo { point: zeta.scalar_mul(g), - polynomials: [trace_info, permutation_zs_info].concat(), + polynomials: [trace_info, auxiliary_polys_info].concat(), }; - let batches = vec![zeta_batch, zeta_next_batch]; - FriInstanceInfo { oracles, batches } + FriInstanceInfo { + oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], + batches: vec![zeta_batch, zeta_next_batch], + } } /// Computes the FRI instance used to prove this Stark. @@ -147,38 +149,34 @@ pub trait Stark, const D: usize>: Sync { g: F, config: &StarkConfig, ) -> FriInstanceInfoTarget { - let mut oracles = vec![]; - - let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS); - oracles.push(FriOracleInfo { + let trace_oracle = FriOracleInfo { num_polys: Self::COLUMNS, blinding: false, - }); - - let permutation_zs_info = if self.uses_permutation_args() { - let num_z_polys = self.num_permutation_batches(config); - let polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_z_polys); - oracles.push(FriOracleInfo { - num_polys: num_z_polys, - blinding: false, - }); - polys - } else { - vec![] }; + let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - let num_quotient_polys = self.quotient_degree_factor() * config.num_challenges; - let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys); - oracles.push(FriOracleInfo { + let num_lookup_columns = self.num_lookup_helper_columns(config); + let num_auxiliary_polys = num_lookup_columns; + let auxiliary_oracle = FriOracleInfo { + num_polys: num_auxiliary_polys, + blinding: false, + }; + let auxiliary_polys_info = + FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); + + let num_quotient_polys = self.num_quotient_polys(config); + let quotient_oracle = FriOracleInfo { num_polys: num_quotient_polys, blinding: false, - }); + }; + let quotient_info = + FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); let zeta_batch = FriBatchInfoTarget { point: zeta, polynomials: [ trace_info.clone(), - permutation_zs_info.clone(), + auxiliary_polys_info.clone(), quotient_info, ] .concat(), @@ -186,40 +184,28 @@ pub trait Stark, const D: usize>: Sync { let zeta_next = builder.mul_const_extension(g, zeta); let zeta_next_batch = FriBatchInfoTarget { point: zeta_next, - polynomials: [trace_info, permutation_zs_info].concat(), + polynomials: [trace_info, auxiliary_polys_info].concat(), }; - let batches = vec![zeta_batch, zeta_next_batch]; - FriInstanceInfoTarget { oracles, batches } + FriInstanceInfoTarget { + oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], + batches: vec![zeta_batch, zeta_next_batch], + } } - /// Pairs of lists of columns that should be permutations of one another. A permutation argument - /// will be used for each such pair. Empty by default. - fn permutation_pairs(&self) -> Vec { + fn lookups(&self) -> Vec> { vec![] } - fn uses_permutation_args(&self) -> bool { - !self.permutation_pairs().is_empty() + fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize { + self.lookups() + .iter() + .map(|lookup| lookup.num_helper_columns(self.constraint_degree())) + .sum::() + * config.num_challenges } - /// The number of permutation argument instances that can be combined into a single constraint. - fn permutation_batch_size(&self) -> usize { - // The permutation argument constraints look like - // Z(x) \prod(...) = Z(g x) \prod(...) - // where each product has a number of terms equal to the batch size. So our batch size - // should be one less than our constraint degree, which happens to be our quotient degree. - self.quotient_degree_factor() - } - - fn num_permutation_instances(&self, config: &StarkConfig) -> usize { - self.permutation_pairs().len() * config.num_challenges - } - - fn num_permutation_batches(&self, config: &StarkConfig) -> usize { - ceil_div_usize( - self.num_permutation_instances(config), - self.permutation_batch_size(), - ) + fn uses_lookups(&self) -> bool { + !self.lookups().is_empty() } } diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 0a399dce..6a179fe2 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -3,19 +3,18 @@ use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; -use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::permutation::{ - eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget, - PermutationCheckVars, +use crate::lookup::{ + eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars, + LookupCheckVarsTarget, }; use crate::stark::Stark; pub(crate) fn eval_vanishing_poly( stark: &S, - config: &StarkConfig, vars: &S::EvaluationFrame, - permutation_data: Option>, + lookups: &[Lookup], + lookup_vars: Option>, consumer: &mut ConstraintConsumer

, ) where F: RichField + Extendable, @@ -24,12 +23,13 @@ pub(crate) fn eval_vanishing_poly( S: Stark, { stark.eval_packed_generic(vars, consumer); - if let Some(permutation_data) = permutation_data { - eval_permutation_checks::( + if let Some(lookup_vars) = lookup_vars { + // Evaluate the STARK constraints related to the permutation arguments. + eval_packed_lookups_generic::( stark, - config, + lookups, vars, - permutation_data, + lookup_vars, consumer, ); } @@ -38,23 +38,16 @@ pub(crate) fn eval_vanishing_poly( pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, - config: &StarkConfig, vars: &S::EvaluationFrameTarget, - permutation_data: Option>, + lookup_vars: Option>, consumer: &mut RecursiveConstraintConsumer, ) where F: RichField + Extendable, S: Stark, { stark.eval_ext_circuit(builder, vars, consumer); - if let Some(permutation_data) = permutation_data { - eval_permutation_checks_circuit::( - builder, - stark, - config, - vars, - permutation_data, - consumer, - ); + if let Some(lookup_vars) = lookup_vars { + // Evaluate all of the STARK's constraints related to the permutation argument. + eval_ext_lookups_circuit::(builder, stark, vars, lookup_vars, consumer); } } diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 28b9a3e2..577405ef 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -7,13 +7,14 @@ use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::types::Field; use plonky2::fri::verifier::verify_fri_proof; use plonky2::hash::hash_types::RichField; +use plonky2::hash::merkle_tree::MerkleCap; use plonky2::plonk::config::GenericConfig; use plonky2::plonk::plonk_common::reduce_with_powers; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; use crate::evaluation_frame::StarkEvaluationFrame; -use crate::permutation::PermutationCheckVars; +use crate::lookup::LookupCheckVars; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; @@ -30,7 +31,7 @@ pub fn verify_stark_proof< ) -> Result<()> { ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS); let degree_bits = proof_with_pis.proof.recover_degree_bits(config); - let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits); + let challenges = proof_with_pis.get_challenges(config, degree_bits); verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) } @@ -47,7 +48,7 @@ pub(crate) fn verify_stark_proof_with_challenges< config: &StarkConfig, ) -> Result<()> { validate_proof_shape(&stark, &proof_with_pis, config)?; - check_permutation_options(&stark, &proof_with_pis, &challenges)?; + let StarkProofWithPublicInputs { proof, public_inputs, @@ -55,8 +56,8 @@ pub(crate) fn verify_stark_proof_with_challenges< let StarkOpeningSet { local_values, next_values, - permutation_zs, - permutation_zs_next, + auxiliary_polys, + auxiliary_polys_next, quotient_polys, } = &proof.openings; let vars = S::EvaluationFrame::from_values( @@ -81,16 +82,30 @@ pub(crate) fn verify_stark_proof_with_challenges< l_0, l_last, ); - let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars { - local_zs: permutation_zs.as_ref().unwrap().clone(), - next_zs: permutation_zs_next.as_ref().unwrap().clone(), - permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(), + + let num_lookup_columns = stark.num_lookup_helper_columns(config); + let lookup_challenges = (num_lookup_columns > 0).then(|| { + challenges + .lookup_challenge_set + .unwrap() + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>() }); + + let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars { + local_values: auxiliary_polys.as_ref().unwrap().clone(), + next_values: auxiliary_polys_next.as_ref().unwrap().clone(), + challenges: lookup_challenges.unwrap(), + }); + let lookups = stark.lookups(); + eval_vanishing_poly::( &stark, - config, &vars, - permutation_data, + &lookups, + lookup_vars, &mut consumer, ); let vanishing_polys_zeta = consumer.accumulators(); @@ -114,7 +129,7 @@ pub(crate) fn verify_stark_proof_with_challenges< } let merkle_caps = once(proof.trace_cap) - .chain(proof.permutation_zs_cap) + .chain(proof.auxiliary_polys_cap) .chain(once(proof.quotient_polys_cap)) .collect_vec(); @@ -152,7 +167,7 @@ where let StarkProof { trace_cap, - permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, // The shape of the opening proof will be checked in the FRI verifier (see @@ -163,8 +178,8 @@ where let StarkOpeningSet { local_values, next_values, - permutation_zs, - permutation_zs_next, + auxiliary_polys, + auxiliary_polys_next, quotient_polys, } = openings; @@ -172,7 +187,8 @@ where let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_zs = stark.num_permutation_batches(config); + + let num_auxiliary = stark.num_lookup_helper_columns(config); ensure!(trace_cap.height() == cap_height); ensure!(quotient_polys_cap.height() == cap_height); @@ -181,25 +197,13 @@ where ensure!(next_values.len() == S::COLUMNS); ensure!(quotient_polys.len() == stark.num_quotient_polys(config)); - if stark.uses_permutation_args() { - let permutation_zs_cap = permutation_zs_cap - .as_ref() - .ok_or_else(|| anyhow!("Missing Zs cap"))?; - let permutation_zs = permutation_zs - .as_ref() - .ok_or_else(|| anyhow!("Missing permutation_zs"))?; - let permutation_zs_next = permutation_zs_next - .as_ref() - .ok_or_else(|| anyhow!("Missing permutation_zs_next"))?; - - ensure!(permutation_zs_cap.height() == cap_height); - ensure!(permutation_zs.len() == num_zs); - ensure!(permutation_zs_next.len() == num_zs); - } else { - ensure!(permutation_zs_cap.is_none()); - ensure!(permutation_zs.is_none()); - ensure!(permutation_zs_next.is_none()); - } + check_lookup_options::( + stark, + auxiliary_polys_cap, + auxiliary_polys, + auxiliary_polys_next, + config, + )?; Ok(()) } @@ -216,30 +220,43 @@ fn eval_l_0_and_l_last(log_n: usize, x: F) -> (F, F) { (z_x * invs[0], z_x * invs[1]) } -/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff +/// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff /// the Stark uses a permutation argument. -fn check_permutation_options< +fn check_lookup_options< F: RichField + Extendable, C: GenericConfig, S: Stark, const D: usize, >( stark: &S, - proof_with_pis: &StarkProofWithPublicInputs, - challenges: &StarkProofChallenges, + auxiliary_polys_cap: &Option>::Hasher>>, + auxiliary_polys: &Option>::Extension>>, + auxiliary_polys_next: &Option>::Extension>>, + config: &StarkConfig, ) -> Result<()> { - let options_is_some = [ - proof_with_pis.proof.permutation_zs_cap.is_some(), - proof_with_pis.proof.openings.permutation_zs.is_some(), - proof_with_pis.proof.openings.permutation_zs_next.is_some(), - challenges.permutation_challenge_sets.is_some(), - ]; - ensure!( - options_is_some - .into_iter() - .all(|b| b == stark.uses_permutation_args()), - "Permutation data doesn't match with Stark configuration." - ); + if stark.uses_lookups() { + let num_auxiliary = stark.num_lookup_helper_columns(config); + let cap_height = config.fri_config.cap_height; + + let auxiliary_polys_cap = auxiliary_polys_cap + .as_ref() + .ok_or_else(|| anyhow!("Missing auxiliary_polys_cap"))?; + let auxiliary_polys = auxiliary_polys + .as_ref() + .ok_or_else(|| anyhow!("Missing auxiliary_polys"))?; + let auxiliary_polys_next = auxiliary_polys_next + .as_ref() + .ok_or_else(|| anyhow!("Missing auxiliary_polys_next"))?; + + ensure!(auxiliary_polys_cap.height() == cap_height); + ensure!(auxiliary_polys.len() == num_auxiliary); + ensure!(auxiliary_polys_next.len() == num_auxiliary); + } else { + ensure!(auxiliary_polys_cap.is_none()); + ensure!(auxiliary_polys.is_none()); + ensure!(auxiliary_polys_next.is_none()); + } + Ok(()) }