From aedfe5dfa6ac8c5663427ae59692fb7d6635ce17 Mon Sep 17 00:00:00 2001 From: Linda Guiga <101227802+LindaGuiga@users.noreply.github.com> Date: Wed, 10 Jan 2024 08:54:13 +0100 Subject: [PATCH] Implement CTL bundling (#1439) * Implement CTL bundling * Cleanup * Clippy * Preallocate memory * Apply comments and remove unnecessary functions. * Start removing clones * Set columns and filters inside if condition. * Remove extra CTL helper columns * Add circuit version, with cleanup and fixes * Remove some overhead * Use refs * Pacify clippy --------- Co-authored-by: Robin Salen --- evm/src/all_stark.rs | 1 + evm/src/cpu/memio.rs | 2 +- evm/src/cross_table_lookup.rs | 874 +++++++++++++++++++++++++++------ evm/src/keccak/keccak_stark.rs | 3 +- evm/src/lookup.rs | 176 +++---- evm/src/proof.rs | 12 +- evm/src/prover.rs | 83 +++- evm/src/recursive_verifier.rs | 53 +- evm/src/stark.rs | 14 +- evm/src/vanishing_poly.rs | 15 +- evm/src/verifier.rs | 27 +- 11 files changed, 951 insertions(+), 309 deletions(-) diff --git a/evm/src/all_stark.rs b/evm/src/all_stark.rs index 74f211d6..a2996eb4 100644 --- a/evm/src/all_stark.rs +++ b/evm/src/all_stark.rs @@ -1,5 +1,6 @@ use std::iter; +use itertools::Itertools; use plonky2::field::extension::Extendable; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; diff --git a/evm/src/cpu/memio.rs b/evm/src/cpu/memio.rs index 2073e182..7cb4c388 100644 --- a/evm/src/cpu/memio.rs +++ b/evm/src/cpu/memio.rs @@ -204,7 +204,7 @@ fn eval_packed_store( } /// Circuit version of `eval_packed_store`. -/// /// Evaluates constraints for MSTORE_GENERAL. +/// Evaluates constraints for MSTORE_GENERAL. fn eval_ext_circuit_store, const D: usize>( builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, lv: &CpuColumnsView>, diff --git a/evm/src/cross_table_lookup.rs b/evm/src/cross_table_lookup.rs index 21f94126..428279ba 100644 --- a/evm/src/cross_table_lookup.rs +++ b/evm/src/cross_table_lookup.rs @@ -28,11 +28,14 @@ //! the current and next row values -- when computing the linear combinations. use std::borrow::Borrow; +use std::cmp::min; use std::fmt::Debug; use std::iter::repeat; use anyhow::{ensure, Result}; +use hashbrown::HashMap; use itertools::Itertools; +use plonky2::field::batch_util::{batch_add_inplace, batch_multiply_inplace}; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::field::polynomial::PolynomialValues; @@ -46,6 +49,7 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; use plonky2::plonk::plonk_common::{ reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit, }; +use plonky2::util::ceil_div_usize; use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; use crate::all_stark::{Table, NUM_TABLES}; @@ -466,39 +470,66 @@ impl CrossTableLookup { } } - /// Given a `Table` t and the number of challenges, returns the number of Cross-table lookup polynomials associated to t, - /// i.e. the number of looking and looked tables among all CTLs whose columns are taken from t. - pub(crate) fn num_ctl_zs(ctls: &[Self], table: Table, num_challenges: usize) -> usize { + /// Given a table, returns: + /// - the total number of helper columns for this table, over all Cross-table lookups, + /// - the total number of z polynomials for this table, over all Cross-table lookups, + /// - the number of helper columns for this table, for each Cross-table lookup. + pub(crate) fn num_ctl_helpers_zs_all( + ctls: &[Self], + table: Table, + num_challenges: usize, + constraint_degree: usize, + ) -> (usize, usize, Vec) { + let mut num_helpers = 0; let mut num_ctls = 0; - for ctl in ctls { + let mut num_helpers_by_ctl = vec![0; ctls.len()]; + for (i, ctl) in ctls.iter().enumerate() { let all_tables = std::iter::once(&ctl.looked_table).chain(&ctl.looking_tables); - num_ctls += all_tables.filter(|twc| twc.table == table).count(); + let num_appearances = all_tables.filter(|twc| twc.table == table).count(); + let is_helpers = num_appearances > 2; + if is_helpers { + num_helpers_by_ctl[i] = ceil_div_usize(num_appearances, constraint_degree - 1); + num_helpers += num_helpers_by_ctl[i]; + } + + if num_appearances > 0 { + num_ctls += 1; + } } - num_ctls * num_challenges + ( + num_helpers * num_challenges, + num_ctls * num_challenges, + num_helpers_by_ctl, + ) } } /// Cross-table lookup data for one table. #[derive(Clone, Default)] -pub(crate) struct CtlData { +pub(crate) struct CtlData<'a, F: Field> { /// Data associated with all Z(x) polynomials for one table. - pub(crate) zs_columns: Vec>, + pub(crate) zs_columns: Vec>, } /// Cross-table lookup data associated with one Z(x) polynomial. +/// One Z(x) polynomial can be associated to multiple tables, +/// built from the same STARK. #[derive(Clone)] -pub(crate) struct CtlZData { +pub(crate) struct CtlZData<'a, F: Field> { + /// Helper columns to verify the Z polynomial values. + pub(crate) helper_columns: Vec>, /// Z polynomial values. pub(crate) z: PolynomialValues, /// Cross-table lookup challenge. pub(crate) challenge: GrandProductChallenge, - /// Column linear combination for the current table. - pub(crate) columns: Vec>, - /// Filter column for the current table. It evaluates to either 1 or 0. - pub(crate) filter: Option>, + /// Vector of column linear combinations for the current tables. + pub(crate) columns: Vec<&'a [Column]>, + /// Vector of filter columns for the current table. + /// Each filter evaluates to either 1 or 0. + pub(crate) filter: Vec>>, } -impl CtlData { +impl<'a, F: Field> CtlData<'a, F> { /// Returns the number of cross-table lookup polynomials. pub(crate) fn len(&self) -> usize { self.zs_columns.len() @@ -509,12 +540,38 @@ impl CtlData { self.zs_columns.is_empty() } - /// Returns all the cross-table lookup polynomials. - pub(crate) fn z_polys(&self) -> Vec> { - self.zs_columns + /// Returns all the cross-table lookup helper polynomials. + pub(crate) fn ctl_helper_polys(&self) -> Vec> { + let num_polys = self + .zs_columns .iter() - .map(|zs_columns| zs_columns.z.clone()) - .collect() + .fold(0, |acc, z| acc + z.helper_columns.len()); + let mut res = Vec::with_capacity(num_polys); + for z in &self.zs_columns { + res.extend(z.helper_columns.clone()); + } + + res + } + + /// Returns all the Z cross-table-lookup polynomials. + pub(crate) fn ctl_z_polys(&self) -> Vec> { + let mut res = Vec::with_capacity(self.zs_columns.len()); + for z in &self.zs_columns { + res.push(z.z.clone()); + } + + res + } + /// Returns the number of helper columns for each STARK in each + /// `CtlZData`. + pub(crate) fn num_ctl_helper_polys(&self) -> Vec { + let mut res = Vec::with_capacity(self.zs_columns.len()); + for z in &self.zs_columns { + res.push(z.helper_columns.len()); + } + + res } } @@ -640,16 +697,46 @@ pub(crate) fn get_grand_product_challenge_set_target< GrandProductChallengeSet { challenges } } +/// Returns the number of helper columns for each `Table`. +pub(crate) fn num_ctl_helper_columns_by_table( + ctls: &[CrossTableLookup], + constraint_degree: usize, +) -> Vec<[usize; NUM_TABLES]> { + let mut res = vec![[0; NUM_TABLES]; ctls.len()]; + for (i, ctl) in ctls.iter().enumerate() { + let CrossTableLookup { + looking_tables, + looked_table, + } = ctl; + let mut num_by_table = [0; NUM_TABLES]; + + let grouped_lookups = looking_tables.iter().group_by(|&a| a.table); + + for (table, group) in grouped_lookups.into_iter() { + let sum = group.count(); + if sum > 2 { + // We only need helper columns if there are more than 2 columns. + num_by_table[table as usize] = ceil_div_usize(sum, constraint_degree - 1); + } + } + + res[i] = num_by_table; + } + res +} + /// Generates all the cross-table lookup data, for all tables. /// - `trace_poly_values` corresponds to the trace values for all tables. /// - `cross_table_lookups` corresponds to all the cross-table lookups, i.e. the looked and looking tables, as described in `CrossTableLookup`. /// - `ctl_challenges` corresponds to the challenges used for CTLs. +/// - `constraint_degree` is the maximal constraint degree for the table. /// For each `CrossTableLookup`, and each looking/looked table, the partial products for the CTL are computed, and added to the said table's `CtlZData`. -pub(crate) fn cross_table_lookup_data( +pub(crate) fn cross_table_lookup_data<'a, F: RichField, const D: usize>( trace_poly_values: &[Vec>; NUM_TABLES], - cross_table_lookups: &[CrossTableLookup], + cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &GrandProductChallengeSet, -) -> [CtlData; NUM_TABLES] { + constraint_degree: usize, +) -> [CtlData<'a, F>; NUM_TABLES] { let mut ctl_data_per_table = [0; NUM_TABLES].map(|_| CtlData::default()); for CrossTableLookup { looking_tables, @@ -658,103 +745,232 @@ pub(crate) fn cross_table_lookup_data( { log::debug!("Processing CTL for {:?}", looked_table.table); for &challenge in &ctl_challenges.challenges { - let zs_looking = looking_tables.iter().map(|table| { - partial_sums( - &trace_poly_values[table.table as usize], - &table.columns, - &table.filter, - challenge, - ) - }); - let z_looked = partial_sums( - &trace_poly_values[looked_table.table as usize], - &looked_table.columns, - &looked_table.filter, + let helper_zs_looking = ctl_helper_zs_cols( + trace_poly_values, + looking_tables.clone(), challenge, + constraint_degree, ); - for (table, z) in looking_tables.iter().zip(zs_looking) { - ctl_data_per_table[table.table as usize] - .zs_columns - .push(CtlZData { - z, - challenge, - columns: table.columns.clone(), - filter: table.filter.clone(), - }); + + let mut z_looked = partial_sums( + &trace_poly_values[looked_table.table as usize], + &[(&looked_table.columns, &looked_table.filter)], + challenge, + constraint_degree, + ); + + for (table, helpers_zs) in helper_zs_looking { + let num_helpers = helpers_zs.len() - 1; + let count = looking_tables + .iter() + .filter(|looking_table| looking_table.table as usize == table) + .count(); + let cols_filts = looking_tables.iter().filter_map(|looking_table| { + if looking_table.table as usize == table { + Some((&looking_table.columns, &looking_table.filter)) + } else { + None + } + }); + let mut columns = Vec::with_capacity(count); + let mut filter = Vec::with_capacity(count); + for (col, filt) in cols_filts { + columns.push(&col[..]); + filter.push(filt.clone()); + } + ctl_data_per_table[table].zs_columns.push(CtlZData { + helper_columns: helpers_zs[..num_helpers].to_vec(), + z: helpers_zs[num_helpers].clone(), + challenge, + columns, + filter, + }); } + // There is no helper column for the looking table. + let looked_poly = z_looked[0].clone(); ctl_data_per_table[looked_table.table as usize] .zs_columns .push(CtlZData { - z: z_looked, + helper_columns: vec![], + z: looked_poly, challenge, - columns: looked_table.columns.clone(), - filter: looked_table.filter.clone(), + columns: vec![&looked_table.columns[..]], + filter: vec![looked_table.filter.clone()], }); } } ctl_data_per_table } +type ColumnFilter<'a, F> = (&'a [Column], &'a Option>); + +/// Given a STARK's trace, and the data associated to one lookup (either CTL or range check), +/// returns the associated helper polynomials. +pub(crate) fn get_helper_cols( + trace: &[PolynomialValues], + degree: usize, + columns_filters: &[ColumnFilter], + challenge: GrandProductChallenge, + constraint_degree: usize, +) -> Vec> { + let num_helper_columns = ceil_div_usize(columns_filters.len(), constraint_degree - 1); + + let mut helper_columns = Vec::with_capacity(num_helper_columns); + + let mut filter_index = 0; + for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) { + let (first_col, first_filter) = cols_filts.next().unwrap(); + + let mut filter_col = Vec::with_capacity(degree); + let first_combined = (0..degree) + .map(|d| { + let f = if let Some(filter) = first_filter { + let f = filter.eval_table(trace, d); + filter_col.push(f); + f + } else { + filter_col.push(F::ONE); + F::ONE + }; + if f.is_one() { + let evals = first_col + .iter() + .map(|c| c.eval_table(trace, d)) + .collect::>(); + challenge.combine(evals.iter()) + } else { + assert_eq!(f, F::ZERO, "Non-binary filter?"); + // Dummy value. Cannot be zero since it will be batch-inverted. + F::ONE + } + }) + .collect::>(); + + let mut acc = F::batch_multiplicative_inverse(&first_combined); + for d in 0..degree { + if filter_col[d].is_zero() { + acc[d] = F::ZERO; + } + } + + for (col, filt) in cols_filts { + let mut filter_col = Vec::with_capacity(degree); + let mut combined = (0..degree) + .map(|d| { + let f = if let Some(filter) = filt { + let f = filter.eval_table(trace, d); + filter_col.push(f); + f + } else { + filter_col.push(F::ONE); + F::ONE + }; + if f.is_one() { + let evals = col + .iter() + .map(|c| c.eval_table(trace, d)) + .collect::>(); + challenge.combine(evals.iter()) + } else { + assert_eq!(f, F::ZERO, "Non-binary filter?"); + // Dummy value. Cannot be zero since it will be batch-inverted. + F::ONE + } + }) + .collect::>(); + + combined = F::batch_multiplicative_inverse(&combined); + + for d in 0..degree { + if filter_col[d].is_zero() { + combined[d] = F::ZERO; + } + } + + batch_add_inplace(&mut acc, &combined); + } + + helper_columns.push(acc.into()); + } + assert_eq!(helper_columns.len(), num_helper_columns); + + helper_columns +} + +/// Computes helper columns and Z polynomials for all looking tables +/// of one cross-table lookup (i.e. for one looked table). +fn ctl_helper_zs_cols( + all_stark_traces: &[Vec>; NUM_TABLES], + looking_tables: Vec>, + challenge: GrandProductChallenge, + constraint_degree: usize, +) -> Vec<(usize, Vec>)> { + let grouped_lookups = looking_tables.iter().group_by(|a| a.table); + + grouped_lookups + .into_iter() + .map(|(table, group)| { + let degree = all_stark_traces[table as usize][0].len(); + let columns_filters = group + .map(|table| (&table.columns[..], &table.filter)) + .collect::], &Option>)>>(); + ( + table as usize, + partial_sums( + &all_stark_traces[table as usize], + &columns_filters, + challenge, + constraint_degree, + ), + ) + }) + .collect::>)>>() +} + /// Computes the cross-table lookup partial sums for one table and given column linear combinations. /// `trace` represents the trace values for the given table. -/// `columns` are all the column linear combinations to evaluate. -/// `filter_column` is a column linear combination used to determine whether a row should be selected. +/// `columns` is a vector of column linear combinations to evaluate. Each element in the vector represents columns that need to be combined. +/// `filter_cols` are column linear combinations used to determine whether a row should be selected. /// `challenge` is a cross-table lookup challenge. /// The initial sum `s` is 0. -/// For each row, if the `filter_column` evaluates to 1, then the rows is selected. All the column linear combinations are evaluated at said row. All those evaluations are combined using the challenge to get a value `v`. -/// The sum is updated: `s += 1/v`, and is pushed to the vector of partial sums. +/// For each row, if the `filter_column` evaluates to 1, then the row is selected. All the column linear combinations are evaluated at said row. +/// The evaluations of each elements of `columns` are then combined together to form a value `v`. +/// The values `v`` are grouped together, in groups of size `constraint_degree - 1` (2 in our case). For each group, we construct a helper +/// column: h = \sum_i 1/(v_i). +/// +/// The sum is updated: `s += \sum h_i`, and is pushed to the vector of partial sums `z``. +/// Returns the helper columns and `z`. fn partial_sums( trace: &[PolynomialValues], - columns: &[Column], - filter: &Option>, + columns_filters: &[ColumnFilter], challenge: GrandProductChallenge, -) -> PolynomialValues { - let mut partial_sum = F::ZERO; + constraint_degree: usize, +) -> Vec> { let degree = trace[0].len(); - let mut filters = Vec::with_capacity(degree); - let mut res = Vec::with_capacity(degree); + let mut z = Vec::with_capacity(degree); - for i in (0..degree).rev() { - if let Some(filter) = filter { - let filter_val = filter.eval_table(trace, i); - if filter_val.is_one() { - filters.push(true); - } else { - assert_eq!(filter_val, F::ZERO, "Non-binary filter?"); - filters.push(false); - } - } else { - filters.push(false); - }; + let mut helper_columns = + get_helper_cols(trace, degree, columns_filters, challenge, constraint_degree); - let combined = if filters[filters.len() - 1] { - let evals = columns - .iter() - .map(|c| c.eval_table(trace, i)) - .collect::>(); - challenge.combine(evals.iter()) - } else { - // Dummy value. Cannot be zero since it will be batch-inverted. - F::ONE - }; - res.push(combined); + let x = helper_columns + .iter() + .map(|col| col.values[degree - 1]) + .sum::(); + z.push(x); + + for i in (0..degree - 1).rev() { + let x = helper_columns.iter().map(|col| col.values[i]).sum::(); + + z.push(z[z.len() - 1] + x); } - res = F::batch_multiplicative_inverse(&res); - - if !filters[0] { - res[0] = F::ZERO; + z.reverse(); + if columns_filters.len() > 2 { + helper_columns.push(z.into()); + } else { + helper_columns = vec![z.into()]; } - for i in (1..degree) { - let mut cur_value = res[i - 1]; - if filters[i] { - cur_value += res[i]; - } - res[i] = cur_value; - } - - res.reverse(); - res.into() + helper_columns } /// Data necessary to check the cross-table lookups of a given table. @@ -765,6 +981,9 @@ where FE: FieldExtension, P: PackedField, { + /// Helper columns to check that the Z polyomial + /// was constructed correctly. + pub(crate) helper_columns: Vec

, /// Evaluation of the trace polynomials at point `zeta`. pub(crate) local_z: P, /// Evaluation of the trace polynomials at point `g * zeta` @@ -772,9 +991,9 @@ where /// Cross-table lookup challenges. pub(crate) challenges: GrandProductChallenge, /// Column linear combinations of the `CrossTableLookup`s. - pub(crate) columns: &'a [Column], + pub(crate) columns: Vec<&'a [Column]>, /// Filter that evaluates to either 1 or 0. - pub(crate) filter: &'a Option>, + pub(crate) filter: Vec>>, } impl<'a, F: RichField + Extendable, const D: usize> @@ -786,45 +1005,107 @@ impl<'a, F: RichField + Extendable, const D: usize> cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, num_lookup_columns: &[usize; NUM_TABLES], + num_helper_ctl_columns: &Vec<[usize; NUM_TABLES]>, ) -> [Vec; NUM_TABLES] { + let mut total_num_helper_cols_by_table = [0; NUM_TABLES]; + for p_ctls in num_helper_ctl_columns { + for j in 0..NUM_TABLES { + total_num_helper_cols_by_table[j] += p_ctls[j] * ctl_challenges.challenges.len(); + } + } + // Get all cross-table lookup polynomial openings for each STARK proof. let mut ctl_zs = proofs .iter() .zip(num_lookup_columns) .map(|(p, &num_lookup)| { let openings = &p.proof.openings; - let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup); - let ctl_zs_next = openings.auxiliary_polys_next.iter().skip(num_lookup); - ctl_zs.zip(ctl_zs_next) + + let ctl_zs = &openings.auxiliary_polys[num_lookup..]; + let ctl_zs_next = &openings.auxiliary_polys_next[num_lookup..]; + ctl_zs.iter().zip(ctl_zs_next).collect::>() }) .collect::>(); // Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t. + let mut start_indices = [0; NUM_TABLES]; + let mut z_indices = [0; NUM_TABLES]; let mut ctl_vars_per_table = [0; NUM_TABLES].map(|_| vec![]); - for CrossTableLookup { - looking_tables, - looked_table, - } in cross_table_lookups + for ( + CrossTableLookup { + looking_tables, + looked_table, + }, + num_ctls, + ) in cross_table_lookups.iter().zip(num_helper_ctl_columns) { for &challenges in &ctl_challenges.challenges { + // Group looking tables by `Table`, since we bundle the looking tables taken from the same `Table` together thanks to helper columns. + // We want to only iterate on each `Table` once. + let mut filtered_looking_tables = + Vec::with_capacity(min(looking_tables.len(), NUM_TABLES)); for table in looking_tables { - let (looking_z, looking_z_next) = ctl_zs[table.table as usize].next().unwrap(); - ctl_vars_per_table[table.table as usize].push(Self { + if !filtered_looking_tables.contains(&(table.table as usize)) { + filtered_looking_tables.push(table.table as usize); + } + } + + for (i, &table) in filtered_looking_tables.iter().enumerate() { + // We have first all the helper polynomials, then all the z polynomials. + let (looking_z, looking_z_next) = + ctl_zs[table][total_num_helper_cols_by_table[table] + z_indices[table]]; + + let count = looking_tables + .iter() + .filter(|looking_table| looking_table.table as usize == table) + .count(); + let cols_filts = looking_tables.iter().filter_map(|looking_table| { + if looking_table.table as usize == table { + Some((&looking_table.columns, &looking_table.filter)) + } else { + None + } + }); + let mut columns = Vec::with_capacity(count); + let mut filter = Vec::with_capacity(count); + for (col, filt) in cols_filts { + columns.push(&col[..]); + filter.push(filt.clone()); + } + let helper_columns = ctl_zs[table] + [start_indices[table]..start_indices[table] + num_ctls[table]] + .iter() + .map(|&(h, _)| *h) + .collect::>(); + + start_indices[table] += num_ctls[table]; + + z_indices[table] += 1; + ctl_vars_per_table[table].push(Self { + helper_columns, local_z: *looking_z, next_z: *looking_z_next, challenges, - columns: &table.columns, - filter: &table.filter, + columns, + filter, }); } - let (looked_z, looked_z_next) = ctl_zs[looked_table.table as usize].next().unwrap(); + let (looked_z, looked_z_next) = ctl_zs[looked_table.table as usize] + [total_num_helper_cols_by_table[looked_table.table as usize] + + z_indices[looked_table.table as usize]]; + + z_indices[looked_table.table as usize] += 1; + + let columns = vec![&looked_table.columns[..]]; + let filter = vec![looked_table.filter.clone()]; ctl_vars_per_table[looked_table.table as usize].push(Self { + helper_columns: vec![], local_z: *looked_z, next_z: *looked_z_next, challenges, - columns: &looked_table.columns, - filter: &looked_table.filter, + columns, + filter, }); } } @@ -832,6 +1113,61 @@ impl<'a, F: RichField + Extendable, const D: usize> } } +/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials. +pub(crate) fn eval_helper_columns( + filter: &[Option>], + columns: &[Vec

], + local_values: &[P], + next_values: &[P], + helper_columns: &[P], + constraint_degree: usize, + challenges: &GrandProductChallenge, + consumer: &mut ConstraintConsumer

, +) where + F: RichField + Extendable, + FE: FieldExtension, + P: PackedField, +{ + if !helper_columns.is_empty() { + for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { + let fs = + &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; + let h = helper_columns[j]; + + match chunk.len() { + 2 => { + let combin0 = challenges.combine(&chunk[0]); + let combin1 = challenges.combine(chunk[1].iter()); + + let f0 = if let Some(filter0) = &fs[0] { + filter0.eval_filter(local_values, next_values) + } else { + P::ONES + }; + let f1 = if let Some(filter1) = &fs[1] { + filter1.eval_filter(local_values, next_values) + } else { + P::ONES + }; + + consumer.constraint(combin1 * combin0 * h - f0 * combin1 - f1 * combin0); + } + 1 => { + let combin = challenges.combine(&chunk[0]); + let f0 = if let Some(filter1) = &fs[0] { + filter1.eval_filter(local_values, next_values) + } else { + P::ONES + }; + consumer.constraint(combin * h - f0); + } + + _ => todo!("Allow other constraint degrees"), + } + } + } +} + /// Checks the cross-table lookup Z polynomials for each table: /// - Checks that the CTL `Z` partial sums are correctly updated. /// - Checks that the final value of the CTL sum is the combination of all STARKs' CTL polynomials. @@ -843,6 +1179,7 @@ pub(crate) fn eval_cross_table_lookup_checks, ctl_vars: &[CtlCheckVars], consumer: &mut ConstraintConsumer

, + constraint_degree: usize, ) where F: RichField + Extendable, FE: FieldExtension, @@ -854,6 +1191,7 @@ pub(crate) fn eval_cross_table_lookup_checks>() + }) .collect::>(); - let combined = challenges.combine(evals.iter()); - let local_filter = if let Some(combin) = filter { - combin.eval_filter(local_values, next_values) - } else { - P::ONES - }; - // Check value of `Z(g^(n-1))` - consumer.constraint_last_row(*local_z * combined - local_filter); - // Check `Z(w) = Z(gw) * (filter / combination)` - consumer.constraint_transition((*local_z - *next_z) * combined - local_filter); + // Check helper columns. + eval_helper_columns( + filter, + &evals, + local_values, + next_values, + helper_columns, + constraint_degree, + challenges, + consumer, + ); + + if !helper_columns.is_empty() { + let h_sum = helper_columns.iter().fold(P::ZEROS, |acc, x| acc + *x); + // Check value of `Z(g^(n-1))` + consumer.constraint_last_row(*local_z - h_sum); + // Check `Z(w) = Z(gw) + \sum h_i` + consumer.constraint_transition(*local_z - *next_z - h_sum); + } else if columns.len() > 1 { + let combin0 = challenges.combine(&evals[0]); + let combin1 = challenges.combine(&evals[1]); + + let f0 = if let Some(filter0) = &filter[0] { + filter0.eval_filter(local_values, next_values) + } else { + P::ONES + }; + let f1 = if let Some(filter1) = &filter[1] { + filter1.eval_filter(local_values, next_values) + } else { + P::ONES + }; + + consumer + .constraint_last_row(combin0 * combin1 * *local_z - f0 * combin1 - f1 * combin0); + consumer.constraint_transition( + combin0 * combin1 * (*local_z - *next_z) - f0 * combin1 - f1 * combin0, + ); + } else { + let combin0 = challenges.combine(&evals[0]); + let f0 = if let Some(filter0) = &filter[0] { + filter0.eval_filter(local_values, next_values) + } else { + P::ONES + }; + consumer.constraint_last_row(combin0 * *local_z - f0); + consumer.constraint_transition(combin0 * (*local_z - *next_z) - f0); + } } } /// Circuit version of `CtlCheckVars`. Data necessary to check the cross-table lookups of a given table. #[derive(Clone)] -pub(crate) struct CtlCheckVarsTarget<'a, F: Field, const D: usize> { +pub(crate) struct CtlCheckVarsTarget { + ///Evaluation of the helper columns to check that the Z polyomial + /// was constructed correctly. + pub(crate) helper_columns: Vec>, /// Evaluation of the trace polynomials at point `zeta`. pub(crate) local_z: ExtensionTarget, /// Evaluation of the trace polynomials at point `g * zeta`. @@ -890,12 +1273,12 @@ pub(crate) struct CtlCheckVarsTarget<'a, F: Field, const D: usize> { /// Cross-table lookup challenges. pub(crate) challenges: GrandProductChallenge, /// Column linear combinations of the `CrossTableLookup`s. - pub(crate) columns: &'a [Column], + pub(crate) columns: Vec>>, /// Filter that evaluates to either 1 or 0. - pub(crate) filter: &'a Option>, + pub(crate) filter: Vec>>, } -impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { +impl<'a, F: Field, const D: usize> CtlCheckVarsTarget { /// Circuit version of `from_proofs`. Extracts the `CtlCheckVarsTarget` for each STARK. pub(crate) fn from_proof( table: Table, @@ -903,6 +1286,8 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, num_lookup_columns: usize, + total_num_helper_columns: usize, + num_helper_ctl_columns: &[usize], ) -> Vec { // Get all cross-table lookup polynomial openings for each STARK proof. let mut ctl_zs = { @@ -912,47 +1297,145 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { .auxiliary_polys_next .iter() .skip(num_lookup_columns); - ctl_zs.zip(ctl_zs_next) + ctl_zs.zip(ctl_zs_next).collect::>() }; // Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t. + let mut z_index = 0; + let mut start_index = 0; let mut ctl_vars = vec![]; - for CrossTableLookup { - looking_tables, - looked_table, - } in cross_table_lookups + for ( + i, + CrossTableLookup { + looking_tables, + looked_table, + }, + ) in cross_table_lookups.iter().enumerate() { for &challenges in &ctl_challenges.challenges { - for looking_table in looking_tables { + // Group looking tables by `Table`, since we bundle the looking tables taken from the same `Table` together thanks to helper columns. + + let count = looking_tables + .iter() + .filter(|looking_table| looking_table.table == table) + .count(); + let cols_filts = looking_tables.iter().filter_map(|looking_table| { if looking_table.table == table { - let (looking_z, looking_z_next) = ctl_zs.next().unwrap(); - ctl_vars.push(Self { - local_z: *looking_z, - next_z: *looking_z_next, - challenges, - columns: &looking_table.columns, - filter: &looking_table.filter, - }); + Some((&looking_table.columns, &looking_table.filter)) + } else { + None } + }); + if count > 0 { + let mut columns = Vec::with_capacity(count); + let mut filter = Vec::with_capacity(count); + for (col, filt) in cols_filts { + columns.push(col.clone()); + filter.push(filt.clone()); + } + let (looking_z, looking_z_next) = ctl_zs[total_num_helper_columns + z_index]; + let helper_columns = ctl_zs + [start_index..start_index + num_helper_ctl_columns[i]] + .iter() + .map(|(&h, _)| h) + .collect::>(); + + start_index += num_helper_ctl_columns[i]; + z_index += 1; + // let columns = group.0.clone(); + // let filter = group.1.clone(); + ctl_vars.push(Self { + helper_columns, + local_z: *looking_z, + next_z: *looking_z_next, + challenges, + columns, + filter, + }); } if looked_table.table == table { - let (looked_z, looked_z_next) = ctl_zs.next().unwrap(); + let (looked_z, looked_z_next) = ctl_zs[total_num_helper_columns + z_index]; + z_index += 1; + + let columns = vec![looked_table.columns.clone()]; + let filter = vec![looked_table.filter.clone()]; ctl_vars.push(Self { + helper_columns: vec![], local_z: *looked_z, next_z: *looked_z_next, challenges, - columns: &looked_table.columns, - filter: &looked_table.filter, + columns, + filter, }); } } } - assert!(ctl_zs.next().is_none()); + ctl_vars } } +/// Circuit version of `eval_helper_columns`. +/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials. +pub(crate) fn eval_helper_columns_circuit, const D: usize>( + builder: &mut CircuitBuilder, + filter: &[Option>], + columns: &[Vec>], + local_values: &[ExtensionTarget], + next_values: &[ExtensionTarget], + helper_columns: &[ExtensionTarget], + constraint_degree: usize, + challenges: &GrandProductChallenge, + consumer: &mut RecursiveConstraintConsumer, +) { + if !helper_columns.is_empty() { + for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { + let fs = + &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; + let h = helper_columns[j]; + + let one = builder.one_extension(); + match chunk.len() { + 2 => { + let combin0 = challenges.combine_circuit(builder, &chunk[0]); + let combin1 = challenges.combine_circuit(builder, &chunk[1]); + + let f0 = if let Some(filter0) = &fs[0] { + filter0.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + let f1 = if let Some(filter1) = &fs[1] { + filter1.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + + let constr = builder.mul_sub_extension(combin0, h, f0); + let constr = builder.mul_extension(constr, combin1); + let f1_constr = builder.mul_extension(f1, combin0); + let constr = builder.sub_extension(constr, f1_constr); + + consumer.constraint(builder, constr); + } + 1 => { + let combin = challenges.combine_circuit(builder, &chunk[0]); + let f0 = if let Some(filter1) = &fs[0] { + filter1.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + let constr = builder.mul_sub_extension(combin, h, f0); + consumer.constraint(builder, constr); + } + + _ => todo!("Allow other constraint degrees"), + } + } + } +} + /// Circuit version of `eval_cross_table_lookup_checks`. Checks the cross-table lookup Z polynomials for each table: /// - Checks that the CTL `Z` partial sums are correctly updated. /// - Checks that the final value of the CTL sum is the combination of all STARKs' CTL polynomials. @@ -969,12 +1452,16 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< vars: &S::EvaluationFrameTarget, ctl_vars: &[CtlCheckVarsTarget], consumer: &mut RecursiveConstraintConsumer, + constraint_degree: usize, ) { let local_values = vars.get_local_values(); let next_values = vars.get_next_values(); + let one = builder.one_extension(); + for lookup_vars in ctl_vars { let CtlCheckVarsTarget { + helper_columns, local_z, next_z, challenges, @@ -982,29 +1469,77 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< filter, } = lookup_vars; - let one = builder.one_extension(); - let local_filter = if let Some(combin) = filter { - combin.eval_filter_circuit(builder, local_values, next_values) - } else { - one - }; - // Compute all linear combinations on the current table, and combine them using the challenge. let evals = columns .iter() - .map(|c| c.eval_with_next_circuit(builder, local_values, next_values)) + .map(|col| { + col.iter() + .map(|c| c.eval_with_next_circuit(builder, local_values, next_values)) + .collect::>() + }) .collect::>(); - let combined = challenges.combine_circuit(builder, &evals); + // Check helper columns. + eval_helper_columns_circuit( + builder, + filter, + &evals, + local_values, + next_values, + helper_columns, + constraint_degree, + challenges, + consumer, + ); - // Check value of `Z(g^(n-1))` - let last_row = builder.mul_sub_extension(*local_z, combined, local_filter); - consumer.constraint_last_row(builder, last_row); - // Check `Z(w) = Z(gw) * (filter / combination)` let z_diff = builder.sub_extension(*local_z, *next_z); - let lhs = builder.mul_extension(combined, z_diff); - let transition = builder.sub_extension(lhs, local_filter); - consumer.constraint_transition(builder, transition); + if !helper_columns.is_empty() { + // Check value of `Z(g^(n-1))` + let h_sum = builder.add_many_extension(helper_columns); + + let last_row = builder.sub_extension(*local_z, h_sum); + consumer.constraint_last_row(builder, last_row); + // Check `Z(w) = Z(gw) * (filter / combination)` + + let transition = builder.sub_extension(z_diff, h_sum); + consumer.constraint_transition(builder, transition); + } else if columns.len() > 1 { + let combin0 = challenges.combine_circuit(builder, &evals[0]); + let combin1 = challenges.combine_circuit(builder, &evals[1]); + + let f0 = if let Some(filter0) = &filter[0] { + filter0.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + let f1 = if let Some(filter1) = &filter[1] { + filter1.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + + let combined = builder.mul_sub_extension(combin1, *local_z, f1); + let combined = builder.mul_extension(combined, combin0); + let constr = builder.arithmetic_extension(F::NEG_ONE, F::ONE, f0, combin1, combined); + consumer.constraint_last_row(builder, constr); + + let combined = builder.mul_sub_extension(combin1, z_diff, f1); + let combined = builder.mul_extension(combined, combin0); + let constr = builder.arithmetic_extension(F::NEG_ONE, F::ONE, f0, combin1, combined); + consumer.constraint_last_row(builder, constr); + } else { + let combin0 = challenges.combine_circuit(builder, &evals[0]); + let f0 = if let Some(filter0) = &filter[0] { + filter0.eval_filter_circuit(builder, local_values, next_values) + } else { + one + }; + + let constr = builder.mul_sub_extension(combin0, *local_z, f0); + consumer.constraint_last_row(builder, constr); + let constr = builder.mul_sub_extension(combin0, z_diff, f0); + consumer.constraint_transition(builder, constr); + } } } @@ -1026,11 +1561,19 @@ pub(crate) fn verify_cross_table_lookups, const D: { // Get elements looking into `looked_table` that are not associated to any STARK. let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table as usize]; + // We want to iterate on each looking table only once. + let mut filtered_looking_tables = vec![]; + for table in looking_tables { + if !filtered_looking_tables.contains(&(table.table as usize)) { + filtered_looking_tables.push(table.table as usize); + } + } for c in 0..config.num_challenges { // Compute the combination of all looking table CTL polynomial openings. - let looking_zs_sum = looking_tables + + let looking_zs_sum = filtered_looking_tables .iter() - .map(|table| *ctl_zs_openings[table.table as usize].next().unwrap()) + .map(|&table| *ctl_zs_openings[table].next().unwrap()) .sum::() + extra_sum_vec[c]; @@ -1065,12 +1608,19 @@ pub(crate) fn verify_cross_table_lookups_circuit, c { // Get elements looking into `looked_table` that are not associated to any STARK. let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table as usize]; + // We want to iterate on each looking table only once. + let mut filtered_looking_tables = vec![]; + for table in looking_tables { + if !filtered_looking_tables.contains(&(table.table as usize)) { + filtered_looking_tables.push(table.table as usize); + } + } for c in 0..inner_config.num_challenges { // Compute the combination of all looking table CTL polynomial openings. let mut looking_zs_sum = builder.add_many( - looking_tables + filtered_looking_tables .iter() - .map(|table| *ctl_zs_openings[table.table as usize].next().unwrap()), + .map(|&table| *ctl_zs_openings[table].next().unwrap()), ); looking_zs_sum = builder.add(looking_zs_sum, extra_sum_vec[c]); diff --git a/evm/src/keccak/keccak_stark.rs b/evm/src/keccak/keccak_stark.rs index b3e31093..63098e79 100644 --- a/evm/src/keccak/keccak_stark.rs +++ b/evm/src/keccak/keccak_stark.rs @@ -740,13 +740,14 @@ mod tests { // Fake CTL data. let ctl_z_data = CtlZData { + helper_columns: vec![PolynomialValues::zero(degree)], z: PolynomialValues::zero(degree), challenge: GrandProductChallenge { beta: F::ZERO, gamma: F::ZERO, }, columns: vec![], - filter: Some(Filter::new_simple(Column::constant(F::ZERO))), + filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))], }; let ctl_data = CtlData { zs_columns: vec![ctl_z_data.clone(); config.num_challenges], diff --git a/evm/src/lookup.rs b/evm/src/lookup.rs index 8bf0c8f6..4b735c64 100644 --- a/evm/src/lookup.rs +++ b/evm/src/lookup.rs @@ -12,7 +12,10 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2_util::ceil_div_usize; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::{Column, Filter}; +use crate::cross_table_lookup::{ + eval_helper_columns, eval_helper_columns_circuit, get_helper_cols, Column, Filter, + GrandProductChallenge, +}; use crate::evaluation_frame::StarkEvaluationFrame; use crate::stark::Stark; @@ -64,48 +67,38 @@ pub(crate) fn lookup_helper_columns( let num_helper_columns = lookup.num_helper_columns(constraint_degree); let mut helper_columns: Vec> = Vec::with_capacity(num_helper_columns); + let looking_cols = lookup + .columns + .iter() + .map(|col| vec![col.clone()]) + .collect::>>>(); + + let grand_challenge = GrandProductChallenge { + beta: F::ONE, + gamma: challenge, + }; + + let columns_filters = looking_cols + .iter() + .zip(lookup.filter_columns.iter()) + .map(|(col, filter)| (&col[..], filter)) + .collect::>(); // For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and // add it to the helper columns. - // TODO: This does one batch inversion per column. It would also be possible to do one batch inversion - // for every group of columns, but that would require building a big vector of all the columns concatenated. - // Not sure which approach is better. // Note: these are the h_k(x) polynomials in the paper, with a few differences: // * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the // h_k polynomials; instead there's a separate helper column for it (see below). // * Here, we use 1 instead of -1 as the numerator (and subtract later). // * Here, for now, the batch size (l) is always constraint_degree - 1 = 2. - for (i, mut col_inds) in (&lookup.columns.iter().chunks(constraint_degree - 1)) - .into_iter() - .enumerate() - { - let first = col_inds.next().unwrap(); - - let mut column = first.eval_all_rows(trace_poly_values); - let length = column.len(); - - for x in column.iter_mut() { - *x = challenge + *x; - } - let mut acc = F::batch_multiplicative_inverse(&column); - if let Some(filter) = &lookup.filter_columns[0] { - batch_multiply_inplace(&mut acc, &filter.eval_all_rows(trace_poly_values)); - } - - for (j, ind) in col_inds.enumerate() { - let mut column = ind.eval_all_rows(trace_poly_values); - for x in column.iter_mut() { - *x = challenge + *x; - } - column = F::batch_multiplicative_inverse(&column); - let filter_idx = (constraint_degree - 1) * i + j + 1; - if let Some(filter) = &lookup.filter_columns[filter_idx] { - batch_multiply_inplace(&mut column, &filter.eval_all_rows(trace_poly_values)); - } - batch_add_inplace(&mut acc, &column); - } - - helper_columns.push(acc.into()); - } + // * Here, there are filters for the columns, to only select some rows + // in a given column. + let mut helper_columns = get_helper_cols( + trace_poly_values, + trace_poly_values[0].len(), + &columns_filters, + grand_challenge, + constraint_degree, + ); // Add `1/(table+challenge)` to the helper columns. // This is 1/phi_0(x) = 1/(x + t(x)) from the paper. @@ -168,38 +161,30 @@ pub(crate) fn eval_packed_lookups_generic>>(); + // For each chunk, check that `h_i (x+f_2i) (x+f_{2i+1}) = (x+f_2i) * filter_{2i+1} + (x+f_{2i+1}) * filter_2i` if the chunk has length 2 // or if it has length 1, check that `h_i * (x+f_2i) = filter_2i`, where x is the challenge - for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() { - let mut x = lookup_vars.local_values[start + j]; - let mut y = P::ZEROS; - let col_values = chunk - .iter() - .map(|col| col.eval_with_next(local_values, next_values)); - let filters = lookup.filter_columns - [(degree - 1) * j..(degree - 1) * j + chunk.len()] - .iter() - .map(|maybe_filter| { - if let Some(filter) = maybe_filter { - filter.eval_filter(local_values, next_values) - } else { - P::ONES - } - }) - .rev() - .collect::>(); - let last_filter_value = filters[0]; - for (val, f) in col_values.zip_eq(filters) { - x *= val + challenge; - y += (val + challenge) * f; - } - match chunk.len() { - 2 => yield_constr.constraint(x - y), - 1 => yield_constr.constraint(x - last_filter_value), - _ => todo!("Allow other constraint degrees."), - } - } + eval_helper_columns( + &lookup.filter_columns, + &lookup_columns, + local_values, + next_values, + &lookup_vars.local_values[start..start + num_helper_columns - 1], + degree, + &grand_challenge, + yield_constr, + ); + + let challenge = FE::from_basefield(challenge); // Check the `Z` polynomial. let z = lookup_vars.local_values[start + num_helper_columns - 1]; @@ -245,45 +230,30 @@ pub(crate) fn eval_ext_lookups_circuit< let mut start = 0; for lookup in lookups { let num_helper_columns = lookup.num_helper_columns(degree); + let col_values = lookup + .columns + .iter() + .map(|col| vec![col.eval_with_next_circuit(builder, local_values, next_values)]) + .collect::>(); + for &challenge in &lookup_vars.challenges { + let grand_challenge = GrandProductChallenge { + beta: builder.one(), + gamma: challenge, + }; + + eval_helper_columns_circuit( + builder, + &lookup.filter_columns, + &col_values, + local_values, + next_values, + &lookup_vars.local_values[start..start + num_helper_columns - 1], + degree, + &grand_challenge, + yield_constr, + ); let challenge = builder.convert_to_ext(challenge); - for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() { - let mut x = lookup_vars.local_values[start + j]; - let mut y = builder.zero_extension(); - let col_values = chunk - .iter() - .map(|k| k.eval_with_next_circuit(builder, local_values, next_values)) - .collect::>(); - let filters = lookup.filter_columns - [(degree - 1) * j..(degree - 1) * j + chunk.len()] - .iter() - .map(|maybe_filter| { - if let Some(filter) = maybe_filter { - filter.eval_filter_circuit(builder, local_values, next_values) - } else { - one - } - }) - .rev() - .collect::>(); - let last_filter_value = filters[0]; - for (&val, f) in col_values.iter().zip_eq(filters) { - let tmp = builder.add_extension(val, challenge); - x = builder.mul_extension(x, tmp); - y = builder.mul_add_extension(f, tmp, y); - } - match chunk.len() { - 2 => { - let tmp = builder.sub_extension(x, y); - yield_constr.constraint(builder, tmp) - } - 1 => { - let tmp = builder.sub_extension(x, last_filter_value); - yield_constr.constraint(builder, tmp) - } - _ => todo!("Allow other constraint degrees."), - } - } let z = lookup_vars.local_values[start + num_helper_columns - 1]; let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; diff --git a/evm/src/proof.rs b/evm/src/proof.rs index 3768f98f..4cc18677 100644 --- a/evm/src/proof.rs +++ b/evm/src/proof.rs @@ -974,7 +974,10 @@ impl, const D: usize> StarkOpeningSet { auxiliary_polys_commitment: &PolynomialBatch, quotient_commitment: &PolynomialBatch, num_lookup_columns: usize, + num_ctl_polys: &[usize], ) -> Self { + let total_num_helper_cols: usize = num_ctl_polys.iter().sum(); + // Batch evaluates polynomials on the LDE, at a point `z`. let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { c.polynomials @@ -989,6 +992,9 @@ impl, const D: usize> StarkOpeningSet { .map(|p| p.eval(z)) .collect::>() }; + + let auxiliary_first = eval_commitment_base(F::ONE, auxiliary_polys_commitment); + let ctl_zs_first = auxiliary_first[num_lookup_columns + total_num_helper_cols..].to_vec(); // `g * zeta`. let zeta_next = zeta.scalar_mul(g); Self { @@ -996,9 +1002,7 @@ impl, const D: usize> StarkOpeningSet { next_values: eval_commitment(zeta_next, trace_commitment), auxiliary_polys: eval_commitment(zeta, auxiliary_polys_commitment), auxiliary_polys_next: eval_commitment(zeta_next, auxiliary_polys_commitment), - ctl_zs_first: eval_commitment_base(F::ONE, auxiliary_polys_commitment) - [num_lookup_columns..] - .to_vec(), + ctl_zs_first, quotient_polys: eval_commitment(zeta, quotient_commitment), } } @@ -1051,7 +1055,7 @@ pub(crate) struct StarkOpeningSetTarget { pub auxiliary_polys: Vec>, /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `g * zeta`. pub auxiliary_polys_next: Vec>, - /// /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1. + /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1. pub ctl_zs_first: Vec, /// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`. pub quotient_polys: Vec>, diff --git a/evm/src/prover.rs b/evm/src/prover.rs index c90490b8..e6426023 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -135,6 +135,7 @@ where &trace_poly_values, &all_stark.cross_table_lookups, &ctl_challenges, + all_stark.arithmetic_stark.constraint_degree() ) ); @@ -375,9 +376,14 @@ where // We add CTLs to the permutation arguments so that we can batch commit to // all auxiliary polynomials. let auxiliary_polys = match lookup_helper_columns { - None => ctl_data.z_polys(), + None => { + let mut ctl_polys = ctl_data.ctl_helper_polys(); + ctl_polys.extend(ctl_data.ctl_z_polys()); + ctl_polys + } Some(mut lookup_columns) => { - lookup_columns.extend(ctl_data.z_polys()); + lookup_columns.extend(ctl_data.ctl_helper_polys()); + lookup_columns.extend(ctl_data.ctl_z_polys()); lookup_columns } }; @@ -402,6 +408,8 @@ where let alphas = challenger.get_n_challenges(config.num_challenges); + let num_ctl_polys = ctl_data.num_ctl_helper_polys(); + #[cfg(test)] { check_constraints( @@ -414,6 +422,7 @@ where alphas.clone(), degree_bits, num_lookup_columns, + &num_ctl_polys, ); } @@ -432,6 +441,7 @@ where alphas, degree_bits, num_lookup_columns, + &num_ctl_polys, config, ) ); @@ -486,6 +496,7 @@ where &auxiliary_polys_commitment, "ient_commitment, stark.num_lookup_helper_columns(config), + &num_ctl_polys, ); // Get the FRI openings and observe them. challenger.observe_openings(&openings.to_fri_openings()); @@ -502,7 +513,7 @@ where timing, "compute openings proof", PolynomialBatch::prove_openings( - &stark.fri_instance(zeta, g, ctl_data.len(), config), + &stark.fri_instance(zeta, g, num_ctl_polys.iter().sum(), num_ctl_polys, config), &initial_merkle_trees, challenger, &fri_params, @@ -535,6 +546,7 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( alphas: Vec, degree_bits: usize, num_lookup_columns: usize, + num_ctl_columns: &[usize], config: &StarkConfig, ) -> Vec> where @@ -545,6 +557,7 @@ where { let degree = 1 << degree_bits; let rate_bits = config.fri_config.rate_bits; + let total_num_helper_cols: usize = num_ctl_columns.iter().sum(); let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor()); assert!( @@ -616,18 +629,35 @@ where // - for each CTL: // - the filter `Column` // - the `Column`s that form the looking/looked table. + + let mut start_index = 0; let ctl_vars = ctl_data .zs_columns .iter() .enumerate() - .map(|(i, zs_columns)| CtlCheckVars:: { - local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) - [num_lookup_columns + i], - next_z: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step) - [num_lookup_columns + i], - challenges: zs_columns.challenge, - columns: &zs_columns.columns, - filter: &zs_columns.filter, + .map(|(i, zs_columns)| { + let num_ctl_helper_cols = num_ctl_columns[i]; + let helper_columns = auxiliary_polys_commitment + .get_lde_values_packed(i_start, step)[num_lookup_columns + + start_index + ..num_lookup_columns + start_index + num_ctl_helper_cols] + .to_vec(); + + let ctl_vars = CtlCheckVars:: { + helper_columns, + local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) + [num_lookup_columns + total_num_helper_cols + i], + next_z: auxiliary_polys_commitment + .get_lde_values_packed(i_next_start, step) + [num_lookup_columns + total_num_helper_cols + i], + challenges: zs_columns.challenge, + columns: zs_columns.columns.clone(), + filter: zs_columns.filter.clone(), + }; + + start_index += num_ctl_helper_cols; + + ctl_vars }) .collect::>(); @@ -691,6 +721,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( alphas: Vec, degree_bits: usize, num_lookup_columns: usize, + num_ctl_helper_cols: &[usize], ) where F: RichField + Extendable, C: GenericConfig, @@ -699,6 +730,8 @@ fn check_constraints<'a, F, C, S, const D: usize>( let degree = 1 << degree_bits; let rate_bits = 0; // Set this to higher value to check constraint degree. + let total_num_helper_cols: usize = num_ctl_helper_cols.iter().sum(); + let size = degree << rate_bits; let step = 1 << rate_bits; @@ -754,18 +787,34 @@ fn check_constraints<'a, F, C, S, const D: usize>( }); // Get the local and next row evaluations for the current STARK's CTL Z polynomials. + let mut start_index = 0; let ctl_vars = ctl_data .zs_columns .iter() .enumerate() - .map(|(iii, zs_columns)| CtlCheckVars:: { - local_z: auxiliary_subgroup_evals[i][num_lookup_columns + iii], - next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii], - challenges: zs_columns.challenge, - columns: &zs_columns.columns, - filter: &zs_columns.filter, + .map(|(iii, zs_columns)| { + let num_helper_cols = num_ctl_helper_cols[iii]; + let helper_columns = auxiliary_subgroup_evals[i][num_lookup_columns + + start_index + ..num_lookup_columns + start_index + num_helper_cols] + .to_vec(); + let ctl_vars = CtlCheckVars:: { + helper_columns, + local_z: auxiliary_subgroup_evals[i] + [num_lookup_columns + total_num_helper_cols + iii], + next_z: auxiliary_subgroup_evals[i_next] + [num_lookup_columns + total_num_helper_cols + iii], + challenges: zs_columns.challenge, + columns: zs_columns.columns.clone(), + filter: zs_columns.filter.clone(), + }; + + start_index += num_helper_cols; + + ctl_vars }) .collect::>(); + // Evaluate the polynomial combining all constraints, including those associated // to the permutation and CTL arguments. eval_vanishing_poly::( diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index 5c10e3b3..8d10b498 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -229,10 +229,24 @@ where let zero_target = builder.zero(); let num_lookup_columns = stark.num_lookup_helper_columns(inner_config); - let num_ctl_zs = - CrossTableLookup::num_ctl_zs(cross_table_lookups, table, inner_config.num_challenges); - let proof_target = - add_virtual_stark_proof(&mut builder, stark, inner_config, degree_bits, num_ctl_zs); + let (total_num_helpers, num_ctl_zs, num_helpers_by_ctl) = + CrossTableLookup::num_ctl_helpers_zs_all( + cross_table_lookups, + table, + inner_config.num_challenges, + stark.constraint_degree(), + ); + let num_ctl_helper_zs = num_ctl_zs + total_num_helpers; + + let proof_target = add_virtual_stark_proof( + &mut builder, + stark, + inner_config, + degree_bits, + num_ctl_helper_zs, + num_ctl_zs, + ); + builder.register_public_inputs( &proof_target .trace_cap @@ -257,6 +271,8 @@ where cross_table_lookups, &ctl_challenges_target, num_lookup_columns, + total_num_helpers, + &num_helpers_by_ctl, ); let init_challenger_state_target = @@ -330,6 +346,12 @@ fn verify_stark_proof_with_challenges_circuit< let zero = builder.zero(); let one = builder.one_extension(); + let num_ctl_polys = ctl_vars + .iter() + .map(|ctl| ctl.helper_columns.len()) + .sum::(); + let num_ctl_z_polys = ctl_vars.len(); + let StarkOpeningSetTarget { local_values, next_values, @@ -407,6 +429,7 @@ fn verify_stark_proof_with_challenges_circuit< builder, challenges.stark_zeta, F::primitive_root_of_unity(degree_bits), + num_ctl_polys, ctl_zs_first.len(), inner_config, ); @@ -759,6 +782,7 @@ pub(crate) fn add_virtual_stark_proof< stark: &S, config: &StarkConfig, degree_bits: usize, + num_ctl_helper_zs: usize, num_ctl_zs: usize, ) -> StarkProofTarget { let fri_params = config.fri_params(degree_bits); @@ -766,7 +790,7 @@ pub(crate) fn add_virtual_stark_proof< let num_leaves_per_oracle = vec![ S::COLUMNS, - stark.num_lookup_helper_columns(config) + num_ctl_zs, + stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, stark.quotient_degree_factor() * config.num_challenges, ]; @@ -776,7 +800,13 @@ pub(crate) fn add_virtual_stark_proof< trace_cap: builder.add_virtual_cap(cap_height), auxiliary_polys_cap, quotient_polys_cap: builder.add_virtual_cap(cap_height), - openings: add_virtual_stark_opening_set::(builder, stark, num_ctl_zs, config), + openings: add_virtual_stark_opening_set::( + builder, + stark, + num_ctl_helper_zs, + num_ctl_zs, + config, + ), opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), } } @@ -784,6 +814,7 @@ pub(crate) fn add_virtual_stark_proof< fn add_virtual_stark_opening_set, S: Stark, const D: usize>( builder: &mut CircuitBuilder, stark: &S, + num_ctl_helper_zs: usize, num_ctl_zs: usize, config: &StarkConfig, ) -> StarkOpeningSetTarget { @@ -791,10 +822,12 @@ fn add_virtual_stark_opening_set, S: Stark, c StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - auxiliary_polys: builder - .add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs), - auxiliary_polys_next: builder - .add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs), + auxiliary_polys: builder.add_virtual_extension_targets( + stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, + ), + auxiliary_polys_next: builder.add_virtual_extension_targets( + stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, + ), ctl_zs_first: builder.add_virtual_targets(num_ctl_zs), quotient_polys: builder .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), diff --git a/evm/src/stark.rs b/evm/src/stark.rs index 6099abdd..6ee26f58 100644 --- a/evm/src/stark.rs +++ b/evm/src/stark.rs @@ -92,7 +92,8 @@ pub trait Stark, const D: usize>: Sync { &self, zeta: F::Extension, g: F, - num_ctl_zs: usize, + num_ctl_helpers: usize, + num_ctl_zs: Vec, config: &StarkConfig, ) -> FriInstanceInfo { let trace_oracle = FriOracleInfo { @@ -102,7 +103,7 @@ pub trait Stark, const D: usize>: Sync { let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); let num_lookup_columns = self.num_lookup_helper_columns(config); - let num_auxiliary_polys = num_lookup_columns + num_ctl_zs; + let num_auxiliary_polys = num_lookup_columns + num_ctl_helpers + num_ctl_zs.len(); let auxiliary_oracle = FriOracleInfo { num_polys: num_auxiliary_polys, blinding: false, @@ -110,9 +111,10 @@ pub trait Stark, const D: usize>: Sync { let auxiliary_polys_info = FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); + let mut start_index = num_lookup_columns; let ctl_zs_info = FriPolynomialInfo::from_range( AUXILIARY_ORACLE_INDEX, - num_lookup_columns..num_lookup_columns + num_ctl_zs, + num_lookup_columns + num_ctl_helpers..num_auxiliary_polys, ); let num_quotient_polys = self.num_quotient_polys(config); @@ -152,6 +154,7 @@ pub trait Stark, const D: usize>: Sync { builder: &mut CircuitBuilder, zeta: ExtensionTarget, g: F, + num_ctl_helper_polys: usize, num_ctl_zs: usize, inner_config: &StarkConfig, ) -> FriInstanceInfoTarget { @@ -162,7 +165,7 @@ pub trait Stark, const D: usize>: Sync { let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); let num_lookup_columns = self.num_lookup_helper_columns(inner_config); - let num_auxiliary_polys = num_lookup_columns + num_ctl_zs; + let num_auxiliary_polys = num_lookup_columns + num_ctl_helper_polys + num_ctl_zs; let auxiliary_oracle = FriOracleInfo { num_polys: num_auxiliary_polys, blinding: false, @@ -172,7 +175,8 @@ pub trait Stark, const D: usize>: Sync { let ctl_zs_info = FriPolynomialInfo::from_range( AUXILIARY_ORACLE_INDEX, - num_lookup_columns..num_lookup_columns + num_ctl_zs, + num_lookup_columns + num_ctl_helper_polys + ..num_lookup_columns + num_ctl_helper_polys + num_ctl_zs, ); let num_quotient_polys = self.num_quotient_polys(inner_config); diff --git a/evm/src/vanishing_poly.rs b/evm/src/vanishing_poly.rs index 15444c7e..c1f2d0f9 100644 --- a/evm/src/vanishing_poly.rs +++ b/evm/src/vanishing_poly.rs @@ -42,7 +42,12 @@ pub(crate) fn eval_vanishing_poly( ); } // Evaluate the STARK constraints related to the cross-table lookups. - eval_cross_table_lookup_checks::(vars, ctl_vars, consumer); + eval_cross_table_lookup_checks::( + vars, + ctl_vars, + consumer, + stark.constraint_degree(), + ); } /// Circuit version of `eval_vanishing_poly`. @@ -66,5 +71,11 @@ pub(crate) fn eval_vanishing_poly_circuit( eval_ext_lookups_circuit::(builder, stark, vars, lookup_vars, consumer); } // Evaluate all of the STARK's constraints related to the cross-table lookups. - eval_cross_table_lookup_checks_circuit::(builder, vars, ctl_vars, consumer); + eval_cross_table_lookup_checks_circuit::( + builder, + vars, + ctl_vars, + consumer, + stark.constraint_degree(), + ); } diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index b69cd927..9eb12100 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -16,7 +16,8 @@ use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; use crate::cross_table_lookup::{ - verify_cross_table_lookups, CtlCheckVars, GrandProductChallenge, GrandProductChallengeSet, + num_ctl_helper_columns_by_table, verify_cross_table_lookups, CtlCheckVars, + GrandProductChallenge, GrandProductChallengeSet, }; use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::LookupCheckVars; @@ -56,11 +57,17 @@ where cross_table_lookups, } = all_stark; + let num_ctl_helper_cols = num_ctl_helper_columns_by_table( + cross_table_lookups, + all_stark.arithmetic_stark.constraint_degree(), + ); + let ctl_vars_per_table = CtlCheckVars::from_proofs( &all_proof.stark_proofs, cross_table_lookups, &ctl_challenges, &num_lookup_columns, + &num_ctl_helper_cols, ); verify_stark_proof_with_challenges( @@ -300,7 +307,12 @@ pub(crate) fn verify_stark_proof_with_challenges< config: &StarkConfig, ) -> Result<()> { log::debug!("Checking proof: {}", type_name::()); - validate_proof_shape(stark, proof, config, ctl_vars.len())?; + let num_ctl_polys = ctl_vars + .iter() + .map(|ctl| ctl.helper_columns.len()) + .sum::(); + let num_ctl_z_polys = ctl_vars.len(); + validate_proof_shape(stark, proof, config, num_ctl_polys, num_ctl_z_polys)?; let StarkOpeningSet { local_values, next_values, @@ -374,11 +386,16 @@ pub(crate) fn verify_stark_proof_with_challenges< proof.quotient_polys_cap.clone(), ]; + let num_ctl_zs = ctl_vars + .iter() + .map(|ctl| ctl.helper_columns.len()) + .collect::>(); verify_fri_proof::( &stark.fri_instance( challenges.stark_zeta, F::primitive_root_of_unity(degree_bits), - ctl_zs_first.len(), + num_ctl_polys, + num_ctl_zs, config, ), &proof.openings.to_fri_openings(), @@ -395,6 +412,7 @@ fn validate_proof_shape( stark: &S, proof: &StarkProof, config: &StarkConfig, + num_ctl_helpers: usize, num_ctl_zs: usize, ) -> anyhow::Result<()> where @@ -424,7 +442,8 @@ where let degree_bits = proof.recover_degree_bits(config); let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_auxiliary = num_ctl_zs + stark.num_lookup_helper_columns(config); + + let num_auxiliary = num_ctl_helpers + stark.num_lookup_helper_columns(config) + num_ctl_zs; ensure!(trace_cap.height() == cap_height); ensure!(auxiliary_polys_cap.height() == cap_height);