diff --git a/evm/spec/framework.tex b/evm/spec/framework.tex index 1d5c4f74..8fc6c2f8 100644 --- a/evm/spec/framework.tex +++ b/evm/spec/framework.tex @@ -36,12 +36,14 @@ The various STARK tables carry out independent operations, but on shared values. Suppose STARK $S_1$ requires an operation -- say $Op$ -- that is carried out by another STARK $S_2$. Then $S_1$ writes the input and output of $Op$ in its own table, and provides the inputs to $S_2$. $S_2$ also writes the inputs and outputs in its rows, and the table's constraints check that $Op$ is carried out correctly. We then need to ensure that the inputs and outputs are the same in $S_1$ and $S_2$. -In other words, we need to ensure that the rows -- reduced to the input and output columns -- of $S_1$ calling $Op$ are permutations of the rows of $S_2$ that carry out $Op$. Our CTL protocol is also based on logUp and is similar to our range-checks. +In other words, we need to ensure that the rows -- reduced to the input and output columns -- of $S_1$ calling $Op$ are permutations of the rows of $S_2$ that carry out $Op$. Our CTL protocol is based on logUp and is similar to our range-checks. To prove this, the first step is to only select the rows of interest in $S_1$ and $S_2$, and filter out the rest. Let $f^1$ be the filter for $S_1$ and $f^2$ the filter for $S_2$. $f^1$ and $f^2$ are constrained to be in $\{0, 1\}$. $f^1 = 1$ (resp. $f^2 = 1$) whenever the row at hand carries out $Op$ in $S_1$ (resp. in $S_2$), and 0 otherwise. Let also $(\alpha, \beta)$ be two random challenges. The idea is to create subtables $S_1'$ and $S_2'$ of $S_1$ and $S_2$ respectively, such that $f^1 = 1$ and $f^2 = 1$ for all their rows. The columns in the subtables are limited to the ones whose values must be identical (the inputs and outputs of $Op$ in our example). +Note that for design and constraint reasons, filters are limited to (at most) degree 2 combinations of columns. + Let $\{c^{1, i}\}_{i=1}^m$ be the columns in $S_1'$ an $\{c^{2,i}\}_{i=1}^m$ be the columns in $S_2'$. The prover defines a ``running sum'' $Z$ for $S_1'$ such that: diff --git a/evm/spec/zkevm.pdf b/evm/spec/zkevm.pdf index 81d8e518..c6a44444 100644 Binary files a/evm/spec/zkevm.pdf and b/evm/spec/zkevm.pdf differ diff --git a/evm/src/arithmetic/arithmetic_stark.rs b/evm/src/arithmetic/arithmetic_stark.rs index 7052f7ba..6b0c9fa5 100644 --- a/evm/src/arithmetic/arithmetic_stark.rs +++ b/evm/src/arithmetic/arithmetic_stark.rs @@ -17,7 +17,7 @@ use crate::all_stark::Table; use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS}; use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation}; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::{Column, TableWithColumns}; +use crate::cross_table_lookup::{Column, Filter, TableWithColumns}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::lookup::Lookup; use crate::stark::Stark; @@ -98,7 +98,9 @@ pub(crate) fn ctl_arithmetic_rows() -> TableWithColumns { let mut filter_cols = COMBINED_OPS.to_vec(); filter_cols.push((columns::IS_RANGE_CHECK, 0x01)); - let filter_column = Some(Column::sum(filter_cols.iter().map(|(c, _v)| *c))); + let filter = Some(Filter::new_simple(Column::sum( + filter_cols.iter().map(|(c, _v)| *c), + ))); let mut all_combined_cols = COMBINED_OPS.to_vec(); all_combined_cols.push((columns::OPCODE_COL, 0x01)); @@ -110,7 +112,7 @@ pub(crate) fn ctl_arithmetic_rows() -> TableWithColumns { TableWithColumns::new( Table::Arithmetic, cpu_arith_data_link(&all_combined_cols, ®ISTER_MAP), - filter_column, + filter, ) } diff --git a/evm/src/byte_packing/byte_packing_stark.rs b/evm/src/byte_packing/byte_packing_stark.rs index e9dcd01e..1e1a667d 100644 --- a/evm/src/byte_packing/byte_packing_stark.rs +++ b/evm/src/byte_packing/byte_packing_stark.rs @@ -45,7 +45,7 @@ use crate::byte_packing::columns::{ NUM_COLUMNS, RANGE_COUNTER, RC_FREQUENCIES, TIMESTAMP, }; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::Column; +use crate::cross_table_lookup::{Column, Filter}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::lookup::Lookup; use crate::stark::Stark; @@ -84,10 +84,10 @@ pub(crate) fn ctl_looked_data() -> Vec> { } /// CTL filter for the `BytePackingStark` looked table. -pub(crate) fn ctl_looked_filter() -> Column { +pub(crate) fn ctl_looked_filter() -> Filter { // The CPU table is only interested in our sequence end rows, // since those contain the final limbs of our packed int. - Column::sum((0..NUM_BYTES).map(index_len)) + Filter::new_simple(Column::sum((0..NUM_BYTES).map(index_len))) } /// Column linear combination for the `BytePackingStark` table reading/writing the `i`th byte sequence from `MemoryStark`. @@ -119,8 +119,8 @@ pub(crate) fn ctl_looking_memory(i: usize) -> Vec> { } /// CTL filter for reading/writing the `i`th byte of the byte sequence from/to memory. -pub(crate) fn ctl_looking_memory_filter(i: usize) -> Column { - Column::sum((i..NUM_BYTES).map(index_len)) +pub(crate) fn ctl_looking_memory_filter(i: usize) -> Filter { + Filter::new_simple(Column::sum((i..NUM_BYTES).map(index_len))) } /// Information about a byte packing operation needed for witness generation. diff --git a/evm/src/cpu/cpu_stark.rs b/evm/src/cpu/cpu_stark.rs index b2ff3c6e..1b89d5af 100644 --- a/evm/src/cpu/cpu_stark.rs +++ b/evm/src/cpu/cpu_stark.rs @@ -19,7 +19,7 @@ use crate::cpu::{ byte_unpacking, clock, contextops, control_flow, decode, dup_swap, gas, jumps, membus, memio, modfp254, pc, push0, shift, simple_logic, stack, syscalls_exceptions, }; -use crate::cross_table_lookup::{Column, TableWithColumns}; +use crate::cross_table_lookup::{Column, Filter, TableWithColumns}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::memory::segments::Segment; use crate::memory::{NUM_CHANNELS, VALUE_LIMBS}; @@ -48,8 +48,8 @@ pub(crate) fn ctl_data_keccak_sponge() -> Vec> { } /// CTL filter for a call to the Keccak sponge. -pub(crate) fn ctl_filter_keccak_sponge() -> Column { - Column::single(COL_MAP.is_keccak_sponge) +pub(crate) fn ctl_filter_keccak_sponge() -> Filter { + Filter::new_simple(Column::single(COL_MAP.is_keccak_sponge)) } /// Creates the vector of `Columns` corresponding to the two inputs and @@ -82,8 +82,8 @@ pub(crate) fn ctl_data_logic() -> Vec> { } /// CTL filter for logic operations. -pub(crate) fn ctl_filter_logic() -> Column { - Column::single(COL_MAP.op.logic_op) +pub(crate) fn ctl_filter_logic() -> Filter { + Filter::new_simple(Column::single(COL_MAP.op.logic_op)) } /// Returns the `TableWithColumns` for the CPU rows calling arithmetic operations. @@ -99,7 +99,7 @@ pub(crate) fn ctl_arithmetic_base_rows() -> TableWithColumns { TableWithColumns::new( Table::Cpu, columns, - Some(Column::sum([ + Some(Filter::new_simple(Column::sum([ COL_MAP.op.binary_op, COL_MAP.op.fp254_op, COL_MAP.op.ternary_op, @@ -107,7 +107,7 @@ pub(crate) fn ctl_arithmetic_base_rows() -> TableWithColumns { COL_MAP.op.prover_input, COL_MAP.op.syscall, COL_MAP.op.exception, - ])), + ]))), ) } @@ -118,8 +118,8 @@ pub(crate) fn ctl_data_byte_packing() -> Vec> { } /// CTL filter for the `MLOAD_32BYTES` operation. -pub(crate) fn ctl_filter_byte_packing() -> Column { - Column::single(COL_MAP.op.mload_32bytes) +pub(crate) fn ctl_filter_byte_packing() -> Filter { + Filter::new_simple(Column::single(COL_MAP.op.mload_32bytes)) } /// Creates the vector of `Columns` corresponding to the contents of General Purpose channels when calling byte unpacking. @@ -152,8 +152,8 @@ pub(crate) fn ctl_data_byte_unpacking() -> Vec> { } /// CTL filter for the `MSTORE_32BYTES` operation. -pub(crate) fn ctl_filter_byte_unpacking() -> Column { - Column::single(COL_MAP.op.mstore_32bytes) +pub(crate) fn ctl_filter_byte_unpacking() -> Filter { + Filter::new_simple(Column::single(COL_MAP.op.mstore_32bytes)) } /// Creates the vector of `Columns` corresponding to the contents of the CPU registers when performing a `PUSH`. @@ -179,8 +179,8 @@ pub(crate) fn ctl_data_byte_packing_push() -> Vec> { } /// CTL filter for the `PUSH` operation. -pub(crate) fn ctl_filter_byte_packing_push() -> Column { - Column::single(COL_MAP.op.push) +pub(crate) fn ctl_filter_byte_packing_push() -> Filter { + Filter::new_simple(Column::single(COL_MAP.op.push)) } /// Index of the memory channel storing code. @@ -254,17 +254,17 @@ pub(crate) fn ctl_data_partial_memory() -> Vec> { } /// CTL filter for code read and write operations. -pub(crate) fn ctl_filter_code_memory() -> Column { - Column::sum(COL_MAP.op.iter()) +pub(crate) fn ctl_filter_code_memory() -> Filter { + Filter::new_simple(Column::sum(COL_MAP.op.iter())) } /// CTL filter for General Purpose memory read and write operations. -pub(crate) fn ctl_filter_gp_memory(channel: usize) -> Column { - Column::single(COL_MAP.mem_channels[channel].used) +pub(crate) fn ctl_filter_gp_memory(channel: usize) -> Filter { + Filter::new_simple(Column::single(COL_MAP.mem_channels[channel].used)) } -pub(crate) fn ctl_filter_partial_memory() -> Column { - Column::single(COL_MAP.partial_channel.used) +pub(crate) fn ctl_filter_partial_memory() -> Filter { + Filter::new_simple(Column::single(COL_MAP.partial_channel.used)) } /// Structure representing the CPU Stark. diff --git a/evm/src/cross_table_lookup.rs b/evm/src/cross_table_lookup.rs index 41485e98..e8e9c1c2 100644 --- a/evm/src/cross_table_lookup.rs +++ b/evm/src/cross_table_lookup.rs @@ -315,27 +315,111 @@ impl Column { } } +/// Represents a CTL filter, which evaluates to 1 if the row must be considered for the CTL and 0 otherwise. +/// It's an arbitrary degree 2 combination of columns: `products` are the degree 2 terms, and `constants` are +/// the degree 1 terms. +#[derive(Clone, Debug)] +pub(crate) struct Filter { + products: Vec<(Column, Column)>, + constants: Vec>, +} + +impl Filter { + pub(crate) fn new(products: Vec<(Column, Column)>, constants: Vec>) -> Self { + Self { + products, + constants, + } + } + + /// Returns a filter made of a single column. + pub(crate) fn new_simple(col: Column) -> Self { + Self { + products: vec![], + constants: vec![col], + } + } + + /// Given the column values for the current and next rows, evaluates the filter. + pub(crate) fn eval_filter(&self, v: &[P], next_v: &[P]) -> P + where + FE: FieldExtension, + P: PackedField, + { + self.products + .iter() + .map(|(col1, col2)| col1.eval_with_next(v, next_v) * col2.eval_with_next(v, next_v)) + .sum::

() + + self + .constants + .iter() + .map(|col| col.eval_with_next(v, next_v)) + .sum::

() + } + + /// Circuit version of `eval_filter`: + /// Given the column values for the current and next rows, evaluates the filter. + pub(crate) fn eval_filter_circuit( + &self, + builder: &mut CircuitBuilder, + v: &[ExtensionTarget], + next_v: &[ExtensionTarget], + ) -> ExtensionTarget + where + F: RichField + Extendable, + { + let prods = self + .products + .iter() + .map(|(col1, col2)| { + let col1_eval = col1.eval_with_next_circuit(builder, v, next_v); + let col2_eval = col2.eval_with_next_circuit(builder, v, next_v); + builder.mul_extension(col1_eval, col2_eval) + }) + .collect::>(); + + let consts = self + .constants + .iter() + .map(|col| col.eval_with_next_circuit(builder, v, next_v)) + .collect::>(); + + let prods = builder.add_many_extension(prods); + let consts = builder.add_many_extension(consts); + builder.add_extension(prods, consts) + } + + /// Evaluate on a row of a table given in column-major form. + pub(crate) fn eval_table(&self, table: &[PolynomialValues], row: usize) -> F { + self.products + .iter() + .map(|(col1, col2)| col1.eval_table(table, row) * col2.eval_table(table, row)) + .sum::() + + self + .constants + .iter() + .map(|col| col.eval_table(table, row)) + .sum() + } +} + /// A `Table` with a linear combination of columns and a filter. -/// `filter_column` is used to determine the rows to select in `Table`. +/// `filter` is used to determine the rows to select in `Table`. /// `columns` represents linear combinations of the columns of `Table`. #[derive(Clone, Debug)] pub(crate) struct TableWithColumns { table: Table, columns: Vec>, - pub(crate) filter_column: Option>, + pub(crate) filter: Option>, } impl TableWithColumns { - /// Generates a new `TableWithColumns` given a `Table`, a linear combination of columns `columns` and a `filter_column`. - pub(crate) fn new( - table: Table, - columns: Vec>, - filter_column: Option>, - ) -> Self { + /// Generates a new `TableWithColumns` given a `Table`, a linear combination of columns `columns` and a `filter`. + pub(crate) fn new(table: Table, columns: Vec>, filter: Option>) -> Self { Self { table, columns, - filter_column, + filter, } } } @@ -395,7 +479,7 @@ pub(crate) struct CtlZData { /// Column linear combination for the current table. pub(crate) columns: Vec>, /// Filter column for the current table. It evaluates to either 1 or 0. - pub(crate) filter_column: Option>, + pub(crate) filter: Option>, } impl CtlData { @@ -562,14 +646,14 @@ pub(crate) fn cross_table_lookup_data( partial_sums( &trace_poly_values[table.table as usize], &table.columns, - &table.filter_column, + &table.filter, challenge, ) }); let z_looked = partial_sums( &trace_poly_values[looked_table.table as usize], &looked_table.columns, - &looked_table.filter_column, + &looked_table.filter, challenge, ); for (table, z) in looking_tables.iter().zip(zs_looking) { @@ -579,7 +663,7 @@ pub(crate) fn cross_table_lookup_data( z, challenge, columns: table.columns.clone(), - filter_column: table.filter_column.clone(), + filter: table.filter.clone(), }); } ctl_data_per_table[looked_table.table as usize] @@ -588,7 +672,7 @@ pub(crate) fn cross_table_lookup_data( z: z_looked, challenge, columns: looked_table.columns.clone(), - filter_column: looked_table.filter_column.clone(), + filter: looked_table.filter.clone(), }); } } @@ -606,7 +690,7 @@ pub(crate) fn cross_table_lookup_data( fn partial_sums( trace: &[PolynomialValues], columns: &[Column], - filter_column: &Option>, + filter: &Option>, challenge: GrandProductChallenge, ) -> PolynomialValues { let mut partial_sum = F::ZERO; @@ -615,12 +699,12 @@ fn partial_sums( let mut res = Vec::with_capacity(degree); for i in (0..degree).rev() { - if let Some(column) = filter_column { - let filter = column.eval_table(trace, i); - if filter.is_one() { + if let Some(filter) = filter { + let filter_val = filter.eval_table(trace, i); + if filter_val.is_one() { filters.push(true); } else { - assert_eq!(filter, F::ZERO, "Non-binary filter?"); + assert_eq!(filter_val, F::ZERO, "Non-binary filter?"); filters.push(false); } } else { @@ -673,8 +757,8 @@ where pub(crate) challenges: GrandProductChallenge, /// Column linear combinations of the `CrossTableLookup`s. pub(crate) columns: &'a [Column], - /// Column linear combination that evaluates to either 1 or 0. - pub(crate) filter_column: &'a Option>, + /// Filter that evaluates to either 1 or 0. + pub(crate) filter: &'a Option>, } impl<'a, F: RichField + Extendable, const D: usize> @@ -714,7 +798,7 @@ impl<'a, F: RichField + Extendable, const D: usize> next_z: *looking_z_next, challenges, columns: &table.columns, - filter_column: &table.filter_column, + filter: &table.filter, }); } @@ -724,7 +808,7 @@ impl<'a, F: RichField + Extendable, const D: usize> next_z: *looked_z_next, challenges, columns: &looked_table.columns, - filter_column: &looked_table.filter_column, + filter: &looked_table.filter, }); } } @@ -758,7 +842,7 @@ pub(crate) fn eval_cross_table_lookup_checks>(); let combined = challenges.combine(evals.iter()); - let local_filter = if let Some(column) = filter_column { - column.eval_with_next(local_values, next_values) + let local_filter = if let Some(combin) = filter { + combin.eval_filter(local_values, next_values) } else { P::ONES }; @@ -791,8 +875,8 @@ pub(crate) struct CtlCheckVarsTarget<'a, F: Field, const D: usize> { pub(crate) challenges: GrandProductChallenge, /// Column linear combinations of the `CrossTableLookup`s. pub(crate) columns: &'a [Column], - /// Column linear combination that evaluates to either 1 or 0. - pub(crate) filter_column: &'a Option>, + /// Filter that evaluates to either 1 or 0. + pub(crate) filter: &'a Option>, } impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { @@ -831,7 +915,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { next_z: *looking_z_next, challenges, columns: &looking_table.columns, - filter_column: &looking_table.filter_column, + filter: &looking_table.filter, }); } } @@ -843,7 +927,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { next_z: *looked_z_next, challenges, columns: &looked_table.columns, - filter_column: &looked_table.filter_column, + filter: &looked_table.filter, }); } } @@ -879,12 +963,12 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< next_z, challenges, columns, - filter_column, + filter, } = lookup_vars; let one = builder.one_extension(); - let local_filter = if let Some(column) = filter_column { - column.eval_circuit(builder, local_values) + let local_filter = if let Some(combin) = filter { + combin.eval_filter_circuit(builder, local_values, next_values) } else { one }; @@ -1060,8 +1144,8 @@ pub(crate) mod testutils { ) { let trace = &trace_poly_values[table.table as usize]; for i in 0..trace[0].len() { - let filter = if let Some(column) = &table.filter_column { - column.eval_table(trace, i) + let filter = if let Some(combin) = &table.filter { + combin.eval_table(trace, i) } else { F::ONE }; diff --git a/evm/src/keccak/keccak_stark.rs b/evm/src/keccak/keccak_stark.rs index 18814e14..18ba3a5f 100644 --- a/evm/src/keccak/keccak_stark.rs +++ b/evm/src/keccak/keccak_stark.rs @@ -13,7 +13,7 @@ use plonky2::util::timing::TimingTree; use super::columns::reg_input_limb; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::Column; +use crate::cross_table_lookup::{Column, Filter}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::keccak::columns::{ reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime, @@ -48,13 +48,13 @@ pub(crate) fn ctl_data_outputs() -> Vec> { } /// CTL filter for the first round of the Keccak permutation. -pub(crate) fn ctl_filter_inputs() -> Column { - Column::single(reg_step(0)) +pub(crate) fn ctl_filter_inputs() -> Filter { + Filter::new_simple(Column::single(reg_step(0))) } /// CTL filter for the final round of the Keccak permutation. -pub(crate) fn ctl_filter_outputs() -> Column { - Column::single(reg_step(NUM_ROUNDS - 1)) +pub(crate) fn ctl_filter_outputs() -> Filter { + Filter::new_simple(Column::single(reg_step(NUM_ROUNDS - 1))) } #[derive(Copy, Clone, Default)] @@ -633,7 +633,7 @@ mod tests { use crate::config::StarkConfig; use crate::cross_table_lookup::{ - Column, CtlData, CtlZData, GrandProductChallenge, GrandProductChallengeSet, + Column, CtlData, CtlZData, Filter, GrandProductChallenge, GrandProductChallengeSet, }; use crate::keccak::columns::reg_output_limb; use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS}; @@ -748,7 +748,7 @@ mod tests { gamma: F::ZERO, }, columns: vec![], - filter_column: Some(Column::constant(F::ZERO)), + filter: Some(Filter::new_simple(Column::constant(F::ZERO))), }; let ctl_data = CtlData { zs_columns: vec![ctl_z_data.clone(); config.num_challenges], diff --git a/evm/src/keccak_sponge/keccak_sponge_stark.rs b/evm/src/keccak_sponge/keccak_sponge_stark.rs index 32bc7905..75e0ced3 100644 --- a/evm/src/keccak_sponge/keccak_sponge_stark.rs +++ b/evm/src/keccak_sponge/keccak_sponge_stark.rs @@ -17,7 +17,7 @@ use plonky2_util::ceil_div_usize; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::kernel::keccak_util::keccakf_u32s; -use crate::cross_table_lookup::Column; +use crate::cross_table_lookup::{Column, Filter}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::keccak_sponge::columns::*; use crate::lookup::Lookup; @@ -185,35 +185,41 @@ pub(crate) fn ctl_looking_logic(i: usize) -> Vec> { } /// CTL filter for the final block rows of the `KeccakSponge` table. -pub(crate) fn ctl_looked_filter() -> Column { +pub(crate) fn ctl_looked_filter() -> Filter { // The CPU table is only interested in our final-block rows, since those contain the final // sponge output. - Column::sum(KECCAK_SPONGE_COL_MAP.is_final_input_len) + Filter::new_simple(Column::sum(KECCAK_SPONGE_COL_MAP.is_final_input_len)) } /// CTL filter for reading the `i`th byte of input from memory. -pub(crate) fn ctl_looking_memory_filter(i: usize) -> Column { +pub(crate) fn ctl_looking_memory_filter(i: usize) -> Filter { // We perform the `i`th read if either // - this is a full input block, or // - this is a final block of length `i` or greater let cols = KECCAK_SPONGE_COL_MAP; if i == KECCAK_RATE_BYTES - 1 { - Column::single(cols.is_full_input_block) + Filter::new_simple(Column::single(cols.is_full_input_block)) } else { - Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i + 1..])) + Filter::new_simple(Column::sum( + once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i + 1..]), + )) } } /// CTL filter for looking at XORs in the logic table. -pub(crate) fn ctl_looking_logic_filter() -> Column { +pub(crate) fn ctl_looking_logic_filter() -> Filter { let cols = KECCAK_SPONGE_COL_MAP; - Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len)) + Filter::new_simple(Column::sum( + once(&cols.is_full_input_block).chain(&cols.is_final_input_len), + )) } /// CTL filter for looking at the input and output in the Keccak table. -pub(crate) fn ctl_looking_keccak_filter() -> Column { +pub(crate) fn ctl_looking_keccak_filter() -> Filter { let cols = KECCAK_SPONGE_COL_MAP; - Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len)) + Filter::new_simple(Column::sum( + once(&cols.is_full_input_block).chain(&cols.is_final_input_len), + )) } /// Information about a Keccak sponge operation needed for witness generation. diff --git a/evm/src/logic.rs b/evm/src/logic.rs index 03737190..43ea2342 100644 --- a/evm/src/logic.rs +++ b/evm/src/logic.rs @@ -13,7 +13,7 @@ use plonky2::util::timing::TimingTree; use plonky2_util::ceil_div_usize; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::Column; +use crate::cross_table_lookup::{Column, Filter}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::logic::columns::NUM_COLUMNS; use crate::stark::Stark; @@ -79,8 +79,12 @@ pub(crate) fn ctl_data() -> Vec> { } /// CTL filter for logic operations. -pub(crate) fn ctl_filter() -> Column { - Column::sum([columns::IS_AND, columns::IS_OR, columns::IS_XOR]) +pub(crate) fn ctl_filter() -> Filter { + Filter::new_simple(Column::sum([ + columns::IS_AND, + columns::IS_OR, + columns::IS_XOR, + ])) } /// Structure representing the Logic STARK, which computes all logic operations. diff --git a/evm/src/memory/memory_stark.rs b/evm/src/memory/memory_stark.rs index ecdea99e..3d36a33a 100644 --- a/evm/src/memory/memory_stark.rs +++ b/evm/src/memory/memory_stark.rs @@ -14,7 +14,7 @@ use plonky2::util::transpose; use plonky2_maybe_rayon::*; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::Column; +use crate::cross_table_lookup::{Column, Filter}; use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::lookup::Lookup; use crate::memory::columns::{ @@ -41,8 +41,8 @@ pub(crate) fn ctl_data() -> Vec> { } /// CTL filter for memory operations. -pub(crate) fn ctl_filter() -> Column { - Column::single(FILTER) +pub(crate) fn ctl_filter() -> Filter { + Filter::new_simple(Column::single(FILTER)) } #[derive(Copy, Clone, Default)] diff --git a/evm/src/prover.rs b/evm/src/prover.rs index fe8ad517..b4fc8edb 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -615,7 +615,7 @@ where [num_lookup_columns + i], challenges: zs_columns.challenge, columns: &zs_columns.columns, - filter_column: &zs_columns.filter_column, + filter: &zs_columns.filter, }) .collect::>(); @@ -738,7 +738,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii], challenges: zs_columns.challenge, columns: &zs_columns.columns, - filter_column: &zs_columns.filter_column, + filter: &zs_columns.filter, }) .collect::>(); // Evaluate the polynomial combining all constraints, including those associated