From f65ad58a0854de3e5a65089815d771b4f9ef12d7 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 13 Feb 2023 15:58:26 +0100 Subject: [PATCH] Implement logUp --- evm/src/all_stark.rs | 28 +- evm/src/arithmetic/arithmetic_stark.rs | 45 +- evm/src/arithmetic/columns.rs | 5 +- evm/src/cpu/kernel/asm/curve/bn254/glv.asm | 97 ++++ evm/src/cpu/kernel/asm/curve/bn254/msm.asm | 73 +++ .../kernel/asm/curve/bn254/precomputation.asm | 35 ++ evm/src/cross_table_lookup.rs | 149 +++++- evm/src/fixed_recursive_verifier.rs | 6 +- evm/src/get_challenges.rs | 66 +-- evm/src/keccak/keccak_stark.rs | 3 +- evm/src/lib.rs | 1 - evm/src/lookup.rs | 345 ++++++++----- evm/src/memory/columns.rs | 7 +- evm/src/memory/memory_stark.rs | 38 +- evm/src/permutation.rs | 459 ------------------ evm/src/proof.rs | 64 +-- evm/src/prover.rs | 158 +++--- evm/src/recursive_verifier.rs | 59 ++- evm/src/stark.rs | 85 ++-- evm/src/vanishing_poly.rs | 35 +- evm/src/verifier.rs | 43 +- 21 files changed, 848 insertions(+), 953 deletions(-) create mode 100644 evm/src/cpu/kernel/asm/curve/bn254/glv.asm create mode 100644 evm/src/cpu/kernel/asm/curve/bn254/msm.asm create mode 100644 evm/src/cpu/kernel/asm/curve/bn254/precomputation.asm delete mode 100644 evm/src/permutation.rs diff --git a/evm/src/all_stark.rs b/evm/src/all_stark.rs index 068b0bcb..b7168f85 100644 --- a/evm/src/all_stark.rs +++ b/evm/src/all_stark.rs @@ -51,27 +51,15 @@ impl, const D: usize> Default for AllStark { } impl, const D: usize> AllStark { - pub(crate) fn nums_permutation_zs(&self, config: &StarkConfig) -> [usize; NUM_TABLES] { + pub(crate) fn num_lookups_helper_columns(&self, config: &StarkConfig) -> [usize; NUM_TABLES] { [ - self.arithmetic_stark.num_permutation_batches(config), - self.byte_packing_stark.num_permutation_batches(config), - self.cpu_stark.num_permutation_batches(config), - self.keccak_stark.num_permutation_batches(config), - self.keccak_sponge_stark.num_permutation_batches(config), - self.logic_stark.num_permutation_batches(config), - self.memory_stark.num_permutation_batches(config), - ] - } - - pub(crate) fn permutation_batch_sizes(&self) -> [usize; NUM_TABLES] { - [ - self.arithmetic_stark.permutation_batch_size(), - self.byte_packing_stark.permutation_batch_size(), - self.cpu_stark.permutation_batch_size(), - self.keccak_stark.permutation_batch_size(), - self.keccak_sponge_stark.permutation_batch_size(), - self.logic_stark.permutation_batch_size(), - self.memory_stark.permutation_batch_size(), + self.arithmetic_stark.num_lookup_helper_columns(config), + self.byte_packing_stark.num_lookup_helper_columns(config), + self.cpu_stark.num_lookup_helper_columns(config), + self.keccak_stark.num_lookup_helper_columns(config), + self.keccak_sponge_stark.num_lookup_helper_columns(config), + self.logic_stark.num_lookup_helper_columns(config), + self.memory_stark.num_lookup_helper_columns(config), ] } } diff --git a/evm/src/arithmetic/arithmetic_stark.rs b/evm/src/arithmetic/arithmetic_stark.rs index 5441cf27..9584ab88 100644 --- a/evm/src/arithmetic/arithmetic_stark.rs +++ b/evm/src/arithmetic/arithmetic_stark.rs @@ -12,11 +12,11 @@ use plonky2::util::transpose; use static_assertions::const_assert; use crate::all_stark::Table; +use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS}; use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation}; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cross_table_lookup::{Column, TableWithColumns}; -use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols}; -use crate::permutation::PermutationPair; +use crate::lookup::Lookup; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; @@ -122,13 +122,12 @@ impl ArithmeticStark { cols[columns::RANGE_COUNTER][i] = F::from_canonical_usize(RANGE_MAX - 1); } - // For each column c in cols, generate the range-check - // permutations and put them in the corresponding range-check - // columns rc_c and rc_c+1. - for (c, rc_c) in columns::SHARED_COLS.zip(columns::RC_COLS.step_by(2)) { - let (col_perm, table_perm) = permuted_cols(&cols[c], &cols[columns::RANGE_COUNTER]); - cols[rc_c].copy_from_slice(&col_perm); - cols[rc_c + 1].copy_from_slice(&table_perm); + // Generate the frequencies column. + for col in SHARED_COLS { + for i in 0..n_rows { + let x = cols[col][i].to_canonical_u64() as usize; + cols[RC_FREQUENCIES][x] += F::ONE; + } } } @@ -178,11 +177,6 @@ impl, const D: usize> Stark for ArithmeticSta FE: FieldExtension, P: PackedField, { - // Range check all the columns - for col in columns::RC_COLS.step_by(2) { - eval_lookups(vars, yield_constr, col, col + 1); - } - let lv = vars.local_values; let nv = vars.next_values; @@ -210,11 +204,6 @@ impl, const D: usize> Stark for ArithmeticSta vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { - // Range check all the columns - for col in columns::RC_COLS.step_by(2) { - eval_lookups_circuit(builder, vars, yield_constr, col, col + 1); - } - let lv = vars.local_values; let nv = vars.next_values; @@ -240,18 +229,12 @@ impl, const D: usize> Stark for ArithmeticSta 3 } - fn permutation_pairs(&self) -> Vec { - const START: usize = columns::START_SHARED_COLS; - const END: usize = START + columns::NUM_SHARED_COLS; - let mut pairs = Vec::with_capacity(2 * columns::NUM_SHARED_COLS); - for (c, c_perm) in (START..END).zip_eq(columns::RC_COLS.step_by(2)) { - pairs.push(PermutationPair::singletons(c, c_perm)); - pairs.push(PermutationPair::singletons( - c_perm + 1, - columns::RANGE_COUNTER, - )); - } - pairs + fn lookups(&self) -> Vec { + vec![Lookup { + columns: SHARED_COLS.collect(), + table_column: RANGE_COUNTER, + frequencies_column: RC_FREQUENCIES, + }] } } diff --git a/evm/src/arithmetic/columns.rs b/evm/src/arithmetic/columns.rs index 48e00f8e..f2646fc5 100644 --- a/evm/src/arithmetic/columns.rs +++ b/evm/src/arithmetic/columns.rs @@ -109,8 +109,7 @@ pub(crate) const MODULAR_DIV_DENOM_IS_ZERO: usize = AUX_REGISTER_2.end; // of the column and the permutation of the range. The two // permutations associated to column i will be in columns RC_COLS[2i] // and RC_COLS[2i+1]. -pub(crate) const NUM_RANGE_CHECK_COLS: usize = 1 + 2 * NUM_SHARED_COLS; pub(crate) const RANGE_COUNTER: usize = START_SHARED_COLS + NUM_SHARED_COLS; -pub(crate) const RC_COLS: Range = RANGE_COUNTER + 1..RANGE_COUNTER + 1 + 2 * NUM_SHARED_COLS; +pub(crate) const RC_FREQUENCIES: usize = RANGE_COUNTER + 1; -pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS + NUM_RANGE_CHECK_COLS; +pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS + 2; diff --git a/evm/src/cpu/kernel/asm/curve/bn254/glv.asm b/evm/src/cpu/kernel/asm/curve/bn254/glv.asm new file mode 100644 index 00000000..c29d8f14 --- /dev/null +++ b/evm/src/cpu/kernel/asm/curve/bn254/glv.asm @@ -0,0 +1,97 @@ +// Inspired by https://github.com/AztecProtocol/weierstrudel/blob/master/huff_modules/endomorphism.huff +// See also Sage code in evm/src/cpu/kernel/tests/ecc/bn_glv_test_data +// Given scalar `k ∈ Bn254::ScalarField`, return `u, k1, k2` with `k1,k2 < 2^127` and such that +// `k = k1 - s*k2` if `u==0` otherwise `k = k1 + s*k2`, where `s` is the scalar value representing the endomorphism. +// In the comments below, N means @BN_SCALAR +// +// Z3 proof that the resulting `k1, k2` satisfy `k1>0`, `k1 < 2^127` and `|k2| < 2^127`. +// ```python +// from z3 import Solver, Int, Or, unsat +// q = 0x30644E72E131A029B85045B68181585D2833E84879B9709143E1F593F0000001 +// glv_s = 0xB3C4D79D41A917585BFC41088D8DAAA78B17EA66B99C90DD +// +// b2 = 0x89D3256894D213E3 +// b1 = -0x6F4D8248EEB859FC8211BBEB7D4F1128 +// +// g1 = 0x24CCEF014A773D2CF7A7BD9D4391EB18D +// g2 = 0x2D91D232EC7E0B3D7 +// k = Int("k") +// c1 = Int("c1") +// c2 = Int("c2") +// s = Solver() +// +// c2p = -c2 +// s.add(k < q) +// s.add(0 < k) +// s.add(c1 * (2**256) <= g2 * k) +// s.add((c1 + 1) * (2**256) > g2 * k) +// s.add(c2p * (2**256) <= g1 * k) +// s.add((c2p + 1) * (2**256) > g1 * k) +// +// q1 = c1 * b1 +// q2 = c2 * b2 +// +// k2 = q2 - q1 +// k2L = (glv_s * k2) % q +// k1 = k - k2L +// k2 = -k2 +// +// s.add(Or((k2 >= 2**127), (-k2 >= 2**127), (k1 >= 2**127), (k1 < 0))) +// +// assert s.check() == unsat +// ``` +global bn_glv_decompose: + // stack: k, retdest + PUSH @BN_SCALAR DUP1 DUP1 + // Compute c2 which is the top 256 bits of k*g1. Use asm from https://medium.com/wicketh/mathemagic-full-multiply-27650fec525d. + PUSH @U256_MAX + // stack: -1, N, N, N, k, retdest + PUSH @BN_GLV_MINUS_G1 DUP6 + // stack: k, g1, -1, N, N, N, k, retdest + MULMOD + // stack: (k * g1 % -1), N, N, N, k, retdest + PUSH @BN_GLV_MINUS_G1 DUP6 + // stack: k, g1, (k * g1 % -1), N, N, N, k, retdest + MUL + // stack: bottom = (k * g1), (k * g1 % -1), N, N, N, k, retdest + DUP1 DUP3 + // stack: (k * g1 % -1), bottom, bottom, (k * g1 % -1), N, N, N, k, retdest + LT SWAP2 SUB SUB + // stack: c2, N, N, N, k, retdest + PUSH @BN_GLV_B2 MULMOD + // stack: q2=c2*b2, N, N, k, retdest + + // Use the same trick to compute c1 = top 256 bits of g2*k. + PUSH @BN_SCALAR PUSH @U256_MAX + PUSH @BN_GLV_G2 DUP7 MULMOD + PUSH @BN_GLV_G2 DUP7 MUL + DUP1 DUP3 LT + SWAP2 SUB SUB + // stack: c1, N, q2, N, N, k, retdest + PUSH @BN_GLV_B1 MULMOD + // stack: q1, q2, N, N, k, retdest + + // We compute k2 = q1 + q2 - N, but we check for underflow and return N-q1-q2 instead if there is one, + // along with a flag `underflow` set to 1 if there is an underflow, 0 otherwise. + ADD %sub_check_underflow + // stack: k2, underflow, N, k, retdest + SWAP3 PUSH @BN_SCALAR DUP5 PUSH @BN_GLV_S + // stack: s, k2, N, k, underflow, N, k2, retdest + MULMOD + // stack: s*k2, k, underflow, N, k2, retdest + // Need to return `k + s*k2` if no underflow occur, otherwise return `k - s*k2` which is done in the `underflowed` fn. + SWAP2 DUP1 %jumpi(underflowed) + %stack (underflow, k, x, N, k2) -> (k, x, N, k2, underflow) + ADDMOD + %stack (k1, k2, underflow, retdest) -> (retdest, underflow, k1, k2) + JUMP + +underflowed: + // stack: underflow, k, s*k2, N, k2 + // Compute (k-s*k2)%N. TODO: Use SUBMOD here when ready + %stack (u, k, x, N, k2) -> (N, x, k, N, k2, u) + SUB ADDMOD + %stack (k1, k2, underflow, retdest) -> (retdest, underflow, k1, k2) + JUMP + + diff --git a/evm/src/cpu/kernel/asm/curve/bn254/msm.asm b/evm/src/cpu/kernel/asm/curve/bn254/msm.asm new file mode 100644 index 00000000..10362287 --- /dev/null +++ b/evm/src/cpu/kernel/asm/curve/bn254/msm.asm @@ -0,0 +1,73 @@ +// Computes the multiplication `a*G` using a standard MSM with the GLV decomposition of `a`. +// see there for a detailed description. +global bn_msm: + // stack: retdest + PUSH 0 PUSH 0 PUSH 0 +global bn_msm_loop: + // stack: accx, accy, i, retdest + DUP3 %bn_mload_wnaf_a + // stack: w, accx, accy, i, retdest + DUP1 %jumpi(bn_msm_loop_add_a_nonzero) + POP +msm_loop_add_b: + //stack: accx, accy, i, retdest + DUP3 %bn_mload_wnaf_b + // stack: w, accx, accy, i, retdest + DUP1 %jumpi(bn_msm_loop_add_b_nonzero) + POP +msm_loop_contd: + %stack (accx, accy, i, retdest) -> (i, i, accx, accy, retdest) + // TODO: the GLV scalars for the BN curve are 127-bit, so could use 127 here. But this would require modifying `wnaf.asm`. Not sure it's worth it... + %eq_const(129) %jumpi(msm_end) + %increment + //stack: i+1, accx, accy, retdest + %stack (i, accx, accy, retdest) -> (accx, accy, bn_msm_loop, i, retdest) + %jump(bn_double) + +msm_end: + %stack (i, accx, accy, retdest) -> (retdest, accx, accy) + JUMP + +bn_msm_loop_add_a_nonzero: + %stack (w, accx, accy, i, retdest) -> (w, accx, accy, msm_loop_add_b, i, retdest) + %bn_mload_point_a + // stack: px, py, accx, accy, msm_loop_add_b, i, retdest + %jump(bn_add_valid_points) + +bn_msm_loop_add_b_nonzero: + %stack (w, accx, accy, i, retdest) -> (w, accx, accy, msm_loop_contd, i, retdest) + %bn_mload_point_b + // stack: px, py, accx, accy, msm_loop_contd, i, retdest + %jump(bn_add_valid_points) + +%macro bn_mload_wnaf_a + // stack: i + %mload_kernel(@SEGMENT_KERNEL_BN_WNAF_A) +%endmacro + +%macro bn_mload_wnaf_b + // stack: i + %mload_kernel(@SEGMENT_KERNEL_BN_WNAF_B) +%endmacro + +%macro bn_mload_point_a + // stack: w + DUP1 + %mload_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + //stack: Gy, w + SWAP1 %decrement %mload_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + //stack: Gx, Gy +%endmacro + +%macro bn_mload_point_b + // stack: w + DUP1 + %mload_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + PUSH @BN_BNEG_LOC %mload_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + %stack (bneg, Gy, w) -> (@BN_BASE, Gy, bneg, bneg, Gy, w) + SUB SWAP1 ISZERO MUL SWAP2 MUL ADD + SWAP1 %decrement %mload_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + //stack: Gx, Gy + PUSH @BN_GLV_BETA + MULFP254 +%endmacro diff --git a/evm/src/cpu/kernel/asm/curve/bn254/precomputation.asm b/evm/src/cpu/kernel/asm/curve/bn254/precomputation.asm new file mode 100644 index 00000000..a8c6ada9 --- /dev/null +++ b/evm/src/cpu/kernel/asm/curve/bn254/precomputation.asm @@ -0,0 +1,35 @@ +// Precompute a table of multiples of the BN254 point `Q = (Qx, Qy)`. +// Let `(Qxi, Qyi) = i * Q`, then store in the `SEGMENT_KERNEL_BN_TABLE_Q` segment of memory the values +// `i-1 => Qxi`, `i => Qyi if i < 16 else -Qy(32-i)` for `i in range(1, 32, 2)`. +global bn_precompute_table: + // stack: Qx, Qy, retdest + PUSH precompute_table_contd DUP3 DUP3 + %jump(bn_double) +precompute_table_contd: + // stack: Qx2, Qy2, Qx, Qy, retdest + PUSH 1 +bn_precompute_table_loop: + // stack i, Qx2, Qy2, Qx, Qy, retdest + PUSH 1 DUP2 SUB + %stack (im, i, Qx2, Qy2, Qx, Qy, retdest) -> (i, Qy, im, Qx, i, Qx2, Qy2, Qx, Qy, retdest) + %mstore_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) %mstore_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + // stack: i, Qx2, Qy2, Qx, Qy, retdest + DUP1 PUSH 32 SUB PUSH 1 DUP2 SUB + // stack: 31-i, 32-i, i, Qx2, Qy2, Qx, Qy, retdest + DUP7 PUSH @BN_BASE SUB + // TODO: Could maybe avoid storing Qx a second time here, not sure if it would be more efficient. + %stack (Qyy, iii, ii, i, Qx2, Qy2, Qx, Qy, retdest) -> (iii, Qx, ii, Qyy, i, Qx2, Qy2, Qx, Qy, retdest) + %mstore_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) %mstore_kernel(@SEGMENT_KERNEL_BN_TABLE_Q) + // stack: i, Qx2, Qy2, Qx, Qy, retdest + PUSH 2 ADD + // stack: i+2, Qx2, Qy2, Qx, Qy, retdest + DUP1 PUSH 16 LT %jumpi(precompute_table_end) + %stack (i, Qx2, Qy2, Qx, Qy, retdest) -> (Qx, Qy, Qx2, Qy2, precompute_table_loop_contd, i, Qx2, Qy2, retdest) + %jump(bn_add_valid_points) +precompute_table_loop_contd: + %stack (Qx, Qy, i, Qx2, Qy2, retdest) -> (i, Qx2, Qy2, Qx, Qy, retdest) + %jump(bn_precompute_table_loop) + +precompute_table_end: + // stack: i, Qx2, Qy2, Qx, Qy, retdest + %pop5 JUMP diff --git a/evm/src/cross_table_lookup.rs b/evm/src/cross_table_lookup.rs index a9b90428..dfac49f7 100644 --- a/evm/src/cross_table_lookup.rs +++ b/evm/src/cross_table_lookup.rs @@ -1,4 +1,5 @@ use std::borrow::Borrow; +use std::fmt::Debug; use std::iter::repeat; use anyhow::{ensure, Result}; @@ -8,15 +9,19 @@ use plonky2::field::packed::PackedField; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; +use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use plonky2::plonk::plonk_common::{ + reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit, +}; +use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; use crate::all_stark::{Table, NUM_TABLES}; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::permutation::{GrandProductChallenge, GrandProductChallengeSet}; use crate::proof::{StarkProofTarget, StarkProofWithMetadata}; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; @@ -217,6 +222,128 @@ impl CtlData { } } +/// Randomness for a single instance of a permutation check protocol. +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +pub(crate) struct GrandProductChallenge { + /// Randomness used to combine multiple columns into one. + pub(crate) beta: T, + /// Random offset that's added to the beta-reduced column values. + pub(crate) gamma: T, +} + +impl GrandProductChallenge { + pub(crate) fn combine<'a, FE, P, T: IntoIterator, const D2: usize>( + &self, + terms: T, + ) -> P + where + FE: FieldExtension, + P: PackedField, + T::IntoIter: DoubleEndedIterator, + { + reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma) + } +} + +impl GrandProductChallenge { + pub(crate) fn combine_circuit, const D: usize>( + &self, + builder: &mut CircuitBuilder, + terms: &[ExtensionTarget], + ) -> ExtensionTarget { + let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta); + let gamma = builder.convert_to_ext(self.gamma); + builder.add_extension(reduced, gamma) + } +} + +impl GrandProductChallenge { + pub(crate) fn combine_base_circuit, const D: usize>( + &self, + builder: &mut CircuitBuilder, + terms: &[Target], + ) -> Target { + let reduced = reduce_with_powers_circuit(builder, terms, self.beta); + builder.add(reduced, self.gamma) + } +} + +/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. +#[derive(Clone, Eq, PartialEq, Debug)] +pub(crate) struct GrandProductChallengeSet { + pub(crate) challenges: Vec>, +} + +impl GrandProductChallengeSet { + pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { + buffer.write_usize(self.challenges.len())?; + for challenge in &self.challenges { + buffer.write_target(challenge.beta)?; + buffer.write_target(challenge.gamma)?; + } + Ok(()) + } + + pub fn from_buffer(buffer: &mut Buffer) -> IoResult { + let length = buffer.read_usize()?; + let mut challenges = Vec::with_capacity(length); + for _ in 0..length { + challenges.push(GrandProductChallenge { + beta: buffer.read_target()?, + gamma: buffer.read_target()?, + }); + } + + Ok(GrandProductChallengeSet { challenges }) + } +} + +fn get_grand_product_challenge>( + challenger: &mut Challenger, +) -> GrandProductChallenge { + let beta = challenger.get_challenge(); + let gamma = challenger.get_challenge(); + GrandProductChallenge { beta, gamma } +} + +pub(crate) fn get_grand_product_challenge_set>( + challenger: &mut Challenger, + num_challenges: usize, +) -> GrandProductChallengeSet { + let challenges = (0..num_challenges) + .map(|_| get_grand_product_challenge(challenger)) + .collect(); + GrandProductChallengeSet { challenges } +} + +fn get_grand_product_challenge_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, +) -> GrandProductChallenge { + let beta = challenger.get_challenge(builder); + let gamma = challenger.get_challenge(builder); + GrandProductChallenge { beta, gamma } +} + +pub(crate) fn get_grand_product_challenge_set_target< + F: RichField + Extendable, + H: AlgebraicHasher, + const D: usize, +>( + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + num_challenges: usize, +) -> GrandProductChallengeSet { + let challenges = (0..num_challenges) + .map(|_| get_grand_product_challenge_target(builder, challenger)) + .collect(); + GrandProductChallengeSet { challenges } +} + pub(crate) fn cross_table_lookup_data( trace_poly_values: &[Vec>; NUM_TABLES], cross_table_lookups: &[CrossTableLookup], @@ -317,15 +444,15 @@ impl<'a, F: RichField + Extendable, const D: usize> proofs: &[StarkProofWithMetadata; NUM_TABLES], cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, - num_permutation_zs: &[usize; NUM_TABLES], + num_lookup_columns: &[usize; NUM_TABLES], ) -> [Vec; NUM_TABLES] { let mut ctl_zs = proofs .iter() - .zip(num_permutation_zs) - .map(|(p, &num_perms)| { + .zip(num_lookup_columns) + .map(|(p, &num_lookup)| { let openings = &p.proof.openings; - let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms); - let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms); + let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup); + let ctl_zs_next = openings.auxiliary_polys_next.iter().skip(num_lookup); ctl_zs.zip(ctl_zs_next) }) .collect::>(); @@ -419,15 +546,15 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { proof: &StarkProofTarget, cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, - num_permutation_zs: usize, + num_lookup_columns: usize, ) -> Vec { let mut ctl_zs = { let openings = &proof.openings; - let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_permutation_zs); + let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup_columns); let ctl_zs_next = openings - .permutation_ctl_zs_next + .auxiliary_polys_next .iter() - .skip(num_permutation_zs); + .skip(num_lookup_columns); ctl_zs.zip(ctl_zs_next) }; diff --git a/evm/src/fixed_recursive_verifier.rs b/evm/src/fixed_recursive_verifier.rs index 5c0f1f80..33021c39 100644 --- a/evm/src/fixed_recursive_verifier.rs +++ b/evm/src/fixed_recursive_verifier.rs @@ -32,14 +32,16 @@ use crate::arithmetic::arithmetic_stark::ArithmeticStark; use crate::byte_packing::byte_packing_stark::BytePackingStark; use crate::config::StarkConfig; use crate::cpu::cpu_stark::CpuStark; -use crate::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup}; +use crate::cross_table_lookup::{ + get_grand_product_challenge_set_target, verify_cross_table_lookups_circuit, CrossTableLookup, + GrandProductChallengeSet, +}; use crate::generation::GenerationInputs; use crate::get_challenges::observe_public_values_target; use crate::keccak::keccak_stark::KeccakStark; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; -use crate::permutation::{get_grand_product_challenge_set_target, GrandProductChallengeSet}; use crate::proof::{ BlockHashesTarget, BlockMetadataTarget, ExtraBlockDataTarget, PublicValues, PublicValuesTarget, StarkProofWithMetadata, TrieRootsTarget, diff --git a/evm/src/get_challenges.rs b/evm/src/get_challenges.rs index 59be8439..a12aaa95 100644 --- a/evm/src/get_challenges.rs +++ b/evm/src/get_challenges.rs @@ -8,10 +8,7 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::all_stark::{AllStark, NUM_TABLES}; use crate::config::StarkConfig; -use crate::permutation::{ - get_grand_product_challenge_set, get_n_grand_product_challenge_sets, - get_n_grand_product_challenge_sets_target, -}; +use crate::cross_table_lookup::get_grand_product_challenge_set; use crate::proof::*; use crate::util::{h256_limbs, u256_limbs}; @@ -224,18 +221,14 @@ impl, C: GenericConfig, const D: usize> A let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges); - let num_permutation_zs = all_stark.nums_permutation_zs(config); - let num_permutation_batch_sizes = all_stark.permutation_batch_sizes(); + let lookups = all_stark.num_lookups_helper_columns(config); AllProofChallenges { stark_challenges: core::array::from_fn(|i| { challenger.compact(); - self.stark_proofs[i].proof.get_challenges( - &mut challenger, - num_permutation_zs[i] > 0, - num_permutation_batch_sizes[i], - config, - ) + self.stark_proofs[i] + .proof + .get_challenges(&mut challenger, lookups[i] > 0, config) }), ctl_challenges, } @@ -258,17 +251,13 @@ impl, C: GenericConfig, const D: usize> A let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges); - let num_permutation_zs = all_stark.nums_permutation_zs(config); - let num_permutation_batch_sizes = all_stark.permutation_batch_sizes(); + let lookups = all_stark.num_lookups_helper_columns(config); let mut challenger_states = vec![challenger.compact()]; for i in 0..NUM_TABLES { - self.stark_proofs[i].proof.get_challenges( - &mut challenger, - num_permutation_zs[i] > 0, - num_permutation_batch_sizes[i], - config, - ); + self.stark_proofs[i] + .proof + .get_challenges(&mut challenger, lookups[i] > 0, config); challenger_states.push(challenger.compact()); } @@ -288,14 +277,13 @@ where pub(crate) fn get_challenges( &self, challenger: &mut Challenger, - stark_use_permutation: bool, - stark_permutation_batch_size: usize, + stark_use_lookup: bool, config: &StarkConfig, ) -> StarkProofChallenges { let degree_bits = self.recover_degree_bits(config); let StarkProof { - permutation_ctl_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof: @@ -310,15 +298,10 @@ where let num_challenges = config.num_challenges; - let permutation_challenge_sets = stark_use_permutation.then(|| { - get_n_grand_product_challenge_sets( - challenger, - num_challenges, - stark_permutation_batch_size, - ) - }); + let lookup_challenges = + stark_use_lookup.then(|| challenger.get_n_challenges(config.num_challenges)); - challenger.observe_cap(permutation_ctl_zs_cap); + challenger.observe_cap(auxiliary_polys_cap); let stark_alphas = challenger.get_n_challenges(num_challenges); @@ -328,7 +311,7 @@ where challenger.observe_openings(&openings.to_fri_openings()); StarkProofChallenges { - permutation_challenge_sets, + lookup_challenges, stark_alphas, stark_zeta, fri_challenges: challenger.fri_challenges::( @@ -347,15 +330,14 @@ impl StarkProofTarget { &self, builder: &mut CircuitBuilder, challenger: &mut RecursiveChallenger, - stark_use_permutation: bool, - stark_permutation_batch_size: usize, + stark_use_lookup: bool, config: &StarkConfig, ) -> StarkProofChallengesTarget where C::Hasher: AlgebraicHasher, { let StarkProofTarget { - permutation_ctl_zs_cap, + auxiliary_polys_cap: auxiliary_polys, quotient_polys_cap, openings, opening_proof: @@ -370,16 +352,10 @@ impl StarkProofTarget { let num_challenges = config.num_challenges; - let permutation_challenge_sets = stark_use_permutation.then(|| { - get_n_grand_product_challenge_sets_target( - builder, - challenger, - num_challenges, - stark_permutation_batch_size, - ) - }); + let lookup_challenges = + stark_use_lookup.then(|| challenger.get_n_challenges(builder, num_challenges)); - challenger.observe_cap(permutation_ctl_zs_cap); + challenger.observe_cap(auxiliary_polys); let stark_alphas = challenger.get_n_challenges(builder, num_challenges); @@ -389,7 +365,7 @@ impl StarkProofTarget { challenger.observe_openings(&openings.to_fri_openings(builder.zero())); StarkProofChallengesTarget { - permutation_challenge_sets, + lookup_challenges, stark_alphas, stark_zeta, fri_challenges: challenger.fri_challenges( diff --git a/evm/src/keccak/keccak_stark.rs b/evm/src/keccak/keccak_stark.rs index 74f92622..b4ff4b84 100644 --- a/evm/src/keccak/keccak_stark.rs +++ b/evm/src/keccak/keccak_stark.rs @@ -650,10 +650,9 @@ mod tests { use tiny_keccak::keccakf; use crate::config::StarkConfig; - use crate::cross_table_lookup::{CtlData, CtlZData}; + use crate::cross_table_lookup::{CtlData, CtlZData, GrandProductChallenge}; use crate::keccak::columns::reg_output_limb; use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS}; - use crate::permutation::GrandProductChallenge; use crate::prover::prove_single_table; use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; diff --git a/evm/src/lib.rs b/evm/src/lib.rs index ab48cda0..93e29c7d 100644 --- a/evm/src/lib.rs +++ b/evm/src/lib.rs @@ -23,7 +23,6 @@ pub mod keccak_sponge; pub mod logic; pub mod lookup; pub mod memory; -pub mod permutation; pub mod proof; pub mod prover; pub mod recursive_verifier; diff --git a/evm/src/lookup.rs b/evm/src/lookup.rs index d7e12bac..ad872a79 100644 --- a/evm/src/lookup.rs +++ b/evm/src/lookup.rs @@ -1,127 +1,248 @@ -use std::cmp::Ordering; - use itertools::Itertools; -use plonky2::field::extension::Extendable; +use plonky2::field::batch_util::batch_add_inplace; +use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; -use plonky2::field::types::{Field, PrimeField64}; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2_util::ceil_div_usize; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) fn eval_lookups, const COLS: usize>( - vars: StarkEvaluationVars, +pub struct Lookup { + /// Columns whose values should be contained in the lookup table. + /// These are the f_i(x) polynomials in the logUp paper. + pub(crate) columns: Vec, + /// Column containing the lookup table. + /// This is the t(x) polynomial in the paper. + pub(crate) table_column: usize, + /// Column containing the frequencies of `columns` in `table_column`. + /// This is the m(x) polynomial in the paper. + pub(crate) frequencies_column: usize, +} + +impl Lookup { + pub(crate) fn num_helper_columns(&self, constraint_degree: usize) -> usize { + // One helper column for each column batch of size `constraint_degree-1`, + // then one column for the inverse of `table + challenge` and one for the `Z` polynomial. + ceil_div_usize(self.columns.len(), constraint_degree - 1) + 2 + } +} + +/// logUp protocol from https://ia.cr/2022/1530 (TODO link to newer version?) +/// Compute the helper columns for the lookup argument. +/// Given columns `f0,...,fk` and a column `t`, such that `∪fi ⊆ t`, and challenges `x`, +/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`, +/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column. +pub(crate) fn lookup_helper_columns( + lookup: &Lookup, + trace_poly_values: &[PolynomialValues], + challenge: F, + constraint_degree: usize, +) -> Vec> { + assert_eq!( + constraint_degree, 3, + "TODO: Allow other constraint degrees." + ); + let num_helper_columns = lookup.num_helper_columns(constraint_degree); + let mut helper_columns: Vec> = Vec::with_capacity(num_helper_columns); + + // For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and + // add it to the helper columns. + // TODO: This does one batch inversion per column. It would also be possible to do one batch inversion + // for every column, but that would require building a big vector of all the columns concatenated. + // Not sure which approach is better. + // Note: these are the h_k(x) polynomials in the paper, with a few differences: + // * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the + // h_k polynomials; instead there's a separate helper column for it (see below). + // * Here, we use 1 instead of -1 as the numerator (and subtract later). + // * Here, for now, the batch size (l) is always constraint_degree - 1 = 2. + for mut col_inds in &lookup.columns.iter().chunks(constraint_degree - 1) { + let first = *col_inds.next().unwrap(); + // TODO: The clone could probably be avoided by using a modified version of `batch_multiplicative_inverse` + // taking `challenge` as an additional argument. + let mut column = trace_poly_values[first].values.clone(); + for x in column.iter_mut() { + *x = challenge + *x; + } + let mut acc = F::batch_multiplicative_inverse(&column); + for &ind in col_inds { + let mut column = trace_poly_values[ind].values.clone(); + for x in column.iter_mut() { + *x = challenge + *x; + } + column = F::batch_multiplicative_inverse(&column); + batch_add_inplace(&mut acc, &column); + } + helper_columns.push(acc.into()); + } + + // Add `1/(table+challenge)` to the helper columns. + // This is 1/phi_0(x) = 1/(x + t(x)) from the paper. + // Here, we don't include m(x) in the numerator, instead multiplying it with this column later. + let mut table = trace_poly_values[lookup.table_column].values.clone(); + for x in table.iter_mut() { + *x = challenge + *x; + } + helper_columns.push(F::batch_multiplicative_inverse(&table).into()); + + // Compute the `Z` polynomial with `Z(1)=0` and `Z(gx) = Z(x) + sum h_i(x) - frequencies(x)g(x)`. + // This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H. + // In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled + // into the h_k(x) polynomials. + let frequencies = &trace_poly_values[lookup.frequencies_column].values; + let mut z = Vec::with_capacity(frequencies.len()); + z.push(F::ZERO); + for i in 0..frequencies.len() - 1 { + let x = helper_columns[..num_helper_columns - 2] + .iter() + .map(|col| col.values[i]) + .sum::() + - frequencies[i] * helper_columns[num_helper_columns - 2].values[i]; + z.push(z[i] + x); + } + helper_columns.push(z.into()); + + helper_columns +} + +pub struct LookupCheckVars +where + F: Field, + FE: FieldExtension, + P: PackedField, +{ + pub(crate) local_values: Vec

, + pub(crate) next_values: Vec

, + pub(crate) challenges: Vec, +} + +/// Constraints for the logUp lookup argument. +pub(crate) fn eval_lookups_checks( + stark: &S, + lookups: &[Lookup], + vars: StarkEvaluationVars, + lookup_vars: LookupCheckVars, yield_constr: &mut ConstraintConsumer

, - col_permuted_input: usize, - col_permuted_table: usize, -) { - let local_perm_input = vars.local_values[col_permuted_input]; - let next_perm_table = vars.next_values[col_permuted_table]; - let next_perm_input = vars.next_values[col_permuted_input]; - - // A "vertical" diff between the local and next permuted inputs. - let diff_input_prev = next_perm_input - local_perm_input; - // A "horizontal" diff between the next permuted input and permuted table value. - let diff_input_table = next_perm_input - next_perm_table; - - yield_constr.constraint(diff_input_prev * diff_input_table); - - // This is actually constraining the first row, as per the spec, since `diff_input_table` - // is a diff of the next row's values. In the context of `constraint_last_row`, the next - // row is the first row. - yield_constr.constraint_last_row(diff_input_table); -} - -pub(crate) fn eval_lookups_circuit< +) where F: RichField + Extendable, - const D: usize, - const COLS: usize, ->( - builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, - yield_constr: &mut RecursiveConstraintConsumer, - col_permuted_input: usize, - col_permuted_table: usize, -) { - let local_perm_input = vars.local_values[col_permuted_input]; - let next_perm_table = vars.next_values[col_permuted_table]; - let next_perm_input = vars.next_values[col_permuted_input]; - - // A "vertical" diff between the local and next permuted inputs. - let diff_input_prev = builder.sub_extension(next_perm_input, local_perm_input); - // A "horizontal" diff between the next permuted input and permuted table value. - let diff_input_table = builder.sub_extension(next_perm_input, next_perm_table); - - let diff_product = builder.mul_extension(diff_input_prev, diff_input_table); - yield_constr.constraint(builder, diff_product); - - // This is actually constraining the first row, as per the spec, since `diff_input_table` - // is a diff of the next row's values. In the context of `constraint_last_row`, the next - // row is the first row. - yield_constr.constraint_last_row(builder, diff_input_table); -} - -/// Given an input column and a table column, generate the permuted input and permuted table columns -/// used in the Halo2 permutation argument. -pub fn permuted_cols(inputs: &[F], table: &[F]) -> (Vec, Vec) { - let n = inputs.len(); - - // The permuted inputs do not have to be ordered, but we found that sorting was faster than - // hash-based grouping. We also sort the table, as this helps us identify "unused" table - // elements efficiently. - - // To compare elements, e.g. for sorting, we first need them in canonical form. It would be - // wasteful to canonicalize in each comparison, as a single element may be involved in many - // comparisons. So we will canonicalize once upfront, then use `to_noncanonical_u64` when - // comparing elements. - - let sorted_inputs = inputs - .iter() - .map(|x| x.to_canonical()) - .sorted_unstable_by_key(|x| x.to_noncanonical_u64()) - .collect_vec(); - let sorted_table = table - .iter() - .map(|x| x.to_canonical()) - .sorted_unstable_by_key(|x| x.to_noncanonical_u64()) - .collect_vec(); - - let mut unused_table_inds = Vec::with_capacity(n); - let mut unused_table_vals = Vec::with_capacity(n); - let mut permuted_table = vec![F::ZERO; n]; - let mut i = 0; - let mut j = 0; - while (j < n) && (i < n) { - let input_val = sorted_inputs[i].to_noncanonical_u64(); - let table_val = sorted_table[j].to_noncanonical_u64(); - match input_val.cmp(&table_val) { - Ordering::Greater => { - unused_table_vals.push(sorted_table[j]); - j += 1; - } - Ordering::Less => { - if let Some(x) = unused_table_vals.pop() { - permuted_table[i] = x; - } else { - unused_table_inds.push(i); + FE: FieldExtension, + P: PackedField, + S: Stark, +{ + let degree = stark.constraint_degree(); + assert_eq!(degree, 3, "TODO: Allow other constraint degrees."); + let mut start = 0; + for lookup in lookups { + let num_helper_columns = lookup.num_helper_columns(degree); + for &challenge in &lookup_vars.challenges { + let challenge = FE::from_basefield(challenge); + // For each chunk, check that `h_i (x+f_2i) (x+f_2i+1) = (x+f_2i) + (x+f_2i+1)` if the chunk has length 2 + // or if it has length 1, check that `h_i * (x+f_2i) = 1`, where x is the challenge + for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() { + let mut x = lookup_vars.local_values[start + j]; + let mut y = P::ZEROS; + let fs = chunk.iter().map(|&k| vars.local_values[k]); + for f in fs { + x *= f + challenge; + y += f + challenge; + } + match chunk.len() { + 2 => yield_constr.constraint(x - y), + 1 => yield_constr.constraint(x - P::ONES), + _ => todo!("Allow other constraint degrees."), } - i += 1; - } - Ordering::Equal => { - permuted_table[i] = sorted_table[j]; - i += 1; - j += 1; } + // Check that the penultimate helper column contains `1/(table+challenge)`. + let x = lookup_vars.local_values[start + num_helper_columns - 2]; + let x = x * (vars.local_values[lookup.table_column] + challenge); + yield_constr.constraint(x - P::ONES); + + // Check the `Z` polynomial. + let z = lookup_vars.local_values[start + num_helper_columns - 1]; + let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; + let y = lookup_vars.local_values[start..start + num_helper_columns - 2] + .iter() + .fold(P::ZEROS, |acc, x| acc + *x) + - vars.local_values[lookup.frequencies_column] + * lookup_vars.local_values[start + num_helper_columns - 2]; + yield_constr.constraint(next_z - z - y); + start += num_helper_columns; + } + } +} + +pub struct LookupCheckVarsTarget { + pub(crate) local_values: Vec>, + pub(crate) next_values: Vec>, + pub(crate) challenges: Vec, +} + +pub(crate) fn eval_lookups_checks_circuit< + F: RichField + Extendable, + S: Stark, + const D: usize, +>( + builder: &mut CircuitBuilder, + stark: &S, + vars: StarkEvaluationTargets, + lookup_vars: LookupCheckVarsTarget, + yield_constr: &mut RecursiveConstraintConsumer, +) { + let one = builder.one_extension(); + let degree = stark.constraint_degree(); + let lookups = stark.lookups(); + assert_eq!(degree, 3, "TODO: Allow other constraint degrees."); + let mut start = 0; + for lookup in lookups { + let num_helper_columns = lookup.num_helper_columns(degree); + for &challenge in &lookup_vars.challenges { + let challenge = builder.convert_to_ext(challenge); + for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() { + let mut x = lookup_vars.local_values[start + j]; + let mut y = builder.zero_extension(); + let fs = chunk.iter().map(|&k| vars.local_values[k]); + for f in fs { + let tmp = builder.add_extension(f, challenge); + x = builder.mul_extension(x, tmp); + y = builder.add_extension(y, tmp); + } + match chunk.len() { + 2 => { + let tmp = builder.sub_extension(x, y); + yield_constr.constraint(builder, tmp) + } + 1 => { + let tmp = builder.sub_extension(x, one); + yield_constr.constraint(builder, tmp) + } + _ => todo!("Allow other constraint degrees."), + } + } + let x = lookup_vars.local_values[start + num_helper_columns - 2]; + let tmp = builder.add_extension(vars.local_values[lookup.table_column], challenge); + let x = builder.mul_sub_extension(x, tmp, one); + yield_constr.constraint(builder, x); + + let z = lookup_vars.local_values[start + num_helper_columns - 1]; + let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; + let y = builder.add_many_extension( + &lookup_vars.local_values[start..start + num_helper_columns - 2], + ); + let tmp = builder.mul_extension( + vars.local_values[lookup.frequencies_column], + lookup_vars.local_values[start + num_helper_columns - 2], + ); + let y = builder.sub_extension(y, tmp); + let constraint = builder.sub_extension(next_z, z); + let constraint = builder.sub_extension(constraint, y); + yield_constr.constraint(builder, constraint); + start += num_helper_columns; } } - - unused_table_vals.extend_from_slice(&sorted_table[j..n]); - unused_table_inds.extend(i..n); - - for (ind, val) in unused_table_inds.into_iter().zip_eq(unused_table_vals) { - permuted_table[ind] = val; - } - - (sorted_inputs, permuted_table) } diff --git a/evm/src/memory/columns.rs b/evm/src/memory/columns.rs index c972dd0a..56b121e1 100644 --- a/evm/src/memory/columns.rs +++ b/evm/src/memory/columns.rs @@ -31,8 +31,7 @@ pub(crate) const VIRTUAL_FIRST_CHANGE: usize = SEGMENT_FIRST_CHANGE + 1; pub(crate) const RANGE_CHECK: usize = VIRTUAL_FIRST_CHANGE + 1; // The counter column (used for the range check) starts from 0 and increments. pub(crate) const COUNTER: usize = RANGE_CHECK + 1; -// Helper columns for the permutation argument used to enforce the range check. -pub(crate) const RANGE_CHECK_PERMUTED: usize = COUNTER + 1; -pub(crate) const COUNTER_PERMUTED: usize = RANGE_CHECK_PERMUTED + 1; +// The frequencies column used in logUp. +pub(crate) const FREQUENCIES: usize = COUNTER + 1; -pub(crate) const NUM_COLUMNS: usize = COUNTER_PERMUTED + 1; +pub(crate) const NUM_COLUMNS: usize = FREQUENCIES + 1; diff --git a/evm/src/memory/memory_stark.rs b/evm/src/memory/memory_stark.rs index 36f75665..1935af55 100644 --- a/evm/src/memory/memory_stark.rs +++ b/evm/src/memory/memory_stark.rs @@ -14,14 +14,13 @@ use plonky2_maybe_rayon::*; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cross_table_lookup::Column; -use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols}; +use crate::lookup::Lookup; use crate::memory::columns::{ - value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, - COUNTER_PERMUTED, FILTER, IS_READ, NUM_COLUMNS, RANGE_CHECK, RANGE_CHECK_PERMUTED, - SEGMENT_FIRST_CHANGE, TIMESTAMP, VIRTUAL_FIRST_CHANGE, + value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, FILTER, + FREQUENCIES, IS_READ, NUM_COLUMNS, RANGE_CHECK, SEGMENT_FIRST_CHANGE, TIMESTAMP, + VIRTUAL_FIRST_CHANGE, }; use crate::memory::VALUE_LIMBS; -use crate::permutation::PermutationPair; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::witness::memory::MemoryOpKind::Read; @@ -144,10 +143,10 @@ impl, const D: usize> MemoryStark { let height = trace_col_vecs[0].len(); trace_col_vecs[COUNTER] = (0..height).map(|i| F::from_canonical_usize(i)).collect(); - let (permuted_inputs, permuted_table) = - permuted_cols(&trace_col_vecs[RANGE_CHECK], &trace_col_vecs[COUNTER]); - trace_col_vecs[RANGE_CHECK_PERMUTED] = permuted_inputs; - trace_col_vecs[COUNTER_PERMUTED] = permuted_table; + for i in 0..height { + let x = trace_col_vecs[RANGE_CHECK][i].to_canonical_u64() as usize; + trace_col_vecs[FREQUENCIES][x] += F::ONE; + } } /// This memory STARK orders rows by `(context, segment, virt, timestamp)`. To enforce the @@ -316,8 +315,6 @@ impl, const D: usize> Stark for MemoryStark, const D: usize> Stark for MemoryStark usize { 3 } - fn permutation_pairs(&self) -> Vec { - vec![ - PermutationPair::singletons(RANGE_CHECK, RANGE_CHECK_PERMUTED), - PermutationPair::singletons(COUNTER, COUNTER_PERMUTED), - ] + fn lookups(&self) -> Vec { + vec![Lookup { + columns: vec![RANGE_CHECK], + table_column: COUNTER, + frequencies_column: FREQUENCIES, + }] } } diff --git a/evm/src/permutation.rs b/evm/src/permutation.rs deleted file mode 100644 index 6ce9c943..00000000 --- a/evm/src/permutation.rs +++ /dev/null @@ -1,459 +0,0 @@ -//! Permutation arguments. - -use std::fmt::Debug; - -use itertools::Itertools; -use plonky2::field::batch_util::batch_multiply_inplace; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::packed::PackedField; -use plonky2::field::polynomial::PolynomialValues; -use plonky2::field::types::Field; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::target::Target; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{AlgebraicHasher, Hasher}; -use plonky2::plonk::plonk_common::{ - reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit, -}; -use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget}; -use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; -use plonky2_maybe_rayon::*; - -use crate::config::StarkConfig; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::stark::Stark; -use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; - -/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another. -/// In particular, there should exist some permutation `pi` such that for any `i`, -/// `trace[lhs[i]] = pi(trace[rhs[i]])`. Here `trace` denotes the trace in column-major form, so -/// `trace[col]` is a column vector. -pub struct PermutationPair { - /// Each entry contains two column indices, representing two columns which should be - /// permutations of one another. - pub column_pairs: Vec<(usize, usize)>, -} - -impl PermutationPair { - pub fn singletons(lhs: usize, rhs: usize) -> Self { - Self { - column_pairs: vec![(lhs, rhs)], - } - } -} - -/// A single instance of a permutation check protocol. -pub(crate) struct PermutationInstance<'a, T: Copy + Eq + PartialEq + Debug> { - pub(crate) pair: &'a PermutationPair, - pub(crate) challenge: GrandProductChallenge, -} - -/// Randomness for a single instance of a permutation check protocol. -#[derive(Copy, Clone, Eq, PartialEq, Debug)] -pub(crate) struct GrandProductChallenge { - /// Randomness used to combine multiple columns into one. - pub(crate) beta: T, - /// Random offset that's added to the beta-reduced column values. - pub(crate) gamma: T, -} - -impl GrandProductChallenge { - pub(crate) fn combine<'a, FE, P, T: IntoIterator, const D2: usize>( - &self, - terms: T, - ) -> P - where - FE: FieldExtension, - P: PackedField, - T::IntoIter: DoubleEndedIterator, - { - reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma) - } -} - -impl GrandProductChallenge { - pub(crate) fn combine_circuit, const D: usize>( - &self, - builder: &mut CircuitBuilder, - terms: &[ExtensionTarget], - ) -> ExtensionTarget { - let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta); - let gamma = builder.convert_to_ext(self.gamma); - builder.add_extension(reduced, gamma) - } -} - -impl GrandProductChallenge { - pub(crate) fn combine_base_circuit, const D: usize>( - &self, - builder: &mut CircuitBuilder, - terms: &[Target], - ) -> Target { - let reduced = reduce_with_powers_circuit(builder, terms, self.beta); - builder.add(reduced, self.gamma) - } -} - -/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. -#[derive(Clone, Eq, PartialEq, Debug)] -pub(crate) struct GrandProductChallengeSet { - pub(crate) challenges: Vec>, -} - -impl GrandProductChallengeSet { - pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { - buffer.write_usize(self.challenges.len())?; - for challenge in &self.challenges { - buffer.write_target(challenge.beta)?; - buffer.write_target(challenge.gamma)?; - } - Ok(()) - } - - pub fn from_buffer(buffer: &mut Buffer) -> IoResult { - let length = buffer.read_usize()?; - let mut challenges = Vec::with_capacity(length); - for _ in 0..length { - challenges.push(GrandProductChallenge { - beta: buffer.read_target()?, - gamma: buffer.read_target()?, - }); - } - - Ok(GrandProductChallengeSet { challenges }) - } -} - -/// Compute all Z polynomials (for permutation arguments). -pub(crate) fn compute_permutation_z_polys( - stark: &S, - config: &StarkConfig, - trace_poly_values: &[PolynomialValues], - permutation_challenge_sets: &[GrandProductChallengeSet], -) -> Vec> -where - F: RichField + Extendable, - S: Stark, -{ - let permutation_pairs = stark.permutation_pairs(); - let permutation_batches = get_permutation_batches( - &permutation_pairs, - permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - permutation_batches - .into_par_iter() - .map(|instances| compute_permutation_z_poly(&instances, trace_poly_values)) - .collect() -} - -/// Compute a single Z polynomial. -fn compute_permutation_z_poly( - instances: &[PermutationInstance], - trace_poly_values: &[PolynomialValues], -) -> PolynomialValues { - let degree = trace_poly_values[0].len(); - let (reduced_lhs_polys, reduced_rhs_polys): (Vec<_>, Vec<_>) = instances - .iter() - .map(|instance| permutation_reduced_polys(instance, trace_poly_values, degree)) - .unzip(); - - let numerator = poly_product_elementwise(reduced_lhs_polys.into_iter()); - let denominator = poly_product_elementwise(reduced_rhs_polys.into_iter()); - - // Compute the quotients. - let denominator_inverses = F::batch_multiplicative_inverse(&denominator.values); - let mut quotients = numerator.values; - batch_multiply_inplace(&mut quotients, &denominator_inverses); - - // Compute Z, which contains partial products of the quotients. - let mut partial_products = Vec::with_capacity(degree); - let mut acc = F::ONE; - for q in quotients { - partial_products.push(acc); - acc *= q; - } - PolynomialValues::new(partial_products) -} - -/// Computes the reduced polynomial, `\sum beta^i f_i(x) + gamma`, for both the "left" and "right" -/// sides of a given `PermutationPair`. -fn permutation_reduced_polys( - instance: &PermutationInstance, - trace_poly_values: &[PolynomialValues], - degree: usize, -) -> (PolynomialValues, PolynomialValues) { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: GrandProductChallenge { beta, gamma }, - } = instance; - - let mut reduced_lhs = PolynomialValues::constant(*gamma, degree); - let mut reduced_rhs = PolynomialValues::constant(*gamma, degree); - for ((lhs, rhs), weight) in column_pairs.iter().zip(beta.powers()) { - reduced_lhs.add_assign_scaled(&trace_poly_values[*lhs], weight); - reduced_rhs.add_assign_scaled(&trace_poly_values[*rhs], weight); - } - (reduced_lhs, reduced_rhs) -} - -/// Computes the elementwise product of a set of polynomials. Assumes that the set is non-empty and -/// that each polynomial has the same length. -fn poly_product_elementwise( - mut polys: impl Iterator>, -) -> PolynomialValues { - let mut product = polys.next().expect("Expected at least one polynomial"); - for poly in polys { - batch_multiply_inplace(&mut product.values, &poly.values) - } - product -} - -fn get_grand_product_challenge>( - challenger: &mut Challenger, -) -> GrandProductChallenge { - let beta = challenger.get_challenge(); - let gamma = challenger.get_challenge(); - GrandProductChallenge { beta, gamma } -} - -pub(crate) fn get_grand_product_challenge_set>( - challenger: &mut Challenger, - num_challenges: usize, -) -> GrandProductChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_grand_product_challenge(challenger)) - .collect(); - GrandProductChallengeSet { challenges } -} - -pub(crate) fn get_n_grand_product_challenge_sets>( - challenger: &mut Challenger, - num_challenges: usize, - num_sets: usize, -) -> Vec> { - (0..num_sets) - .map(|_| get_grand_product_challenge_set(challenger, num_challenges)) - .collect() -} - -fn get_grand_product_challenge_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, -) -> GrandProductChallenge { - let beta = challenger.get_challenge(builder); - let gamma = challenger.get_challenge(builder); - GrandProductChallenge { beta, gamma } -} - -pub(crate) fn get_grand_product_challenge_set_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - num_challenges: usize, -) -> GrandProductChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_grand_product_challenge_target(builder, challenger)) - .collect(); - GrandProductChallengeSet { challenges } -} - -pub(crate) fn get_n_grand_product_challenge_sets_target< - F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - num_challenges: usize, - num_sets: usize, -) -> Vec> { - (0..num_sets) - .map(|_| get_grand_product_challenge_set_target(builder, challenger, num_challenges)) - .collect() -} - -/// Get a list of instances of our batch-permutation argument. These are permutation arguments -/// where the same `Z(x)` polynomial is used to check more than one permutation. -/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we -/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we -/// chunk these arguments based on our batch size. -pub(crate) fn get_permutation_batches<'a, T: Copy + Eq + PartialEq + Debug>( - permutation_pairs: &'a [PermutationPair], - permutation_challenge_sets: &[GrandProductChallengeSet], - num_challenges: usize, - batch_size: usize, -) -> Vec>> { - permutation_pairs - .iter() - .cartesian_product(0..num_challenges) - .chunks(batch_size) - .into_iter() - .map(|batch| { - batch - .enumerate() - .map(|(i, (pair, chal))| { - let challenge = permutation_challenge_sets[i].challenges[chal]; - PermutationInstance { pair, challenge } - }) - .collect_vec() - }) - .collect() -} - -pub struct PermutationCheckVars -where - F: Field, - FE: FieldExtension, - P: PackedField, -{ - pub(crate) local_zs: Vec

, - pub(crate) next_zs: Vec

, - pub(crate) permutation_challenge_sets: Vec>, -} - -pub(crate) fn eval_permutation_checks( - stark: &S, - config: &StarkConfig, - vars: StarkEvaluationVars, - permutation_vars: PermutationCheckVars, - consumer: &mut ConstraintConsumer

, -) where - F: RichField + Extendable, - FE: FieldExtension, - P: PackedField, - S: Stark, -{ - let PermutationCheckVars { - local_zs, - next_zs, - permutation_challenge_sets, - } = permutation_vars; - - // Check that Z(1) = 1; - for &z in &local_zs { - consumer.constraint_first_row(z - FE::ONE); - } - - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, - &permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - // Z(gx) * down = Z x * up - let (reduced_lhs, reduced_rhs): (Vec

, Vec

) = instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: GrandProductChallenge { beta, gamma }, - } = instance; - let mut factor = ReducingFactor::new(*beta); - let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs - .iter() - .map(|&(i, j)| (vars.local_values[i], vars.local_values[j])) - .unzip(); - ( - factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma), - factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma), - ) - }) - .unzip(); - let constraint = next_zs[i] * reduced_rhs.into_iter().product::

() - - local_zs[i] * reduced_lhs.into_iter().product::

(); - consumer.constraint(constraint); - } -} - -pub struct PermutationCheckDataTarget { - pub(crate) local_zs: Vec>, - pub(crate) next_zs: Vec>, - pub(crate) permutation_challenge_sets: Vec>, -} - -pub(crate) fn eval_permutation_checks_circuit( - builder: &mut CircuitBuilder, - stark: &S, - config: &StarkConfig, - vars: StarkEvaluationTargets, - permutation_data: PermutationCheckDataTarget, - consumer: &mut RecursiveConstraintConsumer, -) where - F: RichField + Extendable, - S: Stark, - [(); S::COLUMNS]:, -{ - let PermutationCheckDataTarget { - local_zs, - next_zs, - permutation_challenge_sets, - } = permutation_data; - - let one = builder.one_extension(); - // Check that Z(1) = 1; - for &z in &local_zs { - let z_1 = builder.sub_extension(z, one); - consumer.constraint_first_row(builder, z_1); - } - - let permutation_pairs = stark.permutation_pairs(); - - let permutation_batches = get_permutation_batches( - &permutation_pairs, - &permutation_challenge_sets, - config.num_challenges, - stark.permutation_batch_size(), - ); - - // Each zs value corresponds to a permutation batch. - for (i, instances) in permutation_batches.iter().enumerate() { - let (reduced_lhs, reduced_rhs): (Vec>, Vec>) = - instances - .iter() - .map(|instance| { - let PermutationInstance { - pair: PermutationPair { column_pairs }, - challenge: GrandProductChallenge { beta, gamma }, - } = instance; - let beta_ext = builder.convert_to_ext(*beta); - let gamma_ext = builder.convert_to_ext(*gamma); - let mut factor = ReducingFactorTarget::new(beta_ext); - let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs - .iter() - .map(|&(i, j)| (vars.local_values[i], vars.local_values[j])) - .unzip(); - let reduced_lhs = factor.reduce(&lhs, builder); - let reduced_rhs = factor.reduce(&rhs, builder); - ( - builder.add_extension(reduced_lhs, gamma_ext), - builder.add_extension(reduced_rhs, gamma_ext), - ) - }) - .unzip(); - let reduced_lhs_product = builder.mul_many_extension(reduced_lhs); - let reduced_rhs_product = builder.mul_many_extension(reduced_rhs); - // constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product - let constraint = { - let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product); - builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp) - }; - consumer.constraint(builder, constraint) - } -} diff --git a/evm/src/proof.rs b/evm/src/proof.rs index 14f22b67..23446ac4 100644 --- a/evm/src/proof.rs +++ b/evm/src/proof.rs @@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize}; use crate::all_stark::NUM_TABLES; use crate::config::StarkConfig; -use crate::permutation::GrandProductChallengeSet; +use crate::cross_table_lookup::GrandProductChallengeSet; /// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs. #[derive(Debug, Clone)] @@ -588,8 +588,8 @@ impl ExtraBlockDataTarget { pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, - /// Merkle cap of LDEs of permutation Z values. - pub permutation_ctl_zs_cap: MerkleCap, + /// Merkle cap of LDEs of lookup helper and CTL columns. + pub auxiliary_polys_cap: MerkleCap, /// Merkle cap of LDEs of quotient polynomial evaluations. pub quotient_polys_cap: MerkleCap, /// Purported values of each polynomial at the challenge point. @@ -630,7 +630,7 @@ impl, C: GenericConfig, const D: usize> S #[derive(Eq, PartialEq, Debug)] pub struct StarkProofTarget { pub trace_cap: MerkleCapTarget, - pub permutation_ctl_zs_cap: MerkleCapTarget, + pub auxiliary_polys_cap: MerkleCapTarget, pub quotient_polys_cap: MerkleCapTarget, pub openings: StarkOpeningSetTarget, pub opening_proof: FriProofTarget, @@ -639,7 +639,7 @@ pub struct StarkProofTarget { impl StarkProofTarget { pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { buffer.write_target_merkle_cap(&self.trace_cap)?; - buffer.write_target_merkle_cap(&self.permutation_ctl_zs_cap)?; + buffer.write_target_merkle_cap(&self.auxiliary_polys_cap)?; buffer.write_target_merkle_cap(&self.quotient_polys_cap)?; buffer.write_target_fri_proof(&self.opening_proof)?; self.openings.to_buffer(buffer)?; @@ -648,14 +648,14 @@ impl StarkProofTarget { pub fn from_buffer(buffer: &mut Buffer) -> IoResult { let trace_cap = buffer.read_target_merkle_cap()?; - let permutation_ctl_zs_cap = buffer.read_target_merkle_cap()?; + let auxiliary_polys_cap = buffer.read_target_merkle_cap()?; let quotient_polys_cap = buffer.read_target_merkle_cap()?; let opening_proof = buffer.read_target_fri_proof()?; let openings = StarkOpeningSetTarget::from_buffer(buffer)?; Ok(Self { trace_cap, - permutation_ctl_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof, @@ -674,8 +674,8 @@ impl StarkProofTarget { } pub(crate) struct StarkProofChallenges, const D: usize> { - /// Randomness used in any permutation arguments. - pub permutation_challenge_sets: Option>>, + /// Randomness used in lookup arguments. + pub lookup_challenges: Option>, /// Random values used to combine STARK constraints. pub stark_alphas: Vec, @@ -687,7 +687,7 @@ pub(crate) struct StarkProofChallenges, const D: us } pub(crate) struct StarkProofChallengesTarget { - pub permutation_challenge_sets: Option>>, + pub lookup_challenges: Option>, pub stark_alphas: Vec, pub stark_zeta: ExtensionTarget, pub fri_challenges: FriChallengesTarget, @@ -700,10 +700,10 @@ pub struct StarkOpeningSet, const D: usize> { pub local_values: Vec, /// Openings of trace polynomials at `g * zeta`. pub next_values: Vec, - /// Openings of permutations and cross-table lookups `Z` polynomials at `zeta`. - pub permutation_ctl_zs: Vec, - /// Openings of permutations and cross-table lookups `Z` polynomials at `g * zeta`. - pub permutation_ctl_zs_next: Vec, + /// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`. + pub auxiliary_polys: Vec, + /// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`. + pub auxiliary_polys_next: Vec, /// Openings of cross-table lookups `Z` polynomials at `g^-1`. pub ctl_zs_last: Vec, /// Openings of quotient polynomials at `zeta`. @@ -715,10 +715,10 @@ impl, const D: usize> StarkOpeningSet { zeta: F::Extension, g: F, trace_commitment: &PolynomialBatch, - permutation_ctl_zs_commitment: &PolynomialBatch, + auxiliary_polys_commitment: &PolynomialBatch, quotient_commitment: &PolynomialBatch, degree_bits: usize, - num_permutation_zs: usize, + num_lookup_columns: usize, ) -> Self { let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { c.polynomials @@ -736,12 +736,12 @@ impl, const D: usize> StarkOpeningSet { Self { local_values: eval_commitment(zeta, trace_commitment), next_values: eval_commitment(zeta_next, trace_commitment), - permutation_ctl_zs: eval_commitment(zeta, permutation_ctl_zs_commitment), - permutation_ctl_zs_next: eval_commitment(zeta_next, permutation_ctl_zs_commitment), + auxiliary_polys: eval_commitment(zeta, auxiliary_polys_commitment), + auxiliary_polys_next: eval_commitment(zeta_next, auxiliary_polys_commitment), ctl_zs_last: eval_commitment_base( F::primitive_root_of_unity(degree_bits).inverse(), - permutation_ctl_zs_commitment, - )[num_permutation_zs..] + auxiliary_polys_commitment, + )[num_lookup_columns..] .to_vec(), quotient_polys: eval_commitment(zeta, quotient_commitment), } @@ -752,7 +752,7 @@ impl, const D: usize> StarkOpeningSet { values: self .local_values .iter() - .chain(&self.permutation_ctl_zs) + .chain(&self.auxiliary_polys) .chain(&self.quotient_polys) .copied() .collect_vec(), @@ -761,7 +761,7 @@ impl, const D: usize> StarkOpeningSet { values: self .next_values .iter() - .chain(&self.permutation_ctl_zs_next) + .chain(&self.auxiliary_polys_next) .copied() .collect_vec(), }; @@ -785,8 +785,8 @@ impl, const D: usize> StarkOpeningSet { pub struct StarkOpeningSetTarget { pub local_values: Vec>, pub next_values: Vec>, - pub permutation_ctl_zs: Vec>, - pub permutation_ctl_zs_next: Vec>, + pub auxiliary_polys: Vec>, + pub auxiliary_polys_next: Vec>, pub ctl_zs_last: Vec, pub quotient_polys: Vec>, } @@ -795,8 +795,8 @@ impl StarkOpeningSetTarget { pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { buffer.write_target_ext_vec(&self.local_values)?; buffer.write_target_ext_vec(&self.next_values)?; - buffer.write_target_ext_vec(&self.permutation_ctl_zs)?; - buffer.write_target_ext_vec(&self.permutation_ctl_zs_next)?; + buffer.write_target_ext_vec(&self.auxiliary_polys)?; + buffer.write_target_ext_vec(&self.auxiliary_polys_next)?; buffer.write_target_vec(&self.ctl_zs_last)?; buffer.write_target_ext_vec(&self.quotient_polys)?; Ok(()) @@ -805,16 +805,16 @@ impl StarkOpeningSetTarget { pub fn from_buffer(buffer: &mut Buffer) -> IoResult { let local_values = buffer.read_target_ext_vec::()?; let next_values = buffer.read_target_ext_vec::()?; - let permutation_ctl_zs = buffer.read_target_ext_vec::()?; - let permutation_ctl_zs_next = buffer.read_target_ext_vec::()?; + let auxiliary_polys = buffer.read_target_ext_vec::()?; + let auxiliary_polys_next = buffer.read_target_ext_vec::()?; let ctl_zs_last = buffer.read_target_vec()?; let quotient_polys = buffer.read_target_ext_vec::()?; Ok(Self { local_values, next_values, - permutation_ctl_zs, - permutation_ctl_zs_next, + auxiliary_polys, + auxiliary_polys_next, ctl_zs_last, quotient_polys, }) @@ -825,7 +825,7 @@ impl StarkOpeningSetTarget { values: self .local_values .iter() - .chain(&self.permutation_ctl_zs) + .chain(&self.auxiliary_polys) .chain(&self.quotient_polys) .copied() .collect_vec(), @@ -834,7 +834,7 @@ impl StarkOpeningSetTarget { values: self .next_values .iter() - .chain(&self.permutation_ctl_zs_next) + .chain(&self.auxiliary_polys_next) .copied() .collect_vec(), }; diff --git a/evm/src/prover.rs b/evm/src/prover.rs index 8f587823..7a8439db 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -26,18 +26,17 @@ use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::cpu_stark::CpuStark; use crate::cpu::kernel::aggregator::KERNEL; -use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData}; +use crate::cross_table_lookup::{ + cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData, +}; use crate::generation::outputs::GenerationOutputs; use crate::generation::{generate_traces, GenerationInputs}; use crate::get_challenges::observe_public_values; use crate::keccak::keccak_stark::KeccakStark; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark; use crate::logic::LogicStark; +use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars}; use crate::memory::memory_stark::MemoryStark; -use crate::permutation::{ - compute_permutation_z_polys, get_grand_product_challenge_set, - get_n_grand_product_challenge_sets, GrandProductChallengeSet, PermutationCheckVars, -}; use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; @@ -335,37 +334,45 @@ where let init_challenger_state = challenger.compact(); - // Permutation arguments. - let permutation_challenges = stark.uses_permutation_args().then(|| { - get_n_grand_product_challenge_sets( - challenger, - config.num_challenges, - stark.permutation_batch_size(), - ) - }); - let permutation_zs = permutation_challenges.as_ref().map(|challenges| { - timed!( - timing, - "compute permutation Z(x) polys", - compute_permutation_z_polys::(stark, config, trace_poly_values, challenges) - ) - }); - let num_permutation_zs = permutation_zs.as_ref().map(|v| v.len()).unwrap_or(0); + let constraint_degree = stark.constraint_degree(); + let lookup_challenges = stark + .uses_lookups() + .then(|| challenger.get_n_challenges(config.num_challenges)); + let lookups = stark.lookups(); + let lookup_helper_columns = timed!( + timing, + "compute lookup helper columns", + lookup_challenges.as_ref().map(|challenges| { + let mut columns = Vec::new(); + for lookup in &lookups { + for &challenge in challenges { + columns.extend(lookup_helper_columns( + lookup, + trace_poly_values, + challenge, + constraint_degree, + )); + } + } + columns + }) + ); + let num_lookup_columns = lookup_helper_columns.as_ref().map(|v| v.len()).unwrap_or(0); - let z_polys = match permutation_zs { + let auxiliary_polys = match lookup_helper_columns { None => ctl_data.z_polys(), - Some(mut permutation_zs) => { - permutation_zs.extend(ctl_data.z_polys()); - permutation_zs + Some(mut lookup_columns) => { + lookup_columns.extend(ctl_data.z_polys()); + lookup_columns } }; - assert!(!z_polys.is_empty(), "No CTL?"); + assert!(!auxiliary_polys.is_empty(), "No CTL?"); - let permutation_ctl_zs_commitment = timed!( + let auxiliary_polys_commitment = timed!( timing, - "compute Zs commitment", + "compute auxiliary polynomials commitment", PolynomialBatch::from_values( - z_polys, + auxiliary_polys, rate_bits, false, config.fri_config.cap_height, @@ -374,21 +381,21 @@ where ) ); - let permutation_ctl_zs_cap = permutation_ctl_zs_commitment.merkle_tree.cap.clone(); - challenger.observe_cap(&permutation_ctl_zs_cap); + let auxiliary_polys_cap = auxiliary_polys_commitment.merkle_tree.cap.clone(); + challenger.observe_cap(&auxiliary_polys_cap); let alphas = challenger.get_n_challenges(config.num_challenges); if cfg!(test) { check_constraints( stark, trace_commitment, - &permutation_ctl_zs_commitment, - permutation_challenges.as_ref(), + &auxiliary_polys_commitment, + lookup_challenges.as_ref(), + &lookups, ctl_data, alphas.clone(), degree_bits, - num_permutation_zs, - config, + num_lookup_columns, ); } let quotient_polys = timed!( @@ -397,12 +404,13 @@ where compute_quotient_polys::::Packing, C, S, D>( stark, trace_commitment, - &permutation_ctl_zs_commitment, - permutation_challenges.as_ref(), + &auxiliary_polys_commitment, + lookup_challenges.as_ref(), + &lookups, ctl_data, alphas, degree_bits, - num_permutation_zs, + num_lookup_columns, config, ) ); @@ -451,16 +459,16 @@ where zeta, g, trace_commitment, - &permutation_ctl_zs_commitment, + &auxiliary_polys_commitment, "ient_commitment, degree_bits, - stark.num_permutation_batches(config), + stark.num_lookup_helper_columns(config), ); challenger.observe_openings(&openings.to_fri_openings()); let initial_merkle_trees = vec![ trace_commitment, - &permutation_ctl_zs_commitment, + &auxiliary_polys_commitment, "ient_commitment, ]; @@ -478,7 +486,7 @@ where let proof = StarkProof { trace_cap: trace_commitment.merkle_tree.cap.clone(), - permutation_ctl_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, opening_proof, @@ -494,12 +502,13 @@ where fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( stark: &S, trace_commitment: &'a PolynomialBatch, - permutation_ctl_zs_commitment: &'a PolynomialBatch, - permutation_challenges: Option<&'a Vec>>, + auxiliary_polys_commitment: &'a PolynomialBatch, + lookup_challenges: Option<&'a Vec>, + lookups: &[Lookup], ctl_data: &CtlData, alphas: Vec, degree_bits: usize, - num_permutation_zs: usize, + num_lookup_columns: usize, config: &StarkConfig, ) -> Vec> where @@ -570,25 +579,22 @@ where local_values: &get_trace_values_packed(i_start), next_values: &get_trace_values_packed(i_next_start), }; - let permutation_check_vars = - permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars { - local_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step) - [..num_permutation_zs] - .to_vec(), - next_zs: permutation_ctl_zs_commitment - .get_lde_values_packed(i_next_start, step)[..num_permutation_zs] - .to_vec(), - permutation_challenge_sets: permutation_challenge_sets.to_vec(), - }); + let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { + local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) + [..num_lookup_columns] + .to_vec(), + next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step), + challenges: challenges.to_vec(), + }); let ctl_vars = ctl_data .zs_columns .iter() .enumerate() .map(|(i, zs_columns)| CtlCheckVars:: { - local_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step) - [num_permutation_zs + i], - next_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_next_start, step) - [num_permutation_zs + i], + local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) + [num_lookup_columns + i], + next_z: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step) + [num_lookup_columns + i], challenges: zs_columns.challenge, columns: &zs_columns.columns, filter_column: &zs_columns.filter_column, @@ -596,9 +602,9 @@ where .collect::>(); eval_vanishing_poly::( stark, - config, vars, - permutation_check_vars, + lookups, + lookup_vars, &ctl_vars, &mut consumer, ); @@ -631,13 +637,13 @@ where fn check_constraints<'a, F, C, S, const D: usize>( stark: &S, trace_commitment: &'a PolynomialBatch, - permutation_ctl_zs_commitment: &'a PolynomialBatch, - permutation_challenges: Option<&'a Vec>>, + auxiliary_commitment: &'a PolynomialBatch, + lookup_challenges: Option<&'a Vec>, + lookups: &[Lookup], ctl_data: &CtlData, alphas: Vec, degree_bits: usize, - num_permutation_zs: usize, - config: &StarkConfig, + num_lookup_columns: usize, ) where F: RichField + Extendable, C: GenericConfig, @@ -668,7 +674,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( }; let trace_subgroup_evals = get_subgroup_evals(trace_commitment); - let permutation_ctl_zs_subgroup_evals = get_subgroup_evals(permutation_ctl_zs_commitment); + let auxiliary_subgroup_evals = get_subgroup_evals(auxiliary_commitment); // Last element of the subgroup. let last = F::primitive_root_of_unity(degree_bits).inverse(); @@ -692,21 +698,19 @@ fn check_constraints<'a, F, C, S, const D: usize>( local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(), next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(), }; - let permutation_check_vars = - permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars { - local_zs: permutation_ctl_zs_subgroup_evals[i][..num_permutation_zs].to_vec(), - next_zs: permutation_ctl_zs_subgroup_evals[i_next][..num_permutation_zs] - .to_vec(), - permutation_challenge_sets: permutation_challenge_sets.to_vec(), - }); + let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { + local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(), + next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(), + challenges: challenges.to_vec(), + }); let ctl_vars = ctl_data .zs_columns .iter() .enumerate() .map(|(iii, zs_columns)| CtlCheckVars:: { - local_z: permutation_ctl_zs_subgroup_evals[i][num_permutation_zs + iii], - next_z: permutation_ctl_zs_subgroup_evals[i_next][num_permutation_zs + iii], + local_z: auxiliary_subgroup_evals[i][num_lookup_columns + iii], + next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii], challenges: zs_columns.challenge, columns: &zs_columns.columns, filter_column: &zs_columns.filter_column, @@ -714,9 +718,9 @@ fn check_constraints<'a, F, C, S, const D: usize>( .collect::>(); eval_vanishing_poly::( stark, - config, vars, - permutation_check_vars, + lookups, + lookup_vars, &ctl_vars, &mut consumer, ); diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index e669f4ab..45cc0c48 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -29,13 +29,13 @@ use crate::all_stark::{Table, NUM_TABLES}; use crate::config::StarkConfig; use crate::constraint_consumer::RecursiveConstraintConsumer; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cross_table_lookup::{verify_cross_table_lookups, CrossTableLookup, CtlCheckVarsTarget}; +use crate::cross_table_lookup::{ + get_grand_product_challenge_set, verify_cross_table_lookups, CrossTableLookup, + CtlCheckVarsTarget, GrandProductChallenge, GrandProductChallengeSet, +}; +use crate::lookup::LookupCheckVarsTarget; use crate::memory::segments::Segment; use crate::memory::VALUE_LIMBS; -use crate::permutation::{ - get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet, - PermutationCheckDataTarget, -}; use crate::proof::{ BlockHashes, BlockHashesTarget, BlockMetadata, BlockMetadataTarget, ExtraBlockData, ExtraBlockDataTarget, PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof, @@ -302,8 +302,7 @@ where let mut builder = CircuitBuilder::::new(circuit_config.clone()); let zero_target = builder.zero(); - let num_permutation_zs = stark.num_permutation_batches(inner_config); - let num_permutation_batch_size = stark.permutation_batch_size(); + let num_lookup_columns = stark.num_lookup_helper_columns(inner_config); let num_ctl_zs = CrossTableLookup::num_ctl_zs(cross_table_lookups, table, inner_config.num_challenges); let proof_target = @@ -331,7 +330,7 @@ where &proof_target, cross_table_lookups, &ctl_challenges_target, - num_permutation_zs, + num_lookup_columns, ); let init_challenger_state_target = @@ -343,8 +342,7 @@ where let challenges = proof_target.get_challenges::( &mut builder, &mut challenger, - num_permutation_zs > 0, - num_permutation_batch_size, + num_lookup_columns > 0, inner_config, ); let challenger_state = challenger.compact(&mut builder); @@ -412,8 +410,8 @@ fn verify_stark_proof_with_challenges_circuit< let StarkOpeningSetTarget { local_values, next_values, - permutation_ctl_zs, - permutation_ctl_zs_next, + auxiliary_polys, + auxiliary_polys_next, ctl_zs_last, quotient_polys, } = &proof.openings; @@ -439,14 +437,12 @@ fn verify_stark_proof_with_challenges_circuit< l_last, ); - let num_permutation_zs = stark.num_permutation_batches(inner_config); - let permutation_data = stark - .uses_permutation_args() - .then(|| PermutationCheckDataTarget { - local_zs: permutation_ctl_zs[..num_permutation_zs].to_vec(), - next_zs: permutation_ctl_zs_next[..num_permutation_zs].to_vec(), - permutation_challenge_sets: challenges.permutation_challenge_sets.clone().unwrap(), - }); + let num_lookup_columns = stark.num_lookup_helper_columns(inner_config); + let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget { + local_values: auxiliary_polys[..num_lookup_columns].to_vec(), + next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(), + challenges: challenges.lookup_challenges.clone().unwrap(), + }); with_context!( builder, @@ -454,9 +450,8 @@ fn verify_stark_proof_with_challenges_circuit< eval_vanishing_poly_circuit::( builder, stark, - inner_config, vars, - permutation_data, + lookup_vars, ctl_vars, &mut consumer, ) @@ -476,7 +471,7 @@ fn verify_stark_proof_with_challenges_circuit< let merkle_caps = vec![ proof.trace_cap.clone(), - proof.permutation_ctl_zs_cap.clone(), + proof.auxiliary_polys_cap.clone(), proof.quotient_polys_cap.clone(), ]; @@ -840,15 +835,15 @@ pub(crate) fn add_virtual_stark_proof< let num_leaves_per_oracle = vec![ S::COLUMNS, - stark.num_permutation_batches(config) + num_ctl_zs, + stark.num_lookup_helper_columns(config) + num_ctl_zs, stark.quotient_degree_factor() * config.num_challenges, ]; - let permutation_zs_cap = builder.add_virtual_cap(cap_height); + let auxiliary_polys_cap = builder.add_virtual_cap(cap_height); StarkProofTarget { trace_cap: builder.add_virtual_cap(cap_height), - permutation_ctl_zs_cap: permutation_zs_cap, + auxiliary_polys_cap, quotient_polys_cap: builder.add_virtual_cap(cap_height), openings: add_virtual_stark_opening_set::(builder, stark, num_ctl_zs, config), opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), @@ -865,10 +860,10 @@ fn add_virtual_stark_opening_set, S: Stark, c StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - permutation_ctl_zs: builder - .add_virtual_extension_targets(stark.num_permutation_batches(config) + num_ctl_zs), - permutation_ctl_zs_next: builder - .add_virtual_extension_targets(stark.num_permutation_batches(config) + num_ctl_zs), + auxiliary_polys: builder + .add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs), + auxiliary_polys_next: builder + .add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs), ctl_zs_last: builder.add_virtual_targets(num_ctl_zs), quotient_polys: builder .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), @@ -894,8 +889,8 @@ pub(crate) fn set_stark_proof_target, W, const D: ); witness.set_cap_target( - &proof_target.permutation_ctl_zs_cap, - &proof.permutation_ctl_zs_cap, + &proof_target.auxiliary_polys_cap, + &proof.auxiliary_polys_cap, ); set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); diff --git a/evm/src/stark.rs b/evm/src/stark.rs index 72cee0ad..4e76017d 100644 --- a/evm/src/stark.rs +++ b/evm/src/stark.rs @@ -8,15 +8,14 @@ use plonky2::fri::structure::{ use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_util::ceil_div_usize; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::permutation::PermutationPair; +use crate::lookup::Lookup; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; const TRACE_ORACLE_INDEX: usize = 0; -const PERMUTATION_CTL_ORACLE_INDEX: usize = 1; +const AUXILIARY_ORACLE_INDEX: usize = 1; const QUOTIENT_ORACLE_INDEX: usize = 2; /// Represents a STARK system. @@ -94,20 +93,18 @@ pub trait Stark, const D: usize>: Sync { }; let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - let num_permutation_batches = self.num_permutation_batches(config); - let num_perutation_ctl_polys = num_permutation_batches + num_ctl_zs; - let permutation_ctl_oracle = FriOracleInfo { - num_polys: num_perutation_ctl_polys, + let num_lookup_columns = self.num_lookup_helper_columns(config); + let num_auxiliary_polys = num_lookup_columns + num_ctl_zs; + let auxiliary_oracle = FriOracleInfo { + num_polys: num_auxiliary_polys, blinding: false, }; - let permutation_ctl_zs_info = FriPolynomialInfo::from_range( - PERMUTATION_CTL_ORACLE_INDEX, - 0..num_perutation_ctl_polys, - ); + let auxiliary_polys_info = + FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); let ctl_zs_info = FriPolynomialInfo::from_range( - PERMUTATION_CTL_ORACLE_INDEX, - num_permutation_batches..num_permutation_batches + num_ctl_zs, + AUXILIARY_ORACLE_INDEX, + num_lookup_columns..num_lookup_columns + num_ctl_zs, ); let num_quotient_polys = self.num_quotient_polys(config); @@ -122,21 +119,21 @@ pub trait Stark, const D: usize>: Sync { point: zeta, polynomials: [ trace_info.clone(), - permutation_ctl_zs_info.clone(), + auxiliary_polys_info.clone(), quotient_info, ] .concat(), }; let zeta_next_batch = FriBatchInfo { point: zeta.scalar_mul(g), - polynomials: [trace_info, permutation_ctl_zs_info].concat(), + polynomials: [trace_info, auxiliary_polys_info].concat(), }; let ctl_last_batch = FriBatchInfo { point: F::Extension::primitive_root_of_unity(degree_bits).inverse(), polynomials: ctl_zs_info, }; FriInstanceInfo { - oracles: vec![trace_oracle, permutation_ctl_oracle, quotient_oracle], + oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], batches: vec![zeta_batch, zeta_next_batch, ctl_last_batch], } } @@ -157,20 +154,18 @@ pub trait Stark, const D: usize>: Sync { }; let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - let num_permutation_batches = self.num_permutation_batches(inner_config); - let num_perutation_ctl_polys = num_permutation_batches + num_ctl_zs; - let permutation_ctl_oracle = FriOracleInfo { - num_polys: num_perutation_ctl_polys, + let num_lookup_columns = self.num_lookup_helper_columns(inner_config); + let num_auxiliary_polys = num_lookup_columns + num_ctl_zs; + let auxiliary_oracle = FriOracleInfo { + num_polys: num_auxiliary_polys, blinding: false, }; - let permutation_ctl_zs_info = FriPolynomialInfo::from_range( - PERMUTATION_CTL_ORACLE_INDEX, - 0..num_perutation_ctl_polys, - ); + let auxiliary_polys_info = + FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); let ctl_zs_info = FriPolynomialInfo::from_range( - PERMUTATION_CTL_ORACLE_INDEX, - num_permutation_batches..num_permutation_batches + num_ctl_zs, + AUXILIARY_ORACLE_INDEX, + num_lookup_columns..num_lookup_columns + num_ctl_zs, ); let num_quotient_polys = self.num_quotient_polys(inner_config); @@ -185,7 +180,7 @@ pub trait Stark, const D: usize>: Sync { point: zeta, polynomials: [ trace_info.clone(), - permutation_ctl_zs_info.clone(), + auxiliary_polys_info.clone(), quotient_info, ] .concat(), @@ -193,7 +188,7 @@ pub trait Stark, const D: usize>: Sync { let zeta_next = builder.mul_const_extension(g, zeta); let zeta_next_batch = FriBatchInfoTarget { point: zeta_next, - polynomials: [trace_info, permutation_ctl_zs_info].concat(), + polynomials: [trace_info, auxiliary_polys_info].concat(), }; let ctl_last_batch = FriBatchInfoTarget { point: builder @@ -201,38 +196,24 @@ pub trait Stark, const D: usize>: Sync { polynomials: ctl_zs_info, }; FriInstanceInfoTarget { - oracles: vec![trace_oracle, permutation_ctl_oracle, quotient_oracle], + oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], batches: vec![zeta_batch, zeta_next_batch, ctl_last_batch], } } - /// Pairs of lists of columns that should be permutations of one another. A permutation argument - /// will be used for each such pair. Empty by default. - fn permutation_pairs(&self) -> Vec { + fn lookups(&self) -> Vec { vec![] } - fn uses_permutation_args(&self) -> bool { - !self.permutation_pairs().is_empty() + fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize { + self.lookups() + .iter() + .map(|lookup| lookup.num_helper_columns(self.constraint_degree())) + .sum::() + * config.num_challenges } - /// The number of permutation argument instances that can be combined into a single constraint. - fn permutation_batch_size(&self) -> usize { - // The permutation argument constraints look like - // Z(x) \prod(...) = Z(g x) \prod(...) - // where each product has a number of terms equal to the batch size. So our batch size - // should be one less than our constraint degree, which happens to be our quotient degree. - self.quotient_degree_factor() - } - - fn num_permutation_instances(&self, config: &StarkConfig) -> usize { - self.permutation_pairs().len() * config.num_challenges - } - - fn num_permutation_batches(&self, config: &StarkConfig) -> usize { - ceil_div_usize( - self.num_permutation_instances(config), - self.permutation_batch_size(), - ) + fn uses_lookups(&self) -> bool { + !self.lookups().is_empty() } } diff --git a/evm/src/vanishing_poly.rs b/evm/src/vanishing_poly.rs index 3a2da78c..21d36167 100644 --- a/evm/src/vanishing_poly.rs +++ b/evm/src/vanishing_poly.rs @@ -3,24 +3,23 @@ use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; -use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cross_table_lookup::{ eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars, CtlCheckVarsTarget, }; -use crate::permutation::{ - eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget, - PermutationCheckVars, +use crate::lookup::{ + eval_lookups_checks, eval_lookups_checks_circuit, Lookup, LookupCheckVars, + LookupCheckVarsTarget, }; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; pub(crate) fn eval_vanishing_poly( stark: &S, - config: &StarkConfig, vars: StarkEvaluationVars, - permutation_vars: Option>, + lookups: &[Lookup], + lookup_vars: Option>, ctl_vars: &[CtlCheckVars], consumer: &mut ConstraintConsumer

, ) where @@ -30,14 +29,8 @@ pub(crate) fn eval_vanishing_poly( S: Stark, { stark.eval_packed_generic(vars, consumer); - if let Some(permutation_vars) = permutation_vars { - eval_permutation_checks::( - stark, - config, - vars, - permutation_vars, - consumer, - ); + if let Some(lookup_vars) = lookup_vars { + eval_lookups_checks::(stark, lookups, vars, lookup_vars, consumer); } eval_cross_table_lookup_checks::(vars, ctl_vars, consumer); } @@ -45,9 +38,8 @@ pub(crate) fn eval_vanishing_poly( pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, - config: &StarkConfig, vars: StarkEvaluationTargets, - permutation_data: Option>, + lookup_vars: Option>, ctl_vars: &[CtlCheckVarsTarget], consumer: &mut RecursiveConstraintConsumer, ) where @@ -56,15 +48,8 @@ pub(crate) fn eval_vanishing_poly_circuit( [(); S::COLUMNS]:, { stark.eval_ext_circuit(builder, vars, consumer); - if let Some(permutation_data) = permutation_data { - eval_permutation_checks_circuit::( - builder, - stark, - config, - vars, - permutation_data, - consumer, - ); + if let Some(lookup_vars) = lookup_vars { + eval_lookups_checks_circuit::(builder, stark, vars, lookup_vars, consumer); } eval_cross_table_lookup_checks_circuit::(builder, vars, ctl_vars, consumer); } diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index 1aa9db97..cf1c2e36 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -17,14 +17,14 @@ use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::cpu_stark::CpuStark; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars}; +use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars, GrandProductChallenge}; use crate::keccak::keccak_stark::KeccakStark; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark; use crate::logic::LogicStark; +use crate::lookup::LookupCheckVars; use crate::memory::memory_stark::MemoryStark; use crate::memory::segments::Segment; use crate::memory::VALUE_LIMBS; -use crate::permutation::{GrandProductChallenge, PermutationCheckVars}; use crate::proof::{ AllProof, AllProofChallenges, PublicValues, StarkOpeningSet, StarkProof, StarkProofChallenges, }; @@ -52,7 +52,7 @@ where ctl_challenges, } = all_proof.get_challenges(all_stark, config); - let nums_permutation_zs = all_stark.nums_permutation_zs(config); + let num_lookup_columns = all_stark.num_lookups_helper_columns(config); let AllStark { arithmetic_stark, @@ -69,7 +69,7 @@ where &all_proof.stark_proofs, cross_table_lookups, &ctl_challenges, - &nums_permutation_zs, + &num_lookup_columns, ); verify_stark_proof_with_challenges( @@ -306,8 +306,8 @@ where let StarkOpeningSet { local_values, next_values, - permutation_ctl_zs, - permutation_ctl_zs_next, + auxiliary_polys, + auxiliary_polys_next, ctl_zs_last, quotient_polys, } = &proof.openings; @@ -330,17 +330,18 @@ where l_0, l_last, ); - let num_permutation_zs = stark.num_permutation_batches(config); - let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars { - local_zs: permutation_ctl_zs[..num_permutation_zs].to_vec(), - next_zs: permutation_ctl_zs_next[..num_permutation_zs].to_vec(), - permutation_challenge_sets: challenges.permutation_challenge_sets.clone().unwrap(), + let num_lookup_columns = stark.num_lookup_helper_columns(config); + let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars { + local_values: auxiliary_polys[..num_lookup_columns].to_vec(), + next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(), + challenges: challenges.lookup_challenges.clone().unwrap(), }); + let lookups = stark.lookups(); eval_vanishing_poly::( stark, - config, vars, - permutation_data, + &lookups, + lookup_vars, ctl_vars, &mut consumer, ); @@ -366,7 +367,7 @@ where let merkle_caps = vec![ proof.trace_cap.clone(), - proof.permutation_ctl_zs_cap.clone(), + proof.auxiliary_polys_cap.clone(), proof.quotient_polys_cap.clone(), ]; @@ -402,7 +403,7 @@ where { let StarkProof { trace_cap, - permutation_ctl_zs_cap, + auxiliary_polys_cap, quotient_polys_cap, openings, // The shape of the opening proof will be checked in the FRI verifier (see @@ -413,8 +414,8 @@ where let StarkOpeningSet { local_values, next_values, - permutation_ctl_zs, - permutation_ctl_zs_next, + auxiliary_polys, + auxiliary_polys_next, ctl_zs_last, quotient_polys, } = openings; @@ -422,16 +423,16 @@ where let degree_bits = proof.recover_degree_bits(config); let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_zs = num_ctl_zs + stark.num_permutation_batches(config); + let num_auxiliary = num_ctl_zs + stark.num_lookup_helper_columns(config); ensure!(trace_cap.height() == cap_height); - ensure!(permutation_ctl_zs_cap.height() == cap_height); + ensure!(auxiliary_polys_cap.height() == cap_height); ensure!(quotient_polys_cap.height() == cap_height); ensure!(local_values.len() == S::COLUMNS); ensure!(next_values.len() == S::COLUMNS); - ensure!(permutation_ctl_zs.len() == num_zs); - ensure!(permutation_ctl_zs_next.len() == num_zs); + ensure!(auxiliary_polys.len() == num_auxiliary); + ensure!(auxiliary_polys_next.len() == num_auxiliary); ensure!(ctl_zs_last.len() == num_ctl_zs); ensure!(quotient_polys.len() == stark.num_quotient_polys(config));