From 3ec1bfddb32d51494bb8013d4bd3ad332d30bf87 Mon Sep 17 00:00:00 2001 From: Robin Salen <30937548+Nashtare@users.noreply.github.com> Date: Tue, 13 Feb 2024 11:47:54 -0500 Subject: [PATCH] Update `starky` and leverage it as dependency for `plonky2_evm` (#1503) * Update prover logic * Add helper method for CTL data * Some cleanup * Update some methods * Fix * Some more fixes * More tweaks * Final * Leverage starky crate * Additional tweaks * Cleanup * More cleanup * Fix * Cleanup imports * Fix * Final tweaks * Cleanup and hide behind debug_assertions attribute * Clippy * Fix no-std * Make wasm compatible * Doc and remove todo * API cleanup and remove TODO * Add Debug impls * Add documentation for public items * Feature-gate alloc imports * Import method from starky instead * Add simple crate and module documentation * Apply comments * Add lib level documentation * Add test without lookups * Fix starks without logup * Cleanup * Some more cleanup * Fix get_challenges for non-lookup STARKs * Add additional config methods and tests * Apply comments * More comments --- evm/Cargo.toml | 9 +- evm/src/all_stark.rs | 9 +- evm/src/arithmetic/addcy.rs | 16 +- evm/src/arithmetic/arithmetic_stark.rs | 23 +- evm/src/arithmetic/byte.rs | 8 +- evm/src/arithmetic/divmod.rs | 101 ++- evm/src/arithmetic/modular.rs | 105 +-- evm/src/arithmetic/mul.rs | 8 +- evm/src/arithmetic/shift.rs | 10 +- evm/src/byte_packing/byte_packing_stark.rs | 19 +- evm/src/config.rs | 43 - evm/src/constraint_consumer.rs | 162 ---- evm/src/cpu/byte_unpacking.rs | 2 +- evm/src/cpu/clock.rs | 2 +- evm/src/cpu/contextops.rs | 2 +- evm/src/cpu/control_flow.rs | 2 +- evm/src/cpu/cpu_stark.rs | 23 +- evm/src/cpu/decode.rs | 2 +- evm/src/cpu/dup_swap.rs | 2 +- evm/src/cpu/gas.rs | 2 +- evm/src/cpu/halt.rs | 2 +- evm/src/cpu/jumps.rs | 2 +- evm/src/cpu/membus.rs | 2 +- evm/src/cpu/memio.rs | 2 +- evm/src/cpu/modfp254.rs | 2 +- evm/src/cpu/pc.rs | 2 +- evm/src/cpu/push0.rs | 2 +- evm/src/cpu/shift.rs | 2 +- evm/src/cpu/simple_logic/eq_iszero.rs | 2 +- evm/src/cpu/simple_logic/mod.rs | 2 +- evm/src/cpu/simple_logic/not.rs | 2 +- evm/src/cpu/stack.rs | 2 +- evm/src/cpu/syscalls_exceptions.rs | 2 +- evm/src/evaluation_frame.rs | 47 - evm/src/fixed_recursive_verifier.rs | 24 +- evm/src/generation/mod.rs | 2 +- evm/src/get_challenges.rs | 120 +-- evm/src/keccak/columns.rs | 2 +- evm/src/keccak/keccak_stark.rs | 41 +- evm/src/keccak/round_flags.rs | 9 +- evm/src/keccak_sponge/keccak_sponge_stark.rs | 20 +- evm/src/lib.rs | 31 +- evm/src/logic.rs | 22 +- evm/src/lookup.rs | 895 ------------------- evm/src/memory/memory_stark.rs | 19 +- evm/src/proof.rs | 335 +------ evm/src/prover.rs | 563 +----------- evm/src/recursive_verifier.rs | 275 +----- evm/src/stark.rs | 228 ----- evm/src/stark_testing.rs | 157 ---- evm/src/util.rs | 12 - evm/src/vanishing_poly.rs | 81 -- evm/src/verifier.rs | 294 +----- evm/src/witness/traces.rs | 4 +- evm/tests/add11_yml.rs | 4 +- evm/tests/basic_smart_contract.rs | 4 +- evm/tests/empty_txn_list.rs | 5 +- evm/tests/erc20.rs | 4 +- evm/tests/erc721.rs | 4 +- evm/tests/log_opcode.rs | 5 +- evm/tests/self_balance_gas_cost.rs | 4 +- evm/tests/selfdestruct.rs | 4 +- evm/tests/simple_transfer.rs | 4 +- evm/tests/withdrawals.rs | 4 +- plonky2/src/fri/proof.rs | 2 + starky/Cargo.toml | 2 + starky/src/config.rs | 115 ++- starky/src/constraint_consumer.rs | 24 +- {evm => starky}/src/cross_table_lookup.rs | 319 ++++--- starky/src/evaluation_frame.rs | 7 + starky/src/fibonacci_stark.rs | 216 ++++- starky/src/get_challenges.rs | 180 +++- starky/src/lib.rs | 322 ++++++- starky/src/lookup.rs | 70 +- starky/src/proof.rs | 277 +++++- starky/src/prover.rs | 272 +++++- starky/src/recursive_verifier.rs | 163 ++-- starky/src/stark.rs | 145 ++- starky/src/stark_testing.rs | 6 +- starky/src/util.rs | 3 + starky/src/vanishing_poly.rs | 32 + starky/src/verifier.rs | 158 +++- 82 files changed, 2292 insertions(+), 3820 deletions(-) delete mode 100644 evm/src/config.rs delete mode 100644 evm/src/constraint_consumer.rs delete mode 100644 evm/src/evaluation_frame.rs delete mode 100644 evm/src/lookup.rs delete mode 100644 evm/src/stark.rs delete mode 100644 evm/src/stark_testing.rs delete mode 100644 evm/src/vanishing_poly.rs rename {evm => starky}/src/cross_table_lookup.rs (84%) diff --git a/evm/Cargo.toml b/evm/Cargo.toml index 24c560a0..df8401b0 100644 --- a/evm/Cargo.toml +++ b/evm/Cargo.toml @@ -27,8 +27,9 @@ num-bigint = "0.4.3" once_cell = "1.13.0" pest = "2.1.3" pest_derive = "2.1.0" -plonky2 = { path = "../plonky2", default-features = false, features = ["timing"] } +plonky2 = { path = "../plonky2", features = ["timing"] } plonky2_util = { path = "../util" } +starky = { path = "../starky" } rand = "0.8.5" rand_chacha = "0.3.1" rlp = "0.5.1" @@ -51,7 +52,11 @@ sha2 = "0.10.6" [features] default = ["parallel"] asmtools = ["hex"] -parallel = ["plonky2/parallel", "plonky2_maybe_rayon/parallel"] +parallel = [ + "plonky2/parallel", + "plonky2_maybe_rayon/parallel", + "starky/parallel" +] [[bin]] name = "assemble" diff --git a/evm/src/all_stark.rs b/evm/src/all_stark.rs index cd7a2d3c..ec218ef8 100644 --- a/evm/src/all_stark.rs +++ b/evm/src/all_stark.rs @@ -3,15 +3,17 @@ use core::ops::Deref; use plonky2::field::extension::Extendable; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; +use starky::config::StarkConfig; +use starky::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns}; +use starky::evaluation_frame::StarkFrame; +use starky::stark::Stark; use crate::arithmetic::arithmetic_stark; use crate::arithmetic::arithmetic_stark::ArithmeticStark; use crate::byte_packing::byte_packing_stark::{self, BytePackingStark}; -use crate::config::StarkConfig; use crate::cpu::cpu_stark; use crate::cpu::cpu_stark::CpuStark; use crate::cpu::membus::NUM_GP_CHANNELS; -use crate::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns}; use crate::keccak::keccak_stark; use crate::keccak::keccak_stark::KeccakStark; use crate::keccak_sponge::columns::KECCAK_RATE_BYTES; @@ -21,7 +23,6 @@ use crate::logic; use crate::logic::LogicStark; use crate::memory::memory_stark; use crate::memory::memory_stark::MemoryStark; -use crate::stark::Stark; /// Structure containing all STARKs and the cross-table lookups. #[derive(Clone)] @@ -66,6 +67,8 @@ impl, const D: usize> AllStark { } } +pub type EvmStarkFrame = StarkFrame; + /// Associates STARK tables with a unique index. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum Table { diff --git a/evm/src/arithmetic/addcy.rs b/evm/src/arithmetic/addcy.rs index 4f343b45..94d2bd16 100644 --- a/evm/src/arithmetic/addcy.rs +++ b/evm/src/arithmetic/addcy.rs @@ -22,10 +22,10 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::arithmetic::columns::*; use crate::arithmetic::utils::u256_to_array; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; /// Generate row for ADD, SUB, GT and LT operations. pub(crate) fn generate( @@ -263,10 +263,10 @@ mod tests { use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::constraint_consumer::ConstraintConsumer; use super::*; use crate::arithmetic::columns::NUM_ARITH_COLUMNS; - use crate::constraint_consumer::ConstraintConsumer; // TODO: Should be able to refactor this test to apply to all operations. #[test] @@ -284,14 +284,14 @@ mod tests { lv[IS_LT] = F::ZERO; lv[IS_GT] = F::ZERO; - let mut constrant_consumer = ConstraintConsumer::new( + let mut constraint_consumer = ConstraintConsumer::new( vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], F::ONE, F::ONE, F::ONE, ); - eval_packed_generic(&lv, &mut constrant_consumer); - for &acc in &constrant_consumer.constraint_accs { + eval_packed_generic(&lv, &mut constraint_consumer); + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, F::ZERO); } } @@ -324,14 +324,14 @@ mod tests { generate(&mut lv, op_filter, left_in, right_in); - let mut constrant_consumer = ConstraintConsumer::new( + let mut constraint_consumer = ConstraintConsumer::new( vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], F::ONE, F::ONE, F::ONE, ); - eval_packed_generic(&lv, &mut constrant_consumer); - for &acc in &constrant_consumer.constraint_accs { + eval_packed_generic(&lv, &mut constraint_consumer); + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, F::ZERO); } diff --git a/evm/src/arithmetic/arithmetic_stark.rs b/evm/src/arithmetic/arithmetic_stark.rs index 5e3f039c..75fd9fe2 100644 --- a/evm/src/arithmetic/arithmetic_stark.rs +++ b/evm/src/arithmetic/arithmetic_stark.rs @@ -9,18 +9,18 @@ use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::util::transpose; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::cross_table_lookup::TableWithColumns; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter, Lookup}; +use starky::stark::Stark; use static_assertions::const_assert; use super::columns::{op_flags, NUM_ARITH_COLUMNS}; use super::shift; -use crate::all_stark::Table; +use crate::all_stark::{EvmStarkFrame, Table}; use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS}; use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::TableWithColumns; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; -use crate::lookup::{Column, Filter, Lookup}; -use crate::stark::Stark; /// Creates a vector of `Columns` to link the 16-bit columns of the arithmetic table, /// split into groups of N_LIMBS at a time in `regs`, with the corresponding 32-bit @@ -190,12 +190,13 @@ impl ArithmeticStark { } impl, const D: usize> Stark for ArithmeticStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_ARITH_COLUMNS>; + type EvaluationFrameTarget = + EvmStarkFrame, ExtensionTarget, NUM_ARITH_COLUMNS>; fn eval_packed_generic( &self, @@ -320,6 +321,10 @@ impl, const D: usize> Stark for ArithmeticSta filter_columns: vec![None; NUM_SHARED_COLS], }] } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] @@ -330,11 +335,11 @@ mod tests { use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use super::{columns, ArithmeticStark}; use crate::arithmetic::columns::OUTPUT_REGISTER; use crate::arithmetic::*; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn degree() -> Result<()> { diff --git a/evm/src/arithmetic/byte.rs b/evm/src/arithmetic/byte.rs index f7581efa..272a7843 100644 --- a/evm/src/arithmetic/byte.rs +++ b/evm/src/arithmetic/byte.rs @@ -69,11 +69,11 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use static_assertions::const_assert; use crate::arithmetic::columns::*; use crate::arithmetic::utils::u256_to_array; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; // Give meaningful names to the columns of AUX_INPUT_REGISTER_0 that // we're using @@ -480,14 +480,14 @@ mod tests { let out_byte = val.byte(31 - i) as u64; verify_output(&lv, out_byte); - let mut constrant_consumer = ConstraintConsumer::new( + let mut constraint_consumer = ConstraintConsumer::new( vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], F::ONE, F::ONE, F::ONE, ); - eval_packed(&lv, &mut constrant_consumer); - for &acc in &constrant_consumer.constraint_accs { + eval_packed(&lv, &mut constraint_consumer); + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, F::ZERO); } } diff --git a/evm/src/arithmetic/divmod.rs b/evm/src/arithmetic/divmod.rs index a4599dc7..d27fbc2e 100644 --- a/evm/src/arithmetic/divmod.rs +++ b/evm/src/arithmetic/divmod.rs @@ -11,13 +11,13 @@ use plonky2::field::types::PrimeField64; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::arithmetic::columns::*; use crate::arithmetic::modular::{ generate_modular_op, modular_constr_poly, modular_constr_poly_ext_circuit, }; use crate::arithmetic::utils::*; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; /// Generates the output and auxiliary values for modular operations, /// assuming the input, modular and output limbs are already set. @@ -215,10 +215,10 @@ mod tests { use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::constraint_consumer::ConstraintConsumer; use super::*; use crate::arithmetic::columns::NUM_ARITH_COLUMNS; - use crate::constraint_consumer::ConstraintConsumer; const N_RND_TESTS: usize = 1000; const MODULAR_OPS: [usize; 2] = [IS_MOD, IS_DIV]; @@ -247,7 +247,7 @@ mod tests { GoldilocksField::ONE, ); eval_packed(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -306,7 +306,7 @@ mod tests { GoldilocksField::ZERO, ); eval_packed(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -321,52 +321,57 @@ mod tests { for op_filter in MODULAR_OPS { for _i in 0..N_RND_TESTS { - // set inputs to random values and the modulus to zero; - // the output is defined to be zero when modulus is zero. - let mut lv = [F::default(); NUM_ARITH_COLUMNS] - .map(|_| F::from_canonical_u16(rng.gen::())); - let mut nv = [F::default(); NUM_ARITH_COLUMNS] - .map(|_| F::from_canonical_u16(rng.gen::())); + for corrupt_constraints in [false, true] { + // set inputs to random values and the modulus to zero; + // the output is defined to be zero when modulus is zero. + let mut lv = [F::default(); NUM_ARITH_COLUMNS] + .map(|_| F::from_canonical_u16(rng.gen::())); + let mut nv = [F::default(); NUM_ARITH_COLUMNS] + .map(|_| F::from_canonical_u16(rng.gen::())); - // Reset operation columns, then select one - for op in MODULAR_OPS { - lv[op] = F::ZERO; + // Reset operation columns, then select one + for op in MODULAR_OPS { + lv[op] = F::ZERO; + } + // Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here. + lv[IS_SHR] = F::ZERO; + lv[op_filter] = F::ONE; + + let input0 = U256::from(rng.gen::<[u8; 32]>()); + let input1 = U256::zero(); + + generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero()); + + // check that the correct output was generated + assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO)); + + let mut constraint_consumer = ConstraintConsumer::new( + vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], + GoldilocksField::ONE, + GoldilocksField::ZERO, + GoldilocksField::ZERO, + ); + eval_packed(&lv, &nv, &mut constraint_consumer); + + if corrupt_constraints { + // Corrupt one output limb by setting it to a non-zero value. + let random_oi = OUTPUT_REGISTER.start + rng.gen::() % N_LIMBS; + lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX)); + + eval_packed(&lv, &nv, &mut constraint_consumer); + + // Check that at least one of the constraints was non-zero. + assert!(constraint_consumer + .accumulators() + .iter() + .any(|&acc| acc != F::ZERO)); + } else { + assert!(constraint_consumer + .accumulators() + .iter() + .all(|&acc| acc == F::ZERO)); + } } - // Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here. - lv[IS_SHR] = F::ZERO; - lv[op_filter] = F::ONE; - - let input0 = U256::from(rng.gen::<[u8; 32]>()); - let input1 = U256::zero(); - - generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero()); - - // check that the correct output was generated - assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO)); - - let mut constraint_consumer = ConstraintConsumer::new( - vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], - GoldilocksField::ONE, - GoldilocksField::ZERO, - GoldilocksField::ZERO, - ); - eval_packed(&lv, &nv, &mut constraint_consumer); - assert!(constraint_consumer - .constraint_accs - .iter() - .all(|&acc| acc == F::ZERO)); - - // Corrupt one output limb by setting it to a non-zero value - let random_oi = OUTPUT_REGISTER.start + rng.gen::() % N_LIMBS; - lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX)); - - eval_packed(&lv, &nv, &mut constraint_consumer); - - // Check that at least one of the constraints was non-zero - assert!(constraint_consumer - .constraint_accs - .iter() - .any(|&acc| acc != F::ZERO)); } } } diff --git a/evm/src/arithmetic/modular.rs b/evm/src/arithmetic/modular.rs index 5a1df5c7..a3806862 100644 --- a/evm/src/arithmetic/modular.rs +++ b/evm/src/arithmetic/modular.rs @@ -119,13 +119,13 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use static_assertions::const_assert; use super::columns; use crate::arithmetic::addcy::{eval_ext_circuit_addcy, eval_packed_generic_addcy}; use crate::arithmetic::columns::*; use crate::arithmetic::utils::*; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::extension_tower::BN_BASE; const fn bn254_modulus_limbs() -> [u16; N_LIMBS] { @@ -832,10 +832,10 @@ mod tests { use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::constraint_consumer::ConstraintConsumer; use super::*; use crate::arithmetic::columns::NUM_ARITH_COLUMNS; - use crate::constraint_consumer::ConstraintConsumer; use crate::extension_tower::BN_BASE; const N_RND_TESTS: usize = 1000; @@ -873,7 +873,7 @@ mod tests { GoldilocksField::ONE, ); eval_packed(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -930,7 +930,7 @@ mod tests { GoldilocksField::ZERO, ); eval_packed(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -945,54 +945,59 @@ mod tests { for op_filter in [IS_ADDMOD, IS_SUBMOD, IS_MULMOD] { for _i in 0..N_RND_TESTS { - // set inputs to random values and the modulus to zero; - // the output is defined to be zero when modulus is zero. - let mut lv = [F::default(); NUM_ARITH_COLUMNS] - .map(|_| F::from_canonical_u16(rng.gen::())); - let mut nv = [F::default(); NUM_ARITH_COLUMNS] - .map(|_| F::from_canonical_u16(rng.gen::())); + for corrupt_constraints in [false, true] { + // set inputs to random values and the modulus to zero; + // the output is defined to be zero when modulus is zero. + let mut lv = [F::default(); NUM_ARITH_COLUMNS] + .map(|_| F::from_canonical_u16(rng.gen::())); + let mut nv = [F::default(); NUM_ARITH_COLUMNS] + .map(|_| F::from_canonical_u16(rng.gen::())); - // Reset operation columns, then select one - for op in MODULAR_OPS { - lv[op] = F::ZERO; + // Reset operation columns, then select one + for op in MODULAR_OPS { + lv[op] = F::ZERO; + } + lv[IS_SHR] = F::ZERO; + lv[IS_DIV] = F::ZERO; + lv[IS_MOD] = F::ZERO; + lv[op_filter] = F::ONE; + + let input0 = U256::from(rng.gen::<[u8; 32]>()); + let input1 = U256::from(rng.gen::<[u8; 32]>()); + let modulus = U256::zero(); + + generate(&mut lv, &mut nv, op_filter, input0, input1, modulus); + + // check that the correct output was generated + assert!(lv[MODULAR_OUTPUT].iter().all(|&c| c == F::ZERO)); + + let mut constraint_consumer = ConstraintConsumer::new( + vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], + GoldilocksField::ONE, + GoldilocksField::ZERO, + GoldilocksField::ZERO, + ); + eval_packed(&lv, &nv, &mut constraint_consumer); + + if corrupt_constraints { + // Corrupt one output limb by setting it to a non-zero value. + let random_oi = MODULAR_OUTPUT.start + rng.gen::() % N_LIMBS; + lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX)); + + eval_packed(&lv, &nv, &mut constraint_consumer); + + // Check that at least one of the constraints was non-zero. + assert!(constraint_consumer + .accumulators() + .iter() + .any(|&acc| acc != F::ZERO)); + } else { + assert!(constraint_consumer + .accumulators() + .iter() + .all(|&acc| acc == F::ZERO)); + } } - lv[IS_SHR] = F::ZERO; - lv[IS_DIV] = F::ZERO; - lv[IS_MOD] = F::ZERO; - lv[op_filter] = F::ONE; - - let input0 = U256::from(rng.gen::<[u8; 32]>()); - let input1 = U256::from(rng.gen::<[u8; 32]>()); - let modulus = U256::zero(); - - generate(&mut lv, &mut nv, op_filter, input0, input1, modulus); - - // check that the correct output was generated - assert!(lv[MODULAR_OUTPUT].iter().all(|&c| c == F::ZERO)); - - let mut constraint_consumer = ConstraintConsumer::new( - vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], - GoldilocksField::ONE, - GoldilocksField::ZERO, - GoldilocksField::ZERO, - ); - eval_packed(&lv, &nv, &mut constraint_consumer); - assert!(constraint_consumer - .constraint_accs - .iter() - .all(|&acc| acc == F::ZERO)); - - // Corrupt one output limb by setting it to a non-zero value - let random_oi = MODULAR_OUTPUT.start + rng.gen::() % N_LIMBS; - lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX)); - - eval_packed(&lv, &nv, &mut constraint_consumer); - - // Check that at least one of the constraints was non-zero - assert!(constraint_consumer - .constraint_accs - .iter() - .any(|&acc| acc != F::ZERO)); } } } diff --git a/evm/src/arithmetic/mul.rs b/evm/src/arithmetic/mul.rs index 01c9d5c1..112ef7eb 100644 --- a/evm/src/arithmetic/mul.rs +++ b/evm/src/arithmetic/mul.rs @@ -62,10 +62,10 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::arithmetic::columns::*; use crate::arithmetic::utils::*; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; /// Given the two limbs of `left_in` and `right_in`, computes `left_in * right_in`. pub(crate) fn generate_mul(lv: &mut [F], left_in: [i64; 16], right_in: [i64; 16]) { @@ -253,10 +253,10 @@ mod tests { use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::constraint_consumer::ConstraintConsumer; use super::*; use crate::arithmetic::columns::NUM_ARITH_COLUMNS; - use crate::constraint_consumer::ConstraintConsumer; const N_RND_TESTS: usize = 1000; @@ -279,7 +279,7 @@ mod tests { GoldilocksField::ONE, ); eval_packed_generic(&lv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -312,7 +312,7 @@ mod tests { GoldilocksField::ONE, ); eval_packed_generic(&lv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } diff --git a/evm/src/arithmetic/shift.rs b/evm/src/arithmetic/shift.rs index bb837984..bc6276b1 100644 --- a/evm/src/arithmetic/shift.rs +++ b/evm/src/arithmetic/shift.rs @@ -27,11 +27,11 @@ use plonky2::field::types::PrimeField64; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use super::{divmod, mul}; use crate::arithmetic::columns::*; use crate::arithmetic::utils::*; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; /// Generates a shift operation (either SHL or SHR). /// The inputs are stored in the form `(shift, input, 1 << shift)`. @@ -184,10 +184,10 @@ mod tests { use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; + use starky::constraint_consumer::ConstraintConsumer; use super::*; use crate::arithmetic::columns::NUM_ARITH_COLUMNS; - use crate::constraint_consumer::ConstraintConsumer; const N_RND_TESTS: usize = 1000; @@ -212,7 +212,7 @@ mod tests { GoldilocksField::ONE, ); eval_packed_generic(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -261,7 +261,7 @@ mod tests { GoldilocksField::ZERO, ); eval_packed_generic(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } @@ -320,7 +320,7 @@ mod tests { GoldilocksField::ZERO, ); eval_packed_generic(&lv, &nv, &mut constraint_consumer); - for &acc in &constraint_consumer.constraint_accs { + for &acc in &constraint_consumer.accumulators() { assert_eq!(acc, GoldilocksField::ZERO); } } diff --git a/evm/src/byte_packing/byte_packing_stark.rs b/evm/src/byte_packing/byte_packing_stark.rs index ff7a18c0..14cf61d5 100644 --- a/evm/src/byte_packing/byte_packing_stark.rs +++ b/evm/src/byte_packing/byte_packing_stark.rs @@ -37,16 +37,17 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2::util::transpose; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter, Lookup}; +use starky::stark::Stark; use super::NUM_BYTES; +use crate::all_stark::EvmStarkFrame; use crate::byte_packing::columns::{ index_len, value_bytes, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, IS_READ, LEN_INDICES_COLS, NUM_COLUMNS, RANGE_COUNTER, RC_FREQUENCIES, TIMESTAMP, }; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; -use crate::lookup::{Column, Filter, Lookup}; -use crate::stark::Stark; use crate::witness::memory::MemoryAddress; /// Strict upper bound for the individual bytes range-check. @@ -258,12 +259,12 @@ impl, const D: usize> BytePackingStark { } impl, const D: usize> Stark for BytePackingStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_COLUMNS>; + type EvaluationFrameTarget = EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>; fn eval_packed_generic( &self, @@ -397,15 +398,19 @@ impl, const D: usize> Stark for BytePackingSt filter_columns: vec![None; NUM_BYTES], }] } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] pub(crate) mod tests { use anyhow::Result; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::byte_packing::byte_packing_stark::BytePackingStark; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { diff --git a/evm/src/config.rs b/evm/src/config.rs deleted file mode 100644 index 3f88d99f..00000000 --- a/evm/src/config.rs +++ /dev/null @@ -1,43 +0,0 @@ -use plonky2::fri::reduction_strategies::FriReductionStrategy; -use plonky2::fri::{FriConfig, FriParams}; - -/// A configuration containing the different parameters to be used by the STARK prover. -pub struct StarkConfig { - /// The targeted security level for the proofs generated with this configuration. - pub security_bits: usize, - - /// The number of challenge points to generate, for IOPs that have soundness errors of (roughly) - /// `degree / |F|`. - pub num_challenges: usize, - - /// The configuration of the FRI sub-protocol. - pub fri_config: FriConfig, -} - -impl Default for StarkConfig { - fn default() -> Self { - Self::standard_fast_config() - } -} - -impl StarkConfig { - /// A typical configuration with a rate of 2, resulting in fast but large proofs. - /// Targets ~100 bit conjectured security. - pub const fn standard_fast_config() -> Self { - Self { - security_bits: 100, - num_challenges: 2, - fri_config: FriConfig { - rate_bits: 1, - cap_height: 4, - proof_of_work_bits: 16, - reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5), - num_query_rounds: 84, - }, - } - } - - pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams { - self.fri_config.fri_params(degree_bits, false) - } -} diff --git a/evm/src/constraint_consumer.rs b/evm/src/constraint_consumer.rs deleted file mode 100644 index 919b5163..00000000 --- a/evm/src/constraint_consumer.rs +++ /dev/null @@ -1,162 +0,0 @@ -use core::marker::PhantomData; - -use plonky2::field::extension::Extendable; -use plonky2::field::packed::PackedField; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::target::Target; -use plonky2::plonk::circuit_builder::CircuitBuilder; - -pub struct ConstraintConsumer { - /// Random values used to combine multiple constraints into one. - pub alphas: Vec, - - /// Running sums of constraints that have been emitted so far, scaled by powers of alpha. - // TODO(JN): This is pub so it can be used in a test. Once we have an API for accessing this - // result, it should be made private. - pub constraint_accs: Vec

, - - /// The evaluation of `X - g^(n-1)`. - z_last: P, - - /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated - /// with the first trace row, and zero at other points in the subgroup. - lagrange_basis_first: P, - - /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated - /// with the last trace row, and zero at other points in the subgroup. - lagrange_basis_last: P, -} - -impl ConstraintConsumer

{ - pub(crate) fn new( - alphas: Vec, - z_last: P, - lagrange_basis_first: P, - lagrange_basis_last: P, - ) -> Self { - Self { - constraint_accs: vec![P::ZEROS; alphas.len()], - alphas, - z_last, - lagrange_basis_first, - lagrange_basis_last, - } - } - - pub(crate) fn accumulators(self) -> Vec

{ - self.constraint_accs - } - - /// Add one constraint valid on all rows except the last. - pub(crate) fn constraint_transition(&mut self, constraint: P) { - self.constraint(constraint * self.z_last); - } - - /// Add one constraint on all rows. - pub(crate) fn constraint(&mut self, constraint: P) { - for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { - *acc *= alpha; - *acc += constraint; - } - } - - /// Add one constraint, but first multiply it by a filter such that it will only apply to the - /// first row of the trace. - pub(crate) fn constraint_first_row(&mut self, constraint: P) { - self.constraint(constraint * self.lagrange_basis_first); - } - - /// Add one constraint, but first multiply it by a filter such that it will only apply to the - /// last row of the trace. - pub(crate) fn constraint_last_row(&mut self, constraint: P) { - self.constraint(constraint * self.lagrange_basis_last); - } -} - -pub struct RecursiveConstraintConsumer, const D: usize> { - /// A random value used to combine multiple constraints into one. - alphas: Vec, - - /// A running sum of constraints that have been emitted so far, scaled by powers of alpha. - constraint_accs: Vec>, - - /// The evaluation of `X - g^(n-1)`. - z_last: ExtensionTarget, - - /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated - /// with the first trace row, and zero at other points in the subgroup. - lagrange_basis_first: ExtensionTarget, - - /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated - /// with the last trace row, and zero at other points in the subgroup. - lagrange_basis_last: ExtensionTarget, - - _phantom: PhantomData, -} - -impl, const D: usize> RecursiveConstraintConsumer { - pub(crate) fn new( - zero: ExtensionTarget, - alphas: Vec, - z_last: ExtensionTarget, - lagrange_basis_first: ExtensionTarget, - lagrange_basis_last: ExtensionTarget, - ) -> Self { - Self { - constraint_accs: vec![zero; alphas.len()], - alphas, - z_last, - lagrange_basis_first, - lagrange_basis_last, - _phantom: Default::default(), - } - } - - pub(crate) fn accumulators(self) -> Vec> { - self.constraint_accs - } - - /// Add one constraint valid on all rows except the last. - pub(crate) fn constraint_transition( - &mut self, - builder: &mut CircuitBuilder, - constraint: ExtensionTarget, - ) { - let filtered_constraint = builder.mul_extension(constraint, self.z_last); - self.constraint(builder, filtered_constraint); - } - - /// Add one constraint valid on all rows. - pub(crate) fn constraint( - &mut self, - builder: &mut CircuitBuilder, - constraint: ExtensionTarget, - ) { - for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { - *acc = builder.scalar_mul_add_extension(alpha, *acc, constraint); - } - } - - /// Add one constraint, but first multiply it by a filter such that it will only apply to the - /// first row of the trace. - pub(crate) fn constraint_first_row( - &mut self, - builder: &mut CircuitBuilder, - constraint: ExtensionTarget, - ) { - let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first); - self.constraint(builder, filtered_constraint); - } - - /// Add one constraint, but first multiply it by a filter such that it will only apply to the - /// last row of the trace. - pub(crate) fn constraint_last_row( - &mut self, - builder: &mut CircuitBuilder, - constraint: ExtensionTarget, - ) { - let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last); - self.constraint(builder, filtered_constraint); - } -} diff --git a/evm/src/cpu/byte_unpacking.rs b/evm/src/cpu/byte_unpacking.rs index 39053141..4de1855d 100644 --- a/evm/src/cpu/byte_unpacking.rs +++ b/evm/src/cpu/byte_unpacking.rs @@ -4,8 +4,8 @@ use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; pub(crate) fn eval_packed( diff --git a/evm/src/cpu/clock.rs b/evm/src/cpu/clock.rs index cd7b17d8..4fa917a2 100644 --- a/evm/src/cpu/clock.rs +++ b/evm/src/cpu/clock.rs @@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; /// Check the correct updating of `clock`. diff --git a/evm/src/cpu/contextops.rs b/evm/src/cpu/contextops.rs index ec4e5e5e..9a0bb748 100644 --- a/evm/src/cpu/contextops.rs +++ b/evm/src/cpu/contextops.rs @@ -5,10 +5,10 @@ use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use super::columns::ops::OpsColumnsView; use super::cpu_stark::{disable_unused_channels, disable_unused_channels_circuit}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::memory::segments::Segment; diff --git a/evm/src/cpu/control_flow.rs b/evm/src/cpu/control_flow.rs index bde59305..a2887462 100644 --- a/evm/src/cpu/control_flow.rs +++ b/evm/src/cpu/control_flow.rs @@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::{CpuColumnsView, COL_MAP}; use crate::cpu::kernel::aggregator::KERNEL; diff --git a/evm/src/cpu/cpu_stark.rs b/evm/src/cpu/cpu_stark.rs index 8bcada2f..340eede5 100644 --- a/evm/src/cpu/cpu_stark.rs +++ b/evm/src/cpu/cpu_stark.rs @@ -8,24 +8,24 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::cross_table_lookup::TableWithColumns; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter}; +use starky::stark::Stark; use super::columns::CpuColumnsView; use super::halt; use super::kernel::constants::context_metadata::ContextMetadata; use super::membus::NUM_GP_CHANNELS; -use crate::all_stark::Table; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::all_stark::{EvmStarkFrame, Table}; use crate::cpu::columns::{COL_MAP, NUM_CPU_COLUMNS}; use crate::cpu::{ byte_unpacking, clock, contextops, control_flow, decode, dup_swap, gas, jumps, membus, memio, modfp254, pc, push0, shift, simple_logic, stack, syscalls_exceptions, }; -use crate::cross_table_lookup::TableWithColumns; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; -use crate::lookup::{Column, Filter}; use crate::memory::segments::Segment; use crate::memory::{NUM_CHANNELS, VALUE_LIMBS}; -use crate::stark::Stark; /// Creates the vector of `Columns` corresponding to the General Purpose channels when calling the Keccak sponge: /// the CPU reads the output of the sponge directly from the `KeccakSpongeStark` table. @@ -452,12 +452,13 @@ pub(crate) struct CpuStark { } impl, const D: usize> Stark for CpuStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_CPU_COLUMNS>; + type EvaluationFrameTarget = + EvmStarkFrame, ExtensionTarget, NUM_CPU_COLUMNS>; /// Evaluates all CPU constraints. fn eval_packed_generic( @@ -531,15 +532,19 @@ impl, const D: usize> Stark for CpuStark usize { 3 } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] mod tests { use anyhow::Result; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::cpu::cpu_stark::CpuStark; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { diff --git a/evm/src/cpu/decode.rs b/evm/src/cpu/decode.rs index 4c2c4322..83980239 100644 --- a/evm/src/cpu/decode.rs +++ b/evm/src/cpu/decode.rs @@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::{CpuColumnsView, COL_MAP}; /// List of opcode blocks diff --git a/evm/src/cpu/dup_swap.rs b/evm/src/cpu/dup_swap.rs index 1abec5fc..e67eaa62 100644 --- a/evm/src/cpu/dup_swap.rs +++ b/evm/src/cpu/dup_swap.rs @@ -5,8 +5,8 @@ use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::{CpuColumnsView, MemoryChannelView}; use crate::memory::segments::Segment; diff --git a/evm/src/cpu/gas.rs b/evm/src/cpu/gas.rs index be033c3c..37097adc 100644 --- a/evm/src/cpu/gas.rs +++ b/evm/src/cpu/gas.rs @@ -4,9 +4,9 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use super::columns::COL_MAP; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::ops::OpsColumnsView; use crate::cpu::columns::CpuColumnsView; diff --git a/evm/src/cpu/halt.rs b/evm/src/cpu/halt.rs index 80ac3285..a0412860 100644 --- a/evm/src/cpu/halt.rs +++ b/evm/src/cpu/halt.rs @@ -5,9 +5,9 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use super::control_flow::get_halt_pc; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::{CpuColumnsView, COL_MAP}; use crate::cpu::membus::NUM_GP_CHANNELS; diff --git a/evm/src/cpu/jumps.rs b/evm/src/cpu/jumps.rs index fd7fcfd9..f3413b0f 100644 --- a/evm/src/cpu/jumps.rs +++ b/evm/src/cpu/jumps.rs @@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::membus::NUM_GP_CHANNELS; use crate::memory::segments::Segment; diff --git a/evm/src/cpu/membus.rs b/evm/src/cpu/membus.rs index 6ce84561..b50ab5cc 100644 --- a/evm/src/cpu/membus.rs +++ b/evm/src/cpu/membus.rs @@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; /// General-purpose memory channels; they can read and write to all contexts/segments/addresses. diff --git a/evm/src/cpu/memio.rs b/evm/src/cpu/memio.rs index 924f030f..ac32253d 100644 --- a/evm/src/cpu/memio.rs +++ b/evm/src/cpu/memio.rs @@ -4,9 +4,9 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use super::cpu_stark::get_addr; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::stack; use crate::memory::segments::Segment; diff --git a/evm/src/cpu/modfp254.rs b/evm/src/cpu/modfp254.rs index 95bab8d6..a3b40f59 100644 --- a/evm/src/cpu/modfp254.rs +++ b/evm/src/cpu/modfp254.rs @@ -4,8 +4,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; // Python: diff --git a/evm/src/cpu/pc.rs b/evm/src/cpu/pc.rs index 9635534e..4294dbaf 100644 --- a/evm/src/cpu/pc.rs +++ b/evm/src/cpu/pc.rs @@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; /// Evaluates constraints to check that we are storing the correct PC. diff --git a/evm/src/cpu/push0.rs b/evm/src/cpu/push0.rs index ed9f6c10..4f37a55e 100644 --- a/evm/src/cpu/push0.rs +++ b/evm/src/cpu/push0.rs @@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; /// Evaluates constraints to check that we are not pushing anything. diff --git a/evm/src/cpu/shift.rs b/evm/src/cpu/shift.rs index 9e751421..12ed18b9 100644 --- a/evm/src/cpu/shift.rs +++ b/evm/src/cpu/shift.rs @@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::membus::NUM_GP_CHANNELS; use crate::memory::segments::Segment; diff --git a/evm/src/cpu/simple_logic/eq_iszero.rs b/evm/src/cpu/simple_logic/eq_iszero.rs index fd811ae7..43333fd9 100644 --- a/evm/src/cpu/simple_logic/eq_iszero.rs +++ b/evm/src/cpu/simple_logic/eq_iszero.rs @@ -5,8 +5,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::stack::{self, EQ_STACK_BEHAVIOR, IS_ZERO_STACK_BEHAVIOR}; diff --git a/evm/src/cpu/simple_logic/mod.rs b/evm/src/cpu/simple_logic/mod.rs index 04f8bcc2..748930f2 100644 --- a/evm/src/cpu/simple_logic/mod.rs +++ b/evm/src/cpu/simple_logic/mod.rs @@ -5,8 +5,8 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; /// Evaluates constraints for NOT, EQ and ISZERO. diff --git a/evm/src/cpu/simple_logic/not.rs b/evm/src/cpu/simple_logic/not.rs index 3798606d..92b11568 100644 --- a/evm/src/cpu/simple_logic/not.rs +++ b/evm/src/cpu/simple_logic/not.rs @@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::stack; diff --git a/evm/src/cpu/stack.rs b/evm/src/cpu/stack.rs index 87ca7ee1..e135e391 100644 --- a/evm/src/cpu/stack.rs +++ b/evm/src/cpu/stack.rs @@ -6,8 +6,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::ops::OpsColumnsView; use crate::cpu::columns::CpuColumnsView; use crate::cpu::membus::NUM_GP_CHANNELS; diff --git a/evm/src/cpu/syscalls_exceptions.rs b/evm/src/cpu/syscalls_exceptions.rs index 1dfdb8fa..cf7aa72e 100644 --- a/evm/src/cpu/syscalls_exceptions.rs +++ b/evm/src/cpu/syscalls_exceptions.rs @@ -7,8 +7,8 @@ use plonky2::field::packed::PackedField; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::CpuColumnsView; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::membus::NUM_GP_CHANNELS; diff --git a/evm/src/evaluation_frame.rs b/evm/src/evaluation_frame.rs deleted file mode 100644 index 0f6bbe2c..00000000 --- a/evm/src/evaluation_frame.rs +++ /dev/null @@ -1,47 +0,0 @@ -/// A trait for viewing an evaluation frame of a STARK table. -/// -/// It allows to access the current and next rows at a given step -/// and can be used to implement constraint evaluation both natively -/// and recursively. -pub trait StarkEvaluationFrame: Sized { - /// The number of columns for the STARK table this evaluation frame views. - const COLUMNS: usize; - - /// Returns the local values (i.e. current row) for this evaluation frame. - fn get_local_values(&self) -> &[T]; - /// Returns the next values (i.e. next row) for this evaluation frame. - fn get_next_values(&self) -> &[T]; - - /// Outputs a new evaluation frame from the provided local and next values. - /// - /// **NOTE**: Concrete implementations of this method SHOULD ensure that - /// the provided slices lengths match the `Self::COLUMNS` value. - fn from_values(lv: &[T], nv: &[T]) -> Self; -} - -pub struct StarkFrame { - local_values: [T; N], - next_values: [T; N], -} - -impl StarkEvaluationFrame for StarkFrame { - const COLUMNS: usize = N; - - fn get_local_values(&self) -> &[T] { - &self.local_values - } - - fn get_next_values(&self) -> &[T] { - &self.next_values - } - - fn from_values(lv: &[T], nv: &[T]) -> Self { - assert_eq!(lv.len(), Self::COLUMNS); - assert_eq!(nv.len(), Self::COLUMNS); - - Self { - local_values: lv.try_into().unwrap(), - next_values: nv.try_into().unwrap(), - } - } -} diff --git a/evm/src/fixed_recursive_verifier.rs b/evm/src/fixed_recursive_verifier.rs index 2df85b03..f12a485e 100644 --- a/evm/src/fixed_recursive_verifier.rs +++ b/evm/src/fixed_recursive_verifier.rs @@ -29,18 +29,18 @@ use plonky2::util::serialization::{ }; use plonky2::util::timing::TimingTree; use plonky2_util::log2_ceil; +use starky::config::StarkConfig; +use starky::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup}; +use starky::lookup::{get_grand_product_challenge_set_target, GrandProductChallengeSet}; +use starky::proof::StarkProofWithMetadata; +use starky::stark::Stark; use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES}; -use crate::config::StarkConfig; -use crate::cross_table_lookup::{ - get_grand_product_challenge_set_target, verify_cross_table_lookups_circuit, CrossTableLookup, - GrandProductChallengeSet, -}; use crate::generation::GenerationInputs; use crate::get_challenges::observe_public_values_target; use crate::proof::{ AllProof, BlockHashesTarget, BlockMetadataTarget, ExtraBlockData, ExtraBlockDataTarget, - PublicValues, PublicValuesTarget, StarkProofWithMetadata, TrieRoots, TrieRootsTarget, + PublicValues, PublicValuesTarget, TrieRoots, TrieRootsTarget, }; use crate::prover::{check_abort_signal, prove}; use crate::recursive_verifier::{ @@ -48,7 +48,6 @@ use crate::recursive_verifier::{ recursive_stark_circuit, set_public_value_targets, PlonkWrapperCircuit, PublicInputs, StarkWrapperCircuit, }; -use crate::stark::Stark; use crate::util::h256_limbs; /// The recursion threshold. We end a chain of recursive proofs once we reach this size. @@ -587,7 +586,7 @@ where &mut builder, all_cross_table_lookups(), pis.map(|p| p.ctl_zs_first), - extra_looking_sums, + Some(&extra_looking_sums), stark_config, ); @@ -1002,7 +1001,7 @@ where let mut root_inputs = PartialWitness::new(); for table in 0..NUM_TABLES { - let stark_proof = &all_proof.stark_proofs[table]; + let stark_proof = &all_proof.multi_proof.stark_proofs[table]; let original_degree_bits = stark_proof.proof.recover_degree_bits(config); let table_circuits = &self.by_table[table]; let shrunk_proof = table_circuits @@ -1015,7 +1014,7 @@ where original_degree_bits, )) })? - .shrink(stark_proof, &all_proof.ctl_challenges)?; + .shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?; let index_verifier_data = table_circuits .by_stark_size .keys() @@ -1107,9 +1106,10 @@ where for table in 0..NUM_TABLES { let (table_circuit, index_verifier_data) = &table_circuits[table]; - let stark_proof = &all_proof.stark_proofs[table]; + let stark_proof = &all_proof.multi_proof.stark_proofs[table]; - let shrunk_proof = table_circuit.shrink(stark_proof, &all_proof.ctl_challenges)?; + let shrunk_proof = + table_circuit.shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?; root_inputs.set_target( self.root.index_verifier_data[table], F::from_canonical_u8(*index_verifier_data), diff --git a/evm/src/generation/mod.rs b/evm/src/generation/mod.rs index b63f48a1..6da0e38b 100644 --- a/evm/src/generation/mod.rs +++ b/evm/src/generation/mod.rs @@ -10,13 +10,13 @@ use plonky2::hash::hash_types::RichField; use plonky2::timed; use plonky2::util::timing::TimingTree; use serde::{Deserialize, Serialize}; +use starky::config::StarkConfig; use GlobalMetadata::{ ReceiptTrieRootDigestAfter, ReceiptTrieRootDigestBefore, StateTrieRootDigestAfter, StateTrieRootDigestBefore, TransactionTrieRootDigestAfter, TransactionTrieRootDigestBefore, }; use crate::all_stark::{AllStark, NUM_TABLES}; -use crate::config::StarkConfig; use crate::cpu::columns::CpuColumnsView; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; diff --git a/evm/src/get_challenges.rs b/evm/src/get_challenges.rs index 756b0650..2a783b94 100644 --- a/evm/src/get_challenges.rs +++ b/evm/src/get_challenges.rs @@ -1,13 +1,11 @@ use ethereum_types::{BigEndianHash, H256, U256}; use plonky2::field::extension::Extendable; -use plonky2::fri::proof::{FriProof, FriProofTarget}; use plonky2::hash::hash_types::RichField; use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; -use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; +use starky::config::StarkConfig; +use starky::lookup::get_grand_product_challenge_set; -use crate::config::StarkConfig; -use crate::cross_table_lookup::get_grand_product_challenge_set; use crate::proof::*; use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64}; use crate::witness::errors::ProgramError; @@ -198,7 +196,9 @@ impl, C: GenericConfig, const D: usize> A ) -> Result, ProgramError> { let mut challenger = Challenger::::new(); - for proof in &self.stark_proofs { + let stark_proofs = &self.multi_proof.stark_proofs; + + for proof in stark_proofs { challenger.observe_cap(&proof.proof.trace_cap); } @@ -210,112 +210,14 @@ impl, C: GenericConfig, const D: usize> A Ok(AllProofChallenges { stark_challenges: core::array::from_fn(|i| { challenger.compact(); - self.stark_proofs[i] - .proof - .get_challenges(&mut challenger, config) + stark_proofs[i].proof.get_challenges( + &mut challenger, + Some(&ctl_challenges), + true, + config, + ) }), ctl_challenges, }) } } - -impl StarkProof -where - F: RichField + Extendable, - C: GenericConfig, -{ - /// Computes all Fiat-Shamir challenges used in the STARK proof. - pub(crate) fn get_challenges( - &self, - challenger: &mut Challenger, - config: &StarkConfig, - ) -> StarkProofChallenges { - let degree_bits = self.recover_degree_bits(config); - - let StarkProof { - auxiliary_polys_cap, - quotient_polys_cap, - openings, - opening_proof: - FriProof { - commit_phase_merkle_caps, - final_poly, - pow_witness, - .. - }, - .. - } = &self; - - let num_challenges = config.num_challenges; - - challenger.observe_cap(auxiliary_polys_cap); - - let stark_alphas = challenger.get_n_challenges(num_challenges); - - challenger.observe_cap(quotient_polys_cap); - let stark_zeta = challenger.get_extension_challenge::(); - - challenger.observe_openings(&openings.to_fri_openings()); - - StarkProofChallenges { - stark_alphas, - stark_zeta, - fri_challenges: challenger.fri_challenges::( - commit_phase_merkle_caps, - final_poly, - *pow_witness, - degree_bits, - &config.fri_config, - ), - } - } -} - -impl StarkProofTarget { - pub(crate) fn get_challenges, C: GenericConfig>( - &self, - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - config: &StarkConfig, - ) -> StarkProofChallengesTarget - where - C::Hasher: AlgebraicHasher, - { - let StarkProofTarget { - auxiliary_polys_cap: auxiliary_polys, - quotient_polys_cap, - openings, - opening_proof: - FriProofTarget { - commit_phase_merkle_caps, - final_poly, - pow_witness, - .. - }, - .. - } = &self; - - let num_challenges = config.num_challenges; - - challenger.observe_cap(auxiliary_polys); - - let stark_alphas = challenger.get_n_challenges(builder, num_challenges); - - challenger.observe_cap(quotient_polys_cap); - let stark_zeta = challenger.get_extension_challenge(builder); - - challenger.observe_openings(&openings.to_fri_openings(builder.zero())); - - StarkProofChallengesTarget { - stark_alphas, - stark_zeta, - fri_challenges: challenger.fri_challenges( - builder, - commit_phase_merkle_caps, - final_poly, - *pow_witness, - &config.fri_config, - ), - } - } -} diff --git a/evm/src/keccak/columns.rs b/evm/src/keccak/columns.rs index eedba41c..bbd96a74 100644 --- a/evm/src/keccak/columns.rs +++ b/evm/src/keccak/columns.rs @@ -1,7 +1,7 @@ use plonky2::field::types::Field; +use starky::lookup::Column; use crate::keccak::keccak_stark::{NUM_INPUTS, NUM_ROUNDS}; -use crate::lookup::Column; /// A register which is set to 1 if we are in the `i`th round, otherwise 0. pub(crate) const fn reg_step(i: usize) -> usize { diff --git a/evm/src/keccak/keccak_stark.rs b/evm/src/keccak/keccak_stark.rs index 771c9b43..fc27086a 100644 --- a/evm/src/keccak/keccak_stark.rs +++ b/evm/src/keccak/keccak_stark.rs @@ -10,10 +10,14 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::plonk_common::reduce_with_powers_ext_circuit; use plonky2::timed; use plonky2::util::timing::TimingTree; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter}; +use starky::stark::Stark; +use starky::util::trace_rows_to_poly_values; use super::columns::reg_input_limb; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +use crate::all_stark::EvmStarkFrame; use crate::keccak::columns::{ reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime, reg_b, reg_c, reg_c_prime, reg_output_limb, reg_step, NUM_COLUMNS, TIMESTAMP, @@ -23,9 +27,6 @@ use crate::keccak::logic::{ andn, andn_gen, andn_gen_circuit, xor, xor3_gen, xor3_gen_circuit, xor_gen, xor_gen_circuit, }; use crate::keccak::round_flags::{eval_round_flags, eval_round_flags_recursively}; -use crate::lookup::{Column, Filter}; -use crate::stark::Stark; -use crate::util::trace_rows_to_poly_values; /// Number of rounds in a Keccak permutation. pub(crate) const NUM_ROUNDS: usize = 24; @@ -253,12 +254,12 @@ impl, const D: usize> KeccakStark { } impl, const D: usize> Stark for KeccakStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_COLUMNS>; + type EvaluationFrameTarget = EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>; fn eval_packed_generic( &self, @@ -616,6 +617,10 @@ impl, const D: usize> Stark for KeccakStark usize { 3 } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] @@ -626,14 +631,14 @@ mod tests { use plonky2::fri::oracle::PolynomialBatch; use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::config::StarkConfig; + use starky::cross_table_lookup::{CtlData, CtlZData}; + use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use tiny_keccak::keccakf; use super::*; - use crate::config::StarkConfig; - use crate::cross_table_lookup::{CtlData, CtlZData, GrandProductChallengeSet}; - use crate::lookup::GrandProductChallenge; use crate::prover::prove_single_table; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { @@ -734,16 +739,16 @@ mod tests { let degree = 1 << trace_commitments.degree_log; // Fake CTL data. - let ctl_z_data = CtlZData { - helper_columns: vec![PolynomialValues::zero(degree)], - z: PolynomialValues::zero(degree), - challenge: GrandProductChallenge { + let ctl_z_data = CtlZData::new( + vec![PolynomialValues::zero(degree)], + PolynomialValues::zero(degree), + GrandProductChallenge { beta: F::ZERO, gamma: F::ZERO, }, - columns: vec![], - filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))], - }; + vec![], + vec![Some(Filter::new_simple(Column::constant(F::ZERO)))], + ); let ctl_data = CtlData { zs_columns: vec![ctl_z_data.clone(); config.num_challenges], }; diff --git a/evm/src/keccak/round_flags.rs b/evm/src/keccak/round_flags.rs index 9ad144f7..5e76b2ec 100644 --- a/evm/src/keccak/round_flags.rs +++ b/evm/src/keccak/round_flags.rs @@ -4,14 +4,15 @@ use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +use crate::all_stark::EvmStarkFrame; use crate::keccak::columns::{reg_step, NUM_COLUMNS}; use crate::keccak::keccak_stark::NUM_ROUNDS; pub(crate) fn eval_round_flags>( - vars: &StarkFrame, + vars: &EvmStarkFrame, yield_constr: &mut ConstraintConsumer

, ) { let local_values = vars.get_local_values(); @@ -40,7 +41,7 @@ pub(crate) fn eval_round_flags>( pub(crate) fn eval_round_flags_recursively, const D: usize>( builder: &mut CircuitBuilder, - vars: &StarkFrame, NUM_COLUMNS>, + vars: &EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>, yield_constr: &mut RecursiveConstraintConsumer, ) { let one = builder.one_extension(); diff --git a/evm/src/keccak_sponge/keccak_sponge_stark.rs b/evm/src/keccak_sponge/keccak_sponge_stark.rs index ddf2bca0..04b1bca6 100644 --- a/evm/src/keccak_sponge/keccak_sponge_stark.rs +++ b/evm/src/keccak_sponge/keccak_sponge_stark.rs @@ -14,13 +14,14 @@ use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2::util::transpose; use plonky2_util::ceil_div_usize; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter, Lookup}; +use starky::stark::Stark; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::all_stark::EvmStarkFrame; use crate::cpu::kernel::keccak_util::keccakf_u32s; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; use crate::keccak_sponge::columns::*; -use crate::lookup::{Column, Filter, Lookup}; -use crate::stark::Stark; use crate::witness::memory::MemoryAddress; /// Strict upper bound for the individual bytes range-check. @@ -520,12 +521,13 @@ impl, const D: usize> KeccakSpongeStark { } impl, const D: usize> Stark for KeccakSpongeStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_KECCAK_SPONGE_COLUMNS>; + type EvaluationFrameTarget = + EvmStarkFrame, ExtensionTarget, NUM_KECCAK_SPONGE_COLUMNS>; fn eval_packed_generic( &self, @@ -807,6 +809,10 @@ impl, const D: usize> Stark for KeccakSpongeS filter_columns: vec![None; KECCAK_RATE_BYTES], }] } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] @@ -816,10 +822,10 @@ mod tests { use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::types::PrimeField64; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use super::*; use crate::memory::segments::Segment; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { diff --git a/evm/src/lib.rs b/evm/src/lib.rs index 025fc8e6..5741c441 100644 --- a/evm/src/lib.rs +++ b/evm/src/lib.rs @@ -165,35 +165,32 @@ #![allow(unused)] #![feature(let_chains)] -pub mod all_stark; +// Individual STARK processing units pub mod arithmetic; pub mod byte_packing; -pub mod config; -pub mod constraint_consumer; pub mod cpu; -pub mod cross_table_lookup; -pub mod curve_pairings; -pub mod evaluation_frame; -pub mod extension_tower; -pub mod fixed_recursive_verifier; -pub mod generation; -mod get_challenges; pub mod keccak; pub mod keccak_sponge; pub mod logic; -pub mod lookup; pub mod memory; + +// Proving system components +pub mod all_stark; +pub mod fixed_recursive_verifier; +mod get_challenges; pub mod proof; pub mod prover; pub mod recursive_verifier; -pub mod stark; -pub mod util; -pub mod vanishing_poly; pub mod verifier; + +// Witness generation +pub mod generation; pub mod witness; -#[cfg(test)] -mod stark_testing; +// Utility modules +pub mod curve_pairings; +pub mod extension_tower; +pub mod util; use eth_trie_utils::partial_trie::HashedPartialTrie; // Set up Jemalloc @@ -209,6 +206,6 @@ static GLOBAL: Jemalloc = Jemalloc; pub type Node = eth_trie_utils::partial_trie::Node; pub use all_stark::AllStark; -pub use config::StarkConfig; pub use fixed_recursive_verifier::AllRecursiveCircuits; pub use generation::GenerationInputs; +pub use starky::config::StarkConfig; diff --git a/evm/src/logic.rs b/evm/src/logic.rs index 7300c6af..d07a6e3d 100644 --- a/evm/src/logic.rs +++ b/evm/src/logic.rs @@ -11,13 +11,15 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2_util::ceil_div_usize; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter}; +use starky::stark::Stark; +use starky::util::trace_rows_to_poly_values; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +use crate::all_stark::EvmStarkFrame; use crate::logic::columns::NUM_COLUMNS; -use crate::lookup::{Column, Filter}; -use crate::stark::Stark; -use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive, trace_rows_to_poly_values}; +use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive}; /// Total number of bits per input/output. const VAL_BITS: usize = 256; @@ -210,12 +212,12 @@ impl LogicStark { } impl, const D: usize> Stark for LogicStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_COLUMNS>; + type EvaluationFrameTarget = EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>; fn eval_packed_generic( &self, @@ -354,15 +356,19 @@ impl, const D: usize> Stark for LogicStark usize { 3 } + + fn requires_ctls(&self) -> bool { + true + } } #[cfg(test)] mod tests { use anyhow::Result; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::logic::LogicStark; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { diff --git a/evm/src/lookup.rs b/evm/src/lookup.rs deleted file mode 100644 index f98814f9..00000000 --- a/evm/src/lookup.rs +++ /dev/null @@ -1,895 +0,0 @@ -use core::borrow::Borrow; -use core::fmt::Debug; -use core::iter::repeat; - -use itertools::Itertools; -use num_bigint::BigUint; -use plonky2::field::batch_util::batch_add_inplace; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::packed::PackedField; -use plonky2::field::polynomial::PolynomialValues; -use plonky2::field::types::Field; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::target::Target; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::plonk_common::{ - reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit, -}; -use plonky2_util::ceil_div_usize; - -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::stark::Stark; - -/// Represents a filter, which evaluates to 1 if the row must be considered and 0 if it should be ignored. -/// It's an arbitrary degree 2 combination of columns: `products` are the degree 2 terms, and `constants` are -/// the degree 1 terms. -#[derive(Clone, Debug)] -pub(crate) struct Filter { - products: Vec<(Column, Column)>, - constants: Vec>, -} - -impl Filter { - pub(crate) fn new(products: Vec<(Column, Column)>, constants: Vec>) -> Self { - Self { - products, - constants, - } - } - - /// Returns a filter made of a single column. - pub(crate) fn new_simple(col: Column) -> Self { - Self { - products: vec![], - constants: vec![col], - } - } - - /// Given the column values for the current and next rows, evaluates the filter. - pub(crate) fn eval_filter(&self, v: &[P], next_v: &[P]) -> P - where - FE: FieldExtension, - P: PackedField, - { - self.products - .iter() - .map(|(col1, col2)| col1.eval_with_next(v, next_v) * col2.eval_with_next(v, next_v)) - .sum::

() - + self - .constants - .iter() - .map(|col| col.eval_with_next(v, next_v)) - .sum::

() - } - - /// Circuit version of `eval_filter`: - /// Given the column values for the current and next rows, evaluates the filter. - pub(crate) fn eval_filter_circuit( - &self, - builder: &mut CircuitBuilder, - v: &[ExtensionTarget], - next_v: &[ExtensionTarget], - ) -> ExtensionTarget - where - F: RichField + Extendable, - { - let prods = self - .products - .iter() - .map(|(col1, col2)| { - let col1_eval = col1.eval_with_next_circuit(builder, v, next_v); - let col2_eval = col2.eval_with_next_circuit(builder, v, next_v); - builder.mul_extension(col1_eval, col2_eval) - }) - .collect::>(); - - let consts = self - .constants - .iter() - .map(|col| col.eval_with_next_circuit(builder, v, next_v)) - .collect::>(); - - let prods = builder.add_many_extension(prods); - let consts = builder.add_many_extension(consts); - builder.add_extension(prods, consts) - } - - /// Evaluate on a row of a table given in column-major form. - pub(crate) fn eval_table(&self, table: &[PolynomialValues], row: usize) -> F { - self.products - .iter() - .map(|(col1, col2)| col1.eval_table(table, row) * col2.eval_table(table, row)) - .sum::() - + self - .constants - .iter() - .map(|col| col.eval_table(table, row)) - .sum() - } -} - -/// Represent two linear combination of columns, corresponding to the current and next row values. -/// Each linear combination is represented as: -/// - a vector of `(usize, F)` corresponding to the column number and the associated multiplicand -/// - the constant of the linear combination. -#[derive(Clone, Debug)] -pub(crate) struct Column { - linear_combination: Vec<(usize, F)>, - next_row_linear_combination: Vec<(usize, F)>, - constant: F, -} - -impl Column { - /// Returns the representation of a single column in the current row. - pub(crate) fn single(c: usize) -> Self { - Self { - linear_combination: vec![(c, F::ONE)], - next_row_linear_combination: vec![], - constant: F::ZERO, - } - } - - /// Returns multiple single columns in the current row. - pub(crate) fn singles>>( - cs: I, - ) -> impl Iterator { - cs.into_iter().map(|c| Self::single(*c.borrow())) - } - - /// Returns the representation of a single column in the next row. - pub(crate) fn single_next_row(c: usize) -> Self { - Self { - linear_combination: vec![], - next_row_linear_combination: vec![(c, F::ONE)], - constant: F::ZERO, - } - } - - /// Returns multiple single columns for the next row. - pub(crate) fn singles_next_row>>( - cs: I, - ) -> impl Iterator { - cs.into_iter().map(|c| Self::single_next_row(*c.borrow())) - } - - /// Returns a linear combination corresponding to a constant. - pub(crate) fn constant(constant: F) -> Self { - Self { - linear_combination: vec![], - next_row_linear_combination: vec![], - constant, - } - } - - /// Returns a linear combination corresponding to 0. - pub(crate) fn zero() -> Self { - Self::constant(F::ZERO) - } - - /// Returns a linear combination corresponding to 1. - pub(crate) fn one() -> Self { - Self::constant(F::ONE) - } - - /// Given an iterator of `(usize, F)` and a constant, returns the association linear combination of columns for the current row. - pub(crate) fn linear_combination_with_constant>( - iter: I, - constant: F, - ) -> Self { - let v = iter.into_iter().collect::>(); - assert!(!v.is_empty()); - debug_assert_eq!( - v.iter().map(|(c, _)| c).unique().count(), - v.len(), - "Duplicate columns." - ); - Self { - linear_combination: v, - next_row_linear_combination: vec![], - constant, - } - } - - /// Given an iterator of `(usize, F)` and a constant, returns the associated linear combination of columns for the current and the next rows. - pub(crate) fn linear_combination_and_next_row_with_constant< - I: IntoIterator, - >( - iter: I, - next_row_iter: I, - constant: F, - ) -> Self { - let v = iter.into_iter().collect::>(); - let next_row_v = next_row_iter.into_iter().collect::>(); - - assert!(!v.is_empty() || !next_row_v.is_empty()); - debug_assert_eq!( - v.iter().map(|(c, _)| c).unique().count(), - v.len(), - "Duplicate columns." - ); - debug_assert_eq!( - next_row_v.iter().map(|(c, _)| c).unique().count(), - next_row_v.len(), - "Duplicate columns." - ); - - Self { - linear_combination: v, - next_row_linear_combination: next_row_v, - constant, - } - } - - /// Returns a linear combination of columns, with no additional constant. - pub(crate) fn linear_combination>(iter: I) -> Self { - Self::linear_combination_with_constant(iter, F::ZERO) - } - - /// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order: - /// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n. - pub(crate) fn le_bits>>(cs: I) -> Self { - Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers())) - } - - /// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order: - /// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n + k where `k` is an - /// additional constant. - pub(crate) fn le_bits_with_constant>>( - cs: I, - constant: F, - ) -> Self { - Self::linear_combination_with_constant( - cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers()), - constant, - ) - } - - /// Given an iterator of columns (c_0, ..., c_n) containing bytes in little endian order: - /// returns the representation of c_0 + 256 * c_1 + ... + 256^n * c_n. - pub(crate) fn le_bytes>>(cs: I) -> Self { - Self::linear_combination( - cs.into_iter() - .map(|c| *c.borrow()) - .zip(F::from_canonical_u16(256).powers()), - ) - } - - /// Given an iterator of columns, returns the representation of their sum. - pub(crate) fn sum>>(cs: I) -> Self { - Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(repeat(F::ONE))) - } - - /// Given the column values for the current row, returns the evaluation of the linear combination. - pub(crate) fn eval(&self, v: &[P]) -> P - where - FE: FieldExtension, - P: PackedField, - { - self.linear_combination - .iter() - .map(|&(c, f)| v[c] * FE::from_basefield(f)) - .sum::

() - + FE::from_basefield(self.constant) - } - - /// Given the column values for the current and next rows, evaluates the current and next linear combinations and returns their sum. - pub(crate) fn eval_with_next(&self, v: &[P], next_v: &[P]) -> P - where - FE: FieldExtension, - P: PackedField, - { - self.linear_combination - .iter() - .map(|&(c, f)| v[c] * FE::from_basefield(f)) - .sum::

() - + self - .next_row_linear_combination - .iter() - .map(|&(c, f)| next_v[c] * FE::from_basefield(f)) - .sum::

() - + FE::from_basefield(self.constant) - } - - /// Evaluate on a row of a table given in column-major form. - pub(crate) fn eval_table(&self, table: &[PolynomialValues], row: usize) -> F { - let mut res = self - .linear_combination - .iter() - .map(|&(c, f)| table[c].values[row] * f) - .sum::() - + self.constant; - - // If we access the next row at the last row, for sanity, we consider the next row's values to be 0. - // If the lookups are correctly written, the filter should be 0 in that case anyway. - if !self.next_row_linear_combination.is_empty() && row < table[0].values.len() - 1 { - res += self - .next_row_linear_combination - .iter() - .map(|&(c, f)| table[c].values[row + 1] * f) - .sum::(); - } - - res - } - - /// Evaluates the column on all rows. - pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues]) -> Vec { - let length = table[0].len(); - (0..length) - .map(|row| self.eval_table(table, row)) - .collect::>() - } - - /// Circuit version of `eval`: Given a row's targets, returns their linear combination. - pub(crate) fn eval_circuit( - &self, - builder: &mut CircuitBuilder, - v: &[ExtensionTarget], - ) -> ExtensionTarget - where - F: RichField + Extendable, - { - let pairs = self - .linear_combination - .iter() - .map(|&(c, f)| { - ( - v[c], - builder.constant_extension(F::Extension::from_basefield(f)), - ) - }) - .collect::>(); - let constant = builder.constant_extension(F::Extension::from_basefield(self.constant)); - builder.inner_product_extension(F::ONE, constant, pairs) - } - - /// Circuit version of `eval_with_next`: - /// Given the targets of the current and next row, returns the sum of their linear combinations. - pub(crate) fn eval_with_next_circuit( - &self, - builder: &mut CircuitBuilder, - v: &[ExtensionTarget], - next_v: &[ExtensionTarget], - ) -> ExtensionTarget - where - F: RichField + Extendable, - { - let mut pairs = self - .linear_combination - .iter() - .map(|&(c, f)| { - ( - v[c], - builder.constant_extension(F::Extension::from_basefield(f)), - ) - }) - .collect::>(); - let next_row_pairs = self.next_row_linear_combination.iter().map(|&(c, f)| { - ( - next_v[c], - builder.constant_extension(F::Extension::from_basefield(f)), - ) - }); - pairs.extend(next_row_pairs); - let constant = builder.constant_extension(F::Extension::from_basefield(self.constant)); - builder.inner_product_extension(F::ONE, constant, pairs) - } -} - -pub(crate) type ColumnFilter<'a, F> = (&'a [Column], &'a Option>); - -pub struct Lookup { - /// Columns whose values should be contained in the lookup table. - /// These are the f_i(x) polynomials in the logUp paper. - pub(crate) columns: Vec>, - /// Column containing the lookup table. - /// This is the t(x) polynomial in the paper. - pub(crate) table_column: Column, - /// Column containing the frequencies of `columns` in `table_column`. - /// This is the m(x) polynomial in the paper. - pub(crate) frequencies_column: Column, - - /// Columns to filter some elements. There is at most one filter - /// column per column to range-check. - pub(crate) filter_columns: Vec>>, -} - -impl Lookup { - pub(crate) fn num_helper_columns(&self, constraint_degree: usize) -> usize { - // One helper column for each column batch of size `constraint_degree-1`, - // then one column for the inverse of `table + challenge` and one for the `Z` polynomial. - ceil_div_usize(self.columns.len(), constraint_degree - 1) + 1 - } -} - -/// Randomness for a single instance of a permutation check protocol. -#[derive(Copy, Clone, Eq, PartialEq, Debug)] -pub(crate) struct GrandProductChallenge { - /// Randomness used to combine multiple columns into one. - pub(crate) beta: T, - /// Random offset that's added to the beta-reduced column values. - pub(crate) gamma: T, -} - -impl GrandProductChallenge { - pub(crate) fn combine<'a, FE, P, T: IntoIterator, const D2: usize>( - &self, - terms: T, - ) -> P - where - FE: FieldExtension, - P: PackedField, - T::IntoIter: DoubleEndedIterator, - { - reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma) - } -} - -impl GrandProductChallenge { - pub(crate) fn combine_circuit, const D: usize>( - &self, - builder: &mut CircuitBuilder, - terms: &[ExtensionTarget], - ) -> ExtensionTarget { - let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta); - let gamma = builder.convert_to_ext(self.gamma); - builder.add_extension(reduced, gamma) - } -} - -impl GrandProductChallenge { - pub(crate) fn combine_base_circuit, const D: usize>( - &self, - builder: &mut CircuitBuilder, - terms: &[Target], - ) -> Target { - let reduced = reduce_with_powers_circuit(builder, terms, self.beta); - builder.add(reduced, self.gamma) - } -} - -/// logUp protocol from -/// Compute the helper columns for the lookup argument. -/// Given columns `f0,...,fk` and a column `t`, such that `∪fi ⊆ t`, and challenges `x`, -/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`, -/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column. -pub(crate) fn lookup_helper_columns( - lookup: &Lookup, - trace_poly_values: &[PolynomialValues], - challenge: F, - constraint_degree: usize, -) -> Vec> { - assert_eq!( - constraint_degree, 3, - "TODO: Allow other constraint degrees." - ); - - assert_eq!(lookup.columns.len(), lookup.filter_columns.len()); - - let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len(); - assert!(BigUint::from(num_total_logup_entries) < F::characteristic()); - - let num_helper_columns = lookup.num_helper_columns(constraint_degree); - - let looking_cols = lookup - .columns - .iter() - .map(|col| vec![col.clone()]) - .collect::>>>(); - - let grand_challenge = GrandProductChallenge { - beta: F::ONE, - gamma: challenge, - }; - - let columns_filters = looking_cols - .iter() - .zip(lookup.filter_columns.iter()) - .map(|(col, filter)| (&col[..], filter)) - .collect::>(); - // For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and - // add it to the helper columns. - // Note: these are the h_k(x) polynomials in the paper, with a few differences: - // * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the - // h_k polynomials; instead there's a separate helper column for it (see below). - // * Here, we use 1 instead of -1 as the numerator (and subtract later). - // * Here, for now, the batch size (l) is always constraint_degree - 1 = 2. - // * Here, there are filters for the columns, to only select some rows - // in a given column. - let mut helper_columns = get_helper_cols( - trace_poly_values, - trace_poly_values[0].len(), - &columns_filters, - grand_challenge, - constraint_degree, - ); - - // Add `1/(table+challenge)` to the helper columns. - // This is 1/phi_0(x) = 1/(x + t(x)) from the paper. - // Here, we don't include m(x) in the numerator, instead multiplying it with this column later. - let mut table = lookup.table_column.eval_all_rows(trace_poly_values); - for x in table.iter_mut() { - *x = challenge + *x; - } - let table_inverse: Vec = F::batch_multiplicative_inverse(&table); - - // Compute the `Z` polynomial with `Z(1)=0` and `Z(gx) = Z(x) + sum h_i(x) - frequencies(x)g(x)`. - // This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H. - // In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled - // into the h_k(x) polynomials. - let frequencies = &lookup.frequencies_column.eval_all_rows(trace_poly_values); - let mut z = Vec::with_capacity(frequencies.len()); - z.push(F::ZERO); - for i in 0..frequencies.len() - 1 { - let x = helper_columns[..num_helper_columns - 1] - .iter() - .map(|col| col.values[i]) - .sum::() - - frequencies[i] * table_inverse[i]; - z.push(z[i] + x); - } - helper_columns.push(z.into()); - - helper_columns -} - -/// Given data associated to a lookup, check the associated helper polynomials. -pub(crate) fn eval_helper_columns( - filter: &[Option>], - columns: &[Vec

], - local_values: &[P], - next_values: &[P], - helper_columns: &[P], - constraint_degree: usize, - challenges: &GrandProductChallenge, - consumer: &mut ConstraintConsumer

, -) where - F: RichField + Extendable, - FE: FieldExtension, - P: PackedField, -{ - if !helper_columns.is_empty() { - for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { - let fs = - &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; - let h = helper_columns[j]; - - match chunk.len() { - 2 => { - let combin0 = challenges.combine(&chunk[0]); - let combin1 = challenges.combine(chunk[1].iter()); - - let f0 = if let Some(filter0) = &fs[0] { - filter0.eval_filter(local_values, next_values) - } else { - P::ONES - }; - let f1 = if let Some(filter1) = &fs[1] { - filter1.eval_filter(local_values, next_values) - } else { - P::ONES - }; - - consumer.constraint(combin1 * combin0 * h - f0 * combin1 - f1 * combin0); - } - 1 => { - let combin = challenges.combine(&chunk[0]); - let f0 = if let Some(filter1) = &fs[0] { - filter1.eval_filter(local_values, next_values) - } else { - P::ONES - }; - consumer.constraint(combin * h - f0); - } - - _ => todo!("Allow other constraint degrees"), - } - } - } -} - -/// Circuit version of `eval_helper_columns`. -/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials. -pub(crate) fn eval_helper_columns_circuit, const D: usize>( - builder: &mut CircuitBuilder, - filter: &[Option>], - columns: &[Vec>], - local_values: &[ExtensionTarget], - next_values: &[ExtensionTarget], - helper_columns: &[ExtensionTarget], - constraint_degree: usize, - challenges: &GrandProductChallenge, - consumer: &mut RecursiveConstraintConsumer, -) { - if !helper_columns.is_empty() { - for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() { - let fs = - &filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()]; - let h = helper_columns[j]; - - let one = builder.one_extension(); - match chunk.len() { - 2 => { - let combin0 = challenges.combine_circuit(builder, &chunk[0]); - let combin1 = challenges.combine_circuit(builder, &chunk[1]); - - let f0 = if let Some(filter0) = &fs[0] { - filter0.eval_filter_circuit(builder, local_values, next_values) - } else { - one - }; - let f1 = if let Some(filter1) = &fs[1] { - filter1.eval_filter_circuit(builder, local_values, next_values) - } else { - one - }; - - let constr = builder.mul_sub_extension(combin0, h, f0); - let constr = builder.mul_extension(constr, combin1); - let f1_constr = builder.mul_extension(f1, combin0); - let constr = builder.sub_extension(constr, f1_constr); - - consumer.constraint(builder, constr); - } - 1 => { - let combin = challenges.combine_circuit(builder, &chunk[0]); - let f0 = if let Some(filter1) = &fs[0] { - filter1.eval_filter_circuit(builder, local_values, next_values) - } else { - one - }; - let constr = builder.mul_sub_extension(combin, h, f0); - consumer.constraint(builder, constr); - } - - _ => todo!("Allow other constraint degrees"), - } - } - } -} - -/// Given a STARK's trace, and the data associated to one lookup (either CTL or range check), -/// returns the associated helper polynomials. -pub(crate) fn get_helper_cols( - trace: &[PolynomialValues], - degree: usize, - columns_filters: &[ColumnFilter], - challenge: GrandProductChallenge, - constraint_degree: usize, -) -> Vec> { - let num_helper_columns = ceil_div_usize(columns_filters.len(), constraint_degree - 1); - - let mut helper_columns = Vec::with_capacity(num_helper_columns); - - for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) { - let (first_col, first_filter) = cols_filts.next().unwrap(); - - let mut filter_col = Vec::with_capacity(degree); - let first_combined = (0..degree) - .map(|d| { - let f = if let Some(filter) = first_filter { - let f = filter.eval_table(trace, d); - filter_col.push(f); - f - } else { - filter_col.push(F::ONE); - F::ONE - }; - if f.is_one() { - let evals = first_col - .iter() - .map(|c| c.eval_table(trace, d)) - .collect::>(); - challenge.combine(evals.iter()) - } else { - assert_eq!(f, F::ZERO, "Non-binary filter?"); - // Dummy value. Cannot be zero since it will be batch-inverted. - F::ONE - } - }) - .collect::>(); - - let mut acc = F::batch_multiplicative_inverse(&first_combined); - for d in 0..degree { - if filter_col[d].is_zero() { - acc[d] = F::ZERO; - } - } - - for (col, filt) in cols_filts { - let mut filter_col = Vec::with_capacity(degree); - let mut combined = (0..degree) - .map(|d| { - let f = if let Some(filter) = filt { - let f = filter.eval_table(trace, d); - filter_col.push(f); - f - } else { - filter_col.push(F::ONE); - F::ONE - }; - if f.is_one() { - let evals = col - .iter() - .map(|c| c.eval_table(trace, d)) - .collect::>(); - challenge.combine(evals.iter()) - } else { - assert_eq!(f, F::ZERO, "Non-binary filter?"); - // Dummy value. Cannot be zero since it will be batch-inverted. - F::ONE - } - }) - .collect::>(); - - combined = F::batch_multiplicative_inverse(&combined); - - for d in 0..degree { - if filter_col[d].is_zero() { - combined[d] = F::ZERO; - } - } - - batch_add_inplace(&mut acc, &combined); - } - - helper_columns.push(acc.into()); - } - assert_eq!(helper_columns.len(), num_helper_columns); - - helper_columns -} - -pub(crate) struct LookupCheckVars -where - F: Field, - FE: FieldExtension, - P: PackedField, -{ - pub(crate) local_values: Vec

, - pub(crate) next_values: Vec

, - pub(crate) challenges: Vec, -} - -/// Constraints for the logUp lookup argument. -pub(crate) fn eval_packed_lookups_generic( - stark: &S, - lookups: &[Lookup], - vars: &S::EvaluationFrame, - lookup_vars: LookupCheckVars, - yield_constr: &mut ConstraintConsumer

, -) where - F: RichField + Extendable, - FE: FieldExtension, - P: PackedField, - S: Stark, -{ - let local_values = vars.get_local_values(); - let next_values = vars.get_next_values(); - let degree = stark.constraint_degree(); - assert_eq!(degree, 3, "TODO: Allow other constraint degrees."); - let mut start = 0; - for lookup in lookups { - let num_helper_columns = lookup.num_helper_columns(degree); - for &challenge in &lookup_vars.challenges { - let grand_challenge = GrandProductChallenge { - beta: F::ONE, - gamma: challenge, - }; - let lookup_columns = lookup - .columns - .iter() - .map(|col| vec![col.eval_with_next(local_values, next_values)]) - .collect::>>(); - - // For each chunk, check that `h_i (x+f_2i) (x+f_{2i+1}) = (x+f_2i) * filter_{2i+1} + (x+f_{2i+1}) * filter_2i` if the chunk has length 2 - // or if it has length 1, check that `h_i * (x+f_2i) = filter_2i`, where x is the challenge - eval_helper_columns( - &lookup.filter_columns, - &lookup_columns, - local_values, - next_values, - &lookup_vars.local_values[start..start + num_helper_columns - 1], - degree, - &grand_challenge, - yield_constr, - ); - - let challenge = FE::from_basefield(challenge); - - // Check the `Z` polynomial. - let z = lookup_vars.local_values[start + num_helper_columns - 1]; - let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; - let table_with_challenge = lookup.table_column.eval(local_values) + challenge; - let y = lookup_vars.local_values[start..start + num_helper_columns - 1] - .iter() - .fold(P::ZEROS, |acc, x| acc + *x) - * table_with_challenge - - lookup.frequencies_column.eval(local_values); - // Check that in the first row, z = 0; - yield_constr.constraint_first_row(z); - yield_constr.constraint((next_z - z) * table_with_challenge - y); - start += num_helper_columns; - } - } -} - -pub(crate) struct LookupCheckVarsTarget { - pub(crate) local_values: Vec>, - pub(crate) next_values: Vec>, - pub(crate) challenges: Vec, -} - -pub(crate) fn eval_ext_lookups_circuit< - F: RichField + Extendable, - S: Stark, - const D: usize, ->( - builder: &mut CircuitBuilder, - stark: &S, - vars: &S::EvaluationFrameTarget, - lookup_vars: LookupCheckVarsTarget, - yield_constr: &mut RecursiveConstraintConsumer, -) { - let degree = stark.constraint_degree(); - let lookups = stark.lookups(); - - let local_values = vars.get_local_values(); - let next_values = vars.get_next_values(); - assert_eq!(degree, 3, "TODO: Allow other constraint degrees."); - let mut start = 0; - for lookup in lookups { - let num_helper_columns = lookup.num_helper_columns(degree); - let col_values = lookup - .columns - .iter() - .map(|col| vec![col.eval_with_next_circuit(builder, local_values, next_values)]) - .collect::>(); - - for &challenge in &lookup_vars.challenges { - let grand_challenge = GrandProductChallenge { - beta: builder.one(), - gamma: challenge, - }; - - eval_helper_columns_circuit( - builder, - &lookup.filter_columns, - &col_values, - local_values, - next_values, - &lookup_vars.local_values[start..start + num_helper_columns - 1], - degree, - &grand_challenge, - yield_constr, - ); - let challenge = builder.convert_to_ext(challenge); - - let z = lookup_vars.local_values[start + num_helper_columns - 1]; - let next_z = lookup_vars.next_values[start + num_helper_columns - 1]; - let table_column = lookup - .table_column - .eval_circuit(builder, vars.get_local_values()); - let table_with_challenge = builder.add_extension(table_column, challenge); - let mut y = builder.add_many_extension( - &lookup_vars.local_values[start..start + num_helper_columns - 1], - ); - - let frequencies_column = lookup - .frequencies_column - .eval_circuit(builder, vars.get_local_values()); - y = builder.mul_extension(y, table_with_challenge); - y = builder.sub_extension(y, frequencies_column); - - // Check that in the first row, z = 0; - yield_constr.constraint_first_row(builder, z); - let mut constraint = builder.sub_extension(next_z, z); - constraint = builder.mul_extension(constraint, table_with_challenge); - constraint = builder.sub_extension(constraint, y); - yield_constr.constraint(builder, constraint); - start += num_helper_columns; - } - } -} diff --git a/evm/src/memory/memory_stark.rs b/evm/src/memory/memory_stark.rs index 44d2af6a..d8a818ff 100644 --- a/evm/src/memory/memory_stark.rs +++ b/evm/src/memory/memory_stark.rs @@ -12,18 +12,19 @@ use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2::util::transpose; use plonky2_maybe_rayon::*; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::evaluation_frame::StarkEvaluationFrame; +use starky::lookup::{Column, Filter, Lookup}; +use starky::stark::Stark; use super::segments::Segment; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; -use crate::lookup::{Column, Filter, Lookup}; +use crate::all_stark::EvmStarkFrame; use crate::memory::columns::{ value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, FILTER, FREQUENCIES, INITIALIZE_AUX, IS_READ, NUM_COLUMNS, RANGE_CHECK, SEGMENT_FIRST_CHANGE, TIMESTAMP, VIRTUAL_FIRST_CHANGE, }; use crate::memory::VALUE_LIMBS; -use crate::stark::Stark; use crate::witness::memory::MemoryOpKind::Read; use crate::witness::memory::{MemoryAddress, MemoryOp}; @@ -268,12 +269,12 @@ impl, const D: usize> MemoryStark { } impl, const D: usize> Stark for MemoryStark { - type EvaluationFrame = StarkFrame + type EvaluationFrame = EvmStarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = StarkFrame, NUM_COLUMNS>; + type EvaluationFrameTarget = EvmStarkFrame, ExtensionTarget, NUM_COLUMNS>; fn eval_packed_generic( &self, @@ -569,15 +570,19 @@ impl, const D: usize> Stark for MemoryStark bool { + true + } } #[cfg(test)] pub(crate) mod tests { use anyhow::Result; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::memory::memory_stark::MemoryStark; - use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; #[test] fn test_stark_degree() -> Result<()> { diff --git a/evm/src/proof.rs b/evm/src/proof.rs index 33640458..bc70dbb8 100644 --- a/evm/src/proof.rs +++ b/evm/src/proof.rs @@ -1,33 +1,24 @@ use ethereum_types::{Address, H256, U256}; -use itertools::Itertools; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::fri::oracle::PolynomialBatch; -use plonky2::fri::proof::{FriChallenges, FriChallengesTarget, FriProof, FriProofTarget}; -use plonky2::fri::structure::{ - FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget, -}; -use plonky2::hash::hash_types::{MerkleCapTarget, RichField}; -use plonky2::hash::merkle_tree::MerkleCap; -use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::field::extension::Extendable; +use plonky2::hash::hash_types::RichField; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{GenericConfig, Hasher}; +use plonky2::plonk::config::GenericConfig; use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; -use plonky2_maybe_rayon::*; use serde::{Deserialize, Serialize}; +use starky::config::StarkConfig; +use starky::lookup::GrandProductChallengeSet; +use starky::proof::{MultiProof, StarkProofChallenges}; use crate::all_stark::NUM_TABLES; -use crate::config::StarkConfig; -use crate::cross_table_lookup::GrandProductChallengeSet; use crate::util::{get_h160, get_h256, h2u}; /// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs. #[derive(Debug, Clone)] pub struct AllProof, C: GenericConfig, const D: usize> { - /// Proofs for all the different STARK modules. - pub stark_proofs: [StarkProofWithMetadata; NUM_TABLES], - /// Cross-table lookup challenges. - pub(crate) ctl_challenges: GrandProductChallengeSet, + /// A multi-proof containing all proofs for the different STARK modules and their + /// cross-table lookup challenges. + pub multi_proof: MultiProof, /// Public memory values used for the recursive proofs. pub public_values: PublicValues, } @@ -35,7 +26,7 @@ pub struct AllProof, C: GenericConfig, co impl, C: GenericConfig, const D: usize> AllProof { /// Returns the degree (i.e. the trace length) of each STARK. pub fn degree_bits(&self, config: &StarkConfig) -> [usize; NUM_TABLES] { - core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config)) + self.multi_proof.recover_degree_bits(config) } } @@ -821,309 +812,3 @@ impl ExtraBlockDataTarget { builder.connect(ed0.gas_used_after, ed1.gas_used_after); } } - -/// Merkle caps and openings that form the proof of a single STARK. -#[derive(Debug, Clone)] -pub struct StarkProof, C: GenericConfig, const D: usize> { - /// Merkle cap of LDEs of trace values. - pub trace_cap: MerkleCap, - /// Merkle cap of LDEs of lookup helper and CTL columns. - pub auxiliary_polys_cap: MerkleCap, - /// Merkle cap of LDEs of quotient polynomial evaluations. - pub quotient_polys_cap: MerkleCap, - /// Purported values of each polynomial at the challenge point. - pub openings: StarkOpeningSet, - /// A batch FRI argument for all openings. - pub opening_proof: FriProof, -} - -/// A `StarkProof` along with some metadata about the initial Fiat-Shamir state, which is used when -/// creating a recursive wrapper proof around a STARK proof. -#[derive(Debug, Clone)] -pub struct StarkProofWithMetadata -where - F: RichField + Extendable, - C: GenericConfig, -{ - /// Initial Fiat-Shamir state. - pub(crate) init_challenger_state: >::Permutation, - /// Proof for a single STARK. - pub(crate) proof: StarkProof, -} - -impl, C: GenericConfig, const D: usize> StarkProof { - /// Recover the length of the trace from a STARK proof and a STARK config. - pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize { - let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] - .initial_trees_proof - .evals_proofs[0] - .1; - let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); - lde_bits - config.fri_config.rate_bits - } - - /// Returns the number of cross-table lookup polynomials computed for the current STARK. - pub fn num_ctl_zs(&self) -> usize { - self.openings.ctl_zs_first.len() - } -} - -/// Circuit version of `StarkProof`. -/// Merkle caps and openings that form the proof of a single STARK. -#[derive(Eq, PartialEq, Debug)] -pub(crate) struct StarkProofTarget { - /// `Target` for the Merkle cap if LDEs of trace values. - pub trace_cap: MerkleCapTarget, - /// `Target` for the Merkle cap of LDEs of lookup helper and CTL columns. - pub auxiliary_polys_cap: MerkleCapTarget, - /// `Target` for the Merkle cap of LDEs of quotient polynomial evaluations. - pub quotient_polys_cap: MerkleCapTarget, - /// `Target`s for the purported values of each polynomial at the challenge point. - pub openings: StarkOpeningSetTarget, - /// `Target`s for the batch FRI argument for all openings. - pub opening_proof: FriProofTarget, -} - -impl StarkProofTarget { - /// Serializes a STARK proof. - pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { - buffer.write_target_merkle_cap(&self.trace_cap)?; - buffer.write_target_merkle_cap(&self.auxiliary_polys_cap)?; - buffer.write_target_merkle_cap(&self.quotient_polys_cap)?; - buffer.write_target_fri_proof(&self.opening_proof)?; - self.openings.to_buffer(buffer)?; - Ok(()) - } - - /// Deserializes a STARK proof. - pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { - let trace_cap = buffer.read_target_merkle_cap()?; - let auxiliary_polys_cap = buffer.read_target_merkle_cap()?; - let quotient_polys_cap = buffer.read_target_merkle_cap()?; - let opening_proof = buffer.read_target_fri_proof()?; - let openings = StarkOpeningSetTarget::from_buffer(buffer)?; - - Ok(Self { - trace_cap, - auxiliary_polys_cap, - quotient_polys_cap, - openings, - opening_proof, - }) - } - - /// Recover the length of the trace from a STARK proof and a STARK config. - pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize { - let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] - .initial_trees_proof - .evals_proofs[0] - .1; - let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); - lde_bits - config.fri_config.rate_bits - } -} - -/// Randomness used for a STARK proof. -pub(crate) struct StarkProofChallenges, const D: usize> { - /// Random values used to combine STARK constraints. - pub stark_alphas: Vec, - - /// Point at which the STARK polynomials are opened. - pub stark_zeta: F::Extension, - - /// Randomness used in FRI. - pub fri_challenges: FriChallenges, -} - -/// Circuit version of `StarkProofChallenges`. -pub(crate) struct StarkProofChallengesTarget { - /// `Target`s for the random values used to combine STARK constraints. - pub stark_alphas: Vec, - /// `ExtensionTarget` for the point at which the STARK polynomials are opened. - pub stark_zeta: ExtensionTarget, - /// `Target`s for the randomness used in FRI. - pub fri_challenges: FriChallengesTarget, -} - -/// Purported values of each polynomial at the challenge point. -#[derive(Debug, Clone)] -pub struct StarkOpeningSet, const D: usize> { - /// Openings of trace polynomials at `zeta`. - pub local_values: Vec, - /// Openings of trace polynomials at `g * zeta`. - pub next_values: Vec, - /// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`. - pub auxiliary_polys: Vec, - /// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`. - pub auxiliary_polys_next: Vec, - /// Openings of cross-table lookups `Z` polynomials at `1`. - pub ctl_zs_first: Vec, - /// Openings of quotient polynomials at `zeta`. - pub quotient_polys: Vec, -} - -impl, const D: usize> StarkOpeningSet { - /// Returns a `StarkOpeningSet` given all the polynomial commitments, the number of permutation `Z`polynomials, - /// the evaluation point and a generator `g`. - /// Polynomials are evaluated at point `zeta` and, if necessary, at `g * zeta`. - pub fn new>( - zeta: F::Extension, - g: F, - trace_commitment: &PolynomialBatch, - auxiliary_polys_commitment: &PolynomialBatch, - quotient_commitment: &PolynomialBatch, - num_lookup_columns: usize, - num_ctl_polys: &[usize], - ) -> Self { - let total_num_helper_cols: usize = num_ctl_polys.iter().sum(); - - // Batch evaluates polynomials on the LDE, at a point `z`. - let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { - c.polynomials - .par_iter() - .map(|p| p.to_extension().eval(z)) - .collect::>() - }; - // Batch evaluates polynomials at a base field point `z`. - let eval_commitment_base = |z: F, c: &PolynomialBatch| { - c.polynomials - .par_iter() - .map(|p| p.eval(z)) - .collect::>() - }; - - let auxiliary_first = eval_commitment_base(F::ONE, auxiliary_polys_commitment); - let ctl_zs_first = auxiliary_first[num_lookup_columns + total_num_helper_cols..].to_vec(); - // `g * zeta`. - let zeta_next = zeta.scalar_mul(g); - Self { - local_values: eval_commitment(zeta, trace_commitment), - next_values: eval_commitment(zeta_next, trace_commitment), - auxiliary_polys: eval_commitment(zeta, auxiliary_polys_commitment), - auxiliary_polys_next: eval_commitment(zeta_next, auxiliary_polys_commitment), - ctl_zs_first, - quotient_polys: eval_commitment(zeta, quotient_commitment), - } - } - - /// Constructs the openings required by FRI. - /// All openings but `ctl_zs_first` are grouped together. - pub(crate) fn to_fri_openings(&self) -> FriOpenings { - let zeta_batch = FriOpeningBatch { - values: self - .local_values - .iter() - .chain(&self.auxiliary_polys) - .chain(&self.quotient_polys) - .copied() - .collect_vec(), - }; - let zeta_next_batch = FriOpeningBatch { - values: self - .next_values - .iter() - .chain(&self.auxiliary_polys_next) - .copied() - .collect_vec(), - }; - debug_assert!(!self.ctl_zs_first.is_empty()); - let ctl_first_batch = FriOpeningBatch { - values: self - .ctl_zs_first - .iter() - .copied() - .map(F::Extension::from_basefield) - .collect(), - }; - - FriOpenings { - batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch], - } - } -} - -/// Circuit version of `StarkOpeningSet`. -/// `Target`s for the purported values of each polynomial at the challenge point. -#[derive(Eq, PartialEq, Debug)] -pub(crate) struct StarkOpeningSetTarget { - /// `ExtensionTarget`s for the openings of trace polynomials at `zeta`. - pub local_values: Vec>, - /// `ExtensionTarget`s for the opening of trace polynomials at `g * zeta`. - pub next_values: Vec>, - /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `zeta`. - pub auxiliary_polys: Vec>, - /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `g * zeta`. - pub auxiliary_polys_next: Vec>, - /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1. - pub ctl_zs_first: Vec, - /// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`. - pub quotient_polys: Vec>, -} - -impl StarkOpeningSetTarget { - /// Serializes a STARK's opening set. - pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { - buffer.write_target_ext_vec(&self.local_values)?; - buffer.write_target_ext_vec(&self.next_values)?; - buffer.write_target_ext_vec(&self.auxiliary_polys)?; - buffer.write_target_ext_vec(&self.auxiliary_polys_next)?; - buffer.write_target_vec(&self.ctl_zs_first)?; - buffer.write_target_ext_vec(&self.quotient_polys)?; - Ok(()) - } - - /// Deserializes a STARK's opening set. - pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { - let local_values = buffer.read_target_ext_vec::()?; - let next_values = buffer.read_target_ext_vec::()?; - let auxiliary_polys = buffer.read_target_ext_vec::()?; - let auxiliary_polys_next = buffer.read_target_ext_vec::()?; - let ctl_zs_first = buffer.read_target_vec()?; - let quotient_polys = buffer.read_target_ext_vec::()?; - - Ok(Self { - local_values, - next_values, - auxiliary_polys, - auxiliary_polys_next, - ctl_zs_first, - quotient_polys, - }) - } - - /// Circuit version of `to_fri_openings`for `FriOpenings`. - /// Constructs the `Target`s the circuit version of FRI. - /// All openings but `ctl_zs_first` are grouped together. - pub(crate) fn to_fri_openings(&self, zero: Target) -> FriOpeningsTarget { - let zeta_batch = FriOpeningBatchTarget { - values: self - .local_values - .iter() - .chain(&self.auxiliary_polys) - .chain(&self.quotient_polys) - .copied() - .collect_vec(), - }; - let zeta_next_batch = FriOpeningBatchTarget { - values: self - .next_values - .iter() - .chain(&self.auxiliary_polys_next) - .copied() - .collect_vec(), - }; - debug_assert!(!self.ctl_zs_first.is_empty()); - let ctl_first_batch = FriOpeningBatchTarget { - values: self - .ctl_zs_first - .iter() - .copied() - .map(|t| t.to_ext_target(zero)) - .collect(), - }; - - FriOpeningsTarget { - batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch], - } - } -} diff --git a/evm/src/prover.rs b/evm/src/prover.rs index f376b8cd..8f11c112 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -1,44 +1,34 @@ use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; -use anyhow::{anyhow, ensure, Result}; +use anyhow::{anyhow, Result}; +use hashbrown::HashMap; use itertools::Itertools; use once_cell::sync::Lazy; use plonky2::field::extension::Extendable; -use plonky2::field::packable::Packable; -use plonky2::field::packed::PackedField; -use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2::field::types::Field; -use plonky2::field::zero_poly_coset::ZeroPolyOnCoset; +use plonky2::field::polynomial::PolynomialValues; use plonky2::fri::oracle::PolynomialBatch; use plonky2::hash::hash_types::RichField; use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::GenericConfig; use plonky2::timed; use plonky2::util::timing::TimingTree; -use plonky2::util::transpose; -use plonky2_maybe_rayon::*; -use plonky2_util::{log2_ceil, log2_strict}; +use starky::config::StarkConfig; +#[cfg(debug_assertions)] +use starky::cross_table_lookup::debug_utils::check_ctls; +use starky::cross_table_lookup::{get_ctl_data, CtlData}; +use starky::lookup::GrandProductChallengeSet; +use starky::proof::{MultiProof, StarkProofWithMetadata}; +use starky::prover::prove_with_commitment; +use starky::stark::Stark; use crate::all_stark::{AllStark, Table, NUM_TABLES}; -use crate::config::StarkConfig; -use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::kernel::aggregator::KERNEL; -use crate::cross_table_lookup::{ - cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData, - GrandProductChallengeSet, -}; -use crate::evaluation_frame::StarkEvaluationFrame; use crate::generation::{generate_traces, GenerationInputs}; use crate::get_challenges::observe_public_values; -use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars}; -use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata}; -use crate::stark::Stark; -use crate::vanishing_poly::eval_vanishing_poly; -#[cfg(test)] -use crate::{ - cross_table_lookup::testutils::check_ctls, verifier::testutils::get_memory_extra_looking_values, -}; +use crate::proof::{AllProof, PublicValues}; +#[cfg(debug_assertions)] +use crate::verifier::debug_utils::get_memory_extra_looking_values; /// Generate traces, then create all STARK proofs. pub fn prove( @@ -124,16 +114,15 @@ where observe_public_values::(&mut challenger, &public_values) .map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?; - // Get challenges for the cross-table lookups. - let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges); // For each STARK, compute its cross-table lookup Z polynomials and get the associated `CtlData`. - let ctl_data_per_table = timed!( + let (ctl_challenges, ctl_data_per_table) = timed!( timing, "compute CTL data", - cross_table_lookup_data::( + get_ctl_data::( + config, &trace_poly_values, &all_stark.cross_table_lookups, - &ctl_challenges, + &mut challenger, all_stark.arithmetic_stark.constraint_degree() ) ); @@ -154,18 +143,26 @@ where )? ); - #[cfg(test)] + // This is an expensive check, hence is only run when `debug_assertions` are enabled. + #[cfg(debug_assertions)] { + let mut extra_values = HashMap::new(); + extra_values.insert( + *Table::Memory, + get_memory_extra_looking_values(&public_values), + ); check_ctls( &trace_poly_values, &all_stark.cross_table_lookups, - &get_memory_extra_looking_values(&public_values), + &extra_values, ); } Ok(AllProof { - stark_proofs, - ctl_challenges, + multi_proof: MultiProof { + stark_proofs, + ctl_challenges, + }, public_values, }) } @@ -331,371 +328,26 @@ where { check_abort_signal(abort_signal.clone())?; - let degree = trace_poly_values[0].len(); - let degree_bits = log2_strict(degree); - let fri_params = config.fri_params(degree_bits); - let rate_bits = config.fri_config.rate_bits; - let cap_height = config.fri_config.cap_height; - assert!( - fri_params.total_arities() <= degree_bits + rate_bits - cap_height, - "FRI total reduction arity is too large.", - ); - + // Clear buffered outputs. let init_challenger_state = challenger.compact(); - let constraint_degree = stark.constraint_degree(); - let lookup_challenges = stark.uses_lookups().then(|| { - ctl_challenges - .challenges - .iter() - .map(|ch| ch.beta) - .collect::>() - }); - let lookups = stark.lookups(); - let lookup_helper_columns = timed!( - timing, - "compute lookup helper columns", - lookup_challenges.as_ref().map(|challenges| { - let mut columns = Vec::new(); - for lookup in &lookups { - for &challenge in challenges { - columns.extend(lookup_helper_columns( - lookup, - trace_poly_values, - challenge, - constraint_degree, - )); - } - } - columns - }) - ); - let num_lookup_columns = lookup_helper_columns.as_ref().map(|v| v.len()).unwrap_or(0); - - // We add CTLs to the permutation arguments so that we can batch commit to - // all auxiliary polynomials. - let auxiliary_polys = match lookup_helper_columns { - None => { - let mut ctl_polys = ctl_data.ctl_helper_polys(); - ctl_polys.extend(ctl_data.ctl_z_polys()); - ctl_polys - } - Some(mut lookup_columns) => { - lookup_columns.extend(ctl_data.ctl_helper_polys()); - lookup_columns.extend(ctl_data.ctl_z_polys()); - lookup_columns - } - }; - assert!(!auxiliary_polys.is_empty(), "No CTL?"); - - // Get the polynomial commitments for all auxiliary polynomials. - let auxiliary_polys_commitment = timed!( - timing, - "compute auxiliary polynomials commitment", - PolynomialBatch::from_values( - auxiliary_polys, - rate_bits, - false, - config.fri_config.cap_height, - timing, - None, - ) - ); - - let auxiliary_polys_cap = auxiliary_polys_commitment.merkle_tree.cap.clone(); - challenger.observe_cap(&auxiliary_polys_cap); - - let alphas = challenger.get_n_challenges(config.num_challenges); - - let num_ctl_polys = ctl_data.num_ctl_helper_polys(); - - #[cfg(test)] - { - check_constraints( - stark, - trace_commitment, - &auxiliary_polys_commitment, - lookup_challenges.as_ref(), - &lookups, - ctl_data, - alphas.clone(), - degree_bits, - num_lookup_columns, - &num_ctl_polys, - ); - } - - check_abort_signal(abort_signal.clone())?; - - let quotient_polys = timed!( - timing, - "compute quotient polys", - compute_quotient_polys::::Packing, C, S, D>( - stark, - trace_commitment, - &auxiliary_polys_commitment, - lookup_challenges.as_ref(), - &lookups, - ctl_data, - alphas, - degree_bits, - num_lookup_columns, - &num_ctl_polys, - config, - ) - ); - let all_quotient_chunks = timed!( - timing, - "split quotient polys", - quotient_polys - .into_par_iter() - .flat_map(|mut quotient_poly| { - quotient_poly - .trim_to_len(degree * stark.quotient_degree_factor()) - .expect( - "Quotient has failed, the vanishing polynomial is not divisible by Z_H", - ); - // Split quotient into degree-n chunks. - quotient_poly.chunks(degree) - }) - .collect() - ); - // Commit to the quotient polynomials. - let quotient_commitment = timed!( - timing, - "compute quotient commitment", - PolynomialBatch::from_coeffs( - all_quotient_chunks, - rate_bits, - false, - config.fri_config.cap_height, - timing, - None, - ) - ); - // Observe the quotient polynomials Merkle cap. - let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone(); - challenger.observe_cap("ient_polys_cap); - - let zeta = challenger.get_extension_challenge::(); - // To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and - // `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since - // `(g * zeta)^n = zeta^n`, where `n` is the order of `g`. - let g = F::primitive_root_of_unity(degree_bits); - ensure!( - zeta.exp_power_of_2(degree_bits) != F::Extension::ONE, - "Opening point is in the subgroup." - ); - - // Compute all openings: evaluate all committed polynomials at `zeta` and, when necessary, at `g * zeta`. - let openings = StarkOpeningSet::new( - zeta, - g, + prove_with_commitment( + stark, + config, + trace_poly_values, trace_commitment, - &auxiliary_polys_commitment, - "ient_commitment, - stark.num_lookup_helper_columns(config), - &num_ctl_polys, - ); - // Get the FRI openings and observe them. - challenger.observe_openings(&openings.to_fri_openings()); - - let initial_merkle_trees = vec![ - trace_commitment, - &auxiliary_polys_commitment, - "ient_commitment, - ]; - - check_abort_signal(abort_signal.clone())?; - - let opening_proof = timed!( + Some(ctl_data), + Some(ctl_challenges), + challenger, + &[], timing, - "compute openings proof", - PolynomialBatch::prove_openings( - &stark.fri_instance(zeta, g, num_ctl_polys.iter().sum(), num_ctl_polys, config), - &initial_merkle_trees, - challenger, - &fri_params, - timing, - ) - ); - - let proof = StarkProof { - trace_cap: trace_commitment.merkle_tree.cap.clone(), - auxiliary_polys_cap, - quotient_polys_cap, - openings, - opening_proof, - }; - Ok(StarkProofWithMetadata { + ) + .map(|proof_with_pis| StarkProofWithMetadata { + proof: proof_with_pis.proof, init_challenger_state, - proof, }) } -/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`, -/// where the `C_i`s are the Stark constraints. -fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( - stark: &S, - trace_commitment: &'a PolynomialBatch, - auxiliary_polys_commitment: &'a PolynomialBatch, - lookup_challenges: Option<&'a Vec>, - lookups: &[Lookup], - ctl_data: &CtlData, - alphas: Vec, - degree_bits: usize, - num_lookup_columns: usize, - num_ctl_columns: &[usize], - config: &StarkConfig, -) -> Vec> -where - F: RichField + Extendable, - P: PackedField, - C: GenericConfig, - S: Stark, -{ - let degree = 1 << degree_bits; - let rate_bits = config.fri_config.rate_bits; - let total_num_helper_cols: usize = num_ctl_columns.iter().sum(); - - let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor()); - assert!( - quotient_degree_bits <= rate_bits, - "Having constraints of degree higher than the rate is not supported yet." - ); - let step = 1 << (rate_bits - quotient_degree_bits); - // When opening the `Z`s polys at the "next" point, need to look at the point `next_step` steps away. - let next_step = 1 << quotient_degree_bits; - - // Evaluation of the first Lagrange polynomial on the LDE domain. - let lagrange_first = PolynomialValues::selector(degree, 0).lde_onto_coset(quotient_degree_bits); - // Evaluation of the last Lagrange polynomial on the LDE domain. - let lagrange_last = - PolynomialValues::selector(degree, degree - 1).lde_onto_coset(quotient_degree_bits); - - let z_h_on_coset = ZeroPolyOnCoset::::new(degree_bits, quotient_degree_bits); - - // Retrieve the LDE values at index `i`. - let get_trace_values_packed = - |i_start| -> Vec

{ trace_commitment.get_lde_values_packed(i_start, step) }; - - // Last element of the subgroup. - let last = F::primitive_root_of_unity(degree_bits).inverse(); - let size = degree << quotient_degree_bits; - let coset = F::cyclic_subgroup_coset_known_order( - F::primitive_root_of_unity(degree_bits + quotient_degree_bits), - F::coset_shift(), - size, - ); - - // We will step by `P::WIDTH`, and in each iteration, evaluate the quotient polynomial at - // a batch of `P::WIDTH` points. - let quotient_values = (0..size) - .into_par_iter() - .step_by(P::WIDTH) - .flat_map_iter(|i_start| { - let i_next_start = (i_start + next_step) % size; - let i_range = i_start..i_start + P::WIDTH; - - let x = *P::from_slice(&coset[i_range.clone()]); - let z_last = x - last; - let lagrange_basis_first = *P::from_slice(&lagrange_first.values[i_range.clone()]); - let lagrange_basis_last = *P::from_slice(&lagrange_last.values[i_range]); - - let mut consumer = ConstraintConsumer::new( - alphas.clone(), - z_last, - lagrange_basis_first, - lagrange_basis_last, - ); - // Get the local and next row evaluations for the current STARK. - let vars = S::EvaluationFrame::from_values( - &get_trace_values_packed(i_start), - &get_trace_values_packed(i_next_start), - ); - // Get the local and next row evaluations for the permutation argument, as well as the associated challenges. - let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { - local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) - [..num_lookup_columns] - .to_vec(), - next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step) - [..num_lookup_columns] - .to_vec(), - challenges: challenges.to_vec(), - }); - - // Get all the data for this STARK's CTLs: - // - the local and next row evaluations for the CTL Z polynomials - // - the associated challenges. - // - for each CTL: - // - the filter `Column` - // - the `Column`s that form the looking/looked table. - - let mut start_index = 0; - let ctl_vars = ctl_data - .zs_columns - .iter() - .enumerate() - .map(|(i, zs_columns)| { - let num_ctl_helper_cols = num_ctl_columns[i]; - let helper_columns = auxiliary_polys_commitment - .get_lde_values_packed(i_start, step)[num_lookup_columns - + start_index - ..num_lookup_columns + start_index + num_ctl_helper_cols] - .to_vec(); - - let ctl_vars = CtlCheckVars:: { - helper_columns, - local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step) - [num_lookup_columns + total_num_helper_cols + i], - next_z: auxiliary_polys_commitment - .get_lde_values_packed(i_next_start, step) - [num_lookup_columns + total_num_helper_cols + i], - challenges: zs_columns.challenge, - columns: zs_columns.columns.clone(), - filter: zs_columns.filter.clone(), - }; - - start_index += num_ctl_helper_cols; - - ctl_vars - }) - .collect::>(); - - // Evaluate the polynomial combining all constraints, including those associated - // to the permutation and CTL arguments. - eval_vanishing_poly::( - stark, - &vars, - lookups, - lookup_vars, - &ctl_vars, - &mut consumer, - ); - let mut constraints_evals = consumer.accumulators(); - // We divide the constraints evaluations by `Z_H(x)`. - let denominator_inv: P = z_h_on_coset.eval_inverse_packed(i_start); - for eval in &mut constraints_evals { - *eval *= denominator_inv; - } - - let num_challenges = alphas.len(); - - (0..P::WIDTH).map(move |i| { - (0..num_challenges) - .map(|j| constraints_evals[j].as_slice()[i]) - .collect() - }) - }) - .collect::>(); - - transpose("ient_values) - .into_par_iter() - .map(PolynomialValues::new) - .map(|values| values.coset_ifft(F::coset_shift())) - .collect() -} - /// Utility method that checks whether a kill signal has been emitted by one of the workers, /// which will result in an early abort for all the other processes involved in the same set /// of transactions. @@ -708,134 +360,3 @@ pub fn check_abort_signal(abort_signal: Option>) -> Result<()> { Ok(()) } - -#[cfg(test)] -/// Check that all constraints evaluate to zero on `H`. -/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup. -fn check_constraints<'a, F, C, S, const D: usize>( - stark: &S, - trace_commitment: &'a PolynomialBatch, - auxiliary_commitment: &'a PolynomialBatch, - lookup_challenges: Option<&'a Vec>, - lookups: &[Lookup], - ctl_data: &CtlData, - alphas: Vec, - degree_bits: usize, - num_lookup_columns: usize, - num_ctl_helper_cols: &[usize], -) where - F: RichField + Extendable, - C: GenericConfig, - S: Stark, -{ - let degree = 1 << degree_bits; - let rate_bits = 0; // Set this to higher value to check constraint degree. - - let total_num_helper_cols: usize = num_ctl_helper_cols.iter().sum(); - - let size = degree << rate_bits; - let step = 1 << rate_bits; - - // Evaluation of the first Lagrange polynomial. - let lagrange_first = PolynomialValues::selector(degree, 0).lde(rate_bits); - // Evaluation of the last Lagrange polynomial. - let lagrange_last = PolynomialValues::selector(degree, degree - 1).lde(rate_bits); - - let subgroup = F::two_adic_subgroup(degree_bits + rate_bits); - - // Get the evaluations of a batch of polynomials over our subgroup. - let get_subgroup_evals = |comm: &PolynomialBatch| -> Vec> { - let values = comm - .polynomials - .par_iter() - .map(|coeffs| coeffs.clone().fft().values) - .collect::>(); - transpose(&values) - }; - - // Get batch evaluations of the trace, permutation and CTL polynomials over our subgroup. - let trace_subgroup_evals = get_subgroup_evals(trace_commitment); - let auxiliary_subgroup_evals = get_subgroup_evals(auxiliary_commitment); - - // Last element of the subgroup. - let last = F::primitive_root_of_unity(degree_bits).inverse(); - - let constraint_values = (0..size) - .map(|i| { - let i_next = (i + step) % size; - - let x = subgroup[i]; - let z_last = x - last; - let lagrange_basis_first = lagrange_first.values[i]; - let lagrange_basis_last = lagrange_last.values[i]; - - let mut consumer = ConstraintConsumer::new( - alphas.clone(), - z_last, - lagrange_basis_first, - lagrange_basis_last, - ); - // Get the local and next row evaluations for the current STARK's trace. - let vars = S::EvaluationFrame::from_values( - &trace_subgroup_evals[i], - &trace_subgroup_evals[i_next], - ); - // Get the local and next row evaluations for the current STARK's permutation argument. - let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { - local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(), - next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(), - challenges: challenges.to_vec(), - }); - - // Get the local and next row evaluations for the current STARK's CTL Z polynomials. - let mut start_index = 0; - let ctl_vars = ctl_data - .zs_columns - .iter() - .enumerate() - .map(|(iii, zs_columns)| { - let num_helper_cols = num_ctl_helper_cols[iii]; - let helper_columns = auxiliary_subgroup_evals[i][num_lookup_columns - + start_index - ..num_lookup_columns + start_index + num_helper_cols] - .to_vec(); - let ctl_vars = CtlCheckVars:: { - helper_columns, - local_z: auxiliary_subgroup_evals[i] - [num_lookup_columns + total_num_helper_cols + iii], - next_z: auxiliary_subgroup_evals[i_next] - [num_lookup_columns + total_num_helper_cols + iii], - challenges: zs_columns.challenge, - columns: zs_columns.columns.clone(), - filter: zs_columns.filter.clone(), - }; - - start_index += num_helper_cols; - - ctl_vars - }) - .collect::>(); - - // Evaluate the polynomial combining all constraints, including those associated - // to the permutation and CTL arguments. - eval_vanishing_poly::( - stark, - &vars, - lookups, - lookup_vars, - &ctl_vars, - &mut consumer, - ); - consumer.accumulators() - }) - .collect::>(); - - // Assert that all constraints evaluate to 0 over our subgroup. - for v in constraint_values { - assert!( - v.iter().all(|x| x.is_zero()), - "Constraint failed in {}", - std::any::type_name::() - ); - } -} diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index 5220ba32..f3a8e1db 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -4,47 +4,41 @@ use core::fmt::Debug; use anyhow::Result; use ethereum_types::{BigEndianHash, U256}; use plonky2::field::extension::Extendable; -use plonky2::field::types::Field; -use plonky2::fri::witness_util::set_fri_proof_target; use plonky2::gates::exponentiation::ExponentiationGate; use plonky2::gates::gate::GateRef; use plonky2::gates::noop::NoopGate; use plonky2::hash::hash_types::RichField; use plonky2::hash::hashing::PlonkyPermutation; use plonky2::iop::challenger::RecursiveChallenger; -use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData}; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}; -use plonky2::util::reducing::ReducingFactorTarget; use plonky2::util::serialization::{ Buffer, GateSerializer, IoResult, Read, WitnessGeneratorSerializer, Write, }; -use plonky2::with_context; use plonky2_util::log2_ceil; +use starky::config::StarkConfig; +use starky::cross_table_lookup::{CrossTableLookup, CtlCheckVarsTarget}; +use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet}; +use starky::proof::{StarkProofTarget, StarkProofWithMetadata}; +use starky::recursive_verifier::{ + add_virtual_stark_proof, set_stark_proof_target, verify_stark_proof_with_challenges_circuit, +}; +use starky::stark::Stark; use crate::all_stark::Table; -use crate::config::StarkConfig; -use crate::constraint_consumer::RecursiveConstraintConsumer; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cross_table_lookup::{CrossTableLookup, CtlCheckVarsTarget, GrandProductChallengeSet}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::lookup::{GrandProductChallenge, LookupCheckVarsTarget}; use crate::memory::segments::Segment; use crate::memory::VALUE_LIMBS; use crate::proof::{ BlockHashes, BlockHashesTarget, BlockMetadata, BlockMetadataTarget, ExtraBlockData, - ExtraBlockDataTarget, PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof, - StarkProofChallengesTarget, StarkProofTarget, StarkProofWithMetadata, TrieRoots, - TrieRootsTarget, + ExtraBlockDataTarget, PublicValues, PublicValuesTarget, TrieRoots, TrieRootsTarget, }; -use crate::stark::Stark; use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64}; -use crate::vanishing_poly::eval_vanishing_poly_circuit; use crate::witness::errors::ProgramError; pub(crate) struct PublicInputs> @@ -205,7 +199,7 @@ where } } -/// Returns the recursive Stark circuit. +/// Returns the recursive STARK circuit. pub(crate) fn recursive_stark_circuit< F: RichField + Extendable, C: GenericConfig, @@ -236,7 +230,7 @@ where ); let num_ctl_helper_zs = num_ctl_zs + total_num_helpers; - let proof_target = add_virtual_stark_proof( + let stark_proof_target = add_virtual_stark_proof( &mut builder, stark, inner_config, @@ -246,7 +240,7 @@ where ); builder.register_public_inputs( - &proof_target + &stark_proof_target .trace_cap .0 .iter() @@ -265,7 +259,7 @@ where let ctl_vars = CtlCheckVarsTarget::from_proof( *table, - &proof_target, + &stark_proof_target, cross_table_lookups, &ctl_challenges_target, num_lookup_columns, @@ -279,20 +273,25 @@ where })); let mut challenger = RecursiveChallenger::::from_state(init_challenger_state_target); - let challenges = - proof_target.get_challenges::(&mut builder, &mut challenger, inner_config); + let challenges = stark_proof_target.get_challenges::( + &mut builder, + &mut challenger, + Some(&ctl_challenges_target), + true, + inner_config, + ); let challenger_state = challenger.compact(&mut builder); builder.register_public_inputs(challenger_state.as_ref()); - builder.register_public_inputs(&proof_target.openings.ctl_zs_first); + builder.register_public_inputs(stark_proof_target.openings.ctl_zs_first.as_ref().unwrap()); verify_stark_proof_with_challenges_circuit::( &mut builder, stark, - &proof_target, - &challenges, - &ctl_vars, - &ctl_challenges_target, + &stark_proof_target, + &[], // public inputs + challenges, + Some(&ctl_vars), inner_config, ); @@ -306,7 +305,7 @@ where let circuit = builder.build::(); StarkWrapperCircuit { circuit, - stark_proof_target: proof_target, + stark_proof_target, ctl_challenges_target, init_challenger_state_target, zero_target, @@ -324,122 +323,6 @@ pub(crate) fn add_common_recursion_gates, const D: ))); } -/// Recursively verifies an inner proof. -fn verify_stark_proof_with_challenges_circuit< - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - const D: usize, ->( - builder: &mut CircuitBuilder, - stark: &S, - proof: &StarkProofTarget, - challenges: &StarkProofChallengesTarget, - ctl_vars: &[CtlCheckVarsTarget], - ctl_challenges: &GrandProductChallengeSet, - inner_config: &StarkConfig, -) where - C::Hasher: AlgebraicHasher, -{ - let zero = builder.zero(); - let one = builder.one_extension(); - - let num_ctl_polys = ctl_vars - .iter() - .map(|ctl| ctl.helper_columns.len()) - .sum::(); - - let StarkOpeningSetTarget { - local_values, - next_values, - auxiliary_polys, - auxiliary_polys_next, - ctl_zs_first, - quotient_polys, - } = &proof.openings; - let vars = S::EvaluationFrameTarget::from_values(local_values, next_values); - - let degree_bits = proof.recover_degree_bits(inner_config); - let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits); - let z_h_zeta = builder.sub_extension(zeta_pow_deg, one); - let (l_0, l_last) = - eval_l_0_and_l_last_circuit(builder, degree_bits, challenges.stark_zeta, z_h_zeta); - let last = - builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse()); - let z_last = builder.sub_extension(challenges.stark_zeta, last); - - let mut consumer = RecursiveConstraintConsumer::::new( - builder.zero_extension(), - challenges.stark_alphas.clone(), - z_last, - l_0, - l_last, - ); - - let num_lookup_columns = stark.num_lookup_helper_columns(inner_config); - let lookup_challenges = (num_lookup_columns > 0).then(|| { - ctl_challenges - .challenges - .iter() - .map(|ch| ch.beta) - .collect::>() - }); - - let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget { - local_values: auxiliary_polys[..num_lookup_columns].to_vec(), - next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(), - challenges: lookup_challenges.unwrap(), - }); - - with_context!( - builder, - "evaluate vanishing polynomial", - eval_vanishing_poly_circuit::( - builder, - stark, - &vars, - lookup_vars, - ctl_vars, - &mut consumer, - ) - ); - let vanishing_polys_zeta = consumer.accumulators(); - - // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. - let mut scale = ReducingFactorTarget::new(zeta_pow_deg); - for (i, chunk) in quotient_polys - .chunks(stark.quotient_degree_factor()) - .enumerate() - { - let recombined_quotient = scale.reduce(chunk, builder); - let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient); - builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly); - } - - let merkle_caps = vec![ - proof.trace_cap.clone(), - proof.auxiliary_polys_cap.clone(), - proof.quotient_polys_cap.clone(), - ]; - - let fri_instance = stark.fri_instance_target( - builder, - challenges.stark_zeta, - F::primitive_root_of_unity(degree_bits), - num_ctl_polys, - ctl_zs_first.len(), - inner_config, - ); - builder.verify_fri_proof::( - &fri_instance, - &proof.openings.to_fri_openings(zero), - &challenges.fri_challenges, - &merkle_caps, - &proof.opening_proof, - &inner_config.fri_params(degree_bits), - ); -} - /// Recursive version of `get_memory_extra_looking_sum`. pub(crate) fn get_memory_extra_looking_sum_circuit, const D: usize>( builder: &mut CircuitBuilder, @@ -667,25 +550,6 @@ fn add_data_write, const D: usize>( builder.add(running_sum, inverse) } -fn eval_l_0_and_l_last_circuit, const D: usize>( - builder: &mut CircuitBuilder, - log_n: usize, - x: ExtensionTarget, - z_x: ExtensionTarget, -) -> (ExtensionTarget, ExtensionTarget) { - let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n)); - let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n)); - let one = builder.one_extension(); - let l_0_deno = builder.mul_sub_extension(n, x, n); - let l_last_deno = builder.mul_sub_extension(g, x, one); - let l_last_deno = builder.mul_extension(n, l_last_deno); - - ( - builder.div_extension(z_x, l_0_deno), - builder.div_extension(z_x, l_last_deno), - ) -} - pub(crate) fn add_virtual_public_values, const D: usize>( builder: &mut CircuitBuilder, ) -> PublicValuesTarget { @@ -770,93 +634,6 @@ pub(crate) fn add_virtual_extra_block_data, const D } } -pub(crate) fn add_virtual_stark_proof< - F: RichField + Extendable, - S: Stark, - const D: usize, ->( - builder: &mut CircuitBuilder, - stark: &S, - config: &StarkConfig, - degree_bits: usize, - num_ctl_helper_zs: usize, - num_ctl_zs: usize, -) -> StarkProofTarget { - let fri_params = config.fri_params(degree_bits); - let cap_height = fri_params.config.cap_height; - - let num_leaves_per_oracle = vec![ - S::COLUMNS, - stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, - stark.quotient_degree_factor() * config.num_challenges, - ]; - - let auxiliary_polys_cap = builder.add_virtual_cap(cap_height); - - StarkProofTarget { - trace_cap: builder.add_virtual_cap(cap_height), - auxiliary_polys_cap, - quotient_polys_cap: builder.add_virtual_cap(cap_height), - openings: add_virtual_stark_opening_set::( - builder, - stark, - num_ctl_helper_zs, - num_ctl_zs, - config, - ), - opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), - } -} - -fn add_virtual_stark_opening_set, S: Stark, const D: usize>( - builder: &mut CircuitBuilder, - stark: &S, - num_ctl_helper_zs: usize, - num_ctl_zs: usize, - config: &StarkConfig, -) -> StarkOpeningSetTarget { - let num_challenges = config.num_challenges; - StarkOpeningSetTarget { - local_values: builder.add_virtual_extension_targets(S::COLUMNS), - next_values: builder.add_virtual_extension_targets(S::COLUMNS), - auxiliary_polys: builder.add_virtual_extension_targets( - stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, - ), - auxiliary_polys_next: builder.add_virtual_extension_targets( - stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, - ), - ctl_zs_first: builder.add_virtual_targets(num_ctl_zs), - quotient_polys: builder - .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), - } -} - -pub(crate) fn set_stark_proof_target, W, const D: usize>( - witness: &mut W, - proof_target: &StarkProofTarget, - proof: &StarkProof, - zero: Target, -) where - F: RichField + Extendable, - C::Hasher: AlgebraicHasher, - W: Witness, -{ - witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap); - witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap); - - witness.set_fri_openings( - &proof_target.openings.to_fri_openings(zero), - &proof.openings.to_fri_openings(), - ); - - witness.set_cap_target( - &proof_target.auxiliary_polys_cap, - &proof.auxiliary_polys_cap, - ); - - set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); -} - pub fn set_public_value_targets( witness: &mut W, public_values_target: &PublicValuesTarget, diff --git a/evm/src/stark.rs b/evm/src/stark.rs deleted file mode 100644 index 5ff578f9..00000000 --- a/evm/src/stark.rs +++ /dev/null @@ -1,228 +0,0 @@ -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::packed::PackedField; -use plonky2::field::types::Field; -use plonky2::fri::structure::{ - FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo, - FriPolynomialInfo, -}; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::plonk::circuit_builder::CircuitBuilder; - -use crate::config::StarkConfig; -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::lookup::Lookup; - -const TRACE_ORACLE_INDEX: usize = 0; -const AUXILIARY_ORACLE_INDEX: usize = 1; -const QUOTIENT_ORACLE_INDEX: usize = 2; - -/// Represents a STARK system. -pub trait Stark, const D: usize>: Sync { - /// The total number of columns in the trace. - const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS; - - /// This is used to evaluate constraints natively. - type EvaluationFrame: StarkEvaluationFrame

- where - FE: FieldExtension, - P: PackedField; - - /// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively. - type EvaluationFrameTarget: StarkEvaluationFrame>; - - /// Evaluate constraints at a vector of points. - /// - /// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us - /// evaluate constraints over a larger domain if desired. This can also be called with `FE = F` - /// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating - /// constraints over `F`. - fn eval_packed_generic( - &self, - vars: &Self::EvaluationFrame, - yield_constr: &mut ConstraintConsumer

, - ) where - FE: FieldExtension, - P: PackedField; - - /// Evaluate constraints at a vector of points from the base field `F`. - fn eval_packed_base>( - &self, - vars: &Self::EvaluationFrame, - yield_constr: &mut ConstraintConsumer

, - ) { - self.eval_packed_generic(vars, yield_constr) - } - - /// Evaluate constraints at a single point from the degree `D` extension field. - fn eval_ext( - &self, - vars: &Self::EvaluationFrame, - yield_constr: &mut ConstraintConsumer, - ) { - self.eval_packed_generic(vars, yield_constr) - } - - /// Evaluate constraints at a vector of points from the degree `D` extension field. This is like - /// `eval_ext`, except in the context of a recursive circuit. - /// Note: constraints must be added through`yield_constr.constraint(builder, constraint)` in the - /// same order as they are given in `eval_packed_generic`. - fn eval_ext_circuit( - &self, - builder: &mut CircuitBuilder, - vars: &Self::EvaluationFrameTarget, - yield_constr: &mut RecursiveConstraintConsumer, - ); - - /// The maximum constraint degree. - fn constraint_degree(&self) -> usize; - - /// The maximum constraint degree. - fn quotient_degree_factor(&self) -> usize { - 1.max(self.constraint_degree() - 1) - } - - fn num_quotient_polys(&self, config: &StarkConfig) -> usize { - self.quotient_degree_factor() * config.num_challenges - } - - /// Computes the FRI instance used to prove this Stark. - fn fri_instance( - &self, - zeta: F::Extension, - g: F, - num_ctl_helpers: usize, - num_ctl_zs: Vec, - config: &StarkConfig, - ) -> FriInstanceInfo { - let trace_oracle = FriOracleInfo { - num_polys: Self::COLUMNS, - blinding: false, - }; - let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - - let num_lookup_columns = self.num_lookup_helper_columns(config); - let num_auxiliary_polys = num_lookup_columns + num_ctl_helpers + num_ctl_zs.len(); - let auxiliary_oracle = FriOracleInfo { - num_polys: num_auxiliary_polys, - blinding: false, - }; - let auxiliary_polys_info = - FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); - - let ctl_zs_info = FriPolynomialInfo::from_range( - AUXILIARY_ORACLE_INDEX, - num_lookup_columns + num_ctl_helpers..num_auxiliary_polys, - ); - - let num_quotient_polys = self.num_quotient_polys(config); - let quotient_oracle = FriOracleInfo { - num_polys: num_quotient_polys, - blinding: false, - }; - let quotient_info = - FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); - - let zeta_batch = FriBatchInfo { - point: zeta, - polynomials: [ - trace_info.clone(), - auxiliary_polys_info.clone(), - quotient_info, - ] - .concat(), - }; - let zeta_next_batch = FriBatchInfo { - point: zeta.scalar_mul(g), - polynomials: [trace_info, auxiliary_polys_info].concat(), - }; - let ctl_first_batch = FriBatchInfo { - point: F::Extension::ONE, - polynomials: ctl_zs_info, - }; - FriInstanceInfo { - oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], - batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch], - } - } - - /// Computes the FRI instance used to prove this Stark. - fn fri_instance_target( - &self, - builder: &mut CircuitBuilder, - zeta: ExtensionTarget, - g: F, - num_ctl_helper_polys: usize, - num_ctl_zs: usize, - inner_config: &StarkConfig, - ) -> FriInstanceInfoTarget { - let trace_oracle = FriOracleInfo { - num_polys: Self::COLUMNS, - blinding: false, - }; - let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); - - let num_lookup_columns = self.num_lookup_helper_columns(inner_config); - let num_auxiliary_polys = num_lookup_columns + num_ctl_helper_polys + num_ctl_zs; - let auxiliary_oracle = FriOracleInfo { - num_polys: num_auxiliary_polys, - blinding: false, - }; - let auxiliary_polys_info = - FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); - - let ctl_zs_info = FriPolynomialInfo::from_range( - AUXILIARY_ORACLE_INDEX, - num_lookup_columns + num_ctl_helper_polys - ..num_lookup_columns + num_ctl_helper_polys + num_ctl_zs, - ); - - let num_quotient_polys = self.num_quotient_polys(inner_config); - let quotient_oracle = FriOracleInfo { - num_polys: num_quotient_polys, - blinding: false, - }; - let quotient_info = - FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); - - let zeta_batch = FriBatchInfoTarget { - point: zeta, - polynomials: [ - trace_info.clone(), - auxiliary_polys_info.clone(), - quotient_info, - ] - .concat(), - }; - let zeta_next = builder.mul_const_extension(g, zeta); - let zeta_next_batch = FriBatchInfoTarget { - point: zeta_next, - polynomials: [trace_info, auxiliary_polys_info].concat(), - }; - let ctl_first_batch = FriBatchInfoTarget { - point: builder.one_extension(), - polynomials: ctl_zs_info, - }; - FriInstanceInfoTarget { - oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], - batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch], - } - } - - fn lookups(&self) -> Vec> { - vec![] - } - - fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize { - self.lookups() - .iter() - .map(|lookup| lookup.num_helper_columns(self.constraint_degree())) - .sum::() - * config.num_challenges - } - - fn uses_lookups(&self) -> bool { - !self.lookups().is_empty() - } -} diff --git a/evm/src/stark_testing.rs b/evm/src/stark_testing.rs deleted file mode 100644 index 3568f004..00000000 --- a/evm/src/stark_testing.rs +++ /dev/null @@ -1,157 +0,0 @@ -use anyhow::{ensure, Result}; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2::field::types::{Field, Sample}; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::witness::{PartialWitness, WitnessWrite}; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::circuit_data::CircuitConfig; -use plonky2::plonk::config::GenericConfig; -use plonky2::util::transpose; -use plonky2_util::{log2_ceil, log2_strict}; - -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::stark::Stark; - -const WITNESS_SIZE: usize = 1 << 5; - -/// Tests that the constraints imposed by the given STARK are low-degree by applying them to random -/// low-degree witness polynomials. -pub(crate) fn test_stark_low_degree< - F: RichField + Extendable, - S: Stark, - const D: usize, ->( - stark: S, -) -> Result<()> { - let rate_bits = log2_ceil(stark.constraint_degree() + 1); - - let trace_ldes = random_low_degree_matrix::(S::COLUMNS, rate_bits); - let size = trace_ldes.len(); - - let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits); - let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits); - - let last = F::primitive_root_of_unity(log2_strict(WITNESS_SIZE)).inverse(); - let subgroup = - F::cyclic_subgroup_known_order(F::primitive_root_of_unity(log2_strict(size)), size); - let alpha = F::rand(); - let constraint_evals = (0..size) - .map(|i| { - let vars = S::EvaluationFrame::from_values( - &trace_ldes[i], - &trace_ldes[(i + (1 << rate_bits)) % size], - ); - - let mut consumer = ConstraintConsumer::::new( - vec![alpha], - subgroup[i] - last, - lagrange_first.values[i], - lagrange_last.values[i], - ); - stark.eval_packed_base(&vars, &mut consumer); - consumer.accumulators()[0] - }) - .collect::>(); - - let constraint_poly_values = PolynomialValues::new(constraint_evals); - if !constraint_poly_values.is_zero() { - let constraint_eval_degree = constraint_poly_values.degree(); - let maximum_degree = WITNESS_SIZE * stark.constraint_degree() - 1; - - ensure!( - constraint_eval_degree <= maximum_degree, - "Expected degrees at most {} * {} - 1 = {}, actual {:?}", - WITNESS_SIZE, - stark.constraint_degree(), - maximum_degree, - constraint_eval_degree - ); - } - - Ok(()) -} - -/// Tests that the circuit constraints imposed by the given STARK are coherent with the native constraints. -pub(crate) fn test_stark_circuit_constraints< - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - const D: usize, ->( - stark: S, -) -> Result<()> { - // Compute native constraint evaluation on random values. - let vars = S::EvaluationFrame::from_values( - &F::Extension::rand_vec(S::COLUMNS), - &F::Extension::rand_vec(S::COLUMNS), - ); - - let alphas = F::rand_vec(1); - let z_last = F::Extension::rand(); - let lagrange_first = F::Extension::rand(); - let lagrange_last = F::Extension::rand(); - let mut consumer = ConstraintConsumer::::new( - alphas - .iter() - .copied() - .map(F::Extension::from_basefield) - .collect(), - z_last, - lagrange_first, - lagrange_last, - ); - stark.eval_ext(&vars, &mut consumer); - let native_eval = consumer.accumulators()[0]; - - // Compute circuit constraint evaluation on same random values. - let circuit_config = CircuitConfig::standard_recursion_config(); - let mut builder = CircuitBuilder::::new(circuit_config); - let mut pw = PartialWitness::::new(); - - let locals_t = builder.add_virtual_extension_targets(S::COLUMNS); - pw.set_extension_targets(&locals_t, vars.get_local_values()); - let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS); - pw.set_extension_targets(&nexts_t, vars.get_next_values()); - let alphas_t = builder.add_virtual_targets(1); - pw.set_target(alphas_t[0], alphas[0]); - let z_last_t = builder.add_virtual_extension_target(); - pw.set_extension_target(z_last_t, z_last); - let lagrange_first_t = builder.add_virtual_extension_target(); - pw.set_extension_target(lagrange_first_t, lagrange_first); - let lagrange_last_t = builder.add_virtual_extension_target(); - pw.set_extension_target(lagrange_last_t, lagrange_last); - - let vars = S::EvaluationFrameTarget::from_values(&locals_t, &nexts_t); - let mut consumer = RecursiveConstraintConsumer::::new( - builder.zero_extension(), - alphas_t, - z_last_t, - lagrange_first_t, - lagrange_last_t, - ); - stark.eval_ext_circuit(&mut builder, &vars, &mut consumer); - let circuit_eval = consumer.accumulators()[0]; - let native_eval_t = builder.constant_extension(native_eval); - builder.connect_extension(circuit_eval, native_eval_t); - - let data = builder.build::(); - let proof = data.prove(pw)?; - data.verify(proof) -} - -fn random_low_degree_matrix(num_polys: usize, rate_bits: usize) -> Vec> { - let polys = (0..num_polys) - .map(|_| random_low_degree_values(rate_bits)) - .collect::>(); - - transpose(&polys) -} - -fn random_low_degree_values(rate_bits: usize) -> Vec { - PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE)) - .lde(rate_bits) - .fft() - .values -} diff --git a/evm/src/util.rs b/evm/src/util.rs index aec2e63e..fdb5a98c 100644 --- a/evm/src/util.rs +++ b/evm/src/util.rs @@ -35,18 +35,6 @@ pub(crate) fn limb_from_bits_le_recursive, const D: }) } -/// A helper function to transpose a row-wise trace and put it in the format that `prove` expects. -pub(crate) fn trace_rows_to_poly_values( - trace_rows: Vec<[F; COLUMNS]>, -) -> Vec> { - let trace_row_vecs = trace_rows.into_iter().map(|row| row.to_vec()).collect_vec(); - let trace_col_vecs: Vec> = transpose(&trace_row_vecs); - trace_col_vecs - .into_iter() - .map(|column| PolynomialValues::new(column)) - .collect() -} - /// Returns the lowest LE 32-bit limb of a `U256` as a field element, /// and errors if the integer is actually greater. pub(crate) fn u256_to_u32(u256: U256) -> Result { diff --git a/evm/src/vanishing_poly.rs b/evm/src/vanishing_poly.rs deleted file mode 100644 index c1f2d0f9..00000000 --- a/evm/src/vanishing_poly.rs +++ /dev/null @@ -1,81 +0,0 @@ -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::packed::PackedField; -use plonky2::hash::hash_types::RichField; -use plonky2::plonk::circuit_builder::CircuitBuilder; - -use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::cross_table_lookup::{ - eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars, - CtlCheckVarsTarget, -}; -use crate::lookup::{ - eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars, - LookupCheckVarsTarget, -}; -use crate::stark::Stark; - -/// Evaluates all constraint, permutation and cross-table lookup polynomials -/// of the current STARK at the local and next values. -pub(crate) fn eval_vanishing_poly( - stark: &S, - vars: &S::EvaluationFrame, - lookups: &[Lookup], - lookup_vars: Option>, - ctl_vars: &[CtlCheckVars], - consumer: &mut ConstraintConsumer

, -) where - F: RichField + Extendable, - FE: FieldExtension, - P: PackedField, - S: Stark, -{ - // Evaluate all of the STARK's table constraints. - stark.eval_packed_generic(vars, consumer); - if let Some(lookup_vars) = lookup_vars { - // Evaluate the STARK constraints related to the permutation arguments. - eval_packed_lookups_generic::( - stark, - lookups, - vars, - lookup_vars, - consumer, - ); - } - // Evaluate the STARK constraints related to the cross-table lookups. - eval_cross_table_lookup_checks::( - vars, - ctl_vars, - consumer, - stark.constraint_degree(), - ); -} - -/// Circuit version of `eval_vanishing_poly`. -/// Evaluates all constraint, permutation and cross-table lookup polynomials -/// of the current STARK at the local and next values. -pub(crate) fn eval_vanishing_poly_circuit( - builder: &mut CircuitBuilder, - stark: &S, - vars: &S::EvaluationFrameTarget, - lookup_vars: Option>, - ctl_vars: &[CtlCheckVarsTarget], - consumer: &mut RecursiveConstraintConsumer, -) where - F: RichField + Extendable, - S: Stark, -{ - // Evaluate all of the STARK's table constraints. - stark.eval_ext_circuit(builder, vars, consumer); - if let Some(lookup_vars) = lookup_vars { - // Evaluate all of the STARK's constraints related to the permutation argument. - eval_ext_lookups_circuit::(builder, stark, vars, lookup_vars, consumer); - } - // Evaluate all of the STARK's constraints related to the cross-table lookups. - eval_cross_table_lookup_checks_circuit::( - builder, - vars, - ctl_vars, - consumer, - stark.constraint_degree(), - ); -} diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index 3e284c7f..fd2af863 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -1,34 +1,22 @@ -use core::any::type_name; - -use anyhow::{ensure, Result}; +use anyhow::Result; use ethereum_types::{BigEndianHash, U256}; use itertools::Itertools; -use plonky2::field::extension::{Extendable, FieldExtension}; -use plonky2::field::types::Field; -use plonky2::fri::verifier::verify_fri_proof; +use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::plonk::config::GenericConfig; -use plonky2::plonk::plonk_common::reduce_with_powers; +use starky::config::StarkConfig; +use starky::cross_table_lookup::{get_ctl_vars_from_proofs, verify_cross_table_lookups}; +use starky::lookup::GrandProductChallenge; +use starky::stark::Stark; +use starky::verifier::verify_stark_proof_with_challenges; use crate::all_stark::{AllStark, Table, NUM_TABLES}; -use crate::config::StarkConfig; -use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::constants::global_metadata::GlobalMetadata; -use crate::cross_table_lookup::{ - num_ctl_helper_columns_by_table, verify_cross_table_lookups, CtlCheckVars, - GrandProductChallengeSet, -}; -use crate::evaluation_frame::StarkEvaluationFrame; -use crate::lookup::{GrandProductChallenge, LookupCheckVars}; use crate::memory::segments::Segment; use crate::memory::VALUE_LIMBS; -use crate::proof::{ - AllProof, AllProofChallenges, PublicValues, StarkOpeningSet, StarkProof, StarkProofChallenges, -}; -use crate::stark::Stark; +use crate::proof::{AllProof, AllProofChallenges, PublicValues}; use crate::util::h2u; -use crate::vanishing_poly::eval_vanishing_poly; pub fn verify_proof, C: GenericConfig, const D: usize>( all_stark: &AllStark, @@ -57,73 +45,71 @@ where cross_table_lookups, } = all_stark; - let num_ctl_helper_cols = num_ctl_helper_columns_by_table( - cross_table_lookups, - all_stark.arithmetic_stark.constraint_degree(), - ); - - let ctl_vars_per_table = CtlCheckVars::from_proofs( - &all_proof.stark_proofs, + let ctl_vars_per_table = get_ctl_vars_from_proofs( + &all_proof.multi_proof, cross_table_lookups, &ctl_challenges, &num_lookup_columns, - &num_ctl_helper_cols, + all_stark.arithmetic_stark.constraint_degree(), ); + let stark_proofs = &all_proof.multi_proof.stark_proofs; + verify_stark_proof_with_challenges( arithmetic_stark, - &all_proof.stark_proofs[Table::Arithmetic as usize].proof, + &stark_proofs[Table::Arithmetic as usize].proof, &stark_challenges[Table::Arithmetic as usize], - &ctl_vars_per_table[Table::Arithmetic as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::Arithmetic as usize]), + &[], config, )?; + verify_stark_proof_with_challenges( byte_packing_stark, - &all_proof.stark_proofs[Table::BytePacking as usize].proof, + &stark_proofs[Table::BytePacking as usize].proof, &stark_challenges[Table::BytePacking as usize], - &ctl_vars_per_table[Table::BytePacking as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::BytePacking as usize]), + &[], config, )?; verify_stark_proof_with_challenges( cpu_stark, - &all_proof.stark_proofs[Table::Cpu as usize].proof, + &stark_proofs[Table::Cpu as usize].proof, &stark_challenges[Table::Cpu as usize], - &ctl_vars_per_table[Table::Cpu as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::Cpu as usize]), + &[], config, )?; verify_stark_proof_with_challenges( keccak_stark, - &all_proof.stark_proofs[Table::Keccak as usize].proof, + &stark_proofs[Table::Keccak as usize].proof, &stark_challenges[Table::Keccak as usize], - &ctl_vars_per_table[Table::Keccak as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::Keccak as usize]), + &[], config, )?; verify_stark_proof_with_challenges( keccak_sponge_stark, - &all_proof.stark_proofs[Table::KeccakSponge as usize].proof, + &stark_proofs[Table::KeccakSponge as usize].proof, &stark_challenges[Table::KeccakSponge as usize], - &ctl_vars_per_table[Table::KeccakSponge as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::KeccakSponge as usize]), + &[], config, )?; verify_stark_proof_with_challenges( logic_stark, - &all_proof.stark_proofs[Table::Logic as usize].proof, + &stark_proofs[Table::Logic as usize].proof, &stark_challenges[Table::Logic as usize], - &ctl_vars_per_table[Table::Logic as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::Logic as usize]), + &[], config, )?; verify_stark_proof_with_challenges( memory_stark, - &all_proof.stark_proofs[Table::Memory as usize].proof, + &stark_proofs[Table::Memory as usize].proof, &stark_challenges[Table::Memory as usize], - &ctl_vars_per_table[Table::Memory as usize], - &ctl_challenges, + Some(&ctl_vars_per_table[Table::Memory as usize]), + &[], config, )?; @@ -141,9 +127,10 @@ where verify_cross_table_lookups::( cross_table_lookups, all_proof + .multi_proof .stark_proofs - .map(|p| p.proof.openings.ctl_zs_first), - extra_looking_sums, + .map(|p| p.proof.openings.ctl_zs_first.unwrap()), + Some(&extra_looking_sums), config, ) } @@ -293,186 +280,8 @@ where running_sum + challenge.combine(row.iter()).inverse() } -pub(crate) fn verify_stark_proof_with_challenges< - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - const D: usize, ->( - stark: &S, - proof: &StarkProof, - challenges: &StarkProofChallenges, - ctl_vars: &[CtlCheckVars], - ctl_challenges: &GrandProductChallengeSet, - config: &StarkConfig, -) -> Result<()> { - log::debug!("Checking proof: {}", type_name::()); - let num_ctl_polys = ctl_vars - .iter() - .map(|ctl| ctl.helper_columns.len()) - .sum::(); - let num_ctl_z_polys = ctl_vars.len(); - validate_proof_shape(stark, proof, config, num_ctl_polys, num_ctl_z_polys)?; - let StarkOpeningSet { - local_values, - next_values, - auxiliary_polys, - auxiliary_polys_next, - ctl_zs_first: _, - quotient_polys, - } = &proof.openings; - let vars = S::EvaluationFrame::from_values(local_values, next_values); - - let degree_bits = proof.recover_degree_bits(config); - let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta); - let last = F::primitive_root_of_unity(degree_bits).inverse(); - let z_last = challenges.stark_zeta - last.into(); - let mut consumer = ConstraintConsumer::::new( - challenges - .stark_alphas - .iter() - .map(|&alpha| F::Extension::from_basefield(alpha)) - .collect::>(), - z_last, - l_0, - l_last, - ); - let num_lookup_columns = stark.num_lookup_helper_columns(config); - let lookup_challenges = (num_lookup_columns > 0).then(|| { - ctl_challenges - .challenges - .iter() - .map(|ch| ch.beta) - .collect::>() - }); - - let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars { - local_values: auxiliary_polys[..num_lookup_columns].to_vec(), - next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(), - challenges: lookup_challenges.unwrap(), - }); - let lookups = stark.lookups(); - eval_vanishing_poly::( - stark, - &vars, - &lookups, - lookup_vars, - ctl_vars, - &mut consumer, - ); - let vanishing_polys_zeta = consumer.accumulators(); - - // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. - let zeta_pow_deg = challenges.stark_zeta.exp_power_of_2(degree_bits); - let z_h_zeta = zeta_pow_deg - F::Extension::ONE; - // `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations. - // Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)` - // where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`. - // So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each - // `quotient_degree_factor`-sized chunk of the original evaluations. - for (i, chunk) in quotient_polys - .chunks(stark.quotient_degree_factor()) - .enumerate() - { - ensure!( - vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg), - "Mismatch between evaluation and opening of quotient polynomial" - ); - } - - let merkle_caps = vec![ - proof.trace_cap.clone(), - proof.auxiliary_polys_cap.clone(), - proof.quotient_polys_cap.clone(), - ]; - - let num_ctl_zs = ctl_vars - .iter() - .map(|ctl| ctl.helper_columns.len()) - .collect::>(); - verify_fri_proof::( - &stark.fri_instance( - challenges.stark_zeta, - F::primitive_root_of_unity(degree_bits), - num_ctl_polys, - num_ctl_zs, - config, - ), - &proof.openings.to_fri_openings(), - &challenges.fri_challenges, - &merkle_caps, - &proof.opening_proof, - &config.fri_params(degree_bits), - )?; - - Ok(()) -} - -fn validate_proof_shape( - stark: &S, - proof: &StarkProof, - config: &StarkConfig, - num_ctl_helpers: usize, - num_ctl_zs: usize, -) -> anyhow::Result<()> -where - F: RichField + Extendable, - C: GenericConfig, - S: Stark, -{ - let StarkProof { - trace_cap, - auxiliary_polys_cap, - quotient_polys_cap, - openings, - // The shape of the opening proof will be checked in the FRI verifier (see - // validate_fri_proof_shape), so we ignore it here. - opening_proof: _, - } = proof; - - let StarkOpeningSet { - local_values, - next_values, - auxiliary_polys, - auxiliary_polys_next, - ctl_zs_first, - quotient_polys, - } = openings; - - let degree_bits = proof.recover_degree_bits(config); - let fri_params = config.fri_params(degree_bits); - let cap_height = fri_params.config.cap_height; - - let num_auxiliary = num_ctl_helpers + stark.num_lookup_helper_columns(config) + num_ctl_zs; - - ensure!(trace_cap.height() == cap_height); - ensure!(auxiliary_polys_cap.height() == cap_height); - ensure!(quotient_polys_cap.height() == cap_height); - - ensure!(local_values.len() == S::COLUMNS); - ensure!(next_values.len() == S::COLUMNS); - ensure!(auxiliary_polys.len() == num_auxiliary); - ensure!(auxiliary_polys_next.len() == num_auxiliary); - ensure!(ctl_zs_first.len() == num_ctl_zs); - ensure!(quotient_polys.len() == stark.num_quotient_polys(config)); - - Ok(()) -} - -/// Evaluate the Lagrange polynomials `L_0` and `L_(n-1)` at a point `x`. -/// `L_0(x) = (x^n - 1)/(n * (x - 1))` -/// `L_(n-1)(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup. -fn eval_l_0_and_l_last(log_n: usize, x: F) -> (F, F) { - let n = F::from_canonical_usize(1 << log_n); - let g = F::primitive_root_of_unity(log_n); - let z_x = x.exp_power_of_2(log_n) - F::ONE; - let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]); - - (z_x * invs[0], z_x * invs[1]) -} - -#[cfg(test)] -pub(crate) mod testutils { +#[cfg(debug_assertions)] +pub(crate) mod debug_utils { use super::*; /// Output all the extra memory rows that don't appear in the CPU trace but are @@ -610,26 +419,3 @@ pub(crate) mod testutils { row } } -#[cfg(test)] -mod tests { - use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::polynomial::PolynomialValues; - use plonky2::field::types::Sample; - - use crate::verifier::eval_l_0_and_l_last; - - #[test] - fn test_eval_l_0_and_l_last() { - type F = GoldilocksField; - let log_n = 5; - let n = 1 << log_n; - - let x = F::rand(); // challenge point - let expected_l_first_x = PolynomialValues::selector(n, 0).ifft().eval(x); - let expected_l_last_x = PolynomialValues::selector(n, n - 1).ifft().eval(x); - - let (l_first_x, l_last_x) = eval_l_0_and_l_last(log_n, x); - assert_eq!(l_first_x, expected_l_first_x); - assert_eq!(l_last_x, expected_l_last_x); - } -} diff --git a/evm/src/witness/traces.rs b/evm/src/witness/traces.rs index f7f5c9d3..76267a0a 100644 --- a/evm/src/witness/traces.rs +++ b/evm/src/witness/traces.rs @@ -6,15 +6,15 @@ use plonky2::field::polynomial::PolynomialValues; use plonky2::hash::hash_types::RichField; use plonky2::timed; use plonky2::util::timing::TimingTree; +use starky::config::StarkConfig; +use starky::util::trace_rows_to_poly_values; use crate::all_stark::{AllStark, NUM_TABLES}; use crate::arithmetic::{BinaryOperator, Operation}; use crate::byte_packing::byte_packing_stark::BytePackingOp; -use crate::config::StarkConfig; use crate::cpu::columns::CpuColumnsView; use crate::keccak_sponge::columns::KECCAK_WIDTH_BYTES; use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp; -use crate::util::trace_rows_to_poly_values; use crate::witness::memory::MemoryOp; use crate::{arithmetic, keccak, keccak_sponge, logic}; diff --git a/evm/tests/add11_yml.rs b/evm/tests/add11_yml.rs index 6a15dfc0..51da107c 100644 --- a/evm/tests/add11_yml.rs +++ b/evm/tests/add11_yml.rs @@ -11,14 +11,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/basic_smart_contract.rs b/evm/tests/basic_smart_contract.rs index 7d07ca19..69c90988 100644 --- a/evm/tests/basic_smart_contract.rs +++ b/evm/tests/basic_smart_contract.rs @@ -11,15 +11,13 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/empty_txn_list.rs b/evm/tests/empty_txn_list.rs index 15416c8c..8f482f72 100644 --- a/evm/tests/empty_txn_list.rs +++ b/evm/tests/empty_txn_list.rs @@ -11,12 +11,9 @@ use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::serialization::{DefaultGateSerializer, DefaultGeneratorSerializer}; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; -use plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, PublicValues, TrieRoots}; -use plonky2_evm::Node; +use plonky2_evm::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/erc20.rs b/evm/tests/erc20.rs index 48d0d753..430da14d 100644 --- a/evm/tests/erc20.rs +++ b/evm/tests/erc20.rs @@ -10,14 +10,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/erc721.rs b/evm/tests/erc721.rs index 0c6d50d8..4dfed249 100644 --- a/evm/tests/erc721.rs +++ b/evm/tests/erc721.rs @@ -10,14 +10,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/log_opcode.rs b/evm/tests/log_opcode.rs index 37d874cd..a95473fc 100644 --- a/evm/tests/log_opcode.rs +++ b/evm/tests/log_opcode.rs @@ -12,16 +12,13 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; -use plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits; use plonky2_evm::generation::mpt::transaction_testing::{AddressOption, LegacyTransactionRlp}; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllRecursiveCircuits, AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/self_balance_gas_cost.rs b/evm/tests/self_balance_gas_cost.rs index 538f2aa7..d759387c 100644 --- a/evm/tests/self_balance_gas_cost.rs +++ b/evm/tests/self_balance_gas_cost.rs @@ -11,14 +11,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/selfdestruct.rs b/evm/tests/selfdestruct.rs index 829e0b21..87b39e30 100644 --- a/evm/tests/selfdestruct.rs +++ b/evm/tests/selfdestruct.rs @@ -10,14 +10,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/simple_transfer.rs b/evm/tests/simple_transfer.rs index 5fd252df..cd17fdae 100644 --- a/evm/tests/simple_transfer.rs +++ b/evm/tests/simple_transfer.rs @@ -11,14 +11,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::KeccakGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp}; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; type F = GoldilocksField; const D: usize = 2; diff --git a/evm/tests/withdrawals.rs b/evm/tests/withdrawals.rs index ef2d19b0..ef40b529 100644 --- a/evm/tests/withdrawals.rs +++ b/evm/tests/withdrawals.rs @@ -9,14 +9,12 @@ use keccak_hash::keccak; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; -use plonky2_evm::all_stark::AllStark; -use plonky2_evm::config::StarkConfig; use plonky2_evm::generation::mpt::AccountRlp; use plonky2_evm::generation::{GenerationInputs, TrieInputs}; use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots}; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; -use plonky2_evm::Node; +use plonky2_evm::{AllStark, Node, StarkConfig}; use rand::random; type F = GoldilocksField; diff --git a/plonky2/src/fri/proof.rs b/plonky2/src/fri/proof.rs index edff1bea..6c8145ec 100644 --- a/plonky2/src/fri/proof.rs +++ b/plonky2/src/fri/proof.rs @@ -360,6 +360,7 @@ impl, H: Hasher, const D: usize> CompressedFriPr } } +#[derive(Debug)] pub struct FriChallenges, const D: usize> { // Scaling factor to combine polynomials. pub fri_alpha: F::Extension, @@ -373,6 +374,7 @@ pub struct FriChallenges, const D: usize> { pub fri_query_indices: Vec, } +#[derive(Debug)] pub struct FriChallengesTarget { pub fri_alpha: ExtensionTarget, pub fri_betas: Vec>, diff --git a/starky/Cargo.toml b/starky/Cargo.toml index 0efae5fc..fe64413f 100644 --- a/starky/Cargo.toml +++ b/starky/Cargo.toml @@ -17,7 +17,9 @@ std = ["anyhow/std", "plonky2/std"] timing = ["plonky2/timing"] [dependencies] +ahash = { version = "0.8.3", default-features = false, features = ["compile-time-rng"] } # NOTE: Be sure to keep this version the same as the dependency in `hashbrown`. anyhow = { version = "1.0.40", default-features = false } +hashbrown = { version = "0.14.0", default-features = false, features = ["ahash", "serde"] } # NOTE: When upgrading, see `ahash` dependency. itertools = { version = "0.11.0", default-features = false } log = { version = "0.4.14", default-features = false } num-bigint = { version = "0.4.3", default-features = false } diff --git a/starky/src/config.rs b/starky/src/config.rs index 24ddb6a7..8f95c0ea 100644 --- a/starky/src/config.rs +++ b/starky/src/config.rs @@ -1,17 +1,49 @@ +//! A [`StarkConfig`] defines all the parameters to be used when proving a +//! [`Stark`][crate::stark::Stark]. +//! +//! The default configuration is aimed for speed, yielding fast but large +//! proofs, with a targeted security level of 100 bits. + +#[cfg(not(feature = "std"))] +use alloc::format; + +use anyhow::{anyhow, Result}; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::fri::reduction_strategies::FriReductionStrategy; use plonky2::fri::{FriConfig, FriParams}; +use plonky2::hash::hash_types::RichField; +/// A configuration containing the different parameters used by the STARK prover. +#[derive(Clone, Debug)] pub struct StarkConfig { + /// The targeted security level for the proofs generated with this configuration. pub security_bits: usize, /// The number of challenge points to generate, for IOPs that have soundness errors of (roughly) /// `degree / |F|`. pub num_challenges: usize, + /// The configuration of the FRI sub-protocol. pub fri_config: FriConfig, } +impl Default for StarkConfig { + fn default() -> Self { + Self::standard_fast_config() + } +} + impl StarkConfig { + /// Returns a custom STARK configuration. + pub const fn new(security_bits: usize, num_challenges: usize, fri_config: FriConfig) -> Self { + Self { + security_bits, + num_challenges, + fri_config, + } + } + /// A typical configuration with a rate of 2, resulting in fast but large proofs. /// Targets ~100 bit conjectured security. pub const fn standard_fast_config() -> Self { @@ -28,7 +60,88 @@ impl StarkConfig { } } - pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams { + /// Outputs the [`FriParams`] used during the FRI sub-protocol by this [`StarkConfig`]. + pub fn fri_params(&self, degree_bits: usize) -> FriParams { self.fri_config.fri_params(degree_bits, false) } + + /// Checks that this STARK configuration is consistent, i.e. that the different + /// parameters meet the targeted security level. + pub fn check_config, const D: usize>(&self) -> Result<()> { + let StarkConfig { + security_bits, + fri_config: + FriConfig { + rate_bits, + proof_of_work_bits, + num_query_rounds, + .. + }, + .. + } = &self; + + // Conjectured FRI security; see the ethSTARK paper. + let fri_field_bits = F::Extension::order().bits() as usize; + let fri_query_security_bits = num_query_rounds * rate_bits + *proof_of_work_bits as usize; + let fri_security_bits = fri_field_bits.min(fri_query_security_bits); + + if fri_security_bits < *security_bits { + Err(anyhow!(format!( + "FRI params fall short of target security {}, reaching only {}", + security_bits, fri_security_bits + ))) + } else { + Ok(()) + } + } +} + +#[cfg(test)] +mod tests { + use plonky2::field::goldilocks_field::GoldilocksField; + + use super::*; + + #[test] + fn test_valid_config() { + type F = GoldilocksField; + const D: usize = 2; + + let config = StarkConfig::standard_fast_config(); + assert!(config.check_config::().is_ok()); + + let high_rate_config = StarkConfig::new( + 100, + 2, + FriConfig { + rate_bits: 3, + cap_height: 4, + proof_of_work_bits: 16, + reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5), + num_query_rounds: 28, + }, + ); + assert!(high_rate_config.check_config::().is_ok()); + } + + #[test] + fn test_invalid_config() { + type F = GoldilocksField; + const D: usize = 2; + + let too_few_queries_config = StarkConfig::new( + 100, + 2, + FriConfig { + rate_bits: 1, + cap_height: 4, + proof_of_work_bits: 16, + reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5), + num_query_rounds: 50, + }, + ); + // The conjectured security yields `rate_bits` * `num_query_rounds` + `proof_of_work_bits` = 66 + // bits of security for FRI, which falls short of the 100 bits of security target. + assert!(too_few_queries_config.check_config::().is_err()); + } } diff --git a/starky/src/constraint_consumer.rs b/starky/src/constraint_consumer.rs index 03548935..02eff4b1 100644 --- a/starky/src/constraint_consumer.rs +++ b/starky/src/constraint_consumer.rs @@ -1,5 +1,10 @@ -use alloc::vec; -use alloc::vec::Vec; +//! Implementation of the constraint consumer. +//! +//! The [`ConstraintConsumer`], and its circuit counterpart, allow a +//! prover to evaluate all polynomials of a [`Stark`][crate::stark::Stark]. + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use core::marker::PhantomData; use plonky2::field::extension::Extendable; @@ -9,14 +14,15 @@ use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; +/// A [`ConstraintConsumer`] evaluates all constraint, permutation and cross-table +/// lookup polynomials of a [`Stark`][crate::stark::Stark]. +#[derive(Debug)] pub struct ConstraintConsumer { /// Random values used to combine multiple constraints into one. alphas: Vec, /// Running sums of constraints that have been emitted so far, scaled by powers of alpha. - // TODO(JN): This is pub so it can be used in a test. Once we have an API for accessing this - // result, it should be made private. - pub constraint_accs: Vec

, + constraint_accs: Vec

, /// The evaluation of `X - g^(n-1)`. z_last: P, @@ -31,6 +37,7 @@ pub struct ConstraintConsumer { } impl ConstraintConsumer

{ + /// Creates a new instance of [`ConstraintConsumer`]. pub fn new( alphas: Vec, z_last: P, @@ -46,6 +53,8 @@ impl ConstraintConsumer

{ } } + /// Consumes this [`ConstraintConsumer`] and outputs its sum of accumulated + /// constraints scaled by powers of `alpha`. pub fn accumulators(self) -> Vec

{ self.constraint_accs } @@ -76,6 +85,8 @@ impl ConstraintConsumer

{ } } +/// Circuit version of [`ConstraintConsumer`]. +#[derive(Debug)] pub struct RecursiveConstraintConsumer, const D: usize> { /// A random value used to combine multiple constraints into one. alphas: Vec, @@ -98,6 +109,7 @@ pub struct RecursiveConstraintConsumer, const D: us } impl, const D: usize> RecursiveConstraintConsumer { + /// Creates a new instance of [`RecursiveConstraintConsumer`]. pub fn new( zero: ExtensionTarget, alphas: Vec, @@ -115,6 +127,8 @@ impl, const D: usize> RecursiveConstraintConsumer Vec> { self.constraint_accs } diff --git a/evm/src/cross_table_lookup.rs b/starky/src/cross_table_lookup.rs similarity index 84% rename from evm/src/cross_table_lookup.rs rename to starky/src/cross_table_lookup.rs index 359b5309..f6f958a2 100644 --- a/evm/src/cross_table_lookup.rs +++ b/starky/src/cross_table_lookup.rs @@ -27,8 +27,11 @@ //! is similar, but we provide not only `local_values` but also `next_values` -- corresponding to //! the current and next row values -- when computing the linear combinations. +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use core::cmp::min; use core::fmt::Debug; +use core::iter::once; use anyhow::{ensure, Result}; use itertools::Itertools; @@ -37,40 +40,39 @@ use plonky2::field::packed::PackedField; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; -use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; +use plonky2::iop::challenger::Challenger; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use plonky2::plonk::config::GenericConfig; use plonky2::util::ceil_div_usize; -use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::{ - eval_helper_columns, eval_helper_columns_circuit, get_helper_cols, Column, ColumnFilter, - Filter, GrandProductChallenge, + eval_helper_columns, eval_helper_columns_circuit, get_grand_product_challenge_set, + get_helper_cols, Column, ColumnFilter, Filter, GrandProductChallenge, GrandProductChallengeSet, }; -use crate::proof::{StarkProofTarget, StarkProofWithMetadata}; +use crate::proof::{MultiProof, StarkProofTarget, StarkProofWithMetadata}; use crate::stark::Stark; /// An alias for `usize`, to represent the index of a STARK table in a multi-STARK setting. -pub(crate) type TableIdx = usize; +pub type TableIdx = usize; /// A `table` index with a linear combination of columns and a filter. /// `filter` is used to determine the rows to select in `table`. /// `columns` represents linear combinations of the columns of `table`. #[derive(Clone, Debug)] -pub(crate) struct TableWithColumns { +pub struct TableWithColumns { table: TableIdx, columns: Vec>, - pub(crate) filter: Option>, + filter: Option>, } impl TableWithColumns { /// Generates a new `TableWithColumns` given a `table` index, a linear combination of columns `columns` and a `filter`. - pub(crate) fn new(table: TableIdx, columns: Vec>, filter: Option>) -> Self { + pub fn new(table: TableIdx, columns: Vec>, filter: Option>) -> Self { Self { table, columns, @@ -81,7 +83,7 @@ impl TableWithColumns { /// Cross-table lookup data consisting in the lookup table (`looked_table`) and all the tables that look into `looked_table` (`looking_tables`). /// Each `looking_table` corresponds to a STARK's table whose rows have been filtered out and whose columns have been through a linear combination (see `eval_table`). The concatenation of those smaller tables should result in the `looked_table`. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct CrossTableLookup { /// Column linear combinations for all tables that are looking into the current table. pub(crate) looking_tables: Vec>, @@ -92,7 +94,7 @@ pub struct CrossTableLookup { impl CrossTableLookup { /// Creates a new `CrossTableLookup` given some looking tables and a looked table. /// All tables should have the same width. - pub(crate) fn new( + pub fn new( looking_tables: Vec>, looked_table: TableWithColumns, ) -> Self { @@ -109,7 +111,7 @@ impl CrossTableLookup { /// - the total number of helper columns for this table, over all Cross-table lookups, /// - the total number of z polynomials for this table, over all Cross-table lookups, /// - the number of helper columns for this table, for each Cross-table lookup. - pub(crate) fn num_ctl_helpers_zs_all( + pub fn num_ctl_helpers_zs_all( ctls: &[Self], table: TableIdx, num_challenges: usize, @@ -119,7 +121,7 @@ impl CrossTableLookup { let mut num_ctls = 0; let mut num_helpers_by_ctl = vec![0; ctls.len()]; for (i, ctl) in ctls.iter().enumerate() { - let all_tables = std::iter::once(&ctl.looked_table).chain(&ctl.looking_tables); + let all_tables = once(&ctl.looked_table).chain(&ctl.looking_tables); let num_appearances = all_tables.filter(|twc| twc.table == table).count(); let is_helpers = num_appearances > 2; if is_helpers { @@ -140,23 +142,23 @@ impl CrossTableLookup { } /// Cross-table lookup data for one table. -#[derive(Clone, Default)] -pub(crate) struct CtlData<'a, F: Field> { +#[derive(Clone, Default, Debug)] +pub struct CtlData<'a, F: Field> { /// Data associated with all Z(x) polynomials for one table. - pub(crate) zs_columns: Vec>, + pub zs_columns: Vec>, } /// Cross-table lookup data associated with one Z(x) polynomial. /// One Z(x) polynomial can be associated to multiple tables, /// built from the same STARK. -#[derive(Clone)] -pub(crate) struct CtlZData<'a, F: Field> { +#[derive(Clone, Debug)] +pub struct CtlZData<'a, F: Field> { /// Helper columns to verify the Z polynomial values. pub(crate) helper_columns: Vec>, /// Z polynomial values. pub(crate) z: PolynomialValues, /// Cross-table lookup challenge. - pub(crate) challenge: GrandProductChallenge, + pub challenge: GrandProductChallenge, /// Vector of column linear combinations for the current tables. pub(crate) columns: Vec<&'a [Column]>, /// Vector of filter columns for the current table. @@ -164,17 +166,26 @@ pub(crate) struct CtlZData<'a, F: Field> { pub(crate) filter: Vec>>, } +impl<'a, F: Field> CtlZData<'a, F> { + /// Returs new CTL data from the provided arguments. + pub fn new( + helper_columns: Vec>, + z: PolynomialValues, + challenge: GrandProductChallenge, + columns: Vec<&'a [Column]>, + filter: Vec>>, + ) -> Self { + Self { + helper_columns, + z, + challenge, + columns, + filter, + } + } +} + impl<'a, F: Field> CtlData<'a, F> { - /// Returns the number of cross-table lookup polynomials. - pub(crate) fn len(&self) -> usize { - self.zs_columns.len() - } - - /// Returns whether there are no cross-table lookups. - pub(crate) fn is_empty(&self) -> bool { - self.zs_columns.is_empty() - } - /// Returns all the cross-table lookup helper polynomials. pub(crate) fn ctl_helper_polys(&self) -> Vec> { let num_polys = self @@ -210,82 +221,58 @@ impl<'a, F: Field> CtlData<'a, F> { } } -/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness. -#[derive(Clone, Eq, PartialEq, Debug)] -pub struct GrandProductChallengeSet { - pub(crate) challenges: Vec>, -} - -impl GrandProductChallengeSet { - pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { - buffer.write_usize(self.challenges.len())?; - for challenge in &self.challenges { - buffer.write_target(challenge.beta)?; - buffer.write_target(challenge.gamma)?; - } - Ok(()) - } - - pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { - let length = buffer.read_usize()?; - let mut challenges = Vec::with_capacity(length); - for _ in 0..length { - challenges.push(GrandProductChallenge { - beta: buffer.read_target()?, - gamma: buffer.read_target()?, - }); - } - - Ok(GrandProductChallengeSet { challenges }) - } -} - -fn get_grand_product_challenge>( - challenger: &mut Challenger, -) -> GrandProductChallenge { - let beta = challenger.get_challenge(); - let gamma = challenger.get_challenge(); - GrandProductChallenge { beta, gamma } -} - -pub(crate) fn get_grand_product_challenge_set>( - challenger: &mut Challenger, - num_challenges: usize, -) -> GrandProductChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_grand_product_challenge(challenger)) - .collect(); - GrandProductChallengeSet { challenges } -} - -fn get_grand_product_challenge_target< +/// Outputs a tuple of (challenges, data) of CTL challenges and all +/// the CTL data necessary to prove a multi-STARK system. +pub fn get_ctl_data<'a, F, C, const D: usize, const N: usize>( + config: &StarkConfig, + trace_poly_values: &[Vec>; N], + all_cross_table_lookups: &'a [CrossTableLookup], + challenger: &mut Challenger, + max_constraint_degree: usize, +) -> (GrandProductChallengeSet, [CtlData<'a, F>; N]) +where F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, -) -> GrandProductChallenge { - let beta = challenger.get_challenge(builder); - let gamma = challenger.get_challenge(builder); - GrandProductChallenge { beta, gamma } + C: GenericConfig, +{ + // Get challenges for the cross-table lookups. + let ctl_challenges = get_grand_product_challenge_set(challenger, config.num_challenges); + + // For each STARK, compute its cross-table lookup Z polynomials + // and get the associated `CtlData`. + let ctl_data = cross_table_lookup_data::( + trace_poly_values, + all_cross_table_lookups, + &ctl_challenges, + max_constraint_degree, + ); + + (ctl_challenges, ctl_data) } -pub(crate) fn get_grand_product_challenge_set_target< +/// Outputs all the CTL data necessary to prove a multi-STARK system. +pub fn get_ctl_vars_from_proofs<'a, F, C, const D: usize, const N: usize>( + multi_proof: &MultiProof, + all_cross_table_lookups: &'a [CrossTableLookup], + ctl_challenges: &'a GrandProductChallengeSet, + num_lookup_columns: &'a [usize; N], + max_constraint_degree: usize, +) -> [Vec>::Extension, >::Extension, D>>; + N] +where F: RichField + Extendable, - H: AlgebraicHasher, - const D: usize, ->( - builder: &mut CircuitBuilder, - challenger: &mut RecursiveChallenger, - num_challenges: usize, -) -> GrandProductChallengeSet { - let challenges = (0..num_challenges) - .map(|_| get_grand_product_challenge_target(builder, challenger)) - .collect(); - GrandProductChallengeSet { challenges } -} + C: GenericConfig, +{ + let num_ctl_helper_cols = + num_ctl_helper_columns_by_table(all_cross_table_lookups, max_constraint_degree); + CtlCheckVars::from_proofs( + &multi_proof.stark_proofs, + all_cross_table_lookups, + ctl_challenges, + num_lookup_columns, + &num_ctl_helper_cols, + ) +} /// Returns the number of helper columns for each `Table`. pub(crate) fn num_ctl_helper_columns_by_table( ctls: &[CrossTableLookup], @@ -314,6 +301,17 @@ pub(crate) fn num_ctl_helper_columns_by_table( res } +/// Gets the auxiliary polynomials associated to these CTL data. +pub(crate) fn get_ctl_auxiliary_polys( + ctl_data: Option<&CtlData>, +) -> Option>> { + ctl_data.map(|data| { + let mut ctl_polys = data.ctl_helper_polys(); + ctl_polys.extend(data.ctl_z_polys()); + ctl_polys + }) +} + /// Generates all the cross-table lookup data, for all tables. /// - `trace_poly_values` corresponds to the trace values for all tables. /// - `cross_table_lookups` corresponds to all the cross-table lookups, i.e. the looked and looking tables, as described in `CrossTableLookup`. @@ -467,8 +465,8 @@ fn partial_sums( } /// Data necessary to check the cross-table lookups of a given table. -#[derive(Clone)] -pub(crate) struct CtlCheckVars<'a, F, FE, P, const D2: usize> +#[derive(Clone, Debug)] +pub struct CtlCheckVars<'a, F, FE, P, const D2: usize> where F: Field, FE: FieldExtension, @@ -493,13 +491,24 @@ impl<'a, F: RichField + Extendable, const D: usize> CtlCheckVars<'a, F, F::Extension, F::Extension, D> { /// Extracts the `CtlCheckVars` for each STARK. - pub(crate) fn from_proofs, const N: usize>( + pub fn from_proofs, const N: usize>( proofs: &[StarkProofWithMetadata; N], cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, num_lookup_columns: &[usize; N], num_helper_ctl_columns: &Vec<[usize; N]>, ) -> [Vec; N] { + let mut ctl_vars_per_table = [0; N].map(|_| vec![]); + // If there are no auxiliary polys in the proofs `openings`, + // return early. The verifier will reject the proofs when + // calling `validate_proof_shape`. + if proofs + .iter() + .any(|p| p.proof.openings.auxiliary_polys.is_none()) + { + return ctl_vars_per_table; + } + let mut total_num_helper_cols_by_table = [0; N]; for p_ctls in num_helper_ctl_columns { for j in 0..N { @@ -514,8 +523,14 @@ impl<'a, F: RichField + Extendable, const D: usize> .map(|(p, &num_lookup)| { let openings = &p.proof.openings; - let ctl_zs = &openings.auxiliary_polys[num_lookup..]; - let ctl_zs_next = &openings.auxiliary_polys_next[num_lookup..]; + let ctl_zs = &openings + .auxiliary_polys + .as_ref() + .expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..]; + let ctl_zs_next = &openings + .auxiliary_polys_next + .as_ref() + .expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..]; ctl_zs.iter().zip(ctl_zs_next).collect::>() }) .collect::>(); @@ -523,7 +538,6 @@ impl<'a, F: RichField + Extendable, const D: usize> // Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t. let mut start_indices = [0; N]; let mut z_indices = [0; N]; - let mut ctl_vars_per_table = [0; N].map(|_| vec![]); for ( CrossTableLookup { looking_tables, @@ -698,8 +712,8 @@ pub(crate) fn eval_cross_table_lookup_checks { +#[derive(Clone, Debug)] +pub struct CtlCheckVarsTarget { ///Evaluation of the helper columns to check that the Z polyomial /// was constructed correctly. pub(crate) helper_columns: Vec>, @@ -716,8 +730,8 @@ pub(crate) struct CtlCheckVarsTarget { } impl<'a, F: Field, const D: usize> CtlCheckVarsTarget { - /// Circuit version of `from_proofs`. Extracts the `CtlCheckVarsTarget` for each STARK. - pub(crate) fn from_proof( + /// Circuit version of `from_proofs`, for a single STARK. + pub fn from_proof( table: TableIdx, proof: &StarkProofTarget, cross_table_lookups: &'a [CrossTableLookup], @@ -729,15 +743,24 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget { // Get all cross-table lookup polynomial openings for each STARK proof. let ctl_zs = { let openings = &proof.openings; - let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup_columns); + let ctl_zs = openings + .auxiliary_polys + .as_ref() + .expect("We cannot have CTls without auxiliary polynomials.") + .iter() + .skip(num_lookup_columns); let ctl_zs_next = openings .auxiliary_polys_next + .as_ref() + .expect("We cannot have CTls without auxiliary polynomials.") .iter() .skip(num_lookup_columns); ctl_zs.zip(ctl_zs_next).collect::>() }; - // Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t. + // Put each cross-table lookup polynomial into the correct table's data. + // If a CTL polynomial is extracted from the looking/looked table `t``, + // then we add it to the `CtlCheckVars` of table `t``. let mut z_index = 0; let mut start_index = 0; let mut ctl_vars = vec![]; @@ -750,7 +773,8 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget { ) in cross_table_lookups.iter().enumerate() { for &challenges in &ctl_challenges.challenges { - // Group looking tables by `Table`, since we bundle the looking tables taken from the same `Table` together thanks to helper columns. + // Group looking tables by `Table`, since we bundle the looking tables + // taken from the same `Table` together thanks to helper columns. let count = looking_tables .iter() @@ -779,8 +803,6 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget { start_index += num_helper_ctl_columns[i]; z_index += 1; - // let columns = group.0.clone(); - // let filter = group.1.clone(); ctl_vars.push(Self { helper_columns, local_z: *looking_z, @@ -921,14 +943,10 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< } /// Verifies all cross-table lookups. -pub(crate) fn verify_cross_table_lookups< - F: RichField + Extendable, - const D: usize, - const N: usize, ->( +pub fn verify_cross_table_lookups, const D: usize, const N: usize>( cross_table_lookups: &[CrossTableLookup], ctl_zs_first: [Vec; N], - ctl_extra_looking_sums: Vec>, + ctl_extra_looking_sums: Option<&[Vec]>, config: &StarkConfig, ) -> Result<()> { let mut ctl_zs_openings = ctl_zs_first.iter().map(|v| v.iter()).collect::>(); @@ -941,7 +959,9 @@ pub(crate) fn verify_cross_table_lookups< ) in cross_table_lookups.iter().enumerate() { // Get elements looking into `looked_table` that are not associated to any STARK. - let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table]; + let extra_sum_vec: &[F] = ctl_extra_looking_sums + .map(|v| v[looked_table.table].as_ref()) + .unwrap_or_default(); // We want to iterate on each looking table only once. let mut filtered_looking_tables = vec![]; for table in looking_tables { @@ -974,7 +994,7 @@ pub(crate) fn verify_cross_table_lookups< } /// Circuit version of `verify_cross_table_lookups`. Verifies all cross-table lookups. -pub(crate) fn verify_cross_table_lookups_circuit< +pub fn verify_cross_table_lookups_circuit< F: RichField + Extendable, const D: usize, const N: usize, @@ -982,7 +1002,7 @@ pub(crate) fn verify_cross_table_lookups_circuit< builder: &mut CircuitBuilder, cross_table_lookups: Vec>, ctl_zs_first: [Vec; N], - ctl_extra_looking_sums: Vec>, + ctl_extra_looking_sums: Option<&[Vec]>, inner_config: &StarkConfig, ) { let mut ctl_zs_openings = ctl_zs_first.iter().map(|v| v.iter()).collect::>(); @@ -992,7 +1012,9 @@ pub(crate) fn verify_cross_table_lookups_circuit< } in cross_table_lookups.into_iter() { // Get elements looking into `looked_table` that are not associated to any STARK. - let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table]; + let extra_sum_vec: &[Target] = ctl_extra_looking_sums + .map(|v| v[looked_table.table].as_ref()) + .unwrap_or_default(); // We want to iterate on each looking table only once. let mut filtered_looking_tables = vec![]; for table in looking_tables { @@ -1019,26 +1041,32 @@ pub(crate) fn verify_cross_table_lookups_circuit< debug_assert!(ctl_zs_openings.iter_mut().all(|iter| iter.next().is_none())); } -#[cfg(test)] -pub(crate) mod testutils { - use std::collections::HashMap; +/// Debugging module, to assert correctness of the different CTLs of a multi-STARK system, +/// that can be used during the proof generation process. +/// +/// **Note**: this is an expensive check, hence is only available when the `debug_assertions` +/// flag is activated, to not hinder performances with regular `release` build. +#[cfg(debug_assertions)] +pub mod debug_utils { + #[cfg(not(feature = "std"))] + use alloc::{vec, vec::Vec}; + use hashbrown::HashMap; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; - use crate::all_stark::Table; - use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns}; + use super::{CrossTableLookup, TableIdx, TableWithColumns}; - type MultiSet = HashMap, Vec<(Table, usize)>>; + type MultiSet = HashMap, Vec<(TableIdx, usize)>>; /// Check that the provided traces and cross-table lookups are consistent. - pub(crate) fn check_ctls( + pub fn check_ctls( trace_poly_values: &[Vec>], cross_table_lookups: &[CrossTableLookup], - extra_memory_looking_values: &[Vec], + extra_looking_values: &HashMap>>, ) { for (i, ctl) in cross_table_lookups.iter().enumerate() { - check_ctl(trace_poly_values, ctl, i, extra_memory_looking_values); + check_ctl(trace_poly_values, ctl, i, extra_looking_values.get(&i)); } } @@ -1046,7 +1074,7 @@ pub(crate) mod testutils { trace_poly_values: &[Vec>], ctl: &CrossTableLookup, ctl_index: usize, - extra_memory_looking_values: &[Vec], + extra_looking_values: Option<&Vec>>, ) { let CrossTableLookup { looking_tables, @@ -1063,15 +1091,15 @@ pub(crate) mod testutils { } process_table(trace_poly_values, looked_table, &mut looked_multiset); - // Extra looking values for memory - if ctl_index == Table::Memory as usize { - for row in extra_memory_looking_values.iter() { + // Include extra looking values if any for this `ctl_index`. + if let Some(values) = extra_looking_values { + for row in values.iter() { // The table and the row index don't matter here, as we just want to enforce - // that the special extra values do appear when looking against the Memory table. + // that the special extra values do appear when looking against the specified table. looking_multiset .entry(row.to_vec()) .or_default() - .push((Table::Cpu, 0)); + .push((0, 0)); } } @@ -1106,10 +1134,7 @@ pub(crate) mod testutils { .iter() .map(|c| c.eval_table(trace, i)) .collect::>(); - multiset - .entry(row) - .or_default() - .push((Table::all()[table.table], i)); + multiset.entry(row).or_default().push((table.table, i)); } else { assert_eq!(filter, F::ZERO, "Non-binary filter?") } @@ -1117,8 +1142,8 @@ pub(crate) mod testutils { } fn check_locations( - looking_locations: &[(Table, usize)], - looked_locations: &[(Table, usize)], + looking_locations: &[(TableIdx, usize)], + looked_locations: &[(TableIdx, usize)], ctl_index: usize, row: &[F], ) { diff --git a/starky/src/evaluation_frame.rs b/starky/src/evaluation_frame.rs index e2dcf2db..fbfaf71e 100644 --- a/starky/src/evaluation_frame.rs +++ b/starky/src/evaluation_frame.rs @@ -1,3 +1,5 @@ +//! Implementation of constraint evaluation frames for STARKs. + /// A trait for viewing an evaluation frame of a STARK table. /// /// It allows to access the current and next rows at a given step @@ -8,6 +10,7 @@ pub trait StarkEvaluationFrame &[T]; + /// Returns the public inputs for this evaluation frame. fn get_public_inputs(&self) -> &[U]; /// Outputs a new evaluation frame from the provided local and next values. @@ -24,6 +28,9 @@ pub trait StarkEvaluationFrame Self; } +/// An evaluation frame to be used when defining constraints of a STARK system, that +/// implements the [`StarkEvaluationFrame`] trait. +#[derive(Debug)] pub struct StarkFrame< T: Copy + Clone + Default, U: Copy + Clone + Default, diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index 903c0abf..4bfbf404 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -1,5 +1,9 @@ -use alloc::vec; -use alloc::vec::Vec; +//! An example of generating and verifying STARK proofs for the Fibonacci sequence. +//! The toy STARK system also includes two columns that are a permutation of the other, +//! to highlight the use of the permutation argument with logUp. + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use core::marker::PhantomData; use plonky2::field::extension::{Extendable, FieldExtension}; @@ -16,9 +20,8 @@ use crate::stark::Stark; use crate::util::trace_rows_to_poly_values; /// Toy STARK system used for testing. -/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition -/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`. -/// Note: The `i, j` columns are only used to test the permutation argument. +/// Computes a Fibonacci sequence with state `[x0, x1]` using the state transition +/// `x0' <- x1, x1' <- x0 + x1. #[derive(Copy, Clone)] struct FibonacciStark, const D: usize> { num_rows: usize, @@ -41,6 +44,120 @@ impl, const D: usize> FibonacciStark { } } + /// Generate the trace using `x0, x1` as initial state values. + fn generate_trace(&self, x0: F, x1: F) -> Vec> { + let trace_rows = (0..self.num_rows) + .scan([x0, x1], |acc, _| { + let tmp = *acc; + acc[0] = tmp[1]; + acc[1] = tmp[0] + tmp[1]; + Some(tmp) + }) + .collect::>(); + trace_rows_to_poly_values(trace_rows) + } +} + +const FIBONACCI_COLUMNS: usize = 2; +const FIBONACCI_PUBLIC_INPUTS: usize = 3; + +impl, const D: usize> Stark for FibonacciStark { + type EvaluationFrame = StarkFrame + where + FE: FieldExtension, + P: PackedField; + + type EvaluationFrameTarget = StarkFrame< + ExtensionTarget, + ExtensionTarget, + FIBONACCI_COLUMNS, + FIBONACCI_PUBLIC_INPUTS, + >; + + fn eval_packed_generic( + &self, + vars: &Self::EvaluationFrame, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + let local_values = vars.get_local_values(); + let next_values = vars.get_next_values(); + let public_inputs = vars.get_public_inputs(); + + // Check public inputs. + yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]); + yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]); + yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]); + + // x0' <- x1 + yield_constr.constraint_transition(next_values[0] - local_values[1]); + // x1' <- x0 + x1 + yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]); + } + + fn eval_ext_circuit( + &self, + builder: &mut CircuitBuilder, + vars: &Self::EvaluationFrameTarget, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + let local_values = vars.get_local_values(); + let next_values = vars.get_next_values(); + let public_inputs = vars.get_public_inputs(); + // Check public inputs. + let pis_constraints = [ + builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]), + builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]), + builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]), + ]; + yield_constr.constraint_first_row(builder, pis_constraints[0]); + yield_constr.constraint_first_row(builder, pis_constraints[1]); + yield_constr.constraint_last_row(builder, pis_constraints[2]); + + // x0' <- x1 + let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]); + yield_constr.constraint_transition(builder, first_col_constraint); + // x1' <- x0 + x1 + let second_col_constraint = { + let tmp = builder.sub_extension(next_values[1], local_values[0]); + builder.sub_extension(tmp, local_values[1]) + }; + yield_constr.constraint_transition(builder, second_col_constraint); + } + + fn constraint_degree(&self) -> usize { + 2 + } +} + +/// Similar system than above, but with extra columns to illustrate the permutation argument. +/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition +/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`. +/// Note: The `i, j` columns are the columns used to test the permutation argument. +#[derive(Copy, Clone)] +struct FibonacciWithPermutationStark, const D: usize> { + num_rows: usize, + _phantom: PhantomData, +} + +impl, const D: usize> FibonacciWithPermutationStark { + // The first public input is `x0`. + const PI_INDEX_X0: usize = 0; + // The second public input is `x1`. + const PI_INDEX_X1: usize = 1; + // The third public input is the second element of the last row, which should be equal to the + // `num_rows`-th Fibonacci number. + const PI_INDEX_RES: usize = 2; + + const fn new(num_rows: usize) -> Self { + Self { + num_rows, + _phantom: PhantomData, + } + } + /// Generate the trace using `x0, x1, 0, 1, 1` as initial state values. fn generate_trace(&self, x0: F, x1: F) -> Vec> { let mut trace_rows = (0..self.num_rows) @@ -59,17 +176,23 @@ impl, const D: usize> FibonacciStark { } } -const COLUMNS: usize = 5; -const PUBLIC_INPUTS: usize = 3; +const FIBONACCI_PERM_COLUMNS: usize = 5; +const FIBONACCI_PERM_PUBLIC_INPUTS: usize = 3; -impl, const D: usize> Stark for FibonacciStark { - type EvaluationFrame = StarkFrame +impl, const D: usize> Stark + for FibonacciWithPermutationStark +{ + type EvaluationFrame = StarkFrame where FE: FieldExtension, P: PackedField; - type EvaluationFrameTarget = - StarkFrame, ExtensionTarget, COLUMNS, PUBLIC_INPUTS>; + type EvaluationFrameTarget = StarkFrame< + ExtensionTarget, + ExtensionTarget, + FIBONACCI_PERM_COLUMNS, + FIBONACCI_PERM_PUBLIC_INPUTS, + >; fn eval_packed_generic( &self, @@ -151,7 +274,7 @@ mod tests { use plonky2::util::timing::TimingTree; use crate::config::StarkConfig; - use crate::fibonacci_stark::FibonacciStark; + use crate::fibonacci_stark::{FibonacciStark, FibonacciWithPermutationStark}; use crate::proof::StarkProofWithPublicInputs; use crate::prover::prove; use crate::recursive_verifier::{ @@ -171,14 +294,30 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - type S = FibonacciStark; + type S1 = FibonacciStark; + type S2 = FibonacciWithPermutationStark; let config = StarkConfig::standard_fast_config(); let num_rows = 1 << 5; let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; - let stark = S::new(num_rows); + + // Test first STARK + let stark = S1::new(num_rows); let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); - let proof = prove::( + let proof = prove::( + stark, + &config, + trace, + &public_inputs, + &mut TimingTree::default(), + )?; + + verify_stark_proof(stark, proof, &config)?; + + // Test second STARK + let stark = S2::new(num_rows); + let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); + let proof = prove::( stark, &config, trace, @@ -194,10 +333,14 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - type S = FibonacciStark; + type S1 = FibonacciStark; + type S2 = FibonacciWithPermutationStark; let num_rows = 1 << 5; - let stark = S::new(num_rows); + let stark = S1::new(num_rows); + test_stark_low_degree(stark)?; + + let stark = S2::new(num_rows); test_stark_low_degree(stark) } @@ -206,11 +349,14 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - type S = FibonacciStark; + type S1 = FibonacciStark; + type S2 = FibonacciWithPermutationStark; let num_rows = 1 << 5; - let stark = S::new(num_rows); - test_stark_circuit_constraints::(stark) + let stark = S1::new(num_rows); + test_stark_circuit_constraints::(stark)?; + let stark = S2::new(num_rows); + test_stark_circuit_constraints::(stark) } #[test] @@ -219,14 +365,17 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - type S = FibonacciStark; + type S1 = FibonacciStark; + type S2 = FibonacciWithPermutationStark; let config = StarkConfig::standard_fast_config(); let num_rows = 1 << 5; let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; - let stark = S::new(num_rows); + + // Test first STARK + let stark = S1::new(num_rows); let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); - let proof = prove::( + let proof = prove::( stark, &config, trace, @@ -235,7 +384,21 @@ mod tests { )?; verify_stark_proof(stark, proof.clone(), &config)?; - recursive_proof::(stark, proof, &config, true) + recursive_proof::(stark, proof, &config, true)?; + + // Test second STARK + let stark = S2::new(num_rows); + let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); + let proof = prove::( + stark, + &config, + trace, + &public_inputs, + &mut TimingTree::default(), + )?; + verify_stark_proof(stark, proof.clone(), &config)?; + + recursive_proof::(stark, proof, &config, true) } fn recursive_proof< @@ -257,8 +420,9 @@ mod tests { let mut builder = CircuitBuilder::::new(circuit_config); let mut pw = PartialWitness::new(); let degree_bits = inner_proof.proof.recover_degree_bits(inner_config); - let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); - set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); + let pt = + add_virtual_stark_proof_with_pis(&mut builder, &stark, inner_config, degree_bits, 0, 0); + set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof, builder.zero()); verify_stark_proof_circuit::(&mut builder, stark, pt, inner_config); diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index 5f9beddc..be75b0e0 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -1,5 +1,3 @@ -use alloc::vec::Vec; - use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialCoeffs; use plonky2::fri::proof::{FriProof, FriProofTarget}; @@ -12,12 +10,23 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::config::StarkConfig; -use crate::lookup::{get_grand_product_challenge_set, get_grand_product_challenge_set_target}; +use crate::lookup::{ + get_grand_product_challenge_set, get_grand_product_challenge_set_target, + GrandProductChallengeSet, +}; use crate::proof::*; -use crate::stark::Stark; +/// Generates challenges for a STARK proof from a challenger and given +/// all the arguments needed to update the challenger state. +/// +/// Note: `trace_cap` is passed as `Option` to signify whether to observe it +/// or not by the challenger. Observing it here could be redundant in a +/// multi-STARK system where trace caps would have already been observed +/// before proving individually each STARK. fn get_challenges( - trace_cap: &MerkleCap, + challenger: &mut Challenger, + challenges: Option<&GrandProductChallengeSet>, + trace_cap: Option<&MerkleCap>, auxiliary_polys_cap: Option<&MerkleCap>, quotient_polys_cap: &MerkleCap, openings: &StarkOpeningSet, @@ -33,15 +42,21 @@ where { let num_challenges = config.num_challenges; - let mut challenger = Challenger::::new(); + if let Some(cap) = &trace_cap { + challenger.observe_cap(cap); + } - challenger.observe_cap(trace_cap); + let lookup_challenge_set = if let Some(&challenges) = challenges.as_ref() { + Some(challenges.clone()) + } else { + auxiliary_polys_cap + .is_some() + .then(|| get_grand_product_challenge_set(challenger, num_challenges)) + }; - let lookup_challenge_set = auxiliary_polys_cap.map(|auxiliary_polys_cap| { - let tmp = get_grand_product_challenge_set(&mut challenger, num_challenges); - challenger.observe_cap(auxiliary_polys_cap); - tmp - }); + if let Some(cap) = &auxiliary_polys_cap { + challenger.observe_cap(cap); + } let stark_alphas = challenger.get_n_challenges(num_challenges); @@ -64,25 +79,27 @@ where } } -impl StarkProofWithPublicInputs +impl StarkProof where F: RichField + Extendable, C: GenericConfig, { - // TODO: Should be used later in compression? - #![allow(dead_code)] - pub(crate) fn fri_query_indices(&self, config: &StarkConfig, degree_bits: usize) -> Vec { - self.get_challenges(config, degree_bits) - .fri_challenges - .fri_query_indices - } - /// Computes all Fiat-Shamir challenges used in the STARK proof. - pub(crate) fn get_challenges( + /// For a single STARK system, the `ignore_trace_cap` boolean should + /// always be set to `false`. + /// + /// Multi-STARK systems may already observe individual trace caps + /// ahead of proving each table, and hence may ignore observing + /// again the cap when generating individual challenges. + pub fn get_challenges( &self, + challenger: &mut Challenger, + challenges: Option<&GrandProductChallengeSet>, + ignore_trace_cap: bool, config: &StarkConfig, - degree_bits: usize, ) -> StarkProofChallenges { + let degree_bits = self.recover_degree_bits(config); + let StarkProof { trace_cap, auxiliary_polys_cap, @@ -95,9 +112,17 @@ where pow_witness, .. }, - } = &self.proof; + } = &self; + + let trace_cap = if ignore_trace_cap { + None + } else { + Some(trace_cap) + }; get_challenges::( + challenger, + challenges, trace_cap, auxiliary_polys_cap.as_ref(), quotient_polys_cap, @@ -111,14 +136,37 @@ where } } -#[allow(clippy::too_many_arguments)] -pub(crate) fn get_challenges_target< +impl StarkProofWithPublicInputs +where F: RichField + Extendable, C: GenericConfig, - const D: usize, ->( +{ + /// Computes all Fiat-Shamir challenges used in the STARK proof. + /// For a single STARK system, the `ignore_trace_cap` boolean should + /// always be set to `false`. + /// + /// Multi-STARK systems may already observe individual trace caps + /// ahead of proving each table, and hence may ignore observing + /// again the cap when generating individual challenges. + pub fn get_challenges( + &self, + challenger: &mut Challenger, + challenges: Option<&GrandProductChallengeSet>, + ignore_trace_cap: bool, + config: &StarkConfig, + ) -> StarkProofChallenges { + self.proof + .get_challenges(challenger, challenges, ignore_trace_cap, config) + } +} + +/// Circuit version of `get_challenges`, with the same flexibility around +/// `trace_cap` being passed as an `Option`. +fn get_challenges_target( builder: &mut CircuitBuilder, - trace_cap: &MerkleCapTarget, + challenger: &mut RecursiveChallenger, + challenges: Option<&GrandProductChallengeSet>, + trace_cap: Option<&MerkleCapTarget>, auxiliary_polys_cap: Option<&MerkleCapTarget>, quotient_polys_cap: &MerkleCapTarget, openings: &StarkOpeningSetTarget, @@ -128,26 +176,34 @@ pub(crate) fn get_challenges_target< config: &StarkConfig, ) -> StarkProofChallengesTarget where + F: RichField + Extendable, + C: GenericConfig, C::Hasher: AlgebraicHasher, { let num_challenges = config.num_challenges; - let mut challenger = RecursiveChallenger::::new(builder); + if let Some(trace_cap) = trace_cap { + challenger.observe_cap(trace_cap); + } - challenger.observe_cap(trace_cap); + let lookup_challenge_set = if let Some(&challenges) = challenges.as_ref() { + Some(challenges.clone()) + } else { + auxiliary_polys_cap + .is_some() + .then(|| get_grand_product_challenge_set_target(builder, challenger, num_challenges)) + }; - let lookup_challenge_set = auxiliary_polys_cap.map(|permutation_zs_cap| { - let tmp = get_grand_product_challenge_set_target(builder, &mut challenger, num_challenges); - challenger.observe_cap(permutation_zs_cap); - tmp - }); + if let Some(cap) = auxiliary_polys_cap { + challenger.observe_cap(cap); + } let stark_alphas = challenger.get_n_challenges(builder, num_challenges); challenger.observe_cap(quotient_polys_cap); let stark_zeta = challenger.get_extension_challenge(builder); - challenger.observe_openings(&openings.to_fri_openings()); + challenger.observe_openings(&openings.to_fri_openings(builder.zero())); StarkProofChallengesTarget { lookup_challenge_set, @@ -163,10 +219,20 @@ where } } -impl StarkProofWithPublicInputsTarget { - pub(crate) fn get_challenges( +impl StarkProofTarget { + /// Creates all Fiat-Shamir `Target` challenges used in the STARK proof. + /// For a single STARK system, the `ignore_trace_cap` boolean should + /// always be set to `false`. + /// + /// Multi-STARK systems may already observe individual trace caps + /// ahead of proving each table, and hence may ignore observing + /// again the cap when generating individual challenges. + pub fn get_challenges( &self, builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + challenges: Option<&GrandProductChallengeSet>, + ignore_trace_cap: bool, config: &StarkConfig, ) -> StarkProofChallengesTarget where @@ -186,10 +252,18 @@ impl StarkProofWithPublicInputsTarget { pow_witness, .. }, - } = &self.proof; + } = self; + + let trace_cap = if ignore_trace_cap { + None + } else { + Some(trace_cap) + }; get_challenges_target::( builder, + challenger, + challenges, trace_cap, auxiliary_polys_cap.as_ref(), quotient_polys_cap, @@ -202,6 +276,32 @@ impl StarkProofWithPublicInputsTarget { } } +impl StarkProofWithPublicInputsTarget { + /// Creates all Fiat-Shamir `Target` challenges used in the STARK proof. + /// For a single STARK system, the `ignore_trace_cap` boolean should + /// always be set to `false`. + /// + /// Multi-STARK systems may already observe individual trace caps + /// ahead of proving each table, and hence may ignore observing + /// again the cap when generating individual challenges. + pub fn get_challenges( + &self, + builder: &mut CircuitBuilder, + challenger: &mut RecursiveChallenger, + challenges: Option<&GrandProductChallengeSet>, + ignore_trace_cap: bool, + config: &StarkConfig, + ) -> StarkProofChallengesTarget + where + F: RichField + Extendable, + C: GenericConfig, + C::Hasher: AlgebraicHasher, + { + self.proof + .get_challenges::(builder, challenger, challenges, ignore_trace_cap, config) + } +} + // TODO: Deal with the compressed stuff. // impl, C: GenericConfig, const D: usize> // CompressedProofWithPublicInputs diff --git a/starky/src/lib.rs b/starky/src/lib.rs index f6b4f5e0..63777fba 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -1,14 +1,332 @@ +//! A FRI-based STARK implementation over the Goldilocks field, with support +//! for recursive proof verification through the plonky2 SNARK backend. +//! +//! This library is intended to provide all the necessary tools to prove, +//! verify, and recursively verify STARK statements. While the library +//! is tailored for a system with a single STARK, it also is flexible +//! enough to support a multi-STARK system, i.e. a system of independent +//! STARK statements possibly sharing common values. See section below for +//! more information on how to define such a system. +//! +//! +//! # Defining a STARK statement +//! +//! A STARK system is configured by a [`StarkConfig`][crate::config::StarkConfig] +//! defining all the parameters to be used when generating proofs associated +//! to the statement. How constraints should be defined over the STARK trace is +//! defined through the [`Stark`][crate::stark::Stark] trait, that takes a +//! [`StarkEvaluationFrame`][crate::evaluation_frame::StarkEvaluationFrame] of +//! two consecutive rows and a list of public inputs. +//! +//! ### Example: Fibonacci sequence +//! +//! To build a STARK for the modified Fibonacci sequence starting with two +//! user-provided values `x0` and `x1`, one can do the following: +//! +//! ```rust +//! # use core::marker::PhantomData; +//! // Imports all basic types. +//! use plonky2::field::extension::{Extendable, FieldExtension}; +//! use plonky2::field::packed::PackedField; +//! use plonky2::field::polynomial::PolynomialValues; +//! use plonky2::hash::hash_types::RichField; +//! # use starky::util::trace_rows_to_poly_values; +//! +//! // Imports to define the constraints of our STARK. +//! use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +//! use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +//! use starky::stark::Stark; +//! +//! // Imports to define the recursive constraints of our STARK. +//! use plonky2::iop::ext_target::ExtensionTarget; +//! use plonky2::plonk::circuit_builder::CircuitBuilder; +//! +//! pub struct FibonacciStark, const D: usize> { +//! num_rows: usize, +//! _phantom: PhantomData, +//! } +//! +//! // Define witness generation. +//! impl, const D: usize> FibonacciStark { +//! // The first public input is `x0`. +//! const PI_INDEX_X0: usize = 0; +//! // The second public input is `x1`. +//! const PI_INDEX_X1: usize = 1; +//! // The third public input is the second element of the last row, +//! // which should be equal to the `num_rows`-th Fibonacci number. +//! const PI_INDEX_RES: usize = 2; +//! +//! /// Generate the trace using `x0, x1, 0` as initial state values. +//! fn generate_trace(&self, x0: F, x1: F) -> Vec> { +//! let mut trace_rows = (0..self.num_rows) +//! .scan([x0, x1, F::ZERO], |acc, _| { +//! let tmp = *acc; +//! acc[0] = tmp[1]; +//! acc[1] = tmp[0] + tmp[1]; +//! acc[2] = tmp[2] + F::ONE; +//! Some(tmp) +//! }) +//! .collect::>(); +//! +//! // Transpose the row-wise trace for the prover. +//! trace_rows_to_poly_values(trace_rows) +//! } +//! } +//! +//! // Define constraints. +//! const COLUMNS: usize = 3; +//! const PUBLIC_INPUTS: usize = 3; +//! +//! impl, const D: usize> Stark for FibonacciStark { +//! type EvaluationFrame = StarkFrame +//! where +//! FE: FieldExtension, +//! P: PackedField; +//! +//! type EvaluationFrameTarget = +//! StarkFrame, ExtensionTarget, COLUMNS, PUBLIC_INPUTS>; +//! +//! // Define this STARK's constraints. +//! fn eval_packed_generic( +//! &self, +//! vars: &Self::EvaluationFrame, +//! yield_constr: &mut ConstraintConsumer

, +//! ) where +//! FE: FieldExtension, +//! P: PackedField, +//! { +//! let local_values = vars.get_local_values(); +//! let next_values = vars.get_next_values(); +//! let public_inputs = vars.get_public_inputs(); +//! +//! // Check public inputs. +//! yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]); +//! yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]); +//! yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]); +//! +//! // Enforce the Fibonacci transition constraints. +//! // x0' <- x1 +//! yield_constr.constraint_transition(next_values[0] - local_values[1]); +//! // x1' <- x0 + x1 +//! yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]); +//! } +//! +//! // Define the constraints to recursively verify this STARK. +//! fn eval_ext_circuit( +//! &self, +//! builder: &mut CircuitBuilder, +//! vars: &Self::EvaluationFrameTarget, +//! yield_constr: &mut RecursiveConstraintConsumer, +//! ) { +//! let local_values = vars.get_local_values(); +//! let next_values = vars.get_next_values(); +//! let public_inputs = vars.get_public_inputs(); +//! +//! // Check public inputs. +//! let pis_constraints = [ +//! builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]), +//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]), +//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]), +//! ]; +//! +//! yield_constr.constraint_first_row(builder, pis_constraints[0]); +//! yield_constr.constraint_first_row(builder, pis_constraints[1]); +//! yield_constr.constraint_last_row(builder, pis_constraints[2]); +//! +//! // Enforce the Fibonacci transition constraints. +//! // x0' <- x1 +//! let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]); +//! yield_constr.constraint_transition(builder, first_col_constraint); +//! // x1' <- x0 + x1 +//! let second_col_constraint = { +//! let tmp = builder.sub_extension(next_values[1], local_values[0]); +//! builder.sub_extension(tmp, local_values[1]) +//! }; +//! yield_constr.constraint_transition(builder, second_col_constraint); +//! } +//! +//! fn constraint_degree(&self) -> usize { +//! 2 +//! } +//! } +//! ``` +//! +//! One can then instantiate a new `FibonacciStark` instance, generate an associated +//! STARK trace, and generate a proof for it. +//! +//! ```rust +//! # use anyhow::Result; +//! # use core::marker::PhantomData; +//! # // Imports all basic types. +//! # use plonky2::field::extension::{Extendable, FieldExtension}; +//! # use plonky2::field::types::Field; +//! # use plonky2::field::packed::PackedField; +//! # use plonky2::field::polynomial::PolynomialValues; +//! # use plonky2::hash::hash_types::RichField; +//! # use starky::util::trace_rows_to_poly_values; +//! # // Imports to define the constraints of our STARK. +//! # use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +//! # use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame}; +//! # use starky::stark::Stark; +//! # // Imports to define the recursive constraints of our STARK. +//! # use plonky2::iop::ext_target::ExtensionTarget; +//! # use plonky2::plonk::circuit_builder::CircuitBuilder; +//! # use plonky2::util::timing::TimingTree; +//! # use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; +//! # use starky::prover::prove; +//! # use starky::verifier::verify_stark_proof; +//! # use starky::config::StarkConfig; +//! # +//! # #[derive(Copy, Clone)] +//! # pub struct FibonacciStark, const D: usize> { +//! # num_rows: usize, +//! # _phantom: PhantomData, +//! # } +//! # // Define witness generation. +//! # impl, const D: usize> FibonacciStark { +//! # // The first public input is `x0`. +//! # const PI_INDEX_X0: usize = 0; +//! # // The second public input is `x1`. +//! # const PI_INDEX_X1: usize = 1; +//! # // The third public input is the second element of the last row, +//! # // which should be equal to the `num_rows`-th Fibonacci number. +//! # const PI_INDEX_RES: usize = 2; +//! # /// Generate the trace using `x0, x1, 0` as initial state values. +//! # fn generate_trace(&self, x0: F, x1: F) -> Vec> { +//! # let mut trace_rows = (0..self.num_rows) +//! # .scan([x0, x1, F::ZERO], |acc, _| { +//! # let tmp = *acc; +//! # acc[0] = tmp[1]; +//! # acc[1] = tmp[0] + tmp[1]; +//! # acc[2] = tmp[2] + F::ONE; +//! # Some(tmp) +//! # }) +//! # .collect::>(); +//! # // Transpose the row-wise trace for the prover. +//! # trace_rows_to_poly_values(trace_rows) +//! # } +//! # const fn new(num_rows: usize) -> Self { +//! # Self { +//! # num_rows, +//! # _phantom: PhantomData, +//! # } +//! # } +//! # } +//! # // Define constraints. +//! # const COLUMNS: usize = 3; +//! # const PUBLIC_INPUTS: usize = 3; +//! # impl, const D: usize> Stark for FibonacciStark { +//! # type EvaluationFrame = StarkFrame +//! # where +//! # FE: FieldExtension, +//! # P: PackedField; +//! # type EvaluationFrameTarget = +//! # StarkFrame, ExtensionTarget, COLUMNS, PUBLIC_INPUTS>; +//! # // Define this STARK's constraints. +//! # fn eval_packed_generic( +//! # &self, +//! # vars: &Self::EvaluationFrame, +//! # yield_constr: &mut ConstraintConsumer

, +//! # ) where +//! # FE: FieldExtension, +//! # P: PackedField, +//! # { +//! # let local_values = vars.get_local_values(); +//! # let next_values = vars.get_next_values(); +//! # let public_inputs = vars.get_public_inputs(); +//! # // Check public inputs. +//! # yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]); +//! # yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]); +//! # yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]); +//! # // Enforce the Fibonacci transition constraints. +//! # // x0' <- x1 +//! # yield_constr.constraint_transition(next_values[0] - local_values[1]); +//! # // x1' <- x0 + x1 +//! # yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]); +//! # } +//! # // Define the constraints to recursively verify this STARK. +//! # fn eval_ext_circuit( +//! # &self, +//! # builder: &mut CircuitBuilder, +//! # vars: &Self::EvaluationFrameTarget, +//! # yield_constr: &mut RecursiveConstraintConsumer, +//! # ) { +//! # let local_values = vars.get_local_values(); +//! # let next_values = vars.get_next_values(); +//! # let public_inputs = vars.get_public_inputs(); +//! # // Check public inputs. +//! # let pis_constraints = [ +//! # builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]), +//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]), +//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]), +//! # ]; +//! # yield_constr.constraint_first_row(builder, pis_constraints[0]); +//! # yield_constr.constraint_first_row(builder, pis_constraints[1]); +//! # yield_constr.constraint_last_row(builder, pis_constraints[2]); +//! # // Enforce the Fibonacci transition constraints. +//! # // x0' <- x1 +//! # let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]); +//! # yield_constr.constraint_transition(builder, first_col_constraint); +//! # // x1' <- x0 + x1 +//! # let second_col_constraint = { +//! # let tmp = builder.sub_extension(next_values[1], local_values[0]); +//! # builder.sub_extension(tmp, local_values[1]) +//! # }; +//! # yield_constr.constraint_transition(builder, second_col_constraint); +//! # } +//! # fn constraint_degree(&self) -> usize { +//! # 2 +//! # } +//! # } +//! # fn fibonacci(n: usize, x0: F, x1: F) -> F { +//! # (0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1 +//! # } +//! # +//! const D: usize = 2; +//! const CONFIG: StarkConfig = StarkConfig::standard_fast_config(); +//! type C = PoseidonGoldilocksConfig; +//! type F = >::F; +//! type S = FibonacciStark; +//! +//! fn main() { +//! let num_rows = 1 << 10; +//! let x0 = F::from_canonical_u32(2); +//! let x1 = F::from_canonical_u32(7); +//! +//! let public_inputs = [x0, x1, fibonacci(num_rows - 1, x0, x1)]; +//! let stark = FibonacciStark::::new(num_rows); +//! let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); +//! +//! let proof = prove::( +//! stark, +//! &CONFIG, +//! trace, +//! &public_inputs, +//! &mut TimingTree::default(), +//! ).expect("We should have a valid proof!"); +//! +//! verify_stark_proof(stark, proof, &CONFIG) +//! .expect("We should be able to verify this proof!") +//! } +//! ``` +//! + #![allow(clippy::too_many_arguments)] +#![allow(clippy::needless_range_loop)] #![allow(clippy::type_complexity)] -#![allow(unused)] // TODO: Remove post code migration +#![deny(rustdoc::broken_intra_doc_links)] +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] +#[cfg(not(feature = "std"))] extern crate alloc; mod get_challenges; pub mod config; pub mod constraint_consumer; +pub mod cross_table_lookup; pub mod evaluation_frame; pub mod lookup; pub mod proof; @@ -17,7 +335,7 @@ pub mod recursive_verifier; pub mod stark; pub mod stark_testing; pub mod util; -pub mod vanishing_poly; +mod vanishing_poly; pub mod verifier; #[cfg(test)] diff --git a/starky/src/lookup.rs b/starky/src/lookup.rs index 19f20424..80a01b08 100644 --- a/starky/src/lookup.rs +++ b/starky/src/lookup.rs @@ -1,5 +1,8 @@ -use alloc::vec; -use alloc::vec::Vec; +//! A Lookup protocol leveraging logarithmic derivatives, +//! introduced in . + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use core::borrow::Borrow; use core::fmt::Debug; use core::iter::repeat; @@ -37,6 +40,7 @@ pub struct Filter { } impl Filter { + /// Returns a filter from the provided `products` and `constants` vectors. pub fn new(products: Vec<(Column, Column)>, constants: Vec>) -> Self { Self { products, @@ -113,14 +117,6 @@ impl Filter { .map(|col| col.eval_table(table, row)) .sum() } - - pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues]) -> Vec { - let length = table[0].len(); - - (0..length) - .map(|row| self.eval_table(table, row)) - .collect::>() - } } /// Represent two linear combination of columns, corresponding to the current and next row values. @@ -402,12 +398,24 @@ impl Column { pub(crate) type ColumnFilter<'a, F> = (&'a [Column], &'a Option>); +/// A [`Lookup`] defines a set of `columns`` whose values should appear in a +/// `table_column` (i.e. the lookup table associated to these looking columns), +/// along with a `frequencies_column` indicating the frequency of each looking +/// column in the looked table. +/// +/// It also features a `filter_columns` vector, optionally adding at most one +/// filter per looking column. +/// +/// The lookup argumented implemented here is based on logarithmic derivatives, +/// a technique described with the whole lookup protocol in +/// . +#[derive(Debug)] pub struct Lookup { /// Columns whose values should be contained in the lookup table. /// These are the f_i(x) polynomials in the logUp paper. pub columns: Vec>, /// Column containing the lookup table. - /// This is the t(x) polynomial in the paper. + /// This is the t(x) polynomial in the logUp paper. pub table_column: Column, /// Column containing the frequencies of `columns` in `table_column`. /// This is the m(x) polynomial in the paper. @@ -419,6 +427,7 @@ pub struct Lookup { } impl Lookup { + /// Outputs the number of helper columns needed by this [`Lookup`]. pub fn num_helper_columns(&self, constraint_degree: usize) -> usize { // One helper column for each column batch of size `constraint_degree-1`, // then one column for the inverse of `table + challenge` and one for the `Z` polynomial. @@ -428,18 +437,18 @@ impl Lookup { /// Randomness for a single instance of a permutation check protocol. #[derive(Copy, Clone, Eq, PartialEq, Debug)] -pub(crate) struct GrandProductChallenge { +pub struct GrandProductChallenge { /// Randomness used to combine multiple columns into one. - pub(crate) beta: T, + pub beta: T, /// Random offset that's added to the beta-reduced column values. - pub(crate) gamma: T, + pub gamma: T, } impl GrandProductChallenge { - pub(crate) fn combine<'a, FE, P, T: IntoIterator, const D2: usize>( - &self, - terms: T, - ) -> P + /// Combines a series of values `t_i` with these challenge random values. + /// In particular, given `beta` and `gamma` challenges, this will compute + /// `(Σ t_i * beta^i) + gamma`. + pub fn combine<'a, FE, P, T: IntoIterator, const D2: usize>(&self, terms: T) -> P where FE: FieldExtension, P: PackedField, @@ -462,7 +471,8 @@ impl GrandProductChallenge { } impl GrandProductChallenge { - pub(crate) fn combine_base_circuit, const D: usize>( + /// Circuit version of `combine`. + pub fn combine_base_circuit, const D: usize>( &self, builder: &mut CircuitBuilder, terms: &[Target], @@ -475,11 +485,14 @@ impl GrandProductChallenge { /// Like `GrandProductChallenge`, but with `num_challenges` copies to boost soundness. #[derive(Clone, Eq, PartialEq, Debug)] pub struct GrandProductChallengeSet { - pub(crate) challenges: Vec>, + /// A sequence of `num_challenges` challenge pairs, where `num_challenges` + /// is defined in [`StarkConfig`][crate::config::StarkConfig]. + pub challenges: Vec>, } impl GrandProductChallengeSet { - pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { + /// Serializes this `GrandProductChallengeSet` of `Target`s. + pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { buffer.write_usize(self.challenges.len())?; for challenge in &self.challenges { buffer.write_target(challenge.beta)?; @@ -488,7 +501,8 @@ impl GrandProductChallengeSet { Ok(()) } - pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { + /// Serializes a `GrandProductChallengeSet` of `Target`s from the provided buffer. + pub fn from_buffer(buffer: &mut Buffer) -> IoResult { let length = buffer.read_usize()?; let mut challenges = Vec::with_capacity(length); for _ in 0..length { @@ -510,7 +524,9 @@ fn get_grand_product_challenge>( GrandProductChallenge { beta, gamma } } -pub(crate) fn get_grand_product_challenge_set>( +/// Generates a new `GrandProductChallengeSet` containing `num_challenges` +/// pairs of challenges from the current `challenger` state. +pub fn get_grand_product_challenge_set>( challenger: &mut Challenger, num_challenges: usize, ) -> GrandProductChallengeSet { @@ -533,7 +549,8 @@ fn get_grand_product_challenge_target< GrandProductChallenge { beta, gamma } } -pub(crate) fn get_grand_product_challenge_set_target< +/// Circuit version of `get_grand_product_challenge_set`. +pub fn get_grand_product_challenge_set_target< F: RichField + Extendable, H: AlgebraicHasher, const D: usize, @@ -570,7 +587,6 @@ pub(crate) fn lookup_helper_columns( assert!(BigUint::from(num_total_logup_entries) < F::characteristic()); let num_helper_columns = lookup.num_helper_columns(constraint_degree); - let mut helper_columns: Vec> = Vec::with_capacity(num_helper_columns); let looking_cols = lookup .columns @@ -762,7 +778,6 @@ pub(crate) fn get_helper_cols( let mut helper_columns = Vec::with_capacity(num_helper_columns); - let mut filter_index = 0; for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) { let (first_col, first_filter) = cols_filts.next().unwrap(); @@ -842,6 +857,7 @@ pub(crate) fn get_helper_cols( helper_columns } +#[derive(Debug)] pub(crate) struct LookupCheckVars where F: Field, @@ -919,6 +935,7 @@ pub(crate) fn eval_packed_lookups_generic { pub(crate) local_values: Vec>, pub(crate) next_values: Vec>, @@ -936,7 +953,6 @@ pub(crate) fn eval_ext_lookups_circuit< lookup_vars: LookupCheckVarsTarget, yield_constr: &mut RecursiveConstraintConsumer, ) { - let one = builder.one_extension(); let degree = stark.constraint_degree(); let lookups = stark.lookups(); diff --git a/starky/src/proof.rs b/starky/src/proof.rs index e2239928..b6ea53ef 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -1,5 +1,9 @@ -use alloc::vec; -use alloc::vec::Vec; +//! All the different proof types and their associated `circuit` versions +//! to be used when proving (recursive) [`Stark`][crate::stark::Stark] +//! statements + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use itertools::Itertools; use plonky2::field::extension::{Extendable, FieldExtension}; @@ -14,17 +18,19 @@ use plonky2::hash::hash_types::{MerkleCapTarget, RichField}; use plonky2::hash::merkle_tree::MerkleCap; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; -use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::config::{GenericConfig, Hasher}; +use plonky2::util::serialization::{Buffer, IoResult, Read, Write}; use plonky2_maybe_rayon::*; use crate::config::StarkConfig; use crate::lookup::GrandProductChallengeSet; +/// Merkle caps and openings that form the proof of a single STARK. #[derive(Debug, Clone)] pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, - /// Merkle cap of LDEs of permutation Z values. + /// Optional merkle cap of LDEs of permutation Z values, if any. pub auxiliary_polys_cap: Option>, /// Merkle cap of LDEs of trace values. pub quotient_polys_cap: MerkleCap, @@ -46,15 +52,57 @@ impl, C: GenericConfig, const D: usize> S } } +/// Circuit version of [`StarkProof`]. +/// Merkle caps and openings that form the proof of a single STARK. +#[derive(Clone, Debug, PartialEq, Eq)] pub struct StarkProofTarget { + /// `Target` for the Merkle cap trace values LDEs. pub trace_cap: MerkleCapTarget, + /// Optional `Target` for the Merkle cap of lookup helper and CTL columns LDEs, if any. pub auxiliary_polys_cap: Option, + /// `Target` for the Merkle cap of quotient polynomial evaluations LDEs. pub quotient_polys_cap: MerkleCapTarget, + /// `Target`s for the purported values of each polynomial at the challenge point. pub openings: StarkOpeningSetTarget, + /// `Target`s for the batch FRI argument for all openings. pub opening_proof: FriProofTarget, } impl StarkProofTarget { + /// Serializes a STARK proof. + pub fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { + buffer.write_target_merkle_cap(&self.trace_cap)?; + buffer.write_bool(self.auxiliary_polys_cap.is_some())?; + if let Some(poly) = &self.auxiliary_polys_cap { + buffer.write_target_merkle_cap(poly)?; + } + buffer.write_target_merkle_cap(&self.quotient_polys_cap)?; + buffer.write_target_fri_proof(&self.opening_proof)?; + self.openings.to_buffer(buffer)?; + Ok(()) + } + + /// Deserializes a STARK proof. + pub fn from_buffer(buffer: &mut Buffer) -> IoResult { + let trace_cap = buffer.read_target_merkle_cap()?; + let auxiliary_polys_cap = if buffer.read_bool()? { + Some(buffer.read_target_merkle_cap()?) + } else { + None + }; + let quotient_polys_cap = buffer.read_target_merkle_cap()?; + let opening_proof = buffer.read_target_fri_proof()?; + let openings = StarkOpeningSetTarget::from_buffer(buffer)?; + + Ok(Self { + trace_cap, + auxiliary_polys_cap, + quotient_polys_cap, + openings, + opening_proof, + }) + } + /// Recover the length of the trace from a STARK proof and a STARK config. pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize { let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] @@ -66,22 +114,31 @@ impl StarkProofTarget { } } +/// Merkle caps and openings that form the proof of a single STARK, along with its public inputs. #[derive(Debug, Clone)] pub struct StarkProofWithPublicInputs< F: RichField + Extendable, C: GenericConfig, const D: usize, > { + /// A STARK proof. pub proof: StarkProof, + /// Public inputs associated to this STARK proof. // TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`. pub public_inputs: Vec, } +/// Circuit version of [`StarkProofWithPublicInputs`]. +#[derive(Debug, Clone)] pub struct StarkProofWithPublicInputsTarget { + /// `Target` STARK proof. pub proof: StarkProofTarget, + /// `Target` public inputs for this STARK proof. pub public_inputs: Vec, } +/// A compressed proof format of a single STARK. +#[derive(Debug, Clone)] pub struct CompressedStarkProof< F: RichField + Extendable, C: GenericConfig, @@ -95,69 +152,158 @@ pub struct CompressedStarkProof< pub opening_proof: CompressedFriProof, } +/// A compressed [`StarkProof`] format of a single STARK with its public inputs. +#[derive(Debug, Clone)] pub struct CompressedStarkProofWithPublicInputs< F: RichField + Extendable, C: GenericConfig, const D: usize, > { + /// A compressed STARK proof. pub proof: CompressedStarkProof, + /// Public inputs for this compressed STARK proof. pub public_inputs: Vec, } -pub(crate) struct StarkProofChallenges, const D: usize> { - /// Randomness used in any permutation arguments. - pub lookup_challenge_set: Option>, +/// A [`StarkProof`] along with metadata about the initial Fiat-Shamir state, which is used when +/// creating a recursive wrapper proof around a STARK proof. +#[derive(Debug, Clone)] +pub struct StarkProofWithMetadata +where + F: RichField + Extendable, + C: GenericConfig, +{ + /// Initial Fiat-Shamir state. + pub init_challenger_state: >::Permutation, + /// Proof for a single STARK. + pub proof: StarkProof, +} +/// A combination of STARK proofs for independent statements operating on possibly shared variables, +/// along with Cross-Table Lookup (CTL) challenges to assert consistency of common variables across tables. +#[derive(Debug, Clone)] +pub struct MultiProof< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, + const N: usize, +> { + /// Proofs for all the different STARK modules. + pub stark_proofs: [StarkProofWithMetadata; N], + /// Cross-table lookup challenges. + pub ctl_challenges: GrandProductChallengeSet, +} + +impl, C: GenericConfig, const D: usize, const N: usize> + MultiProof +{ + /// Returns the degree (i.e. the trace length) of each STARK proof, + /// from their common [`StarkConfig`]. + pub fn recover_degree_bits(&self, config: &StarkConfig) -> [usize; N] { + core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config)) + } +} + +/// Randomness used for a STARK proof. +#[derive(Debug)] +pub struct StarkProofChallenges, const D: usize> { + /// Optional randomness used in any permutation argument. + pub lookup_challenge_set: Option>, /// Random values used to combine STARK constraints. pub stark_alphas: Vec, - /// Point at which the STARK polynomials are opened. pub stark_zeta: F::Extension, - + /// Randomness used in FRI. pub fri_challenges: FriChallenges, } -pub(crate) struct StarkProofChallengesTarget { +/// Circuit version of [`StarkProofChallenges`]. +#[derive(Debug)] +pub struct StarkProofChallengesTarget { + /// Optional `Target`'s randomness used in any permutation argument. pub lookup_challenge_set: Option>, + /// `Target`s for the random values used to combine STARK constraints. pub stark_alphas: Vec, + /// `ExtensionTarget` for the point at which the STARK polynomials are opened. pub stark_zeta: ExtensionTarget, + /// `Target`s for the randomness used in FRI. pub fri_challenges: FriChallengesTarget, } +/// Randomness for all STARK proofs contained in a [`MultiProof`]`. +#[derive(Debug)] +pub struct MultiProofChallenges, const D: usize, const N: usize> { + /// Randomness used in each STARK proof. + pub stark_challenges: [StarkProofChallenges; N], + /// Randomness used for cross-table lookups. It is shared by all STARKs. + pub ctl_challenges: GrandProductChallengeSet, +} + /// Purported values of each polynomial at the challenge point. #[derive(Debug, Clone)] pub struct StarkOpeningSet, const D: usize> { + /// Openings of trace polynomials at `zeta`. pub local_values: Vec, + /// Openings of trace polynomials at `g * zeta`. pub next_values: Vec, + /// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`. pub auxiliary_polys: Option>, + /// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`. pub auxiliary_polys_next: Option>, + /// Openings of cross-table lookups `Z` polynomials at `1`. + pub ctl_zs_first: Option>, + /// Openings of quotient polynomials at `zeta`. pub quotient_polys: Vec, } impl, const D: usize> StarkOpeningSet { + /// Returns a `StarkOpeningSet` given all the polynomial commitments, the number + /// of permutation `Z`polynomials, the evaluation point and a generator `g`. + /// + /// Polynomials are evaluated at point `zeta` and, if necessary, at `g * zeta`. pub fn new>( zeta: F::Extension, g: F, trace_commitment: &PolynomialBatch, auxiliary_polys_commitment: Option<&PolynomialBatch>, quotient_commitment: &PolynomialBatch, + num_lookup_columns: usize, + requires_ctl: bool, + num_ctl_polys: &[usize], ) -> Self { + // Batch evaluates polynomials on the LDE, at a point `z`. let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { c.polynomials .par_iter() .map(|p| p.to_extension().eval(z)) .collect::>() }; + // Batch evaluates polynomials at a base field point `z`. + let eval_commitment_base = |z: F, c: &PolynomialBatch| { + c.polynomials + .par_iter() + .map(|p| p.eval(z)) + .collect::>() + }; + + let auxiliary_first = auxiliary_polys_commitment.map(|c| eval_commitment_base(F::ONE, c)); + // `g * zeta`. let zeta_next = zeta.scalar_mul(g); Self { local_values: eval_commitment(zeta, trace_commitment), next_values: eval_commitment(zeta_next, trace_commitment), auxiliary_polys: auxiliary_polys_commitment.map(|c| eval_commitment(zeta, c)), auxiliary_polys_next: auxiliary_polys_commitment.map(|c| eval_commitment(zeta_next, c)), + ctl_zs_first: requires_ctl.then(|| { + let total_num_helper_cols: usize = num_ctl_polys.iter().sum(); + auxiliary_first.unwrap()[num_lookup_columns + total_num_helper_cols..].to_vec() + }), quotient_polys: eval_commitment(zeta, quotient_commitment), } } + /// Constructs the openings required by FRI. + /// All openings but `ctl_zs_first` are grouped together. pub(crate) fn to_fri_openings(&self) -> FriOpenings { let zeta_batch = FriOpeningBatch { values: self @@ -176,22 +322,107 @@ impl, const D: usize> StarkOpeningSet { .copied() .collect_vec(), }; - FriOpenings { - batches: vec![zeta_batch, zeta_next_batch], + + let mut batches = vec![zeta_batch, zeta_next_batch]; + + if let Some(ctl_zs_first) = self.ctl_zs_first.as_ref() { + debug_assert!(!ctl_zs_first.is_empty()); + debug_assert!(self.auxiliary_polys.is_some()); + debug_assert!(self.auxiliary_polys_next.is_some()); + + let ctl_first_batch = FriOpeningBatch { + values: ctl_zs_first + .iter() + .copied() + .map(F::Extension::from_basefield) + .collect(), + }; + + batches.push(ctl_first_batch); } + + FriOpenings { batches } } } +/// Circuit version of [`StarkOpeningSet`]. +/// `Target`s for the purported values of each polynomial at the challenge point. +#[derive(Clone, Debug, PartialEq, Eq)] pub struct StarkOpeningSetTarget { + /// `ExtensionTarget`s for the openings of trace polynomials at `zeta`. pub local_values: Vec>, + /// `ExtensionTarget`s for the opening of trace polynomials at `g * zeta`. pub next_values: Vec>, + /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `zeta`. pub auxiliary_polys: Option>>, + /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `g * zeta`. pub auxiliary_polys_next: Option>>, + /// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1. + pub ctl_zs_first: Option>, + /// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`. pub quotient_polys: Vec>, } impl StarkOpeningSetTarget { - pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget { + /// Serializes a STARK's opening set. + pub(crate) fn to_buffer(&self, buffer: &mut Vec) -> IoResult<()> { + buffer.write_target_ext_vec(&self.local_values)?; + buffer.write_target_ext_vec(&self.next_values)?; + if let Some(poly) = &self.auxiliary_polys { + buffer.write_bool(true)?; + buffer.write_target_ext_vec(poly)?; + } else { + buffer.write_bool(false)?; + } + if let Some(poly_next) = &self.auxiliary_polys_next { + buffer.write_bool(true)?; + buffer.write_target_ext_vec(poly_next)?; + } else { + buffer.write_bool(false)?; + } + if let Some(ctl_zs_first) = &self.ctl_zs_first { + buffer.write_bool(true)?; + buffer.write_target_vec(ctl_zs_first)?; + } else { + buffer.write_bool(false)?; + } + buffer.write_target_ext_vec(&self.quotient_polys)?; + Ok(()) + } + + /// Deserializes a STARK's opening set. + pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult { + let local_values = buffer.read_target_ext_vec::()?; + let next_values = buffer.read_target_ext_vec::()?; + let auxiliary_polys = if buffer.read_bool()? { + Some(buffer.read_target_ext_vec::()?) + } else { + None + }; + let auxiliary_polys_next = if buffer.read_bool()? { + Some(buffer.read_target_ext_vec::()?) + } else { + None + }; + let ctl_zs_first = if buffer.read_bool()? { + Some(buffer.read_target_vec()?) + } else { + None + }; + let quotient_polys = buffer.read_target_ext_vec::()?; + + Ok(Self { + local_values, + next_values, + auxiliary_polys, + auxiliary_polys_next, + ctl_zs_first, + quotient_polys, + }) + } + + /// Circuit version of `to_fri_openings`for [`FriOpeningsTarget`]. + pub(crate) fn to_fri_openings(&self, zero: Target) -> FriOpeningsTarget { let zeta_batch = FriOpeningBatchTarget { values: self .local_values @@ -209,8 +440,24 @@ impl StarkOpeningSetTarget { .copied() .collect_vec(), }; - FriOpeningsTarget { - batches: vec![zeta_batch, zeta_next_batch], + + let mut batches = vec![zeta_batch, zeta_next_batch]; + + if let Some(ctl_zs_first) = self.ctl_zs_first.as_ref() { + debug_assert!(!ctl_zs_first.is_empty()); + debug_assert!(self.auxiliary_polys.is_some()); + debug_assert!(self.auxiliary_polys_next.is_some()); + + let ctl_first_batch = FriOpeningBatchTarget { + values: ctl_zs_first + .iter() + .copied() + .map(|t| t.to_ext_target(zero)) + .collect(), + }; + + batches.push(ctl_first_batch); } + FriOpeningsTarget { batches } } } diff --git a/starky/src/prover.rs b/starky/src/prover.rs index f9b40217..7014bdd3 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -1,3 +1,6 @@ +//! Implementation of the STARK prover. + +#[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::iter::once; @@ -20,15 +23,17 @@ use plonky2_maybe_rayon::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; +use crate::cross_table_lookup::{get_ctl_auxiliary_polys, CtlCheckVars, CtlData}; use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::{ - get_grand_product_challenge_set, lookup_helper_columns, Lookup, LookupCheckVars, + get_grand_product_challenge_set, lookup_helper_columns, GrandProductChallengeSet, Lookup, + LookupCheckVars, }; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; -#[allow(clippy::useless_asref)] +/// From a STARK trace, computes a STARK proof to attest its correctness. pub fn prove( stark: S, config: &StarkConfig, @@ -68,54 +73,120 @@ where let mut challenger = Challenger::new(); challenger.observe_cap(&trace_cap); - // Lookup argument. + prove_with_commitment( + &stark, + config, + &trace_poly_values, + &trace_commitment, + None, + None, + &mut challenger, + public_inputs, + timing, + ) +} + +/// Generates a proof for a single STARK table, including: +/// +/// - the initial state of the challenger, +/// - all the required Merkle caps, +/// - all the required polynomial and FRI argument openings. +/// - individual `ctl_data` and common `ctl_challenges` if the STARK is part +/// of a multi-STARK system. +pub fn prove_with_commitment( + stark: &S, + config: &StarkConfig, + trace_poly_values: &[PolynomialValues], + trace_commitment: &PolynomialBatch, + ctl_data: Option<&CtlData>, + ctl_challenges: Option<&GrandProductChallengeSet>, + challenger: &mut Challenger, + public_inputs: &[F], + timing: &mut TimingTree, +) -> Result> +where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, +{ + let degree = trace_poly_values[0].len(); + let degree_bits = log2_strict(degree); + let fri_params = config.fri_params(degree_bits); + let rate_bits = config.fri_config.rate_bits; + let cap_height = config.fri_config.cap_height; + assert!( + fri_params.total_arities() <= degree_bits + rate_bits - cap_height, + "FRI total reduction arity is too large.", + ); + + // Permutation arguments. + let constraint_degree = stark.constraint_degree(); - let lookups = stark.lookups(); let lookup_challenges = stark.uses_lookups().then(|| { - get_grand_product_challenge_set(&mut challenger, config.num_challenges) - .challenges - .iter() - .map(|ch| ch.beta) - .collect::>() + if let Some(c) = ctl_challenges { + c.challenges.iter().map(|ch| ch.beta).collect::>() + } else { + get_grand_product_challenge_set(challenger, config.num_challenges) + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>() + } }); - let num_lookup_columns = lookups - .iter() - .map(|l| l.num_helper_columns(constraint_degree)) - .sum(); - - let auxiliary_polys_commitment = stark.uses_lookups().then(|| { - let lookup_helper_columns = timed!(timing, "compute lookup helper columns", { - let challenges = lookup_challenges.as_ref().expect("We do have challenges."); - let mut columns = Vec::with_capacity(num_lookup_columns); + let lookups = stark.lookups(); + let lookup_helper_columns = timed!( + timing, + "compute lookup helper columns", + lookup_challenges.as_ref().map(|challenges| { + let mut columns = Vec::new(); for lookup in &lookups { for &challenge in challenges { columns.extend(lookup_helper_columns( lookup, - &trace_poly_values, + trace_poly_values, challenge, constraint_degree, )); } } columns - }); + }) + ); + let num_lookup_columns = lookup_helper_columns.as_ref().map_or(0, |v| v.len()); - // Get the polynomial commitments for all auxiliary polynomials. - let auxiliary_polys_commitment = timed!( + // We add CTLs, if there are any, to the permutation arguments so that + // we can batch commit to all auxiliary polynomials. + let auxiliary_polys = match lookup_helper_columns { + None => get_ctl_auxiliary_polys(ctl_data), + Some(mut lookup_columns) => { + if let Some(p) = get_ctl_auxiliary_polys(ctl_data) { + lookup_columns.extend(p) + }; + + Some(lookup_columns) + } + }; + + debug_assert!( + (stark.uses_lookups() || stark.requires_ctls()) || auxiliary_polys.is_none(), + "There should be auxiliary polynomials if and only if we have either lookups or require cross-table lookups." + ); + + // Get the polynomial commitments for all auxiliary polynomials. + let auxiliary_polys_commitment = auxiliary_polys.map(|aux_polys| { + timed!( timing, - "compute permutation Z commitments", + "compute auxiliary polynomials commitment", PolynomialBatch::from_values( - lookup_helper_columns, + aux_polys, rate_bits, false, config.fri_config.cap_height, timing, None, ) - ); - - auxiliary_polys_commitment + ) }); let auxiliary_polys_cap = auxiliary_polys_commitment @@ -127,18 +198,25 @@ where let alphas = challenger.get_n_challenges(config.num_challenges); - #[cfg(test)] + let num_ctl_polys = ctl_data + .map(|data| data.num_ctl_helper_polys()) + .unwrap_or_default(); + + // This is an expensive check, hence is only run when `debug_assertions` are enabled. + #[cfg(debug_assertions)] { check_constraints( - &stark, - &trace_commitment, + stark, + trace_commitment, public_inputs, &auxiliary_polys_commitment, lookup_challenges.as_ref(), &lookups, + ctl_data, alphas.clone(), degree_bits, num_lookup_columns, + &num_ctl_polys, ); } @@ -146,19 +224,20 @@ where timing, "compute quotient polys", compute_quotient_polys::::Packing, C, S, D>( - &stark, - &trace_commitment, + stark, + trace_commitment, &auxiliary_polys_commitment, lookup_challenges.as_ref(), &lookups, + ctl_data, public_inputs, - alphas, + alphas.clone(), degree_bits, num_lookup_columns, + &num_ctl_polys, config, ) ); - let all_quotient_chunks = timed!( timing, "split quotient polys", @@ -175,7 +254,7 @@ where }) .collect() ); - + // Commit to the quotient polynomials. let quotient_commitment = timed!( timing, "compute quotient commitment", @@ -188,12 +267,12 @@ where None, ) ); - // Observe the quotient polynomials Merkle cap. let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone(); challenger.observe_cap("ient_polys_cap); let zeta = challenger.get_extension_challenge::(); + // To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and // `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since // `(g * zeta)^n = zeta^n`, where `n` is the order of `g`. @@ -207,15 +286,17 @@ where let openings = StarkOpeningSet::new( zeta, g, - &trace_commitment, + trace_commitment, auxiliary_polys_commitment.as_ref(), "ient_commitment, + stark.num_lookup_helper_columns(config), + stark.requires_ctls(), + &num_ctl_polys, ); - // Get the FRI openings and observe them. challenger.observe_openings(&openings.to_fri_openings()); - let initial_merkle_trees = once(&trace_commitment) + let initial_merkle_trees = once(trace_commitment) .chain(&auxiliary_polys_commitment) .chain(once("ient_commitment)) .collect_vec(); @@ -224,15 +305,16 @@ where timing, "compute openings proof", PolynomialBatch::prove_openings( - &stark.fri_instance(zeta, g, config), + &stark.fri_instance(zeta, g, num_ctl_polys.iter().sum(), num_ctl_polys, config), &initial_merkle_trees, - &mut challenger, + challenger, &fri_params, timing, ) ); + let proof = StarkProof { - trace_cap, + trace_cap: trace_commitment.merkle_tree.cap.clone(), auxiliary_polys_cap, quotient_polys_cap, openings, @@ -246,17 +328,19 @@ where } /// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`, -/// where the `C_i`s are the Stark constraints. +/// where the `C_i`s are the STARK constraints. fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( stark: &S, trace_commitment: &'a PolynomialBatch, auxiliary_polys_commitment: &'a Option>, lookup_challenges: Option<&'a Vec>, lookups: &[Lookup], + ctl_data: Option<&CtlData>, public_inputs: &[F], alphas: Vec, degree_bits: usize, num_lookup_columns: usize, + num_ctl_columns: &[usize], config: &StarkConfig, ) -> Vec> where @@ -267,6 +351,7 @@ where { let degree = 1 << degree_bits; let rate_bits = config.fri_config.rate_bits; + let total_num_helper_cols: usize = num_ctl_columns.iter().sum(); let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor()); assert!( @@ -331,15 +416,62 @@ where local_values: auxiliary_polys_commitment .as_ref() .unwrap() - .get_lde_values_packed(i_start, step) + .get_lde_values_packed(i_start, step)[..num_lookup_columns] .to_vec(), next_values: auxiliary_polys_commitment .as_ref() .unwrap() - .get_lde_values_packed(i_next_start, step), + .get_lde_values_packed(i_next_start, step)[..num_lookup_columns] + .to_vec(), challenges: challenges.to_vec(), }); + // Get all the data for this STARK's CTLs, if any: + // - the local and next row evaluations for the CTL Z polynomials + // - the associated challenges. + // - for each CTL: + // - the filter `Column` + // - the `Column`s that form the looking/looked table. + + let ctl_vars = ctl_data.map(|data| { + let mut start_index = 0; + data.zs_columns + .iter() + .enumerate() + .map(|(i, zs_columns)| { + let num_ctl_helper_cols = num_ctl_columns[i]; + let helper_columns = auxiliary_polys_commitment + .as_ref() + .unwrap() + .get_lde_values_packed(i_start, step) + [num_lookup_columns + start_index + ..num_lookup_columns + start_index + num_ctl_helper_cols] + .to_vec(); + + let ctl_vars = CtlCheckVars:: { + helper_columns, + local_z: auxiliary_polys_commitment + .as_ref() + .unwrap() + .get_lde_values_packed(i_start, step) + [num_lookup_columns + total_num_helper_cols + i], + next_z: auxiliary_polys_commitment + .as_ref() + .unwrap() + .get_lde_values_packed(i_next_start, step) + [num_lookup_columns + total_num_helper_cols + i], + challenges: zs_columns.challenge, + columns: zs_columns.columns.clone(), + filter: zs_columns.filter.clone(), + }; + + start_index += num_ctl_helper_cols; + + ctl_vars + }) + .collect::>() + }); + // Evaluate the polynomial combining all constraints, including // those associated to the permutation arguments. eval_vanishing_poly::( @@ -347,6 +479,7 @@ where &vars, lookups, lookup_vars, + ctl_vars.as_deref(), &mut consumer, ); @@ -375,9 +508,15 @@ where .collect() } -#[cfg(test)] /// Check that all constraints evaluate to zero on `H`. /// Can also be used to check the degree of the constraints by evaluating on a larger subgroup. +/// +/// Debugging module, to assert that all constraints evaluate to zero on `H`. +/// It can also be used to check the degree of the constraints by evaluating on a larger subgroup. +/// +/// **Note**: this is an expensive check, hence is only available when the `debug_assertions` +/// flag is activated, to not hinder performances with regular `release` build. +#[cfg(debug_assertions)] fn check_constraints<'a, F, C, S, const D: usize>( stark: &S, trace_commitment: &'a PolynomialBatch, @@ -385,9 +524,11 @@ fn check_constraints<'a, F, C, S, const D: usize>( auxiliary_commitment: &'a Option>, lookup_challenges: Option<&'a Vec>, lookups: &[Lookup], + ctl_data: Option<&CtlData>, alphas: Vec, degree_bits: usize, num_lookup_columns: usize, + num_ctl_helper_cols: &[usize], ) where F: RichField + Extendable, C: GenericConfig, @@ -395,6 +536,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( { let degree = 1 << degree_bits; let rate_bits = 0; // Set this to higher value to check constraint degree. + let total_num_helper_cols: usize = num_ctl_helper_cols.iter().sum(); let size = degree << rate_bits; let step = 1 << rate_bits; @@ -446,11 +588,44 @@ fn check_constraints<'a, F, C, S, const D: usize>( ); // Get the local and next row evaluations for the current STARK's permutation argument. let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars { - local_values: auxiliary_subgroup_evals.as_ref().unwrap()[i].clone(), - next_values: auxiliary_subgroup_evals.as_ref().unwrap()[i_next].clone(), + local_values: auxiliary_subgroup_evals.as_ref().unwrap()[i][..num_lookup_columns] + .to_vec(), + next_values: auxiliary_subgroup_evals.as_ref().unwrap()[i_next] + [..num_lookup_columns] + .to_vec(), challenges: challenges.to_vec(), }); + // Get the local and next row evaluations for the current STARK's CTL Z polynomials. + let mut start_index = 0; + let ctl_vars = ctl_data.map(|data| { + data.zs_columns + .iter() + .enumerate() + .map(|(iii, zs_columns)| { + let num_helper_cols = num_ctl_helper_cols[iii]; + let helper_columns = auxiliary_subgroup_evals.as_ref().unwrap()[i] + [num_lookup_columns + start_index + ..num_lookup_columns + start_index + num_helper_cols] + .to_vec(); + let ctl_vars = CtlCheckVars:: { + helper_columns, + local_z: auxiliary_subgroup_evals.as_ref().unwrap()[i] + [num_lookup_columns + total_num_helper_cols + iii], + next_z: auxiliary_subgroup_evals.as_ref().unwrap()[i_next] + [num_lookup_columns + total_num_helper_cols + iii], + challenges: zs_columns.challenge, + columns: zs_columns.columns.clone(), + filter: zs_columns.filter.clone(), + }; + + start_index += num_helper_cols; + + ctl_vars + }) + .collect::>() + }); + // Evaluate the polynomial combining all constraints, including those associated // to the permutation arguments. eval_vanishing_poly::( @@ -458,6 +633,7 @@ fn check_constraints<'a, F, C, S, const D: usize>( &vars, lookups, lookup_vars, + ctl_vars.as_deref(), &mut consumer, ); consumer.accumulators() diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index e91583f1..9bc62e6b 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,4 +1,7 @@ -use alloc::vec; +//! Implementation of the STARK recursive verifier, i.e. where proof +//! verification if encoded in a plonky2 circuit. + +#[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::iter::once; @@ -8,7 +11,9 @@ use plonky2::field::extension::Extendable; use plonky2::field::types::Field; use plonky2::fri::witness_util::set_fri_proof_target; use plonky2::hash::hash_types::RichField; +use plonky2::iop::challenger::RecursiveChallenger; use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::target::Target; use plonky2::iop::witness::Witness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; @@ -17,6 +22,7 @@ use plonky2::with_context; use crate::config::StarkConfig; use crate::constraint_consumer::RecursiveConstraintConsumer; +use crate::cross_table_lookup::CtlCheckVarsTarget; use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::LookupCheckVarsTarget; use crate::proof::{ @@ -26,6 +32,8 @@ use crate::proof::{ use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly_circuit; +/// Encodes the verification of a [`StarkProofWithPublicInputsTarget`] +/// for some statement in a circuit. pub fn verify_stark_proof_circuit< F: RichField + Extendable, C: GenericConfig, @@ -40,51 +48,57 @@ pub fn verify_stark_proof_circuit< C::Hasher: AlgebraicHasher, { assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS); - let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config); + + let mut challenger = RecursiveChallenger::::new(builder); let challenges = with_context!( builder, "compute challenges", - proof_with_pis.get_challenges::(builder, inner_config) + proof_with_pis.get_challenges::(builder, &mut challenger, None, false, inner_config) ); verify_stark_proof_with_challenges_circuit::( builder, - stark, - proof_with_pis, + &stark, + &proof_with_pis.proof, + &proof_with_pis.public_inputs, challenges, + None, inner_config, - degree_bits, ); } -/// Recursively verifies an inner proof. -fn verify_stark_proof_with_challenges_circuit< +/// Recursively verifies an inner STARK proof. +pub fn verify_stark_proof_with_challenges_circuit< F: RichField + Extendable, C: GenericConfig, S: Stark, const D: usize, >( builder: &mut CircuitBuilder, - stark: S, - proof_with_pis: StarkProofWithPublicInputsTarget, + stark: &S, + proof: &StarkProofTarget, + public_inputs: &[Target], challenges: StarkProofChallengesTarget, + ctl_vars: Option<&[CtlCheckVarsTarget]>, inner_config: &StarkConfig, - degree_bits: usize, ) where C::Hasher: AlgebraicHasher, { - check_lookup_options(&stark, &proof_with_pis, &challenges).unwrap(); + check_lookup_options(stark, proof, &challenges).unwrap(); + + let zero = builder.zero(); let one = builder.one_extension(); - let StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } = proof_with_pis; + let num_ctl_polys = ctl_vars + .map(|v| v.iter().map(|ctl| ctl.helper_columns.len()).sum::()) + .unwrap_or_default(); + let StarkOpeningSetTarget { local_values, next_values, auxiliary_polys, auxiliary_polys_next, + ctl_zs_first, quotient_polys, } = &proof.openings; @@ -92,11 +106,12 @@ fn verify_stark_proof_with_challenges_circuit< local_values, next_values, &public_inputs - .into_iter() - .map(|t| builder.convert_to_ext(t)) + .iter() + .map(|&t| builder.convert_to_ext(t)) .collect::>(), ); + let degree_bits = proof.recover_degree_bits(inner_config); let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits); let z_h_zeta = builder.sub_extension(zeta_pow_deg, one); let (l_0, l_last) = @@ -117,6 +132,7 @@ fn verify_stark_proof_with_challenges_circuit< let lookup_challenges = stark.uses_lookups().then(|| { challenges .lookup_challenge_set + .as_ref() .unwrap() .challenges .iter() @@ -133,7 +149,14 @@ fn verify_stark_proof_with_challenges_circuit< with_context!( builder, "evaluate vanishing polynomial", - eval_vanishing_poly_circuit::(builder, &stark, &vars, lookup_vars, &mut consumer) + eval_vanishing_poly_circuit::( + builder, + stark, + &vars, + lookup_vars, + ctl_vars, + &mut consumer + ) ); let vanishing_polys_zeta = consumer.accumulators(); @@ -148,20 +171,22 @@ fn verify_stark_proof_with_challenges_circuit< builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly); } - let merkle_caps = once(proof.trace_cap) - .chain(proof.auxiliary_polys_cap) - .chain(once(proof.quotient_polys_cap)) + let merkle_caps = once(proof.trace_cap.clone()) + .chain(proof.auxiliary_polys_cap.clone()) + .chain(once(proof.quotient_polys_cap.clone())) .collect_vec(); let fri_instance = stark.fri_instance_target( builder, challenges.stark_zeta, F::primitive_root_of_unity(degree_bits), + num_ctl_polys, + ctl_zs_first.as_ref().map_or(0, |c| c.len()), inner_config, ); builder.verify_fri_proof::( &fri_instance, - &proof.openings.to_fri_openings(), + &proof.openings.to_fri_openings(zero), &challenges.fri_challenges, &merkle_caps, &proof.opening_proof, @@ -188,17 +213,27 @@ fn eval_l_0_and_l_last_circuit, const D: usize>( ) } +/// Adds a new `StarkProofWithPublicInputsTarget` to this circuit. pub fn add_virtual_stark_proof_with_pis< F: RichField + Extendable, S: Stark, const D: usize, >( builder: &mut CircuitBuilder, - stark: S, + stark: &S, config: &StarkConfig, degree_bits: usize, + num_ctl_helper_zs: usize, + num_ctl_zs: usize, ) -> StarkProofWithPublicInputsTarget { - let proof = add_virtual_stark_proof::(builder, stark, config, degree_bits); + let proof = add_virtual_stark_proof::( + builder, + stark, + config, + degree_bits, + num_ctl_helper_zs, + num_ctl_zs, + ); let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS); StarkProofWithPublicInputsTarget { proof, @@ -206,58 +241,79 @@ pub fn add_virtual_stark_proof_with_pis< } } +/// Adds a new `StarkProofTarget` to this circuit. pub fn add_virtual_stark_proof, S: Stark, const D: usize>( builder: &mut CircuitBuilder, - stark: S, + stark: &S, config: &StarkConfig, degree_bits: usize, + num_ctl_helper_zs: usize, + num_ctl_zs: usize, ) -> StarkProofTarget { let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_leaves_per_oracle = vec![ - S::COLUMNS, - stark.num_lookup_helper_columns(config), - stark.quotient_degree_factor() * config.num_challenges, - ]; + let num_leaves_per_oracle = once(S::COLUMNS) + .chain( + (stark.uses_lookups() || stark.requires_ctls()) + .then(|| stark.num_lookup_helper_columns(config) + num_ctl_helper_zs), + ) + .chain(once(stark.quotient_degree_factor() * config.num_challenges)) + .collect_vec(); - let auxiliary_polys_cap = stark - .uses_lookups() + let auxiliary_polys_cap = (stark.uses_lookups() || stark.requires_ctls()) .then(|| builder.add_virtual_cap(cap_height)); StarkProofTarget { trace_cap: builder.add_virtual_cap(cap_height), auxiliary_polys_cap, quotient_polys_cap: builder.add_virtual_cap(cap_height), - openings: add_stark_opening_set_target::(builder, stark, config), + openings: add_virtual_stark_opening_set::( + builder, + stark, + num_ctl_helper_zs, + num_ctl_zs, + config, + ), opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params), } } -fn add_stark_opening_set_target, S: Stark, const D: usize>( +fn add_virtual_stark_opening_set, S: Stark, const D: usize>( builder: &mut CircuitBuilder, - stark: S, + stark: &S, + num_ctl_helper_zs: usize, + num_ctl_zs: usize, config: &StarkConfig, ) -> StarkOpeningSetTarget { - let num_challenges = config.num_challenges; StarkOpeningSetTarget { local_values: builder.add_virtual_extension_targets(S::COLUMNS), next_values: builder.add_virtual_extension_targets(S::COLUMNS), - auxiliary_polys: stark.uses_lookups().then(|| { - builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config)) + auxiliary_polys: (stark.uses_lookups() || stark.requires_ctls()).then(|| { + builder.add_virtual_extension_targets( + stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, + ) }), - auxiliary_polys_next: stark.uses_lookups().then(|| { - builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config)) + auxiliary_polys_next: (stark.uses_lookups() || stark.requires_ctls()).then(|| { + builder.add_virtual_extension_targets( + stark.num_lookup_helper_columns(config) + num_ctl_helper_zs, + ) }), + ctl_zs_first: stark + .requires_ctls() + .then(|| builder.add_virtual_targets(num_ctl_zs)), quotient_polys: builder - .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges), + .add_virtual_extension_targets(stark.quotient_degree_factor() * config.num_challenges), } } +/// Set the targets in a `StarkProofWithPublicInputsTarget` to +/// their corresponding values in a `StarkProofWithPublicInputs`. pub fn set_stark_proof_with_pis_target, W, const D: usize>( witness: &mut W, stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget, stark_proof_with_pis: &StarkProofWithPublicInputs, + zero: Target, ) where F: RichField + Extendable, C::Hasher: AlgebraicHasher, @@ -277,13 +333,16 @@ pub fn set_stark_proof_with_pis_target, W, const D witness.set_target(pi_t, pi); } - set_stark_proof_target(witness, pt, proof); + set_stark_proof_target(witness, pt, proof, zero); } +/// Set the targets in a [`StarkProofTarget`] to their corresponding values in a +/// [`StarkProof`]. pub fn set_stark_proof_target, W, const D: usize>( witness: &mut W, proof_target: &StarkProofTarget, proof: &StarkProof, + zero: Target, ) where F: RichField + Extendable, C::Hasher: AlgebraicHasher, @@ -293,7 +352,7 @@ pub fn set_stark_proof_target, W, const D: usize>( witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap); witness.set_fri_openings( - &proof_target.openings.to_fri_openings(), + &proof_target.openings.to_fri_openings(zero), &proof.openings.to_fri_openings(), ); @@ -308,23 +367,23 @@ pub fn set_stark_proof_target, W, const D: usize>( } /// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff -/// the Stark uses a permutation argument. +/// the STARK uses a permutation argument. fn check_lookup_options, S: Stark, const D: usize>( stark: &S, - proof_with_pis: &StarkProofWithPublicInputsTarget, + proof: &StarkProofTarget, challenges: &StarkProofChallengesTarget, ) -> Result<()> { let options_is_some = [ - proof_with_pis.proof.auxiliary_polys_cap.is_some(), - proof_with_pis.proof.openings.auxiliary_polys.is_some(), - proof_with_pis.proof.openings.auxiliary_polys_next.is_some(), + proof.auxiliary_polys_cap.is_some(), + proof.openings.auxiliary_polys.is_some(), + proof.openings.auxiliary_polys_next.is_some(), challenges.lookup_challenge_set.is_some(), ]; ensure!( options_is_some - .into_iter() - .all(|b| b == stark.uses_lookups()), - "Lookups data doesn't match with Stark configuration." + .iter() + .all(|&b| b == stark.uses_lookups() || stark.requires_ctls()), + "Lookups data doesn't match with STARK configuration." ); Ok(()) } diff --git a/starky/src/stark.rs b/starky/src/stark.rs index a9f2b260..0e2b3bd7 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -1,5 +1,8 @@ -use alloc::vec; -use alloc::vec::Vec; +//! Implementation of the [`Stark`] trait that defines the set of constraints +//! related to a statement. + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; @@ -17,14 +20,11 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::Lookup; -const TRACE_ORACLE_INDEX: usize = 0; -const AUXILIARY_ORACLE_INDEX: usize = 1; -const QUOTIENT_ORACLE_INDEX: usize = 2; - /// Represents a STARK system. pub trait Stark, const D: usize>: Sync { /// The total number of columns in the trace. const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS; + /// The total number of public inputs. const PUBLIC_INPUTS: usize = Self::EvaluationFrameTarget::PUBLIC_INPUTS; /// This is used to evaluate constraints natively. @@ -36,7 +36,7 @@ pub trait Stark, const D: usize>: Sync { /// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively. type EvaluationFrameTarget: StarkEvaluationFrame, ExtensionTarget>; - /// Evaluate constraints at a vector of points. + /// Evaluates constraints at a vector of points. /// /// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us /// evaluate constraints over a larger domain if desired. This can also be called with `FE = F` @@ -50,7 +50,7 @@ pub trait Stark, const D: usize>: Sync { FE: FieldExtension, P: PackedField; - /// Evaluate constraints at a vector of points from the base field `F`. + /// Evaluates constraints at a vector of points from the base field `F`. fn eval_packed_base>( &self, vars: &Self::EvaluationFrame, @@ -59,7 +59,7 @@ pub trait Stark, const D: usize>: Sync { self.eval_packed_generic(vars, yield_constr) } - /// Evaluate constraints at a single point from the degree `D` extension field. + /// Evaluates constraints at a single point from the degree `D` extension field. fn eval_ext( &self, vars: &Self::EvaluationFrame, @@ -68,10 +68,10 @@ pub trait Stark, const D: usize>: Sync { self.eval_packed_generic(vars, yield_constr) } - /// Evaluate constraints at a vector of points from the degree `D` extension field. This is like - /// `eval_ext`, except in the context of a recursive circuit. - /// Note: constraints must be added through`yield_constr.constraint(builder, constraint)` in the - /// same order as they are given in `eval_packed_generic`. + /// Evaluates constraints at a vector of points from the degree `D` extension field. + /// This is like `eval_ext`, except in the context of a recursive circuit. + /// Note: constraints must be added through`yield_constr.constraint(builder, constraint)` + /// in the same order as they are given in `eval_packed_generic`. fn eval_ext_circuit( &self, builder: &mut CircuitBuilder, @@ -79,14 +79,16 @@ pub trait Stark, const D: usize>: Sync { yield_constr: &mut RecursiveConstraintConsumer, ); - /// The maximum constraint degree. + /// Outputs the maximum constraint degree of this [`Stark`]. fn constraint_degree(&self) -> usize; - /// The maximum constraint degree. + /// Outputs the maximum quotient polynomial's degree factor of this [`Stark`]. fn quotient_degree_factor(&self) -> usize { 1.max(self.constraint_degree() - 1) } + /// Outputs the number of quotient polynomials this [`Stark`] would require with + /// the provided [`StarkConfig`] fn num_quotient_polys(&self, config: &StarkConfig) -> usize { self.quotient_degree_factor() * config.num_challenges } @@ -96,30 +98,36 @@ pub trait Stark, const D: usize>: Sync { &self, zeta: F::Extension, g: F, + num_ctl_helpers: usize, + num_ctl_zs: Vec, config: &StarkConfig, ) -> FriInstanceInfo { - let trace_oracle = FriOracleInfo { + let mut oracles = vec![]; + let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS); + oracles.push(FriOracleInfo { num_polys: Self::COLUMNS, blinding: false, - }; - let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); + }); let num_lookup_columns = self.num_lookup_helper_columns(config); - let num_auxiliary_polys = num_lookup_columns; - let auxiliary_oracle = FriOracleInfo { - num_polys: num_auxiliary_polys, - blinding: false, + let num_auxiliary_polys = num_lookup_columns + num_ctl_helpers + num_ctl_zs.len(); + let auxiliary_polys_info = if self.uses_lookups() || self.requires_ctls() { + let aux_polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_auxiliary_polys); + oracles.push(FriOracleInfo { + num_polys: num_auxiliary_polys, + blinding: false, + }); + aux_polys + } else { + vec![] }; - let auxiliary_polys_info = - FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); let num_quotient_polys = self.num_quotient_polys(config); - let quotient_oracle = FriOracleInfo { + let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys); + oracles.push(FriOracleInfo { num_polys: num_quotient_polys, blinding: false, - }; - let quotient_info = - FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); + }); let zeta_batch = FriBatchInfo { point: zeta, @@ -135,10 +143,22 @@ pub trait Stark, const D: usize>: Sync { polynomials: [trace_info, auxiliary_polys_info].concat(), }; - FriInstanceInfo { - oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], - batches: vec![zeta_batch, zeta_next_batch], + let mut batches = vec![zeta_batch, zeta_next_batch]; + + if self.requires_ctls() { + let ctl_zs_info = FriPolynomialInfo::from_range( + 1, // auxiliary oracle index + num_lookup_columns + num_ctl_helpers..num_auxiliary_polys, + ); + let ctl_first_batch = FriBatchInfo { + point: F::Extension::ONE, + polynomials: ctl_zs_info, + }; + + batches.push(ctl_first_batch); } + + FriInstanceInfo { oracles, batches } } /// Computes the FRI instance used to prove this Stark. @@ -147,30 +167,36 @@ pub trait Stark, const D: usize>: Sync { builder: &mut CircuitBuilder, zeta: ExtensionTarget, g: F, + num_ctl_helper_polys: usize, + num_ctl_zs: usize, config: &StarkConfig, ) -> FriInstanceInfoTarget { - let trace_oracle = FriOracleInfo { + let mut oracles = vec![]; + let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS); + oracles.push(FriOracleInfo { num_polys: Self::COLUMNS, blinding: false, - }; - let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS); + }); let num_lookup_columns = self.num_lookup_helper_columns(config); - let num_auxiliary_polys = num_lookup_columns; - let auxiliary_oracle = FriOracleInfo { - num_polys: num_auxiliary_polys, - blinding: false, + let num_auxiliary_polys = num_lookup_columns + num_ctl_helper_polys + num_ctl_zs; + let auxiliary_polys_info = if self.uses_lookups() || self.requires_ctls() { + let aux_polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_auxiliary_polys); + oracles.push(FriOracleInfo { + num_polys: num_auxiliary_polys, + blinding: false, + }); + aux_polys + } else { + vec![] }; - let auxiliary_polys_info = - FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys); let num_quotient_polys = self.num_quotient_polys(config); - let quotient_oracle = FriOracleInfo { + let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys); + oracles.push(FriOracleInfo { num_polys: num_quotient_polys, blinding: false, - }; - let quotient_info = - FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys); + }); let zeta_batch = FriBatchInfoTarget { point: zeta, @@ -187,16 +213,31 @@ pub trait Stark, const D: usize>: Sync { polynomials: [trace_info, auxiliary_polys_info].concat(), }; - FriInstanceInfoTarget { - oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle], - batches: vec![zeta_batch, zeta_next_batch], + let mut batches = vec![zeta_batch, zeta_next_batch]; + + if self.requires_ctls() { + let ctl_zs_info = FriPolynomialInfo::from_range( + 1, // auxiliary oracle index + num_lookup_columns + num_ctl_helper_polys..num_auxiliary_polys, + ); + let ctl_first_batch = FriBatchInfoTarget { + point: builder.one_extension(), + polynomials: ctl_zs_info, + }; + + batches.push(ctl_first_batch); } + + FriInstanceInfoTarget { oracles, batches } } + /// Outputs all the [`Lookup`] this STARK table needs to perform across its columns. fn lookups(&self) -> Vec> { vec![] } + /// Outputs the number of total lookup helper columns, based on this STARK's vector + /// of [`Lookup`] and the number of challenges used by this [`StarkConfig`]. fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize { self.lookups() .iter() @@ -205,7 +246,17 @@ pub trait Stark, const D: usize>: Sync { * config.num_challenges } + /// Indicates whether this STARK uses lookups over some of its columns, and as such requires + /// additional steps during proof generation to handle auxiliary polynomials. fn uses_lookups(&self) -> bool { !self.lookups().is_empty() } + + /// Indicates whether this STARK belongs to a multi-STARK system, and as such may require + /// cross-table lookups to connect shared values across different traces. + /// + /// It defaults to `false`, i.e. for simple uni-STARK systems. + fn requires_ctls(&self) -> bool { + false + } } diff --git a/starky/src/stark_testing.rs b/starky/src/stark_testing.rs index a454a29c..cc732844 100644 --- a/starky/src/stark_testing.rs +++ b/starky/src/stark_testing.rs @@ -1,5 +1,7 @@ -use alloc::vec; -use alloc::vec::Vec; +//! Utility module for testing [`Stark`] implementation. + +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; use anyhow::{ensure, Result}; use plonky2::field::extension::{Extendable, FieldExtension}; diff --git a/starky/src/util.rs b/starky/src/util.rs index 1adee000..08b2c702 100644 --- a/starky/src/util.rs +++ b/starky/src/util.rs @@ -1,3 +1,6 @@ +//! Utility module providing some helper functions. + +#[cfg(not(feature = "std"))] use alloc::vec::Vec; use itertools::Itertools; diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 6a179fe2..c5ea5c10 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -4,17 +4,24 @@ use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::cross_table_lookup::{ + eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars, + CtlCheckVarsTarget, +}; use crate::lookup::{ eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars, LookupCheckVarsTarget, }; use crate::stark::Stark; +/// Evaluates all constraint, permutation and cross-table lookup polynomials +/// of the current STARK at the local and next values. pub(crate) fn eval_vanishing_poly( stark: &S, vars: &S::EvaluationFrame, lookups: &[Lookup], lookup_vars: Option>, + ctl_vars: Option<&[CtlCheckVars]>, consumer: &mut ConstraintConsumer

, ) where F: RichField + Extendable, @@ -22,6 +29,7 @@ pub(crate) fn eval_vanishing_poly( P: PackedField, S: Stark, { + // Evaluate all of the STARK's table constraints. stark.eval_packed_generic(vars, consumer); if let Some(lookup_vars) = lookup_vars { // Evaluate the STARK constraints related to the permutation arguments. @@ -33,21 +41,45 @@ pub(crate) fn eval_vanishing_poly( consumer, ); } + if let Some(ctl_vars) = ctl_vars { + // Evaluate the STARK constraints related to the CTLs. + eval_cross_table_lookup_checks::( + vars, + ctl_vars, + consumer, + stark.constraint_degree(), + ); + } } +/// Circuit version of `eval_vanishing_poly`. +/// Evaluates all constraint, permutation and cross-table lookup polynomials +/// of the current STARK at the local and next values. pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, vars: &S::EvaluationFrameTarget, lookup_vars: Option>, + ctl_vars: Option<&[CtlCheckVarsTarget]>, consumer: &mut RecursiveConstraintConsumer, ) where F: RichField + Extendable, S: Stark, { + // Evaluate all of the STARK's table constraints. stark.eval_ext_circuit(builder, vars, consumer); if let Some(lookup_vars) = lookup_vars { // Evaluate all of the STARK's constraints related to the permutation argument. eval_ext_lookups_circuit::(builder, stark, vars, lookup_vars, consumer); } + if let Some(ctl_vars) = ctl_vars { + // Evaluate all of the STARK's constraints related to the CTLs. + eval_cross_table_lookup_checks_circuit::( + builder, + vars, + ctl_vars, + consumer, + stark.constraint_degree(), + ); + } } diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 577405ef..7959ae0f 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -1,4 +1,8 @@ +//! Implementation of the STARK verifier. + +#[cfg(not(feature = "std"))] use alloc::vec::Vec; +use core::any::type_name; use core::iter::once; use anyhow::{anyhow, ensure, Result}; @@ -8,17 +12,20 @@ use plonky2::field::types::Field; use plonky2::fri::verifier::verify_fri_proof; use plonky2::hash::hash_types::RichField; use plonky2::hash::merkle_tree::MerkleCap; +use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::GenericConfig; use plonky2::plonk::plonk_common::reduce_with_powers; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; +use crate::cross_table_lookup::CtlCheckVars; use crate::evaluation_frame::StarkEvaluationFrame; use crate::lookup::LookupCheckVars; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; +/// Verifies a [`StarkProofWithPublicInputs`] against a STARK statement. pub fn verify_stark_proof< F: RichField + Extendable, C: GenericConfig, @@ -30,36 +37,66 @@ pub fn verify_stark_proof< config: &StarkConfig, ) -> Result<()> { ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS); - let degree_bits = proof_with_pis.proof.recover_degree_bits(config); - let challenges = proof_with_pis.get_challenges(config, degree_bits); - verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) + let mut challenger = Challenger::::new(); + + let challenges = proof_with_pis.get_challenges(&mut challenger, None, false, config); + + verify_stark_proof_with_challenges( + &stark, + &proof_with_pis.proof, + &challenges, + None, + &proof_with_pis.public_inputs, + config, + ) } -pub(crate) fn verify_stark_proof_with_challenges< +/// Verifies a [`StarkProofWithPublicInputs`] against a STARK statement, +/// with the provided [`StarkProofChallenges`]. +/// It also supports optional cross-table lookups data and challenges, +/// in case this proof is part of a multi-STARK system. +pub fn verify_stark_proof_with_challenges( + stark: &S, + proof: &StarkProof, + challenges: &StarkProofChallenges, + ctl_vars: Option<&[CtlCheckVars]>, + public_inputs: &[F], + config: &StarkConfig, +) -> Result<()> +where F: RichField + Extendable, C: GenericConfig, S: Stark, - const D: usize, ->( - stark: S, - proof_with_pis: StarkProofWithPublicInputs, - challenges: StarkProofChallenges, - degree_bits: usize, - config: &StarkConfig, -) -> Result<()> { - validate_proof_shape(&stark, &proof_with_pis, config)?; +{ + log::debug!("Checking proof: {}", type_name::()); - let StarkProofWithPublicInputs { + let (num_ctl_z_polys, num_ctl_polys) = ctl_vars + .map(|ctls| { + ( + ctls.len(), + ctls.iter().map(|ctl| ctl.helper_columns.len()).sum(), + ) + }) + .unwrap_or_default(); + + validate_proof_shape( + stark, proof, public_inputs, - } = proof_with_pis; + config, + num_ctl_polys, + num_ctl_z_polys, + )?; + let StarkOpeningSet { local_values, next_values, auxiliary_polys, auxiliary_polys_next, + ctl_zs_first: _, quotient_polys, } = &proof.openings; + let vars = S::EvaluationFrame::from_values( local_values, next_values, @@ -69,9 +106,12 @@ pub(crate) fn verify_stark_proof_with_challenges< .map(F::Extension::from_basefield) .collect::>(), ); + + let degree_bits = proof.recover_degree_bits(config); let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta); let last = F::primitive_root_of_unity(degree_bits).inverse(); let z_last = challenges.stark_zeta - last.into(); + let mut consumer = ConstraintConsumer::::new( challenges .stark_alphas @@ -84,28 +124,34 @@ pub(crate) fn verify_stark_proof_with_challenges< ); let num_lookup_columns = stark.num_lookup_helper_columns(config); - let lookup_challenges = (num_lookup_columns > 0).then(|| { - challenges - .lookup_challenge_set - .unwrap() - .challenges - .iter() - .map(|ch| ch.beta) - .collect::>() - }); + let lookup_challenges = if stark.uses_lookups() { + Some( + challenges + .lookup_challenge_set + .as_ref() + .unwrap() + .challenges + .iter() + .map(|ch| ch.beta) + .collect::>(), + ) + } else { + None + }; let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars { - local_values: auxiliary_polys.as_ref().unwrap().clone(), - next_values: auxiliary_polys_next.as_ref().unwrap().clone(), + local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(), + next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(), challenges: lookup_challenges.unwrap(), }); let lookups = stark.lookups(); eval_vanishing_poly::( - &stark, + stark, &vars, &lookups, lookup_vars, + ctl_vars, &mut consumer, ); let vanishing_polys_zeta = consumer.accumulators(); @@ -128,15 +174,25 @@ pub(crate) fn verify_stark_proof_with_challenges< ); } - let merkle_caps = once(proof.trace_cap) - .chain(proof.auxiliary_polys_cap) - .chain(once(proof.quotient_polys_cap)) + let merkle_caps = once(proof.trace_cap.clone()) + .chain(proof.auxiliary_polys_cap.clone()) + .chain(once(proof.quotient_polys_cap.clone())) .collect_vec(); + let num_ctl_zs = ctl_vars + .map(|vars| { + vars.iter() + .map(|ctl| ctl.helper_columns.len()) + .collect::>() + }) + .unwrap_or_default(); + verify_fri_proof::( &stark.fri_instance( challenges.stark_zeta, F::primitive_root_of_unity(degree_bits), + num_ctl_polys, + num_ctl_zs, config, ), &proof.openings.to_fri_openings(), @@ -151,18 +207,17 @@ pub(crate) fn verify_stark_proof_with_challenges< fn validate_proof_shape( stark: &S, - proof_with_pis: &StarkProofWithPublicInputs, + proof: &StarkProof, + public_inputs: &[F], config: &StarkConfig, + num_ctl_helpers: usize, + num_ctl_zs: usize, ) -> anyhow::Result<()> where F: RichField + Extendable, C: GenericConfig, S: Stark, { - let StarkProofWithPublicInputs { - proof, - public_inputs, - } = proof_with_pis; let degree_bits = proof.recover_degree_bits(config); let StarkProof { @@ -180,6 +235,7 @@ where next_values, auxiliary_polys, auxiliary_polys_next, + ctl_zs_first, quotient_polys, } = openings; @@ -188,8 +244,6 @@ where let fri_params = config.fri_params(degree_bits); let cap_height = fri_params.config.cap_height; - let num_auxiliary = stark.num_lookup_helper_columns(config); - ensure!(trace_cap.height() == cap_height); ensure!(quotient_polys_cap.height() == cap_height); @@ -202,6 +256,9 @@ where auxiliary_polys_cap, auxiliary_polys, auxiliary_polys_next, + num_ctl_helpers, + num_ctl_zs, + ctl_zs_first, config, )?; @@ -221,21 +278,24 @@ fn eval_l_0_and_l_last(log_n: usize, x: F) -> (F, F) { } /// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff -/// the Stark uses a permutation argument. -fn check_lookup_options< - F: RichField + Extendable, - C: GenericConfig, - S: Stark, - const D: usize, ->( +/// the STARK uses a permutation argument. +fn check_lookup_options( stark: &S, auxiliary_polys_cap: &Option>::Hasher>>, auxiliary_polys: &Option>::Extension>>, auxiliary_polys_next: &Option>::Extension>>, + num_ctl_helpers: usize, + num_ctl_zs: usize, + ctl_zs_first: &Option>, config: &StarkConfig, -) -> Result<()> { - if stark.uses_lookups() { - let num_auxiliary = stark.num_lookup_helper_columns(config); +) -> Result<()> +where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, +{ + if stark.uses_lookups() || stark.requires_ctls() { + let num_auxiliary = stark.num_lookup_helper_columns(config) + num_ctl_helpers + num_ctl_zs; let cap_height = config.fri_config.cap_height; let auxiliary_polys_cap = auxiliary_polys_cap @@ -248,6 +308,10 @@ fn check_lookup_options< .as_ref() .ok_or_else(|| anyhow!("Missing auxiliary_polys_next"))?; + if let Some(ctl_zs_first) = ctl_zs_first { + ensure!(ctl_zs_first.len() == num_ctl_zs); + } + ensure!(auxiliary_polys_cap.height() == cap_height); ensure!(auxiliary_polys.len() == num_auxiliary); ensure!(auxiliary_polys_next.len() == num_auxiliary);