mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-02 13:53:07 +00:00
Update starky and leverage it as dependency for plonky2_evm (#1503)
* Update prover logic * Add helper method for CTL data * Some cleanup * Update some methods * Fix * Some more fixes * More tweaks * Final * Leverage starky crate * Additional tweaks * Cleanup * More cleanup * Fix * Cleanup imports * Fix * Final tweaks * Cleanup and hide behind debug_assertions attribute * Clippy * Fix no-std * Make wasm compatible * Doc and remove todo * API cleanup and remove TODO * Add Debug impls * Add documentation for public items * Feature-gate alloc imports * Import method from starky instead * Add simple crate and module documentation * Apply comments * Add lib level documentation * Add test without lookups * Fix starks without logup * Cleanup * Some more cleanup * Fix get_challenges for non-lookup STARKs * Add additional config methods and tests * Apply comments * More comments
This commit is contained in:
parent
b6fec06c38
commit
3ec1bfddb3
@ -27,8 +27,9 @@ num-bigint = "0.4.3"
|
||||
once_cell = "1.13.0"
|
||||
pest = "2.1.3"
|
||||
pest_derive = "2.1.0"
|
||||
plonky2 = { path = "../plonky2", default-features = false, features = ["timing"] }
|
||||
plonky2 = { path = "../plonky2", features = ["timing"] }
|
||||
plonky2_util = { path = "../util" }
|
||||
starky = { path = "../starky" }
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
rlp = "0.5.1"
|
||||
@ -51,7 +52,11 @@ sha2 = "0.10.6"
|
||||
[features]
|
||||
default = ["parallel"]
|
||||
asmtools = ["hex"]
|
||||
parallel = ["plonky2/parallel", "plonky2_maybe_rayon/parallel"]
|
||||
parallel = [
|
||||
"plonky2/parallel",
|
||||
"plonky2_maybe_rayon/parallel",
|
||||
"starky/parallel"
|
||||
]
|
||||
|
||||
[[bin]]
|
||||
name = "assemble"
|
||||
|
||||
@ -3,15 +3,17 @@ use core::ops::Deref;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use starky::config::StarkConfig;
|
||||
use starky::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns};
|
||||
use starky::evaluation_frame::StarkFrame;
|
||||
use starky::stark::Stark;
|
||||
|
||||
use crate::arithmetic::arithmetic_stark;
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::{self, BytePackingStark};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::cpu_stark;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
use crate::cross_table_lookup::{CrossTableLookup, TableIdx, TableWithColumns};
|
||||
use crate::keccak::keccak_stark;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::columns::KECCAK_RATE_BYTES;
|
||||
@ -21,7 +23,6 @@ use crate::logic;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Structure containing all STARKs and the cross-table lookups.
|
||||
#[derive(Clone)]
|
||||
@ -66,6 +67,8 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
|
||||
}
|
||||
}
|
||||
|
||||
pub type EvmStarkFrame<T, U, const N: usize> = StarkFrame<T, U, N, 0>;
|
||||
|
||||
/// Associates STARK tables with a unique index.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum Table {
|
||||
|
||||
@ -22,10 +22,10 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::utils::u256_to_array;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
/// Generate row for ADD, SUB, GT and LT operations.
|
||||
pub(crate) fn generate<F: PrimeField64>(
|
||||
@ -263,10 +263,10 @@ mod tests {
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
use super::*;
|
||||
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
// TODO: Should be able to refactor this test to apply to all operations.
|
||||
#[test]
|
||||
@ -284,14 +284,14 @@ mod tests {
|
||||
lv[IS_LT] = F::ZERO;
|
||||
lv[IS_GT] = F::ZERO;
|
||||
|
||||
let mut constrant_consumer = ConstraintConsumer::new(
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
);
|
||||
eval_packed_generic(&lv, &mut constrant_consumer);
|
||||
for &acc in &constrant_consumer.constraint_accs {
|
||||
eval_packed_generic(&lv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, F::ZERO);
|
||||
}
|
||||
}
|
||||
@ -324,14 +324,14 @@ mod tests {
|
||||
|
||||
generate(&mut lv, op_filter, left_in, right_in);
|
||||
|
||||
let mut constrant_consumer = ConstraintConsumer::new(
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
);
|
||||
eval_packed_generic(&lv, &mut constrant_consumer);
|
||||
for &acc in &constrant_consumer.constraint_accs {
|
||||
eval_packed_generic(&lv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, F::ZERO);
|
||||
}
|
||||
|
||||
|
||||
@ -9,18 +9,18 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::util::transpose;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::cross_table_lookup::TableWithColumns;
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter, Lookup};
|
||||
use starky::stark::Stark;
|
||||
use static_assertions::const_assert;
|
||||
|
||||
use super::columns::{op_flags, NUM_ARITH_COLUMNS};
|
||||
use super::shift;
|
||||
use crate::all_stark::Table;
|
||||
use crate::all_stark::{EvmStarkFrame, Table};
|
||||
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
||||
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::TableWithColumns;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{Column, Filter, Lookup};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Creates a vector of `Columns` to link the 16-bit columns of the arithmetic table,
|
||||
/// split into groups of N_LIMBS at a time in `regs`, with the corresponding 32-bit
|
||||
@ -190,12 +190,13 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_ARITH_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_ARITH_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
|
||||
type EvaluationFrameTarget =
|
||||
EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -320,6 +321,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
filter_columns: vec![None; NUM_SHARED_COLS],
|
||||
}]
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -330,11 +335,11 @@ mod tests {
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use super::{columns, ArithmeticStark};
|
||||
use crate::arithmetic::columns::OUTPUT_REGISTER;
|
||||
use crate::arithmetic::*;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn degree() -> Result<()> {
|
||||
|
||||
@ -69,11 +69,11 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use static_assertions::const_assert;
|
||||
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::utils::u256_to_array;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
// Give meaningful names to the columns of AUX_INPUT_REGISTER_0 that
|
||||
// we're using
|
||||
@ -480,14 +480,14 @@ mod tests {
|
||||
let out_byte = val.byte(31 - i) as u64;
|
||||
verify_output(&lv, out_byte);
|
||||
|
||||
let mut constrant_consumer = ConstraintConsumer::new(
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
F::ONE,
|
||||
);
|
||||
eval_packed(&lv, &mut constrant_consumer);
|
||||
for &acc in &constrant_consumer.constraint_accs {
|
||||
eval_packed(&lv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, F::ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,13 +11,13 @@ use plonky2::field::types::PrimeField64;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::modular::{
|
||||
generate_modular_op, modular_constr_poly, modular_constr_poly_ext_circuit,
|
||||
};
|
||||
use crate::arithmetic::utils::*;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
/// Generates the output and auxiliary values for modular operations,
|
||||
/// assuming the input, modular and output limbs are already set.
|
||||
@ -215,10 +215,10 @@ mod tests {
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
use super::*;
|
||||
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
const N_RND_TESTS: usize = 1000;
|
||||
const MODULAR_OPS: [usize; 2] = [IS_MOD, IS_DIV];
|
||||
@ -247,7 +247,7 @@ mod tests {
|
||||
GoldilocksField::ONE,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -306,7 +306,7 @@ mod tests {
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -321,52 +321,57 @@ mod tests {
|
||||
|
||||
for op_filter in MODULAR_OPS {
|
||||
for _i in 0..N_RND_TESTS {
|
||||
// set inputs to random values and the modulus to zero;
|
||||
// the output is defined to be zero when modulus is zero.
|
||||
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
for corrupt_constraints in [false, true] {
|
||||
// set inputs to random values and the modulus to zero;
|
||||
// the output is defined to be zero when modulus is zero.
|
||||
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
|
||||
// Reset operation columns, then select one
|
||||
for op in MODULAR_OPS {
|
||||
lv[op] = F::ZERO;
|
||||
// Reset operation columns, then select one
|
||||
for op in MODULAR_OPS {
|
||||
lv[op] = F::ZERO;
|
||||
}
|
||||
// Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here.
|
||||
lv[IS_SHR] = F::ZERO;
|
||||
lv[op_filter] = F::ONE;
|
||||
|
||||
let input0 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let input1 = U256::zero();
|
||||
|
||||
generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero());
|
||||
|
||||
// check that the correct output was generated
|
||||
assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO));
|
||||
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
GoldilocksField::ONE,
|
||||
GoldilocksField::ZERO,
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
if corrupt_constraints {
|
||||
// Corrupt one output limb by setting it to a non-zero value.
|
||||
let random_oi = OUTPUT_REGISTER.start + rng.gen::<usize>() % N_LIMBS;
|
||||
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));
|
||||
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
// Check that at least one of the constraints was non-zero.
|
||||
assert!(constraint_consumer
|
||||
.accumulators()
|
||||
.iter()
|
||||
.any(|&acc| acc != F::ZERO));
|
||||
} else {
|
||||
assert!(constraint_consumer
|
||||
.accumulators()
|
||||
.iter()
|
||||
.all(|&acc| acc == F::ZERO));
|
||||
}
|
||||
}
|
||||
// Since SHR uses the logic for DIV, `IS_SHR` should also be set to 0 here.
|
||||
lv[IS_SHR] = F::ZERO;
|
||||
lv[op_filter] = F::ONE;
|
||||
|
||||
let input0 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let input1 = U256::zero();
|
||||
|
||||
generate(&mut lv, &mut nv, op_filter, input0, input1, U256::zero());
|
||||
|
||||
// check that the correct output was generated
|
||||
assert!(lv[OUTPUT_REGISTER].iter().all(|&c| c == F::ZERO));
|
||||
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
GoldilocksField::ONE,
|
||||
GoldilocksField::ZERO,
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
assert!(constraint_consumer
|
||||
.constraint_accs
|
||||
.iter()
|
||||
.all(|&acc| acc == F::ZERO));
|
||||
|
||||
// Corrupt one output limb by setting it to a non-zero value
|
||||
let random_oi = OUTPUT_REGISTER.start + rng.gen::<usize>() % N_LIMBS;
|
||||
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));
|
||||
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
// Check that at least one of the constraints was non-zero
|
||||
assert!(constraint_consumer
|
||||
.constraint_accs
|
||||
.iter()
|
||||
.any(|&acc| acc != F::ZERO));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -119,13 +119,13 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use static_assertions::const_assert;
|
||||
|
||||
use super::columns;
|
||||
use crate::arithmetic::addcy::{eval_ext_circuit_addcy, eval_packed_generic_addcy};
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::utils::*;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::extension_tower::BN_BASE;
|
||||
|
||||
const fn bn254_modulus_limbs() -> [u16; N_LIMBS] {
|
||||
@ -832,10 +832,10 @@ mod tests {
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
use super::*;
|
||||
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::extension_tower::BN_BASE;
|
||||
|
||||
const N_RND_TESTS: usize = 1000;
|
||||
@ -873,7 +873,7 @@ mod tests {
|
||||
GoldilocksField::ONE,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -930,7 +930,7 @@ mod tests {
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -945,54 +945,59 @@ mod tests {
|
||||
|
||||
for op_filter in [IS_ADDMOD, IS_SUBMOD, IS_MULMOD] {
|
||||
for _i in 0..N_RND_TESTS {
|
||||
// set inputs to random values and the modulus to zero;
|
||||
// the output is defined to be zero when modulus is zero.
|
||||
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
for corrupt_constraints in [false, true] {
|
||||
// set inputs to random values and the modulus to zero;
|
||||
// the output is defined to be zero when modulus is zero.
|
||||
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
let mut nv = [F::default(); NUM_ARITH_COLUMNS]
|
||||
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
|
||||
|
||||
// Reset operation columns, then select one
|
||||
for op in MODULAR_OPS {
|
||||
lv[op] = F::ZERO;
|
||||
// Reset operation columns, then select one
|
||||
for op in MODULAR_OPS {
|
||||
lv[op] = F::ZERO;
|
||||
}
|
||||
lv[IS_SHR] = F::ZERO;
|
||||
lv[IS_DIV] = F::ZERO;
|
||||
lv[IS_MOD] = F::ZERO;
|
||||
lv[op_filter] = F::ONE;
|
||||
|
||||
let input0 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let input1 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let modulus = U256::zero();
|
||||
|
||||
generate(&mut lv, &mut nv, op_filter, input0, input1, modulus);
|
||||
|
||||
// check that the correct output was generated
|
||||
assert!(lv[MODULAR_OUTPUT].iter().all(|&c| c == F::ZERO));
|
||||
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
GoldilocksField::ONE,
|
||||
GoldilocksField::ZERO,
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
if corrupt_constraints {
|
||||
// Corrupt one output limb by setting it to a non-zero value.
|
||||
let random_oi = MODULAR_OUTPUT.start + rng.gen::<usize>() % N_LIMBS;
|
||||
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));
|
||||
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
// Check that at least one of the constraints was non-zero.
|
||||
assert!(constraint_consumer
|
||||
.accumulators()
|
||||
.iter()
|
||||
.any(|&acc| acc != F::ZERO));
|
||||
} else {
|
||||
assert!(constraint_consumer
|
||||
.accumulators()
|
||||
.iter()
|
||||
.all(|&acc| acc == F::ZERO));
|
||||
}
|
||||
}
|
||||
lv[IS_SHR] = F::ZERO;
|
||||
lv[IS_DIV] = F::ZERO;
|
||||
lv[IS_MOD] = F::ZERO;
|
||||
lv[op_filter] = F::ONE;
|
||||
|
||||
let input0 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let input1 = U256::from(rng.gen::<[u8; 32]>());
|
||||
let modulus = U256::zero();
|
||||
|
||||
generate(&mut lv, &mut nv, op_filter, input0, input1, modulus);
|
||||
|
||||
// check that the correct output was generated
|
||||
assert!(lv[MODULAR_OUTPUT].iter().all(|&c| c == F::ZERO));
|
||||
|
||||
let mut constraint_consumer = ConstraintConsumer::new(
|
||||
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
|
||||
GoldilocksField::ONE,
|
||||
GoldilocksField::ZERO,
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
assert!(constraint_consumer
|
||||
.constraint_accs
|
||||
.iter()
|
||||
.all(|&acc| acc == F::ZERO));
|
||||
|
||||
// Corrupt one output limb by setting it to a non-zero value
|
||||
let random_oi = MODULAR_OUTPUT.start + rng.gen::<usize>() % N_LIMBS;
|
||||
lv[random_oi] = F::from_canonical_u16(rng.gen_range(1..u16::MAX));
|
||||
|
||||
eval_packed(&lv, &nv, &mut constraint_consumer);
|
||||
|
||||
// Check that at least one of the constraints was non-zero
|
||||
assert!(constraint_consumer
|
||||
.constraint_accs
|
||||
.iter()
|
||||
.any(|&acc| acc != F::ZERO));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -62,10 +62,10 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::utils::*;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
/// Given the two limbs of `left_in` and `right_in`, computes `left_in * right_in`.
|
||||
pub(crate) fn generate_mul<F: PrimeField64>(lv: &mut [F], left_in: [i64; 16], right_in: [i64; 16]) {
|
||||
@ -253,10 +253,10 @@ mod tests {
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
use super::*;
|
||||
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
const N_RND_TESTS: usize = 1000;
|
||||
|
||||
@ -279,7 +279,7 @@ mod tests {
|
||||
GoldilocksField::ONE,
|
||||
);
|
||||
eval_packed_generic(&lv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -312,7 +312,7 @@ mod tests {
|
||||
GoldilocksField::ONE,
|
||||
);
|
||||
eval_packed_generic(&lv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,11 +27,11 @@ use plonky2::field::types::PrimeField64;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use super::{divmod, mul};
|
||||
use crate::arithmetic::columns::*;
|
||||
use crate::arithmetic::utils::*;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
/// Generates a shift operation (either SHL or SHR).
|
||||
/// The inputs are stored in the form `(shift, input, 1 << shift)`.
|
||||
@ -184,10 +184,10 @@ mod tests {
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use starky::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
use super::*;
|
||||
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
|
||||
const N_RND_TESTS: usize = 1000;
|
||||
|
||||
@ -212,7 +212,7 @@ mod tests {
|
||||
GoldilocksField::ONE,
|
||||
);
|
||||
eval_packed_generic(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -261,7 +261,7 @@ mod tests {
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed_generic(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
@ -320,7 +320,7 @@ mod tests {
|
||||
GoldilocksField::ZERO,
|
||||
);
|
||||
eval_packed_generic(&lv, &nv, &mut constraint_consumer);
|
||||
for &acc in &constraint_consumer.constraint_accs {
|
||||
for &acc in &constraint_consumer.accumulators() {
|
||||
assert_eq!(acc, GoldilocksField::ZERO);
|
||||
}
|
||||
}
|
||||
|
||||
@ -37,16 +37,17 @@ use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter, Lookup};
|
||||
use starky::stark::Stark;
|
||||
|
||||
use super::NUM_BYTES;
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::byte_packing::columns::{
|
||||
index_len, value_bytes, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, IS_READ, LEN_INDICES_COLS,
|
||||
NUM_COLUMNS, RANGE_COUNTER, RC_FREQUENCIES, TIMESTAMP,
|
||||
};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{Column, Filter, Lookup};
|
||||
use crate::stark::Stark;
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
/// Strict upper bound for the individual bytes range-check.
|
||||
@ -258,12 +259,12 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
type EvaluationFrameTarget = EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -397,15 +398,19 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
filter_columns: vec![None; NUM_BYTES],
|
||||
}]
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
|
||||
@ -1,43 +0,0 @@
|
||||
use plonky2::fri::reduction_strategies::FriReductionStrategy;
|
||||
use plonky2::fri::{FriConfig, FriParams};
|
||||
|
||||
/// A configuration containing the different parameters to be used by the STARK prover.
|
||||
pub struct StarkConfig {
|
||||
/// The targeted security level for the proofs generated with this configuration.
|
||||
pub security_bits: usize,
|
||||
|
||||
/// The number of challenge points to generate, for IOPs that have soundness errors of (roughly)
|
||||
/// `degree / |F|`.
|
||||
pub num_challenges: usize,
|
||||
|
||||
/// The configuration of the FRI sub-protocol.
|
||||
pub fri_config: FriConfig,
|
||||
}
|
||||
|
||||
impl Default for StarkConfig {
|
||||
fn default() -> Self {
|
||||
Self::standard_fast_config()
|
||||
}
|
||||
}
|
||||
|
||||
impl StarkConfig {
|
||||
/// A typical configuration with a rate of 2, resulting in fast but large proofs.
|
||||
/// Targets ~100 bit conjectured security.
|
||||
pub const fn standard_fast_config() -> Self {
|
||||
Self {
|
||||
security_bits: 100,
|
||||
num_challenges: 2,
|
||||
fri_config: FriConfig {
|
||||
rate_bits: 1,
|
||||
cap_height: 4,
|
||||
proof_of_work_bits: 16,
|
||||
reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5),
|
||||
num_query_rounds: 84,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||
self.fri_config.fri_params(degree_bits, false)
|
||||
}
|
||||
}
|
||||
@ -1,162 +0,0 @@
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
pub struct ConstraintConsumer<P: PackedField> {
|
||||
/// Random values used to combine multiple constraints into one.
|
||||
pub alphas: Vec<P::Scalar>,
|
||||
|
||||
/// Running sums of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
// TODO(JN): This is pub so it can be used in a test. Once we have an API for accessing this
|
||||
// result, it should be made private.
|
||||
pub constraint_accs: Vec<P>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: P,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the first trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_first: P,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the last trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_last: P,
|
||||
}
|
||||
|
||||
impl<P: PackedField> ConstraintConsumer<P> {
|
||||
pub(crate) fn new(
|
||||
alphas: Vec<P::Scalar>,
|
||||
z_last: P,
|
||||
lagrange_basis_first: P,
|
||||
lagrange_basis_last: P,
|
||||
) -> Self {
|
||||
Self {
|
||||
constraint_accs: vec![P::ZEROS; alphas.len()],
|
||||
alphas,
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn accumulators(self) -> Vec<P> {
|
||||
self.constraint_accs
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows except the last.
|
||||
pub(crate) fn constraint_transition(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.z_last);
|
||||
}
|
||||
|
||||
/// Add one constraint on all rows.
|
||||
pub(crate) fn constraint(&mut self, constraint: P) {
|
||||
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||
*acc *= alpha;
|
||||
*acc += constraint;
|
||||
}
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// first row of the trace.
|
||||
pub(crate) fn constraint_first_row(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.lagrange_basis_first);
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// last row of the trace.
|
||||
pub(crate) fn constraint_last_row(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.lagrange_basis_last);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// A random value used to combine multiple constraints into one.
|
||||
alphas: Vec<Target>,
|
||||
|
||||
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
constraint_accs: Vec<ExtensionTarget<D>>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: ExtensionTarget<D>,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the first trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_first: ExtensionTarget<D>,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the last trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_last: ExtensionTarget<D>,
|
||||
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
||||
pub(crate) fn new(
|
||||
zero: ExtensionTarget<D>,
|
||||
alphas: Vec<Target>,
|
||||
z_last: ExtensionTarget<D>,
|
||||
lagrange_basis_first: ExtensionTarget<D>,
|
||||
lagrange_basis_last: ExtensionTarget<D>,
|
||||
) -> Self {
|
||||
Self {
|
||||
constraint_accs: vec![zero; alphas.len()],
|
||||
alphas,
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
_phantom: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn accumulators(self) -> Vec<ExtensionTarget<D>> {
|
||||
self.constraint_accs
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows except the last.
|
||||
pub(crate) fn constraint_transition(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.z_last);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows.
|
||||
pub(crate) fn constraint(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||
*acc = builder.scalar_mul_add_extension(alpha, *acc, constraint);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// first row of the trace.
|
||||
pub(crate) fn constraint_first_row(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// last row of the trace.
|
||||
pub(crate) fn constraint_last_row(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
}
|
||||
@ -4,8 +4,8 @@ use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
pub(crate) fn eval_packed<P: PackedField>(
|
||||
|
||||
@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
/// Check the correct updating of `clock`.
|
||||
|
||||
@ -5,10 +5,10 @@ use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use super::columns::ops::OpsColumnsView;
|
||||
use super::cpu_stark::{disable_unused_channels, disable_unused_channels_circuit};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
|
||||
@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, COL_MAP};
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
|
||||
|
||||
@ -8,24 +8,24 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::cross_table_lookup::TableWithColumns;
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter};
|
||||
use starky::stark::Stark;
|
||||
|
||||
use super::columns::CpuColumnsView;
|
||||
use super::halt;
|
||||
use super::kernel::constants::context_metadata::ContextMetadata;
|
||||
use super::membus::NUM_GP_CHANNELS;
|
||||
use crate::all_stark::Table;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::all_stark::{EvmStarkFrame, Table};
|
||||
use crate::cpu::columns::{COL_MAP, NUM_CPU_COLUMNS};
|
||||
use crate::cpu::{
|
||||
byte_unpacking, clock, contextops, control_flow, decode, dup_swap, gas, jumps, membus, memio,
|
||||
modfp254, pc, push0, shift, simple_logic, stack, syscalls_exceptions,
|
||||
};
|
||||
use crate::cross_table_lookup::TableWithColumns;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{Column, Filter};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Creates the vector of `Columns` corresponding to the General Purpose channels when calling the Keccak sponge:
|
||||
/// the CPU reads the output of the sponge directly from the `KeccakSpongeStark` table.
|
||||
@ -452,12 +452,13 @@ pub(crate) struct CpuStark<F, const D: usize> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_CPU_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_CPU_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_CPU_COLUMNS>;
|
||||
type EvaluationFrameTarget =
|
||||
EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_CPU_COLUMNS>;
|
||||
|
||||
/// Evaluates all CPU constraints.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
@ -531,15 +532,19 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
|
||||
@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, COL_MAP};
|
||||
|
||||
/// List of opcode blocks
|
||||
|
||||
@ -5,8 +5,8 @@ use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, MemoryChannelView};
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
|
||||
@ -4,9 +4,9 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use super::columns::COL_MAP;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::ops::OpsColumnsView;
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
|
||||
@ -5,9 +5,9 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use super::control_flow::get_halt_pc;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, COL_MAP};
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
|
||||
|
||||
@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
/// General-purpose memory channels; they can read and write to all contexts/segments/addresses.
|
||||
|
||||
@ -4,9 +4,9 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use super::cpu_stark::get_addr;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::stack;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
@ -4,8 +4,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
// Python:
|
||||
|
||||
@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
/// Evaluates constraints to check that we are storing the correct PC.
|
||||
|
||||
@ -2,8 +2,8 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
/// Evaluates constraints to check that we are not pushing anything.
|
||||
|
||||
@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
@ -5,8 +5,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::stack::{self, EQ_STACK_BEHAVIOR, IS_ZERO_STACK_BEHAVIOR};
|
||||
|
||||
|
||||
@ -5,8 +5,8 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
|
||||
/// Evaluates constraints for NOT, EQ and ISZERO.
|
||||
|
||||
@ -3,8 +3,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::stack;
|
||||
|
||||
|
||||
@ -6,8 +6,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::ops::OpsColumnsView;
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
|
||||
@ -7,8 +7,8 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
|
||||
@ -1,47 +0,0 @@
|
||||
/// A trait for viewing an evaluation frame of a STARK table.
|
||||
///
|
||||
/// It allows to access the current and next rows at a given step
|
||||
/// and can be used to implement constraint evaluation both natively
|
||||
/// and recursively.
|
||||
pub trait StarkEvaluationFrame<T: Copy + Clone + Default>: Sized {
|
||||
/// The number of columns for the STARK table this evaluation frame views.
|
||||
const COLUMNS: usize;
|
||||
|
||||
/// Returns the local values (i.e. current row) for this evaluation frame.
|
||||
fn get_local_values(&self) -> &[T];
|
||||
/// Returns the next values (i.e. next row) for this evaluation frame.
|
||||
fn get_next_values(&self) -> &[T];
|
||||
|
||||
/// Outputs a new evaluation frame from the provided local and next values.
|
||||
///
|
||||
/// **NOTE**: Concrete implementations of this method SHOULD ensure that
|
||||
/// the provided slices lengths match the `Self::COLUMNS` value.
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self;
|
||||
}
|
||||
|
||||
pub struct StarkFrame<T: Copy + Clone + Default, const N: usize> {
|
||||
local_values: [T; N],
|
||||
next_values: [T; N],
|
||||
}
|
||||
|
||||
impl<T: Copy + Clone + Default, const N: usize> StarkEvaluationFrame<T> for StarkFrame<T, N> {
|
||||
const COLUMNS: usize = N;
|
||||
|
||||
fn get_local_values(&self) -> &[T] {
|
||||
&self.local_values
|
||||
}
|
||||
|
||||
fn get_next_values(&self) -> &[T] {
|
||||
&self.next_values
|
||||
}
|
||||
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self {
|
||||
assert_eq!(lv.len(), Self::COLUMNS);
|
||||
assert_eq!(nv.len(), Self::COLUMNS);
|
||||
|
||||
Self {
|
||||
local_values: lv.try_into().unwrap(),
|
||||
next_values: nv.try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -29,18 +29,18 @@ use plonky2::util::serialization::{
|
||||
};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::log2_ceil;
|
||||
use starky::config::StarkConfig;
|
||||
use starky::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup};
|
||||
use starky::lookup::{get_grand_product_challenge_set_target, GrandProductChallengeSet};
|
||||
use starky::proof::StarkProofWithMetadata;
|
||||
use starky::stark::Stark;
|
||||
|
||||
use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{
|
||||
get_grand_product_challenge_set_target, verify_cross_table_lookups_circuit, CrossTableLookup,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::get_challenges::observe_public_values_target;
|
||||
use crate::proof::{
|
||||
AllProof, BlockHashesTarget, BlockMetadataTarget, ExtraBlockData, ExtraBlockDataTarget,
|
||||
PublicValues, PublicValuesTarget, StarkProofWithMetadata, TrieRoots, TrieRootsTarget,
|
||||
PublicValues, PublicValuesTarget, TrieRoots, TrieRootsTarget,
|
||||
};
|
||||
use crate::prover::{check_abort_signal, prove};
|
||||
use crate::recursive_verifier::{
|
||||
@ -48,7 +48,6 @@ use crate::recursive_verifier::{
|
||||
recursive_stark_circuit, set_public_value_targets, PlonkWrapperCircuit, PublicInputs,
|
||||
StarkWrapperCircuit,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::h256_limbs;
|
||||
|
||||
/// The recursion threshold. We end a chain of recursive proofs once we reach this size.
|
||||
@ -587,7 +586,7 @@ where
|
||||
&mut builder,
|
||||
all_cross_table_lookups(),
|
||||
pis.map(|p| p.ctl_zs_first),
|
||||
extra_looking_sums,
|
||||
Some(&extra_looking_sums),
|
||||
stark_config,
|
||||
);
|
||||
|
||||
@ -1002,7 +1001,7 @@ where
|
||||
let mut root_inputs = PartialWitness::new();
|
||||
|
||||
for table in 0..NUM_TABLES {
|
||||
let stark_proof = &all_proof.stark_proofs[table];
|
||||
let stark_proof = &all_proof.multi_proof.stark_proofs[table];
|
||||
let original_degree_bits = stark_proof.proof.recover_degree_bits(config);
|
||||
let table_circuits = &self.by_table[table];
|
||||
let shrunk_proof = table_circuits
|
||||
@ -1015,7 +1014,7 @@ where
|
||||
original_degree_bits,
|
||||
))
|
||||
})?
|
||||
.shrink(stark_proof, &all_proof.ctl_challenges)?;
|
||||
.shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?;
|
||||
let index_verifier_data = table_circuits
|
||||
.by_stark_size
|
||||
.keys()
|
||||
@ -1107,9 +1106,10 @@ where
|
||||
for table in 0..NUM_TABLES {
|
||||
let (table_circuit, index_verifier_data) = &table_circuits[table];
|
||||
|
||||
let stark_proof = &all_proof.stark_proofs[table];
|
||||
let stark_proof = &all_proof.multi_proof.stark_proofs[table];
|
||||
|
||||
let shrunk_proof = table_circuit.shrink(stark_proof, &all_proof.ctl_challenges)?;
|
||||
let shrunk_proof =
|
||||
table_circuit.shrink(stark_proof, &all_proof.multi_proof.ctl_challenges)?;
|
||||
root_inputs.set_target(
|
||||
self.root.index_verifier_data[table],
|
||||
F::from_canonical_u8(*index_verifier_data),
|
||||
|
||||
@ -10,13 +10,13 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use starky::config::StarkConfig;
|
||||
use GlobalMetadata::{
|
||||
ReceiptTrieRootDigestAfter, ReceiptTrieRootDigestBefore, StateTrieRootDigestAfter,
|
||||
StateTrieRootDigestBefore, TransactionTrieRootDigestAfter, TransactionTrieRootDigestBefore,
|
||||
};
|
||||
|
||||
use crate::all_stark::{AllStark, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
|
||||
@ -1,13 +1,11 @@
|
||||
use ethereum_types::{BigEndianHash, H256, U256};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::fri::proof::{FriProof, FriProofTarget};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use starky::config::StarkConfig;
|
||||
use starky::lookup::get_grand_product_challenge_set;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::get_grand_product_challenge_set;
|
||||
use crate::proof::*;
|
||||
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
|
||||
use crate::witness::errors::ProgramError;
|
||||
@ -198,7 +196,9 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
|
||||
) -> Result<AllProofChallenges<F, D>, ProgramError> {
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
|
||||
for proof in &self.stark_proofs {
|
||||
let stark_proofs = &self.multi_proof.stark_proofs;
|
||||
|
||||
for proof in stark_proofs {
|
||||
challenger.observe_cap(&proof.proof.trace_cap);
|
||||
}
|
||||
|
||||
@ -210,112 +210,14 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
|
||||
Ok(AllProofChallenges {
|
||||
stark_challenges: core::array::from_fn(|i| {
|
||||
challenger.compact();
|
||||
self.stark_proofs[i]
|
||||
.proof
|
||||
.get_challenges(&mut challenger, config)
|
||||
stark_proofs[i].proof.get_challenges(
|
||||
&mut challenger,
|
||||
Some(&ctl_challenges),
|
||||
true,
|
||||
config,
|
||||
)
|
||||
}),
|
||||
ctl_challenges,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, C, const D: usize> StarkProof<F, C, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
pub(crate) fn get_challenges(
|
||||
&self,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
let degree_bits = self.recover_degree_bits(config);
|
||||
|
||||
let StarkProof {
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
FriProof {
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
..
|
||||
} = &self;
|
||||
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
challenger.observe_cap(auxiliary_polys_cap);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge::<D>();
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
StarkProofChallenges {
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges::<C, D>(
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
*pow_witness,
|
||||
degree_bits,
|
||||
&config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let StarkProofTarget {
|
||||
auxiliary_polys_cap: auxiliary_polys,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
FriProofTarget {
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
..
|
||||
} = &self;
|
||||
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
challenger.observe_cap(auxiliary_polys);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
|
||||
|
||||
StarkProofChallengesTarget {
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges(
|
||||
builder,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
*pow_witness,
|
||||
&config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use plonky2::field::types::Field;
|
||||
use starky::lookup::Column;
|
||||
|
||||
use crate::keccak::keccak_stark::{NUM_INPUTS, NUM_ROUNDS};
|
||||
use crate::lookup::Column;
|
||||
|
||||
/// A register which is set to 1 if we are in the `i`th round, otherwise 0.
|
||||
pub(crate) const fn reg_step(i: usize) -> usize {
|
||||
|
||||
@ -10,10 +10,14 @@ use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers_ext_circuit;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter};
|
||||
use starky::stark::Stark;
|
||||
use starky::util::trace_rows_to_poly_values;
|
||||
|
||||
use super::columns::reg_input_limb;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::keccak::columns::{
|
||||
reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime,
|
||||
reg_b, reg_c, reg_c_prime, reg_output_limb, reg_step, NUM_COLUMNS, TIMESTAMP,
|
||||
@ -23,9 +27,6 @@ use crate::keccak::logic::{
|
||||
andn, andn_gen, andn_gen_circuit, xor, xor3_gen, xor3_gen_circuit, xor_gen, xor_gen_circuit,
|
||||
};
|
||||
use crate::keccak::round_flags::{eval_round_flags, eval_round_flags_recursively};
|
||||
use crate::lookup::{Column, Filter};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
/// Number of rounds in a Keccak permutation.
|
||||
pub(crate) const NUM_ROUNDS: usize = 24;
|
||||
@ -253,12 +254,12 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
type EvaluationFrameTarget = EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -616,6 +617,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -626,14 +631,14 @@ mod tests {
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::config::StarkConfig;
|
||||
use starky::cross_table_lookup::{CtlData, CtlZData};
|
||||
use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
use tiny_keccak::keccakf;
|
||||
|
||||
use super::*;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{CtlData, CtlZData, GrandProductChallengeSet};
|
||||
use crate::lookup::GrandProductChallenge;
|
||||
use crate::prover::prove_single_table;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
@ -734,16 +739,16 @@ mod tests {
|
||||
let degree = 1 << trace_commitments.degree_log;
|
||||
|
||||
// Fake CTL data.
|
||||
let ctl_z_data = CtlZData {
|
||||
helper_columns: vec![PolynomialValues::zero(degree)],
|
||||
z: PolynomialValues::zero(degree),
|
||||
challenge: GrandProductChallenge {
|
||||
let ctl_z_data = CtlZData::new(
|
||||
vec![PolynomialValues::zero(degree)],
|
||||
PolynomialValues::zero(degree),
|
||||
GrandProductChallenge {
|
||||
beta: F::ZERO,
|
||||
gamma: F::ZERO,
|
||||
},
|
||||
columns: vec![],
|
||||
filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))],
|
||||
};
|
||||
vec![],
|
||||
vec![Some(Filter::new_simple(Column::constant(F::ZERO)))],
|
||||
);
|
||||
let ctl_data = CtlData {
|
||||
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
|
||||
};
|
||||
|
||||
@ -4,14 +4,15 @@ use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::keccak::columns::{reg_step, NUM_COLUMNS};
|
||||
use crate::keccak::keccak_stark::NUM_ROUNDS;
|
||||
|
||||
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: &StarkFrame<P, NUM_COLUMNS>,
|
||||
vars: &EvmStarkFrame<P, F, NUM_COLUMNS>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
@ -40,7 +41,7 @@ pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
|
||||
|
||||
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: &StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>,
|
||||
vars: &EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_COLUMNS>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
|
||||
@ -14,13 +14,14 @@ use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter, Lookup};
|
||||
use starky::stark::Stark;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::cpu::kernel::keccak_util::keccakf_u32s;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak_sponge::columns::*;
|
||||
use crate::lookup::{Column, Filter, Lookup};
|
||||
use crate::stark::Stark;
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
/// Strict upper bound for the individual bytes range-check.
|
||||
@ -520,12 +521,13 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakSpongeStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_KECCAK_SPONGE_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_KECCAK_SPONGE_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_KECCAK_SPONGE_COLUMNS>;
|
||||
type EvaluationFrameTarget =
|
||||
EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_KECCAK_SPONGE_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -807,6 +809,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
|
||||
filter_columns: vec![None; KECCAK_RATE_BYTES],
|
||||
}]
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -816,10 +822,10 @@ mod tests {
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::PrimeField64;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use super::*;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
|
||||
@ -165,35 +165,32 @@
|
||||
#![allow(unused)]
|
||||
#![feature(let_chains)]
|
||||
|
||||
pub mod all_stark;
|
||||
// Individual STARK processing units
|
||||
pub mod arithmetic;
|
||||
pub mod byte_packing;
|
||||
pub mod config;
|
||||
pub mod constraint_consumer;
|
||||
pub mod cpu;
|
||||
pub mod cross_table_lookup;
|
||||
pub mod curve_pairings;
|
||||
pub mod evaluation_frame;
|
||||
pub mod extension_tower;
|
||||
pub mod fixed_recursive_verifier;
|
||||
pub mod generation;
|
||||
mod get_challenges;
|
||||
pub mod keccak;
|
||||
pub mod keccak_sponge;
|
||||
pub mod logic;
|
||||
pub mod lookup;
|
||||
pub mod memory;
|
||||
|
||||
// Proving system components
|
||||
pub mod all_stark;
|
||||
pub mod fixed_recursive_verifier;
|
||||
mod get_challenges;
|
||||
pub mod proof;
|
||||
pub mod prover;
|
||||
pub mod recursive_verifier;
|
||||
pub mod stark;
|
||||
pub mod util;
|
||||
pub mod vanishing_poly;
|
||||
pub mod verifier;
|
||||
|
||||
// Witness generation
|
||||
pub mod generation;
|
||||
pub mod witness;
|
||||
|
||||
#[cfg(test)]
|
||||
mod stark_testing;
|
||||
// Utility modules
|
||||
pub mod curve_pairings;
|
||||
pub mod extension_tower;
|
||||
pub mod util;
|
||||
|
||||
use eth_trie_utils::partial_trie::HashedPartialTrie;
|
||||
// Set up Jemalloc
|
||||
@ -209,6 +206,6 @@ static GLOBAL: Jemalloc = Jemalloc;
|
||||
pub type Node = eth_trie_utils::partial_trie::Node<HashedPartialTrie>;
|
||||
|
||||
pub use all_stark::AllStark;
|
||||
pub use config::StarkConfig;
|
||||
pub use fixed_recursive_verifier::AllRecursiveCircuits;
|
||||
pub use generation::GenerationInputs;
|
||||
pub use starky::config::StarkConfig;
|
||||
|
||||
@ -11,13 +11,15 @@ use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter};
|
||||
use starky::stark::Stark;
|
||||
use starky::util::trace_rows_to_poly_values;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::logic::columns::NUM_COLUMNS;
|
||||
use crate::lookup::{Column, Filter};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive, trace_rows_to_poly_values};
|
||||
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive};
|
||||
|
||||
/// Total number of bits per input/output.
|
||||
const VAL_BITS: usize = 256;
|
||||
@ -210,12 +212,12 @@ impl<F: RichField, const D: usize> LogicStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
type EvaluationFrameTarget = EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -354,15 +356,19 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F,
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use crate::logic::LogicStark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
|
||||
@ -1,895 +0,0 @@
|
||||
use core::borrow::Borrow;
|
||||
use core::fmt::Debug;
|
||||
use core::iter::repeat;
|
||||
|
||||
use itertools::Itertools;
|
||||
use num_bigint::BigUint;
|
||||
use plonky2::field::batch_util::batch_add_inplace;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::plonk_common::{
|
||||
reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit,
|
||||
};
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Represents a filter, which evaluates to 1 if the row must be considered and 0 if it should be ignored.
|
||||
/// It's an arbitrary degree 2 combination of columns: `products` are the degree 2 terms, and `constants` are
|
||||
/// the degree 1 terms.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Filter<F: Field> {
|
||||
products: Vec<(Column<F>, Column<F>)>,
|
||||
constants: Vec<Column<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> Filter<F> {
|
||||
pub(crate) fn new(products: Vec<(Column<F>, Column<F>)>, constants: Vec<Column<F>>) -> Self {
|
||||
Self {
|
||||
products,
|
||||
constants,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a filter made of a single column.
|
||||
pub(crate) fn new_simple(col: Column<F>) -> Self {
|
||||
Self {
|
||||
products: vec![],
|
||||
constants: vec![col],
|
||||
}
|
||||
}
|
||||
|
||||
/// Given the column values for the current and next rows, evaluates the filter.
|
||||
pub(crate) fn eval_filter<FE, P, const D: usize>(&self, v: &[P], next_v: &[P]) -> P
|
||||
where
|
||||
FE: FieldExtension<D, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
self.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| col1.eval_with_next(v, next_v) * col2.eval_with_next(v, next_v))
|
||||
.sum::<P>()
|
||||
+ self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_with_next(v, next_v))
|
||||
.sum::<P>()
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_filter`:
|
||||
/// Given the column values for the current and next rows, evaluates the filter.
|
||||
pub(crate) fn eval_filter_circuit<const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
v: &[ExtensionTarget<D>],
|
||||
next_v: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
let prods = self
|
||||
.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| {
|
||||
let col1_eval = col1.eval_with_next_circuit(builder, v, next_v);
|
||||
let col2_eval = col2.eval_with_next_circuit(builder, v, next_v);
|
||||
builder.mul_extension(col1_eval, col2_eval)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let consts = self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_with_next_circuit(builder, v, next_v))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let prods = builder.add_many_extension(prods);
|
||||
let consts = builder.add_many_extension(consts);
|
||||
builder.add_extension(prods, consts)
|
||||
}
|
||||
|
||||
/// Evaluate on a row of a table given in column-major form.
|
||||
pub(crate) fn eval_table(&self, table: &[PolynomialValues<F>], row: usize) -> F {
|
||||
self.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| col1.eval_table(table, row) * col2.eval_table(table, row))
|
||||
.sum::<F>()
|
||||
+ self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_table(table, row))
|
||||
.sum()
|
||||
}
|
||||
}
|
||||
|
||||
/// Represent two linear combination of columns, corresponding to the current and next row values.
|
||||
/// Each linear combination is represented as:
|
||||
/// - a vector of `(usize, F)` corresponding to the column number and the associated multiplicand
|
||||
/// - the constant of the linear combination.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Column<F: Field> {
|
||||
linear_combination: Vec<(usize, F)>,
|
||||
next_row_linear_combination: Vec<(usize, F)>,
|
||||
constant: F,
|
||||
}
|
||||
|
||||
impl<F: Field> Column<F> {
|
||||
/// Returns the representation of a single column in the current row.
|
||||
pub(crate) fn single(c: usize) -> Self {
|
||||
Self {
|
||||
linear_combination: vec![(c, F::ONE)],
|
||||
next_row_linear_combination: vec![],
|
||||
constant: F::ZERO,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns multiple single columns in the current row.
|
||||
pub(crate) fn singles<I: IntoIterator<Item = impl Borrow<usize>>>(
|
||||
cs: I,
|
||||
) -> impl Iterator<Item = Self> {
|
||||
cs.into_iter().map(|c| Self::single(*c.borrow()))
|
||||
}
|
||||
|
||||
/// Returns the representation of a single column in the next row.
|
||||
pub(crate) fn single_next_row(c: usize) -> Self {
|
||||
Self {
|
||||
linear_combination: vec![],
|
||||
next_row_linear_combination: vec![(c, F::ONE)],
|
||||
constant: F::ZERO,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns multiple single columns for the next row.
|
||||
pub(crate) fn singles_next_row<I: IntoIterator<Item = impl Borrow<usize>>>(
|
||||
cs: I,
|
||||
) -> impl Iterator<Item = Self> {
|
||||
cs.into_iter().map(|c| Self::single_next_row(*c.borrow()))
|
||||
}
|
||||
|
||||
/// Returns a linear combination corresponding to a constant.
|
||||
pub(crate) fn constant(constant: F) -> Self {
|
||||
Self {
|
||||
linear_combination: vec![],
|
||||
next_row_linear_combination: vec![],
|
||||
constant,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a linear combination corresponding to 0.
|
||||
pub(crate) fn zero() -> Self {
|
||||
Self::constant(F::ZERO)
|
||||
}
|
||||
|
||||
/// Returns a linear combination corresponding to 1.
|
||||
pub(crate) fn one() -> Self {
|
||||
Self::constant(F::ONE)
|
||||
}
|
||||
|
||||
/// Given an iterator of `(usize, F)` and a constant, returns the association linear combination of columns for the current row.
|
||||
pub(crate) fn linear_combination_with_constant<I: IntoIterator<Item = (usize, F)>>(
|
||||
iter: I,
|
||||
constant: F,
|
||||
) -> Self {
|
||||
let v = iter.into_iter().collect::<Vec<_>>();
|
||||
assert!(!v.is_empty());
|
||||
debug_assert_eq!(
|
||||
v.iter().map(|(c, _)| c).unique().count(),
|
||||
v.len(),
|
||||
"Duplicate columns."
|
||||
);
|
||||
Self {
|
||||
linear_combination: v,
|
||||
next_row_linear_combination: vec![],
|
||||
constant,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given an iterator of `(usize, F)` and a constant, returns the associated linear combination of columns for the current and the next rows.
|
||||
pub(crate) fn linear_combination_and_next_row_with_constant<
|
||||
I: IntoIterator<Item = (usize, F)>,
|
||||
>(
|
||||
iter: I,
|
||||
next_row_iter: I,
|
||||
constant: F,
|
||||
) -> Self {
|
||||
let v = iter.into_iter().collect::<Vec<_>>();
|
||||
let next_row_v = next_row_iter.into_iter().collect::<Vec<_>>();
|
||||
|
||||
assert!(!v.is_empty() || !next_row_v.is_empty());
|
||||
debug_assert_eq!(
|
||||
v.iter().map(|(c, _)| c).unique().count(),
|
||||
v.len(),
|
||||
"Duplicate columns."
|
||||
);
|
||||
debug_assert_eq!(
|
||||
next_row_v.iter().map(|(c, _)| c).unique().count(),
|
||||
next_row_v.len(),
|
||||
"Duplicate columns."
|
||||
);
|
||||
|
||||
Self {
|
||||
linear_combination: v,
|
||||
next_row_linear_combination: next_row_v,
|
||||
constant,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a linear combination of columns, with no additional constant.
|
||||
pub(crate) fn linear_combination<I: IntoIterator<Item = (usize, F)>>(iter: I) -> Self {
|
||||
Self::linear_combination_with_constant(iter, F::ZERO)
|
||||
}
|
||||
|
||||
/// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order:
|
||||
/// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n.
|
||||
pub(crate) fn le_bits<I: IntoIterator<Item = impl Borrow<usize>>>(cs: I) -> Self {
|
||||
Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers()))
|
||||
}
|
||||
|
||||
/// Given an iterator of columns (c_0, ..., c_n) containing bits in little endian order:
|
||||
/// returns the representation of c_0 + 2 * c_1 + ... + 2^n * c_n + k where `k` is an
|
||||
/// additional constant.
|
||||
pub(crate) fn le_bits_with_constant<I: IntoIterator<Item = impl Borrow<usize>>>(
|
||||
cs: I,
|
||||
constant: F,
|
||||
) -> Self {
|
||||
Self::linear_combination_with_constant(
|
||||
cs.into_iter().map(|c| *c.borrow()).zip(F::TWO.powers()),
|
||||
constant,
|
||||
)
|
||||
}
|
||||
|
||||
/// Given an iterator of columns (c_0, ..., c_n) containing bytes in little endian order:
|
||||
/// returns the representation of c_0 + 256 * c_1 + ... + 256^n * c_n.
|
||||
pub(crate) fn le_bytes<I: IntoIterator<Item = impl Borrow<usize>>>(cs: I) -> Self {
|
||||
Self::linear_combination(
|
||||
cs.into_iter()
|
||||
.map(|c| *c.borrow())
|
||||
.zip(F::from_canonical_u16(256).powers()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Given an iterator of columns, returns the representation of their sum.
|
||||
pub(crate) fn sum<I: IntoIterator<Item = impl Borrow<usize>>>(cs: I) -> Self {
|
||||
Self::linear_combination(cs.into_iter().map(|c| *c.borrow()).zip(repeat(F::ONE)))
|
||||
}
|
||||
|
||||
/// Given the column values for the current row, returns the evaluation of the linear combination.
|
||||
pub(crate) fn eval<FE, P, const D: usize>(&self, v: &[P]) -> P
|
||||
where
|
||||
FE: FieldExtension<D, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
self.linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| v[c] * FE::from_basefield(f))
|
||||
.sum::<P>()
|
||||
+ FE::from_basefield(self.constant)
|
||||
}
|
||||
|
||||
/// Given the column values for the current and next rows, evaluates the current and next linear combinations and returns their sum.
|
||||
pub(crate) fn eval_with_next<FE, P, const D: usize>(&self, v: &[P], next_v: &[P]) -> P
|
||||
where
|
||||
FE: FieldExtension<D, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
self.linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| v[c] * FE::from_basefield(f))
|
||||
.sum::<P>()
|
||||
+ self
|
||||
.next_row_linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| next_v[c] * FE::from_basefield(f))
|
||||
.sum::<P>()
|
||||
+ FE::from_basefield(self.constant)
|
||||
}
|
||||
|
||||
/// Evaluate on a row of a table given in column-major form.
|
||||
pub(crate) fn eval_table(&self, table: &[PolynomialValues<F>], row: usize) -> F {
|
||||
let mut res = self
|
||||
.linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| table[c].values[row] * f)
|
||||
.sum::<F>()
|
||||
+ self.constant;
|
||||
|
||||
// If we access the next row at the last row, for sanity, we consider the next row's values to be 0.
|
||||
// If the lookups are correctly written, the filter should be 0 in that case anyway.
|
||||
if !self.next_row_linear_combination.is_empty() && row < table[0].values.len() - 1 {
|
||||
res += self
|
||||
.next_row_linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| table[c].values[row + 1] * f)
|
||||
.sum::<F>();
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// Evaluates the column on all rows.
|
||||
pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues<F>]) -> Vec<F> {
|
||||
let length = table[0].len();
|
||||
(0..length)
|
||||
.map(|row| self.eval_table(table, row))
|
||||
.collect::<Vec<F>>()
|
||||
}
|
||||
|
||||
/// Circuit version of `eval`: Given a row's targets, returns their linear combination.
|
||||
pub(crate) fn eval_circuit<const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
v: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
let pairs = self
|
||||
.linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| {
|
||||
(
|
||||
v[c],
|
||||
builder.constant_extension(F::Extension::from_basefield(f)),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let constant = builder.constant_extension(F::Extension::from_basefield(self.constant));
|
||||
builder.inner_product_extension(F::ONE, constant, pairs)
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_with_next`:
|
||||
/// Given the targets of the current and next row, returns the sum of their linear combinations.
|
||||
pub(crate) fn eval_with_next_circuit<const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
v: &[ExtensionTarget<D>],
|
||||
next_v: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
let mut pairs = self
|
||||
.linear_combination
|
||||
.iter()
|
||||
.map(|&(c, f)| {
|
||||
(
|
||||
v[c],
|
||||
builder.constant_extension(F::Extension::from_basefield(f)),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let next_row_pairs = self.next_row_linear_combination.iter().map(|&(c, f)| {
|
||||
(
|
||||
next_v[c],
|
||||
builder.constant_extension(F::Extension::from_basefield(f)),
|
||||
)
|
||||
});
|
||||
pairs.extend(next_row_pairs);
|
||||
let constant = builder.constant_extension(F::Extension::from_basefield(self.constant));
|
||||
builder.inner_product_extension(F::ONE, constant, pairs)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type ColumnFilter<'a, F> = (&'a [Column<F>], &'a Option<Filter<F>>);
|
||||
|
||||
pub struct Lookup<F: Field> {
|
||||
/// Columns whose values should be contained in the lookup table.
|
||||
/// These are the f_i(x) polynomials in the logUp paper.
|
||||
pub(crate) columns: Vec<Column<F>>,
|
||||
/// Column containing the lookup table.
|
||||
/// This is the t(x) polynomial in the paper.
|
||||
pub(crate) table_column: Column<F>,
|
||||
/// Column containing the frequencies of `columns` in `table_column`.
|
||||
/// This is the m(x) polynomial in the paper.
|
||||
pub(crate) frequencies_column: Column<F>,
|
||||
|
||||
/// Columns to filter some elements. There is at most one filter
|
||||
/// column per column to range-check.
|
||||
pub(crate) filter_columns: Vec<Option<Filter<F>>>,
|
||||
}
|
||||
|
||||
impl<F: Field> Lookup<F> {
|
||||
pub(crate) fn num_helper_columns(&self, constraint_degree: usize) -> usize {
|
||||
// One helper column for each column batch of size `constraint_degree-1`,
|
||||
// then one column for the inverse of `table + challenge` and one for the `Z` polynomial.
|
||||
ceil_div_usize(self.columns.len(), constraint_degree - 1) + 1
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomness for a single instance of a permutation check protocol.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
||||
/// Randomness used to combine multiple columns into one.
|
||||
pub(crate) beta: T,
|
||||
/// Random offset that's added to the beta-reduced column values.
|
||||
pub(crate) gamma: T,
|
||||
}
|
||||
|
||||
impl<F: Field> GrandProductChallenge<F> {
|
||||
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
||||
&self,
|
||||
terms: T,
|
||||
) -> P
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
T::IntoIter: DoubleEndedIterator,
|
||||
{
|
||||
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D> {
|
||||
let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta);
|
||||
let gamma = builder.convert_to_ext(self.gamma);
|
||||
builder.add_extension(reduced, gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[Target],
|
||||
) -> Target {
|
||||
let reduced = reduce_with_powers_circuit(builder, terms, self.beta);
|
||||
builder.add(reduced, self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
/// logUp protocol from <https://ia.cr/2022/1530>
|
||||
/// Compute the helper columns for the lookup argument.
|
||||
/// Given columns `f0,...,fk` and a column `t`, such that `∪fi ⊆ t`, and challenges `x`,
|
||||
/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`,
|
||||
/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column.
|
||||
pub(crate) fn lookup_helper_columns<F: Field>(
|
||||
lookup: &Lookup<F>,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
challenge: F,
|
||||
constraint_degree: usize,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
assert_eq!(
|
||||
constraint_degree, 3,
|
||||
"TODO: Allow other constraint degrees."
|
||||
);
|
||||
|
||||
assert_eq!(lookup.columns.len(), lookup.filter_columns.len());
|
||||
|
||||
let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len();
|
||||
assert!(BigUint::from(num_total_logup_entries) < F::characteristic());
|
||||
|
||||
let num_helper_columns = lookup.num_helper_columns(constraint_degree);
|
||||
|
||||
let looking_cols = lookup
|
||||
.columns
|
||||
.iter()
|
||||
.map(|col| vec![col.clone()])
|
||||
.collect::<Vec<Vec<Column<F>>>>();
|
||||
|
||||
let grand_challenge = GrandProductChallenge {
|
||||
beta: F::ONE,
|
||||
gamma: challenge,
|
||||
};
|
||||
|
||||
let columns_filters = looking_cols
|
||||
.iter()
|
||||
.zip(lookup.filter_columns.iter())
|
||||
.map(|(col, filter)| (&col[..], filter))
|
||||
.collect::<Vec<_>>();
|
||||
// For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and
|
||||
// add it to the helper columns.
|
||||
// Note: these are the h_k(x) polynomials in the paper, with a few differences:
|
||||
// * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the
|
||||
// h_k polynomials; instead there's a separate helper column for it (see below).
|
||||
// * Here, we use 1 instead of -1 as the numerator (and subtract later).
|
||||
// * Here, for now, the batch size (l) is always constraint_degree - 1 = 2.
|
||||
// * Here, there are filters for the columns, to only select some rows
|
||||
// in a given column.
|
||||
let mut helper_columns = get_helper_cols(
|
||||
trace_poly_values,
|
||||
trace_poly_values[0].len(),
|
||||
&columns_filters,
|
||||
grand_challenge,
|
||||
constraint_degree,
|
||||
);
|
||||
|
||||
// Add `1/(table+challenge)` to the helper columns.
|
||||
// This is 1/phi_0(x) = 1/(x + t(x)) from the paper.
|
||||
// Here, we don't include m(x) in the numerator, instead multiplying it with this column later.
|
||||
let mut table = lookup.table_column.eval_all_rows(trace_poly_values);
|
||||
for x in table.iter_mut() {
|
||||
*x = challenge + *x;
|
||||
}
|
||||
let table_inverse: Vec<F> = F::batch_multiplicative_inverse(&table);
|
||||
|
||||
// Compute the `Z` polynomial with `Z(1)=0` and `Z(gx) = Z(x) + sum h_i(x) - frequencies(x)g(x)`.
|
||||
// This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H.
|
||||
// In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled
|
||||
// into the h_k(x) polynomials.
|
||||
let frequencies = &lookup.frequencies_column.eval_all_rows(trace_poly_values);
|
||||
let mut z = Vec::with_capacity(frequencies.len());
|
||||
z.push(F::ZERO);
|
||||
for i in 0..frequencies.len() - 1 {
|
||||
let x = helper_columns[..num_helper_columns - 1]
|
||||
.iter()
|
||||
.map(|col| col.values[i])
|
||||
.sum::<F>()
|
||||
- frequencies[i] * table_inverse[i];
|
||||
z.push(z[i] + x);
|
||||
}
|
||||
helper_columns.push(z.into());
|
||||
|
||||
helper_columns
|
||||
}
|
||||
|
||||
/// Given data associated to a lookup, check the associated helper polynomials.
|
||||
pub(crate) fn eval_helper_columns<F, FE, P, const D: usize, const D2: usize>(
|
||||
filter: &[Option<Filter<F>>],
|
||||
columns: &[Vec<P>],
|
||||
local_values: &[P],
|
||||
next_values: &[P],
|
||||
helper_columns: &[P],
|
||||
constraint_degree: usize,
|
||||
challenges: &GrandProductChallenge<F>,
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
if !helper_columns.is_empty() {
|
||||
for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() {
|
||||
let fs =
|
||||
&filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()];
|
||||
let h = helper_columns[j];
|
||||
|
||||
match chunk.len() {
|
||||
2 => {
|
||||
let combin0 = challenges.combine(&chunk[0]);
|
||||
let combin1 = challenges.combine(chunk[1].iter());
|
||||
|
||||
let f0 = if let Some(filter0) = &fs[0] {
|
||||
filter0.eval_filter(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
let f1 = if let Some(filter1) = &fs[1] {
|
||||
filter1.eval_filter(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
|
||||
consumer.constraint(combin1 * combin0 * h - f0 * combin1 - f1 * combin0);
|
||||
}
|
||||
1 => {
|
||||
let combin = challenges.combine(&chunk[0]);
|
||||
let f0 = if let Some(filter1) = &fs[0] {
|
||||
filter1.eval_filter(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
consumer.constraint(combin * h - f0);
|
||||
}
|
||||
|
||||
_ => todo!("Allow other constraint degrees"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_helper_columns`.
|
||||
/// Given data associated to a lookup (either a CTL or a range-check), check the associated helper polynomials.
|
||||
pub(crate) fn eval_helper_columns_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
filter: &[Option<Filter<F>>],
|
||||
columns: &[Vec<ExtensionTarget<D>>],
|
||||
local_values: &[ExtensionTarget<D>],
|
||||
next_values: &[ExtensionTarget<D>],
|
||||
helper_columns: &[ExtensionTarget<D>],
|
||||
constraint_degree: usize,
|
||||
challenges: &GrandProductChallenge<Target>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
if !helper_columns.is_empty() {
|
||||
for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() {
|
||||
let fs =
|
||||
&filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()];
|
||||
let h = helper_columns[j];
|
||||
|
||||
let one = builder.one_extension();
|
||||
match chunk.len() {
|
||||
2 => {
|
||||
let combin0 = challenges.combine_circuit(builder, &chunk[0]);
|
||||
let combin1 = challenges.combine_circuit(builder, &chunk[1]);
|
||||
|
||||
let f0 = if let Some(filter0) = &fs[0] {
|
||||
filter0.eval_filter_circuit(builder, local_values, next_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
let f1 = if let Some(filter1) = &fs[1] {
|
||||
filter1.eval_filter_circuit(builder, local_values, next_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
|
||||
let constr = builder.mul_sub_extension(combin0, h, f0);
|
||||
let constr = builder.mul_extension(constr, combin1);
|
||||
let f1_constr = builder.mul_extension(f1, combin0);
|
||||
let constr = builder.sub_extension(constr, f1_constr);
|
||||
|
||||
consumer.constraint(builder, constr);
|
||||
}
|
||||
1 => {
|
||||
let combin = challenges.combine_circuit(builder, &chunk[0]);
|
||||
let f0 = if let Some(filter1) = &fs[0] {
|
||||
filter1.eval_filter_circuit(builder, local_values, next_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
let constr = builder.mul_sub_extension(combin, h, f0);
|
||||
consumer.constraint(builder, constr);
|
||||
}
|
||||
|
||||
_ => todo!("Allow other constraint degrees"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a STARK's trace, and the data associated to one lookup (either CTL or range check),
|
||||
/// returns the associated helper polynomials.
|
||||
pub(crate) fn get_helper_cols<F: Field>(
|
||||
trace: &[PolynomialValues<F>],
|
||||
degree: usize,
|
||||
columns_filters: &[ColumnFilter<F>],
|
||||
challenge: GrandProductChallenge<F>,
|
||||
constraint_degree: usize,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
let num_helper_columns = ceil_div_usize(columns_filters.len(), constraint_degree - 1);
|
||||
|
||||
let mut helper_columns = Vec::with_capacity(num_helper_columns);
|
||||
|
||||
for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) {
|
||||
let (first_col, first_filter) = cols_filts.next().unwrap();
|
||||
|
||||
let mut filter_col = Vec::with_capacity(degree);
|
||||
let first_combined = (0..degree)
|
||||
.map(|d| {
|
||||
let f = if let Some(filter) = first_filter {
|
||||
let f = filter.eval_table(trace, d);
|
||||
filter_col.push(f);
|
||||
f
|
||||
} else {
|
||||
filter_col.push(F::ONE);
|
||||
F::ONE
|
||||
};
|
||||
if f.is_one() {
|
||||
let evals = first_col
|
||||
.iter()
|
||||
.map(|c| c.eval_table(trace, d))
|
||||
.collect::<Vec<F>>();
|
||||
challenge.combine(evals.iter())
|
||||
} else {
|
||||
assert_eq!(f, F::ZERO, "Non-binary filter?");
|
||||
// Dummy value. Cannot be zero since it will be batch-inverted.
|
||||
F::ONE
|
||||
}
|
||||
})
|
||||
.collect::<Vec<F>>();
|
||||
|
||||
let mut acc = F::batch_multiplicative_inverse(&first_combined);
|
||||
for d in 0..degree {
|
||||
if filter_col[d].is_zero() {
|
||||
acc[d] = F::ZERO;
|
||||
}
|
||||
}
|
||||
|
||||
for (col, filt) in cols_filts {
|
||||
let mut filter_col = Vec::with_capacity(degree);
|
||||
let mut combined = (0..degree)
|
||||
.map(|d| {
|
||||
let f = if let Some(filter) = filt {
|
||||
let f = filter.eval_table(trace, d);
|
||||
filter_col.push(f);
|
||||
f
|
||||
} else {
|
||||
filter_col.push(F::ONE);
|
||||
F::ONE
|
||||
};
|
||||
if f.is_one() {
|
||||
let evals = col
|
||||
.iter()
|
||||
.map(|c| c.eval_table(trace, d))
|
||||
.collect::<Vec<F>>();
|
||||
challenge.combine(evals.iter())
|
||||
} else {
|
||||
assert_eq!(f, F::ZERO, "Non-binary filter?");
|
||||
// Dummy value. Cannot be zero since it will be batch-inverted.
|
||||
F::ONE
|
||||
}
|
||||
})
|
||||
.collect::<Vec<F>>();
|
||||
|
||||
combined = F::batch_multiplicative_inverse(&combined);
|
||||
|
||||
for d in 0..degree {
|
||||
if filter_col[d].is_zero() {
|
||||
combined[d] = F::ZERO;
|
||||
}
|
||||
}
|
||||
|
||||
batch_add_inplace(&mut acc, &combined);
|
||||
}
|
||||
|
||||
helper_columns.push(acc.into());
|
||||
}
|
||||
assert_eq!(helper_columns.len(), num_helper_columns);
|
||||
|
||||
helper_columns
|
||||
}
|
||||
|
||||
pub(crate) struct LookupCheckVars<F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
pub(crate) local_values: Vec<P>,
|
||||
pub(crate) next_values: Vec<P>,
|
||||
pub(crate) challenges: Vec<F>,
|
||||
}
|
||||
|
||||
/// Constraints for the logUp lookup argument.
|
||||
pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
lookups: &[Lookup<F>],
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookup_vars: LookupCheckVars<F, FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let degree = stark.constraint_degree();
|
||||
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
for &challenge in &lookup_vars.challenges {
|
||||
let grand_challenge = GrandProductChallenge {
|
||||
beta: F::ONE,
|
||||
gamma: challenge,
|
||||
};
|
||||
let lookup_columns = lookup
|
||||
.columns
|
||||
.iter()
|
||||
.map(|col| vec![col.eval_with_next(local_values, next_values)])
|
||||
.collect::<Vec<Vec<P>>>();
|
||||
|
||||
// For each chunk, check that `h_i (x+f_2i) (x+f_{2i+1}) = (x+f_2i) * filter_{2i+1} + (x+f_{2i+1}) * filter_2i` if the chunk has length 2
|
||||
// or if it has length 1, check that `h_i * (x+f_2i) = filter_2i`, where x is the challenge
|
||||
eval_helper_columns(
|
||||
&lookup.filter_columns,
|
||||
&lookup_columns,
|
||||
local_values,
|
||||
next_values,
|
||||
&lookup_vars.local_values[start..start + num_helper_columns - 1],
|
||||
degree,
|
||||
&grand_challenge,
|
||||
yield_constr,
|
||||
);
|
||||
|
||||
let challenge = FE::from_basefield(challenge);
|
||||
|
||||
// Check the `Z` polynomial.
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_with_challenge = lookup.table_column.eval(local_values) + challenge;
|
||||
let y = lookup_vars.local_values[start..start + num_helper_columns - 1]
|
||||
.iter()
|
||||
.fold(P::ZEROS, |acc, x| acc + *x)
|
||||
* table_with_challenge
|
||||
- lookup.frequencies_column.eval(local_values);
|
||||
// Check that in the first row, z = 0;
|
||||
yield_constr.constraint_first_row(z);
|
||||
yield_constr.constraint((next_z - z) * table_with_challenge - y);
|
||||
start += num_helper_columns;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct LookupCheckVarsTarget<const D: usize> {
|
||||
pub(crate) local_values: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) next_values: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) challenges: Vec<Target>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_ext_lookups_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: LookupCheckVarsTarget<D>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let degree = stark.constraint_degree();
|
||||
let lookups = stark.lookups();
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
let col_values = lookup
|
||||
.columns
|
||||
.iter()
|
||||
.map(|col| vec![col.eval_with_next_circuit(builder, local_values, next_values)])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for &challenge in &lookup_vars.challenges {
|
||||
let grand_challenge = GrandProductChallenge {
|
||||
beta: builder.one(),
|
||||
gamma: challenge,
|
||||
};
|
||||
|
||||
eval_helper_columns_circuit(
|
||||
builder,
|
||||
&lookup.filter_columns,
|
||||
&col_values,
|
||||
local_values,
|
||||
next_values,
|
||||
&lookup_vars.local_values[start..start + num_helper_columns - 1],
|
||||
degree,
|
||||
&grand_challenge,
|
||||
yield_constr,
|
||||
);
|
||||
let challenge = builder.convert_to_ext(challenge);
|
||||
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_column = lookup
|
||||
.table_column
|
||||
.eval_circuit(builder, vars.get_local_values());
|
||||
let table_with_challenge = builder.add_extension(table_column, challenge);
|
||||
let mut y = builder.add_many_extension(
|
||||
&lookup_vars.local_values[start..start + num_helper_columns - 1],
|
||||
);
|
||||
|
||||
let frequencies_column = lookup
|
||||
.frequencies_column
|
||||
.eval_circuit(builder, vars.get_local_values());
|
||||
y = builder.mul_extension(y, table_with_challenge);
|
||||
y = builder.sub_extension(y, frequencies_column);
|
||||
|
||||
// Check that in the first row, z = 0;
|
||||
yield_constr.constraint_first_row(builder, z);
|
||||
let mut constraint = builder.sub_extension(next_z, z);
|
||||
constraint = builder.mul_extension(constraint, table_with_challenge);
|
||||
constraint = builder.sub_extension(constraint, y);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
start += num_helper_columns;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -12,18 +12,19 @@ use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_maybe_rayon::*;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::evaluation_frame::StarkEvaluationFrame;
|
||||
use starky::lookup::{Column, Filter, Lookup};
|
||||
use starky::stark::Stark;
|
||||
|
||||
use super::segments::Segment;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{Column, Filter, Lookup};
|
||||
use crate::all_stark::EvmStarkFrame;
|
||||
use crate::memory::columns::{
|
||||
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, FILTER,
|
||||
FREQUENCIES, INITIALIZE_AUX, IS_READ, NUM_COLUMNS, RANGE_CHECK, SEGMENT_FIRST_CHANGE,
|
||||
TIMESTAMP, VIRTUAL_FIRST_CHANGE,
|
||||
};
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::stark::Stark;
|
||||
use crate::witness::memory::MemoryOpKind::Read;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryOp};
|
||||
|
||||
@ -268,12 +269,12 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
type EvaluationFrame<FE, P, const D2: usize> = EvmStarkFrame<P, FE, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
type EvaluationFrameTarget = EvmStarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -569,15 +570,19 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
],
|
||||
}]
|
||||
}
|
||||
|
||||
fn requires_ctls(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use starky::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
#[test]
|
||||
fn test_stark_degree() -> Result<()> {
|
||||
|
||||
335
evm/src/proof.rs
335
evm/src/proof.rs
@ -1,33 +1,24 @@
|
||||
use ethereum_types::{Address, H256, U256};
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::fri::proof::{FriChallenges, FriChallengesTarget, FriProof, FriProofTarget};
|
||||
use plonky2::fri::structure::{
|
||||
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
|
||||
};
|
||||
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::target::{BoolTarget, Target};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
use plonky2_maybe_rayon::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use starky::config::StarkConfig;
|
||||
use starky::lookup::GrandProductChallengeSet;
|
||||
use starky::proof::{MultiProof, StarkProofChallenges};
|
||||
|
||||
use crate::all_stark::NUM_TABLES;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::GrandProductChallengeSet;
|
||||
use crate::util::{get_h160, get_h256, h2u};
|
||||
|
||||
/// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Proofs for all the different STARK modules.
|
||||
pub stark_proofs: [StarkProofWithMetadata<F, C, D>; NUM_TABLES],
|
||||
/// Cross-table lookup challenges.
|
||||
pub(crate) ctl_challenges: GrandProductChallengeSet<F>,
|
||||
/// A multi-proof containing all proofs for the different STARK modules and their
|
||||
/// cross-table lookup challenges.
|
||||
pub multi_proof: MultiProof<F, C, D, NUM_TABLES>,
|
||||
/// Public memory values used for the recursive proofs.
|
||||
pub public_values: PublicValues,
|
||||
}
|
||||
@ -35,7 +26,7 @@ pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
|
||||
/// Returns the degree (i.e. the trace length) of each STARK.
|
||||
pub fn degree_bits(&self, config: &StarkConfig) -> [usize; NUM_TABLES] {
|
||||
core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config))
|
||||
self.multi_proof.recover_degree_bits(config)
|
||||
}
|
||||
}
|
||||
|
||||
@ -821,309 +812,3 @@ impl ExtraBlockDataTarget {
|
||||
builder.connect(ed0.gas_used_after, ed1.gas_used_after);
|
||||
}
|
||||
}
|
||||
|
||||
/// Merkle caps and openings that form the proof of a single STARK.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of lookup helper and CTL columns.
|
||||
pub auxiliary_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of quotient polynomial evaluations.
|
||||
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSet<F, D>,
|
||||
/// A batch FRI argument for all openings.
|
||||
pub opening_proof: FriProof<F, C::Hasher, D>,
|
||||
}
|
||||
|
||||
/// A `StarkProof` along with some metadata about the initial Fiat-Shamir state, which is used when
|
||||
/// creating a recursive wrapper proof around a STARK proof.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProofWithMetadata<F, C, const D: usize>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
/// Initial Fiat-Shamir state.
|
||||
pub(crate) init_challenger_state: <C::Hasher as Hasher<F>>::Permutation,
|
||||
/// Proof for a single STARK.
|
||||
pub(crate) proof: StarkProof<F, C, D>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
lde_bits - config.fri_config.rate_bits
|
||||
}
|
||||
|
||||
/// Returns the number of cross-table lookup polynomials computed for the current STARK.
|
||||
pub fn num_ctl_zs(&self) -> usize {
|
||||
self.openings.ctl_zs_first.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of `StarkProof`.
|
||||
/// Merkle caps and openings that form the proof of a single STARK.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub(crate) struct StarkProofTarget<const D: usize> {
|
||||
/// `Target` for the Merkle cap if LDEs of trace values.
|
||||
pub trace_cap: MerkleCapTarget,
|
||||
/// `Target` for the Merkle cap of LDEs of lookup helper and CTL columns.
|
||||
pub auxiliary_polys_cap: MerkleCapTarget,
|
||||
/// `Target` for the Merkle cap of LDEs of quotient polynomial evaluations.
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
/// `Target`s for the purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSetTarget<D>,
|
||||
/// `Target`s for the batch FRI argument for all openings.
|
||||
pub opening_proof: FriProofTarget<D>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Serializes a STARK proof.
|
||||
pub(crate) fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_merkle_cap(&self.trace_cap)?;
|
||||
buffer.write_target_merkle_cap(&self.auxiliary_polys_cap)?;
|
||||
buffer.write_target_merkle_cap(&self.quotient_polys_cap)?;
|
||||
buffer.write_target_fri_proof(&self.opening_proof)?;
|
||||
self.openings.to_buffer(buffer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deserializes a STARK proof.
|
||||
pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let trace_cap = buffer.read_target_merkle_cap()?;
|
||||
let auxiliary_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let quotient_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let opening_proof = buffer.read_target_fri_proof()?;
|
||||
let openings = StarkOpeningSetTarget::from_buffer(buffer)?;
|
||||
|
||||
Ok(Self {
|
||||
trace_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
})
|
||||
}
|
||||
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
lde_bits - config.fri_config.rate_bits
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomness used for a STARK proof.
|
||||
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<F>,
|
||||
|
||||
/// Point at which the STARK polynomials are opened.
|
||||
pub stark_zeta: F::Extension,
|
||||
|
||||
/// Randomness used in FRI.
|
||||
pub fri_challenges: FriChallenges<F, D>,
|
||||
}
|
||||
|
||||
/// Circuit version of `StarkProofChallenges`.
|
||||
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||
/// `Target`s for the random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<Target>,
|
||||
/// `ExtensionTarget` for the point at which the STARK polynomials are opened.
|
||||
pub stark_zeta: ExtensionTarget<D>,
|
||||
/// `Target`s for the randomness used in FRI.
|
||||
pub fri_challenges: FriChallengesTarget<D>,
|
||||
}
|
||||
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Openings of trace polynomials at `zeta`.
|
||||
pub local_values: Vec<F::Extension>,
|
||||
/// Openings of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<F::Extension>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub auxiliary_polys: Vec<F::Extension>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub auxiliary_polys_next: Vec<F::Extension>,
|
||||
/// Openings of cross-table lookups `Z` polynomials at `1`.
|
||||
pub ctl_zs_first: Vec<F>,
|
||||
/// Openings of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<F::Extension>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
/// Returns a `StarkOpeningSet` given all the polynomial commitments, the number of permutation `Z`polynomials,
|
||||
/// the evaluation point and a generator `g`.
|
||||
/// Polynomials are evaluated at point `zeta` and, if necessary, at `g * zeta`.
|
||||
pub fn new<C: GenericConfig<D, F = F>>(
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: &PolynomialBatch<F, C, D>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_polys: &[usize],
|
||||
) -> Self {
|
||||
let total_num_helper_cols: usize = num_ctl_polys.iter().sum();
|
||||
|
||||
// Batch evaluates polynomials on the LDE, at a point `z`.
|
||||
let eval_commitment = |z: F::Extension, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.to_extension().eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
// Batch evaluates polynomials at a base field point `z`.
|
||||
let eval_commitment_base = |z: F, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let auxiliary_first = eval_commitment_base(F::ONE, auxiliary_polys_commitment);
|
||||
let ctl_zs_first = auxiliary_first[num_lookup_columns + total_num_helper_cols..].to_vec();
|
||||
// `g * zeta`.
|
||||
let zeta_next = zeta.scalar_mul(g);
|
||||
Self {
|
||||
local_values: eval_commitment(zeta, trace_commitment),
|
||||
next_values: eval_commitment(zeta_next, trace_commitment),
|
||||
auxiliary_polys: eval_commitment(zeta, auxiliary_polys_commitment),
|
||||
auxiliary_polys_next: eval_commitment(zeta_next, auxiliary_polys_commitment),
|
||||
ctl_zs_first,
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs the openings required by FRI.
|
||||
/// All openings but `ctl_zs_first` are grouped together.
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpenings<F, D> {
|
||||
let zeta_batch = FriOpeningBatch {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(&self.auxiliary_polys)
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
let zeta_next_batch = FriOpeningBatch {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(&self.auxiliary_polys_next)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
debug_assert!(!self.ctl_zs_first.is_empty());
|
||||
let ctl_first_batch = FriOpeningBatch {
|
||||
values: self
|
||||
.ctl_zs_first
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect(),
|
||||
};
|
||||
|
||||
FriOpenings {
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of `StarkOpeningSet`.
|
||||
/// `Target`s for the purported values of each polynomial at the challenge point.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub(crate) struct StarkOpeningSetTarget<const D: usize> {
|
||||
/// `ExtensionTarget`s for the openings of trace polynomials at `zeta`.
|
||||
pub local_values: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub auxiliary_polys: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub auxiliary_polys_next: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1.
|
||||
pub ctl_zs_first: Vec<Target>,
|
||||
/// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
/// Serializes a STARK's opening set.
|
||||
pub(crate) fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_ext_vec(&self.local_values)?;
|
||||
buffer.write_target_ext_vec(&self.next_values)?;
|
||||
buffer.write_target_ext_vec(&self.auxiliary_polys)?;
|
||||
buffer.write_target_ext_vec(&self.auxiliary_polys_next)?;
|
||||
buffer.write_target_vec(&self.ctl_zs_first)?;
|
||||
buffer.write_target_ext_vec(&self.quotient_polys)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deserializes a STARK's opening set.
|
||||
pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let local_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let next_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let auxiliary_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
let auxiliary_polys_next = buffer.read_target_ext_vec::<D>()?;
|
||||
let ctl_zs_first = buffer.read_target_vec()?;
|
||||
let quotient_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
|
||||
Ok(Self {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
})
|
||||
}
|
||||
|
||||
/// Circuit version of `to_fri_openings`for `FriOpenings`.
|
||||
/// Constructs the `Target`s the circuit version of FRI.
|
||||
/// All openings but `ctl_zs_first` are grouped together.
|
||||
pub(crate) fn to_fri_openings(&self, zero: Target) -> FriOpeningsTarget<D> {
|
||||
let zeta_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(&self.auxiliary_polys)
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
let zeta_next_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(&self.auxiliary_polys_next)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
debug_assert!(!self.ctl_zs_first.is_empty());
|
||||
let ctl_first_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.ctl_zs_first
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|t| t.to_ext_target(zero))
|
||||
.collect(),
|
||||
};
|
||||
|
||||
FriOpeningsTarget {
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,44 +1,34 @@
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, ensure, Result};
|
||||
use anyhow::{anyhow, Result};
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packable::Packable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::field::zero_poly_coset::ZeroPolyOnCoset;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_maybe_rayon::*;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
use starky::config::StarkConfig;
|
||||
#[cfg(debug_assertions)]
|
||||
use starky::cross_table_lookup::debug_utils::check_ctls;
|
||||
use starky::cross_table_lookup::{get_ctl_data, CtlData};
|
||||
use starky::lookup::GrandProductChallengeSet;
|
||||
use starky::proof::{MultiProof, StarkProofWithMetadata};
|
||||
use starky::prover::prove_with_commitment;
|
||||
use starky::stark::Stark;
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cross_table_lookup::{
|
||||
cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::generation::{generate_traces, GenerationInputs};
|
||||
use crate::get_challenges::observe_public_values;
|
||||
use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars};
|
||||
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
#[cfg(test)]
|
||||
use crate::{
|
||||
cross_table_lookup::testutils::check_ctls, verifier::testutils::get_memory_extra_looking_values,
|
||||
};
|
||||
use crate::proof::{AllProof, PublicValues};
|
||||
#[cfg(debug_assertions)]
|
||||
use crate::verifier::debug_utils::get_memory_extra_looking_values;
|
||||
|
||||
/// Generate traces, then create all STARK proofs.
|
||||
pub fn prove<F, C, const D: usize>(
|
||||
@ -124,16 +114,15 @@ where
|
||||
observe_public_values::<F, C, D>(&mut challenger, &public_values)
|
||||
.map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?;
|
||||
|
||||
// Get challenges for the cross-table lookups.
|
||||
let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges);
|
||||
// For each STARK, compute its cross-table lookup Z polynomials and get the associated `CtlData`.
|
||||
let ctl_data_per_table = timed!(
|
||||
let (ctl_challenges, ctl_data_per_table) = timed!(
|
||||
timing,
|
||||
"compute CTL data",
|
||||
cross_table_lookup_data::<F, D, NUM_TABLES>(
|
||||
get_ctl_data::<F, C, D, NUM_TABLES>(
|
||||
config,
|
||||
&trace_poly_values,
|
||||
&all_stark.cross_table_lookups,
|
||||
&ctl_challenges,
|
||||
&mut challenger,
|
||||
all_stark.arithmetic_stark.constraint_degree()
|
||||
)
|
||||
);
|
||||
@ -154,18 +143,26 @@ where
|
||||
)?
|
||||
);
|
||||
|
||||
#[cfg(test)]
|
||||
// This is an expensive check, hence is only run when `debug_assertions` are enabled.
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut extra_values = HashMap::new();
|
||||
extra_values.insert(
|
||||
*Table::Memory,
|
||||
get_memory_extra_looking_values(&public_values),
|
||||
);
|
||||
check_ctls(
|
||||
&trace_poly_values,
|
||||
&all_stark.cross_table_lookups,
|
||||
&get_memory_extra_looking_values(&public_values),
|
||||
&extra_values,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(AllProof {
|
||||
stark_proofs,
|
||||
ctl_challenges,
|
||||
multi_proof: MultiProof {
|
||||
stark_proofs,
|
||||
ctl_challenges,
|
||||
},
|
||||
public_values,
|
||||
})
|
||||
}
|
||||
@ -331,371 +328,26 @@ where
|
||||
{
|
||||
check_abort_signal(abort_signal.clone())?;
|
||||
|
||||
let degree = trace_poly_values[0].len();
|
||||
let degree_bits = log2_strict(degree);
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
assert!(
|
||||
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
|
||||
"FRI total reduction arity is too large.",
|
||||
);
|
||||
|
||||
// Clear buffered outputs.
|
||||
let init_challenger_state = challenger.compact();
|
||||
|
||||
let constraint_degree = stark.constraint_degree();
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let lookups = stark.lookups();
|
||||
let lookup_helper_columns = timed!(
|
||||
timing,
|
||||
"compute lookup helper columns",
|
||||
lookup_challenges.as_ref().map(|challenges| {
|
||||
let mut columns = Vec::new();
|
||||
for lookup in &lookups {
|
||||
for &challenge in challenges {
|
||||
columns.extend(lookup_helper_columns(
|
||||
lookup,
|
||||
trace_poly_values,
|
||||
challenge,
|
||||
constraint_degree,
|
||||
));
|
||||
}
|
||||
}
|
||||
columns
|
||||
})
|
||||
);
|
||||
let num_lookup_columns = lookup_helper_columns.as_ref().map(|v| v.len()).unwrap_or(0);
|
||||
|
||||
// We add CTLs to the permutation arguments so that we can batch commit to
|
||||
// all auxiliary polynomials.
|
||||
let auxiliary_polys = match lookup_helper_columns {
|
||||
None => {
|
||||
let mut ctl_polys = ctl_data.ctl_helper_polys();
|
||||
ctl_polys.extend(ctl_data.ctl_z_polys());
|
||||
ctl_polys
|
||||
}
|
||||
Some(mut lookup_columns) => {
|
||||
lookup_columns.extend(ctl_data.ctl_helper_polys());
|
||||
lookup_columns.extend(ctl_data.ctl_z_polys());
|
||||
lookup_columns
|
||||
}
|
||||
};
|
||||
assert!(!auxiliary_polys.is_empty(), "No CTL?");
|
||||
|
||||
// Get the polynomial commitments for all auxiliary polynomials.
|
||||
let auxiliary_polys_commitment = timed!(
|
||||
timing,
|
||||
"compute auxiliary polynomials commitment",
|
||||
PolynomialBatch::from_values(
|
||||
auxiliary_polys,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
|
||||
let auxiliary_polys_cap = auxiliary_polys_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap(&auxiliary_polys_cap);
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
|
||||
let num_ctl_polys = ctl_data.num_ctl_helper_polys();
|
||||
|
||||
#[cfg(test)]
|
||||
{
|
||||
check_constraints(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas.clone(),
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
&num_ctl_polys,
|
||||
);
|
||||
}
|
||||
|
||||
check_abort_signal(abort_signal.clone())?;
|
||||
|
||||
let quotient_polys = timed!(
|
||||
timing,
|
||||
"compute quotient polys",
|
||||
compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas,
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
&num_ctl_polys,
|
||||
config,
|
||||
)
|
||||
);
|
||||
let all_quotient_chunks = timed!(
|
||||
timing,
|
||||
"split quotient polys",
|
||||
quotient_polys
|
||||
.into_par_iter()
|
||||
.flat_map(|mut quotient_poly| {
|
||||
quotient_poly
|
||||
.trim_to_len(degree * stark.quotient_degree_factor())
|
||||
.expect(
|
||||
"Quotient has failed, the vanishing polynomial is not divisible by Z_H",
|
||||
);
|
||||
// Split quotient into degree-n chunks.
|
||||
quotient_poly.chunks(degree)
|
||||
})
|
||||
.collect()
|
||||
);
|
||||
// Commit to the quotient polynomials.
|
||||
let quotient_commitment = timed!(
|
||||
timing,
|
||||
"compute quotient commitment",
|
||||
PolynomialBatch::from_coeffs(
|
||||
all_quotient_chunks,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
// Observe the quotient polynomials Merkle cap.
|
||||
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap("ient_polys_cap);
|
||||
|
||||
let zeta = challenger.get_extension_challenge::<D>();
|
||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||
let g = F::primitive_root_of_unity(degree_bits);
|
||||
ensure!(
|
||||
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
||||
"Opening point is in the subgroup."
|
||||
);
|
||||
|
||||
// Compute all openings: evaluate all committed polynomials at `zeta` and, when necessary, at `g * zeta`.
|
||||
let openings = StarkOpeningSet::new(
|
||||
zeta,
|
||||
g,
|
||||
prove_with_commitment(
|
||||
stark,
|
||||
config,
|
||||
trace_poly_values,
|
||||
trace_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
"ient_commitment,
|
||||
stark.num_lookup_helper_columns(config),
|
||||
&num_ctl_polys,
|
||||
);
|
||||
// Get the FRI openings and observe them.
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
let initial_merkle_trees = vec![
|
||||
trace_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
"ient_commitment,
|
||||
];
|
||||
|
||||
check_abort_signal(abort_signal.clone())?;
|
||||
|
||||
let opening_proof = timed!(
|
||||
Some(ctl_data),
|
||||
Some(ctl_challenges),
|
||||
challenger,
|
||||
&[],
|
||||
timing,
|
||||
"compute openings proof",
|
||||
PolynomialBatch::prove_openings(
|
||||
&stark.fri_instance(zeta, g, num_ctl_polys.iter().sum(), num_ctl_polys, config),
|
||||
&initial_merkle_trees,
|
||||
challenger,
|
||||
&fri_params,
|
||||
timing,
|
||||
)
|
||||
);
|
||||
|
||||
let proof = StarkProof {
|
||||
trace_cap: trace_commitment.merkle_tree.cap.clone(),
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
};
|
||||
Ok(StarkProofWithMetadata {
|
||||
)
|
||||
.map(|proof_with_pis| StarkProofWithMetadata {
|
||||
proof: proof_with_pis.proof,
|
||||
init_challenger_state,
|
||||
proof,
|
||||
})
|
||||
}
|
||||
|
||||
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
|
||||
/// where the `C_i`s are the Stark constraints.
|
||||
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup<F>],
|
||||
ctl_data: &CtlData<F>,
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_columns: &[usize],
|
||||
config: &StarkConfig,
|
||||
) -> Vec<PolynomialCoeffs<F>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
P: PackedField<Scalar = F>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let total_num_helper_cols: usize = num_ctl_columns.iter().sum();
|
||||
|
||||
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
|
||||
assert!(
|
||||
quotient_degree_bits <= rate_bits,
|
||||
"Having constraints of degree higher than the rate is not supported yet."
|
||||
);
|
||||
let step = 1 << (rate_bits - quotient_degree_bits);
|
||||
// When opening the `Z`s polys at the "next" point, need to look at the point `next_step` steps away.
|
||||
let next_step = 1 << quotient_degree_bits;
|
||||
|
||||
// Evaluation of the first Lagrange polynomial on the LDE domain.
|
||||
let lagrange_first = PolynomialValues::selector(degree, 0).lde_onto_coset(quotient_degree_bits);
|
||||
// Evaluation of the last Lagrange polynomial on the LDE domain.
|
||||
let lagrange_last =
|
||||
PolynomialValues::selector(degree, degree - 1).lde_onto_coset(quotient_degree_bits);
|
||||
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_trace_values_packed =
|
||||
|i_start| -> Vec<P> { trace_commitment.get_lde_values_packed(i_start, step) };
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let size = degree << quotient_degree_bits;
|
||||
let coset = F::cyclic_subgroup_coset_known_order(
|
||||
F::primitive_root_of_unity(degree_bits + quotient_degree_bits),
|
||||
F::coset_shift(),
|
||||
size,
|
||||
);
|
||||
|
||||
// We will step by `P::WIDTH`, and in each iteration, evaluate the quotient polynomial at
|
||||
// a batch of `P::WIDTH` points.
|
||||
let quotient_values = (0..size)
|
||||
.into_par_iter()
|
||||
.step_by(P::WIDTH)
|
||||
.flat_map_iter(|i_start| {
|
||||
let i_next_start = (i_start + next_step) % size;
|
||||
let i_range = i_start..i_start + P::WIDTH;
|
||||
|
||||
let x = *P::from_slice(&coset[i_range.clone()]);
|
||||
let z_last = x - last;
|
||||
let lagrange_basis_first = *P::from_slice(&lagrange_first.values[i_range.clone()]);
|
||||
let lagrange_basis_last = *P::from_slice(&lagrange_last.values[i_range]);
|
||||
|
||||
let mut consumer = ConstraintConsumer::new(
|
||||
alphas.clone(),
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
// Get the local and next row evaluations for the current STARK.
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&get_trace_values_packed(i_start),
|
||||
&get_trace_values_packed(i_next_start),
|
||||
);
|
||||
// Get the local and next row evaluations for the permutation argument, as well as the associated challenges.
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
||||
[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step)
|
||||
[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
|
||||
// Get all the data for this STARK's CTLs:
|
||||
// - the local and next row evaluations for the CTL Z polynomials
|
||||
// - the associated challenges.
|
||||
// - for each CTL:
|
||||
// - the filter `Column`
|
||||
// - the `Column`s that form the looking/looked table.
|
||||
|
||||
let mut start_index = 0;
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, zs_columns)| {
|
||||
let num_ctl_helper_cols = num_ctl_columns[i];
|
||||
let helper_columns = auxiliary_polys_commitment
|
||||
.get_lde_values_packed(i_start, step)[num_lookup_columns
|
||||
+ start_index
|
||||
..num_lookup_columns + start_index + num_ctl_helper_cols]
|
||||
.to_vec();
|
||||
|
||||
let ctl_vars = CtlCheckVars::<F, F, P, 1> {
|
||||
helper_columns,
|
||||
local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
||||
[num_lookup_columns + total_num_helper_cols + i],
|
||||
next_z: auxiliary_polys_commitment
|
||||
.get_lde_values_packed(i_next_start, step)
|
||||
[num_lookup_columns + total_num_helper_cols + i],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: zs_columns.columns.clone(),
|
||||
filter: zs_columns.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_ctl_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Evaluate the polynomial combining all constraints, including those associated
|
||||
// to the permutation and CTL arguments.
|
||||
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
||||
stark,
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
let mut constraints_evals = consumer.accumulators();
|
||||
// We divide the constraints evaluations by `Z_H(x)`.
|
||||
let denominator_inv: P = z_h_on_coset.eval_inverse_packed(i_start);
|
||||
for eval in &mut constraints_evals {
|
||||
*eval *= denominator_inv;
|
||||
}
|
||||
|
||||
let num_challenges = alphas.len();
|
||||
|
||||
(0..P::WIDTH).map(move |i| {
|
||||
(0..num_challenges)
|
||||
.map(|j| constraints_evals[j].as_slice()[i])
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose("ient_values)
|
||||
.into_par_iter()
|
||||
.map(PolynomialValues::new)
|
||||
.map(|values| values.coset_ifft(F::coset_shift()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Utility method that checks whether a kill signal has been emitted by one of the workers,
|
||||
/// which will result in an early abort for all the other processes involved in the same set
|
||||
/// of transactions.
|
||||
@ -708,134 +360,3 @@ pub fn check_abort_signal(abort_signal: Option<Arc<AtomicBool>>) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
/// Check that all constraints evaluate to zero on `H`.
|
||||
/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup.
|
||||
fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
auxiliary_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup<F>],
|
||||
ctl_data: &CtlData<F>,
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_helper_cols: &[usize],
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
||||
|
||||
let total_num_helper_cols: usize = num_ctl_helper_cols.iter().sum();
|
||||
|
||||
let size = degree << rate_bits;
|
||||
let step = 1 << rate_bits;
|
||||
|
||||
// Evaluation of the first Lagrange polynomial.
|
||||
let lagrange_first = PolynomialValues::selector(degree, 0).lde(rate_bits);
|
||||
// Evaluation of the last Lagrange polynomial.
|
||||
let lagrange_last = PolynomialValues::selector(degree, degree - 1).lde(rate_bits);
|
||||
|
||||
let subgroup = F::two_adic_subgroup(degree_bits + rate_bits);
|
||||
|
||||
// Get the evaluations of a batch of polynomials over our subgroup.
|
||||
let get_subgroup_evals = |comm: &PolynomialBatch<F, C, D>| -> Vec<Vec<F>> {
|
||||
let values = comm
|
||||
.polynomials
|
||||
.par_iter()
|
||||
.map(|coeffs| coeffs.clone().fft().values)
|
||||
.collect::<Vec<_>>();
|
||||
transpose(&values)
|
||||
};
|
||||
|
||||
// Get batch evaluations of the trace, permutation and CTL polynomials over our subgroup.
|
||||
let trace_subgroup_evals = get_subgroup_evals(trace_commitment);
|
||||
let auxiliary_subgroup_evals = get_subgroup_evals(auxiliary_commitment);
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
|
||||
let constraint_values = (0..size)
|
||||
.map(|i| {
|
||||
let i_next = (i + step) % size;
|
||||
|
||||
let x = subgroup[i];
|
||||
let z_last = x - last;
|
||||
let lagrange_basis_first = lagrange_first.values[i];
|
||||
let lagrange_basis_last = lagrange_last.values[i];
|
||||
|
||||
let mut consumer = ConstraintConsumer::new(
|
||||
alphas.clone(),
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
// Get the local and next row evaluations for the current STARK's trace.
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_subgroup_evals[i],
|
||||
&trace_subgroup_evals[i_next],
|
||||
);
|
||||
// Get the local and next row evaluations for the current STARK's permutation argument.
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
|
||||
// Get the local and next row evaluations for the current STARK's CTL Z polynomials.
|
||||
let mut start_index = 0;
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(iii, zs_columns)| {
|
||||
let num_helper_cols = num_ctl_helper_cols[iii];
|
||||
let helper_columns = auxiliary_subgroup_evals[i][num_lookup_columns
|
||||
+ start_index
|
||||
..num_lookup_columns + start_index + num_helper_cols]
|
||||
.to_vec();
|
||||
let ctl_vars = CtlCheckVars::<F, F, F, 1> {
|
||||
helper_columns,
|
||||
local_z: auxiliary_subgroup_evals[i]
|
||||
[num_lookup_columns + total_num_helper_cols + iii],
|
||||
next_z: auxiliary_subgroup_evals[i_next]
|
||||
[num_lookup_columns + total_num_helper_cols + iii],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: zs_columns.columns.clone(),
|
||||
filter: zs_columns.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Evaluate the polynomial combining all constraints, including those associated
|
||||
// to the permutation and CTL arguments.
|
||||
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
||||
stark,
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
consumer.accumulators()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Assert that all constraints evaluate to 0 over our subgroup.
|
||||
for v in constraint_values {
|
||||
assert!(
|
||||
v.iter().all(|x| x.is_zero()),
|
||||
"Constraint failed in {}",
|
||||
std::any::type_name::<S>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,47 +4,41 @@ use core::fmt::Debug;
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{BigEndianHash, U256};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::witness_util::set_fri_proof_target;
|
||||
use plonky2::gates::exponentiation::ExponentiationGate;
|
||||
use plonky2::gates::gate::GateRef;
|
||||
use plonky2::gates::noop::NoopGate;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::iop::challenger::RecursiveChallenger;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
|
||||
use plonky2::util::reducing::ReducingFactorTarget;
|
||||
use plonky2::util::serialization::{
|
||||
Buffer, GateSerializer, IoResult, Read, WitnessGeneratorSerializer, Write,
|
||||
};
|
||||
use plonky2::with_context;
|
||||
use plonky2_util::log2_ceil;
|
||||
use starky::config::StarkConfig;
|
||||
use starky::cross_table_lookup::{CrossTableLookup, CtlCheckVarsTarget};
|
||||
use starky::lookup::{GrandProductChallenge, GrandProductChallengeSet};
|
||||
use starky::proof::{StarkProofTarget, StarkProofWithMetadata};
|
||||
use starky::recursive_verifier::{
|
||||
add_virtual_stark_proof, set_stark_proof_target, verify_stark_proof_with_challenges_circuit,
|
||||
};
|
||||
use starky::stark::Stark;
|
||||
|
||||
use crate::all_stark::Table;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{CrossTableLookup, CtlCheckVarsTarget, GrandProductChallengeSet};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::{GrandProductChallenge, LookupCheckVarsTarget};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::proof::{
|
||||
BlockHashes, BlockHashesTarget, BlockMetadata, BlockMetadataTarget, ExtraBlockData,
|
||||
ExtraBlockDataTarget, PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof,
|
||||
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithMetadata, TrieRoots,
|
||||
TrieRootsTarget,
|
||||
ExtraBlockDataTarget, PublicValues, PublicValuesTarget, TrieRoots, TrieRootsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::witness::errors::ProgramError;
|
||||
|
||||
pub(crate) struct PublicInputs<T: Copy + Default + Eq + PartialEq + Debug, P: PlonkyPermutation<T>>
|
||||
@ -205,7 +199,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the recursive Stark circuit.
|
||||
/// Returns the recursive STARK circuit.
|
||||
pub(crate) fn recursive_stark_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -236,7 +230,7 @@ where
|
||||
);
|
||||
let num_ctl_helper_zs = num_ctl_zs + total_num_helpers;
|
||||
|
||||
let proof_target = add_virtual_stark_proof(
|
||||
let stark_proof_target = add_virtual_stark_proof(
|
||||
&mut builder,
|
||||
stark,
|
||||
inner_config,
|
||||
@ -246,7 +240,7 @@ where
|
||||
);
|
||||
|
||||
builder.register_public_inputs(
|
||||
&proof_target
|
||||
&stark_proof_target
|
||||
.trace_cap
|
||||
.0
|
||||
.iter()
|
||||
@ -265,7 +259,7 @@ where
|
||||
|
||||
let ctl_vars = CtlCheckVarsTarget::from_proof(
|
||||
*table,
|
||||
&proof_target,
|
||||
&stark_proof_target,
|
||||
cross_table_lookups,
|
||||
&ctl_challenges_target,
|
||||
num_lookup_columns,
|
||||
@ -279,20 +273,25 @@ where
|
||||
}));
|
||||
let mut challenger =
|
||||
RecursiveChallenger::<F, C::Hasher, D>::from_state(init_challenger_state_target);
|
||||
let challenges =
|
||||
proof_target.get_challenges::<F, C>(&mut builder, &mut challenger, inner_config);
|
||||
let challenges = stark_proof_target.get_challenges::<F, C>(
|
||||
&mut builder,
|
||||
&mut challenger,
|
||||
Some(&ctl_challenges_target),
|
||||
true,
|
||||
inner_config,
|
||||
);
|
||||
let challenger_state = challenger.compact(&mut builder);
|
||||
builder.register_public_inputs(challenger_state.as_ref());
|
||||
|
||||
builder.register_public_inputs(&proof_target.openings.ctl_zs_first);
|
||||
builder.register_public_inputs(stark_proof_target.openings.ctl_zs_first.as_ref().unwrap());
|
||||
|
||||
verify_stark_proof_with_challenges_circuit::<F, C, _, D>(
|
||||
&mut builder,
|
||||
stark,
|
||||
&proof_target,
|
||||
&challenges,
|
||||
&ctl_vars,
|
||||
&ctl_challenges_target,
|
||||
&stark_proof_target,
|
||||
&[], // public inputs
|
||||
challenges,
|
||||
Some(&ctl_vars),
|
||||
inner_config,
|
||||
);
|
||||
|
||||
@ -306,7 +305,7 @@ where
|
||||
let circuit = builder.build::<C>();
|
||||
StarkWrapperCircuit {
|
||||
circuit,
|
||||
stark_proof_target: proof_target,
|
||||
stark_proof_target,
|
||||
ctl_challenges_target,
|
||||
init_challenger_state_target,
|
||||
zero_target,
|
||||
@ -324,122 +323,6 @@ pub(crate) fn add_common_recursion_gates<F: RichField + Extendable<D>, const D:
|
||||
)));
|
||||
}
|
||||
|
||||
/// Recursively verifies an inner proof.
|
||||
fn verify_stark_proof_with_challenges_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
proof: &StarkProofTarget<D>,
|
||||
challenges: &StarkProofChallengesTarget<D>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
ctl_challenges: &GrandProductChallengeSet<Target>,
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let zero = builder.zero();
|
||||
let one = builder.one_extension();
|
||||
|
||||
let num_ctl_polys = ctl_vars
|
||||
.iter()
|
||||
.map(|ctl| ctl.helper_columns.len())
|
||||
.sum::<usize>();
|
||||
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = S::EvaluationFrameTarget::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(inner_config);
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
let (l_0, l_last) =
|
||||
eval_l_0_and_l_last_circuit(builder, degree_bits, challenges.stark_zeta, z_h_zeta);
|
||||
let last =
|
||||
builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse());
|
||||
let z_last = builder.sub_extension(challenges.stark_zeta, last);
|
||||
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
challenges.stark_alphas.clone(),
|
||||
z_last,
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(inner_config);
|
||||
let lookup_challenges = (num_lookup_columns > 0).then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget {
|
||||
local_values: auxiliary_polys[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
|
||||
with_context!(
|
||||
builder,
|
||||
"evaluate vanishing polynomial",
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
&vars,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
)
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, builder);
|
||||
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
|
||||
let merkle_caps = vec![
|
||||
proof.trace_cap.clone(),
|
||||
proof.auxiliary_polys_cap.clone(),
|
||||
proof.quotient_polys_cap.clone(),
|
||||
];
|
||||
|
||||
let fri_instance = stark.fri_instance_target(
|
||||
builder,
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
num_ctl_polys,
|
||||
ctl_zs_first.len(),
|
||||
inner_config,
|
||||
);
|
||||
builder.verify_fri_proof::<C>(
|
||||
&fri_instance,
|
||||
&proof.openings.to_fri_openings(zero),
|
||||
&challenges.fri_challenges,
|
||||
&merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&inner_config.fri_params(degree_bits),
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursive version of `get_memory_extra_looking_sum`.
|
||||
pub(crate) fn get_memory_extra_looking_sum_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
@ -667,25 +550,6 @@ fn add_data_write<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder.add(running_sum, inverse)
|
||||
}
|
||||
|
||||
fn eval_l_0_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
log_n: usize,
|
||||
x: ExtensionTarget<D>,
|
||||
z_x: ExtensionTarget<D>,
|
||||
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||
let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n));
|
||||
let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n));
|
||||
let one = builder.one_extension();
|
||||
let l_0_deno = builder.mul_sub_extension(n, x, n);
|
||||
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||
|
||||
(
|
||||
builder.div_extension(z_x, l_0_deno),
|
||||
builder.div_extension(z_x, l_last_deno),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn add_virtual_public_values<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
) -> PublicValuesTarget {
|
||||
@ -770,93 +634,6 @@ pub(crate) fn add_virtual_extra_block_data<F: RichField + Extendable<D>, const D
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_virtual_stark_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
num_ctl_helper_zs: usize,
|
||||
num_ctl_zs: usize,
|
||||
) -> StarkProofTarget<D> {
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_leaves_per_oracle = vec![
|
||||
S::COLUMNS,
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_helper_zs,
|
||||
stark.quotient_degree_factor() * config.num_challenges,
|
||||
];
|
||||
|
||||
let auxiliary_polys_cap = builder.add_virtual_cap(cap_height);
|
||||
|
||||
StarkProofTarget {
|
||||
trace_cap: builder.add_virtual_cap(cap_height),
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||
openings: add_virtual_stark_opening_set::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
num_ctl_helper_zs,
|
||||
num_ctl_zs,
|
||||
config,
|
||||
),
|
||||
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_virtual_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
num_ctl_helper_zs: usize,
|
||||
num_ctl_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) -> StarkOpeningSetTarget<D> {
|
||||
let num_challenges = config.num_challenges;
|
||||
StarkOpeningSetTarget {
|
||||
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
auxiliary_polys: builder.add_virtual_extension_targets(
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_helper_zs,
|
||||
),
|
||||
auxiliary_polys_next: builder.add_virtual_extension_targets(
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_helper_zs,
|
||||
),
|
||||
ctl_zs_first: builder.add_virtual_targets(num_ctl_zs),
|
||||
quotient_polys: builder
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
proof_target: &StarkProofTarget<D>,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
zero: Target,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
W: Witness<F>,
|
||||
{
|
||||
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);
|
||||
witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||
|
||||
witness.set_fri_openings(
|
||||
&proof_target.openings.to_fri_openings(zero),
|
||||
&proof.openings.to_fri_openings(),
|
||||
);
|
||||
|
||||
witness.set_cap_target(
|
||||
&proof_target.auxiliary_polys_cap,
|
||||
&proof.auxiliary_polys_cap,
|
||||
);
|
||||
|
||||
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
|
||||
}
|
||||
|
||||
pub fn set_public_value_targets<F, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
public_values_target: &PublicValuesTarget,
|
||||
|
||||
228
evm/src/stark.rs
228
evm/src/stark.rs
@ -1,228 +0,0 @@
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::structure::{
|
||||
FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo,
|
||||
FriPolynomialInfo,
|
||||
};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::Lookup;
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const AUXILIARY_ORACLE_INDEX: usize = 1;
|
||||
const QUOTIENT_ORACLE_INDEX: usize = 2;
|
||||
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The total number of columns in the trace.
|
||||
const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS;
|
||||
|
||||
/// This is used to evaluate constraints natively.
|
||||
type EvaluationFrame<FE, P, const D2: usize>: StarkEvaluationFrame<P>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively.
|
||||
type EvaluationFrameTarget: StarkEvaluationFrame<ExtensionTarget<D>>;
|
||||
|
||||
/// Evaluate constraints at a vector of points.
|
||||
///
|
||||
/// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us
|
||||
/// evaluate constraints over a larger domain if desired. This can also be called with `FE = F`
|
||||
/// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating
|
||||
/// constraints over `F`.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// Evaluate constraints at a vector of points from the base field `F`.
|
||||
fn eval_packed_base<P: PackedField<Scalar = F>>(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<F, P, 1>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a single point from the degree `D` extension field.
|
||||
fn eval_ext(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<F::Extension, F::Extension, D>,
|
||||
yield_constr: &mut ConstraintConsumer<F::Extension>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a vector of points from the degree `D` extension field. This is like
|
||||
/// `eval_ext`, except in the context of a recursive circuit.
|
||||
/// Note: constraints must be added through`yield_constr.constraint(builder, constraint)` in the
|
||||
/// same order as they are given in `eval_packed_generic`.
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn constraint_degree(&self) -> usize;
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn quotient_degree_factor(&self) -> usize {
|
||||
1.max(self.constraint_degree() - 1)
|
||||
}
|
||||
|
||||
fn num_quotient_polys(&self, config: &StarkConfig) -> usize {
|
||||
self.quotient_degree_factor() * config.num_challenges
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
fn fri_instance(
|
||||
&self,
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
num_ctl_helpers: usize,
|
||||
num_ctl_zs: Vec<usize>,
|
||||
config: &StarkConfig,
|
||||
) -> FriInstanceInfo<F, D> {
|
||||
let trace_oracle = FriOracleInfo {
|
||||
num_polys: Self::COLUMNS,
|
||||
blinding: false,
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(config);
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_helpers + num_ctl_zs.len();
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
AUXILIARY_ORACLE_INDEX,
|
||||
num_lookup_columns + num_ctl_helpers..num_auxiliary_polys,
|
||||
);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
let quotient_oracle = FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys);
|
||||
|
||||
let zeta_batch = FriBatchInfo {
|
||||
point: zeta,
|
||||
polynomials: [
|
||||
trace_info.clone(),
|
||||
auxiliary_polys_info.clone(),
|
||||
quotient_info,
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
let zeta_next_batch = FriBatchInfo {
|
||||
point: zeta.scalar_mul(g),
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
let ctl_first_batch = FriBatchInfo {
|
||||
point: F::Extension::ONE,
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
FriInstanceInfo {
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
fn fri_instance_target(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
g: F,
|
||||
num_ctl_helper_polys: usize,
|
||||
num_ctl_zs: usize,
|
||||
inner_config: &StarkConfig,
|
||||
) -> FriInstanceInfoTarget<D> {
|
||||
let trace_oracle = FriOracleInfo {
|
||||
num_polys: Self::COLUMNS,
|
||||
blinding: false,
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(inner_config);
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_helper_polys + num_ctl_zs;
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
AUXILIARY_ORACLE_INDEX,
|
||||
num_lookup_columns + num_ctl_helper_polys
|
||||
..num_lookup_columns + num_ctl_helper_polys + num_ctl_zs,
|
||||
);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(inner_config);
|
||||
let quotient_oracle = FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys);
|
||||
|
||||
let zeta_batch = FriBatchInfoTarget {
|
||||
point: zeta,
|
||||
polynomials: [
|
||||
trace_info.clone(),
|
||||
auxiliary_polys_info.clone(),
|
||||
quotient_info,
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
let zeta_next = builder.mul_const_extension(g, zeta);
|
||||
let zeta_next_batch = FriBatchInfoTarget {
|
||||
point: zeta_next,
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
let ctl_first_batch = FriBatchInfoTarget {
|
||||
point: builder.one_extension(),
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
FriInstanceInfoTarget {
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
|
||||
fn lookups(&self) -> Vec<Lookup<F>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize {
|
||||
self.lookups()
|
||||
.iter()
|
||||
.map(|lookup| lookup.num_helper_columns(self.constraint_degree()))
|
||||
.sum::<usize>()
|
||||
* config.num_challenges
|
||||
}
|
||||
|
||||
fn uses_lookups(&self) -> bool {
|
||||
!self.lookups().is_empty()
|
||||
}
|
||||
}
|
||||
@ -1,157 +0,0 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
|
||||
const WITNESS_SIZE: usize = 1 << 5;
|
||||
|
||||
/// Tests that the constraints imposed by the given STARK are low-degree by applying them to random
|
||||
/// low-degree witness polynomials.
|
||||
pub(crate) fn test_stark_low_degree<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
) -> Result<()> {
|
||||
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
|
||||
|
||||
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
|
||||
let size = trace_ldes.len();
|
||||
|
||||
let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits);
|
||||
let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits);
|
||||
|
||||
let last = F::primitive_root_of_unity(log2_strict(WITNESS_SIZE)).inverse();
|
||||
let subgroup =
|
||||
F::cyclic_subgroup_known_order(F::primitive_root_of_unity(log2_strict(size)), size);
|
||||
let alpha = F::rand();
|
||||
let constraint_evals = (0..size)
|
||||
.map(|i| {
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_ldes[i],
|
||||
&trace_ldes[(i + (1 << rate_bits)) % size],
|
||||
);
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F>::new(
|
||||
vec![alpha],
|
||||
subgroup[i] - last,
|
||||
lagrange_first.values[i],
|
||||
lagrange_last.values[i],
|
||||
);
|
||||
stark.eval_packed_base(&vars, &mut consumer);
|
||||
consumer.accumulators()[0]
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let constraint_poly_values = PolynomialValues::new(constraint_evals);
|
||||
if !constraint_poly_values.is_zero() {
|
||||
let constraint_eval_degree = constraint_poly_values.degree();
|
||||
let maximum_degree = WITNESS_SIZE * stark.constraint_degree() - 1;
|
||||
|
||||
ensure!(
|
||||
constraint_eval_degree <= maximum_degree,
|
||||
"Expected degrees at most {} * {} - 1 = {}, actual {:?}",
|
||||
WITNESS_SIZE,
|
||||
stark.constraint_degree(),
|
||||
maximum_degree,
|
||||
constraint_eval_degree
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Tests that the circuit constraints imposed by the given STARK are coherent with the native constraints.
|
||||
pub(crate) fn test_stark_circuit_constraints<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
) -> Result<()> {
|
||||
// Compute native constraint evaluation on random values.
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
);
|
||||
|
||||
let alphas = F::rand_vec(1);
|
||||
let z_last = F::Extension::rand();
|
||||
let lagrange_first = F::Extension::rand();
|
||||
let lagrange_last = F::Extension::rand();
|
||||
let mut consumer = ConstraintConsumer::<F::Extension>::new(
|
||||
alphas
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect(),
|
||||
z_last,
|
||||
lagrange_first,
|
||||
lagrange_last,
|
||||
);
|
||||
stark.eval_ext(&vars, &mut consumer);
|
||||
let native_eval = consumer.accumulators()[0];
|
||||
|
||||
// Compute circuit constraint evaluation on same random values.
|
||||
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::<F>::new();
|
||||
|
||||
let locals_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&locals_t, vars.get_local_values());
|
||||
let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&nexts_t, vars.get_next_values());
|
||||
let alphas_t = builder.add_virtual_targets(1);
|
||||
pw.set_target(alphas_t[0], alphas[0]);
|
||||
let z_last_t = builder.add_virtual_extension_target();
|
||||
pw.set_extension_target(z_last_t, z_last);
|
||||
let lagrange_first_t = builder.add_virtual_extension_target();
|
||||
pw.set_extension_target(lagrange_first_t, lagrange_first);
|
||||
let lagrange_last_t = builder.add_virtual_extension_target();
|
||||
pw.set_extension_target(lagrange_last_t, lagrange_last);
|
||||
|
||||
let vars = S::EvaluationFrameTarget::from_values(&locals_t, &nexts_t);
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
alphas_t,
|
||||
z_last_t,
|
||||
lagrange_first_t,
|
||||
lagrange_last_t,
|
||||
);
|
||||
stark.eval_ext_circuit(&mut builder, &vars, &mut consumer);
|
||||
let circuit_eval = consumer.accumulators()[0];
|
||||
let native_eval_t = builder.constant_extension(native_eval);
|
||||
builder.connect_extension(circuit_eval, native_eval_t);
|
||||
|
||||
let data = builder.build::<C>();
|
||||
let proof = data.prove(pw)?;
|
||||
data.verify(proof)
|
||||
}
|
||||
|
||||
fn random_low_degree_matrix<F: Field>(num_polys: usize, rate_bits: usize) -> Vec<Vec<F>> {
|
||||
let polys = (0..num_polys)
|
||||
.map(|_| random_low_degree_values(rate_bits))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose(&polys)
|
||||
}
|
||||
|
||||
fn random_low_degree_values<F: Field>(rate_bits: usize) -> Vec<F> {
|
||||
PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE))
|
||||
.lde(rate_bits)
|
||||
.fft()
|
||||
.values
|
||||
}
|
||||
@ -35,18 +35,6 @@ pub(crate) fn limb_from_bits_le_recursive<F: RichField + Extendable<D>, const D:
|
||||
})
|
||||
}
|
||||
|
||||
/// A helper function to transpose a row-wise trace and put it in the format that `prove` expects.
|
||||
pub(crate) fn trace_rows_to_poly_values<F: Field, const COLUMNS: usize>(
|
||||
trace_rows: Vec<[F; COLUMNS]>,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
let trace_row_vecs = trace_rows.into_iter().map(|row| row.to_vec()).collect_vec();
|
||||
let trace_col_vecs: Vec<Vec<F>> = transpose(&trace_row_vecs);
|
||||
trace_col_vecs
|
||||
.into_iter()
|
||||
.map(|column| PolynomialValues::new(column))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the lowest LE 32-bit limb of a `U256` as a field element,
|
||||
/// and errors if the integer is actually greater.
|
||||
pub(crate) fn u256_to_u32<F: Field>(u256: U256) -> Result<F, ProgramError> {
|
||||
|
||||
@ -1,81 +0,0 @@
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{
|
||||
eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars,
|
||||
CtlCheckVarsTarget,
|
||||
};
|
||||
use crate::lookup::{
|
||||
eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars,
|
||||
LookupCheckVarsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Evaluates all constraint, permutation and cross-table lookup polynomials
|
||||
/// of the current STARK at the local and next values.
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookups: &[Lookup<F>],
|
||||
lookup_vars: Option<LookupCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
// Evaluate all of the STARK's table constraints.
|
||||
stark.eval_packed_generic(vars, consumer);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
// Evaluate the STARK constraints related to the permutation arguments.
|
||||
eval_packed_lookups_generic::<F, FE, P, S, D, D2>(
|
||||
stark,
|
||||
lookups,
|
||||
vars,
|
||||
lookup_vars,
|
||||
consumer,
|
||||
);
|
||||
}
|
||||
// Evaluate the STARK constraints related to the cross-table lookups.
|
||||
eval_cross_table_lookup_checks::<F, FE, P, S, D, D2>(
|
||||
vars,
|
||||
ctl_vars,
|
||||
consumer,
|
||||
stark.constraint_degree(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_vanishing_poly`.
|
||||
/// Evaluates all constraint, permutation and cross-table lookup polynomials
|
||||
/// of the current STARK at the local and next values.
|
||||
pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: Option<LookupCheckVarsTarget<D>>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
// Evaluate all of the STARK's table constraints.
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
// Evaluate all of the STARK's constraints related to the permutation argument.
|
||||
eval_ext_lookups_circuit::<F, S, D>(builder, stark, vars, lookup_vars, consumer);
|
||||
}
|
||||
// Evaluate all of the STARK's constraints related to the cross-table lookups.
|
||||
eval_cross_table_lookup_checks_circuit::<S, F, D>(
|
||||
builder,
|
||||
vars,
|
||||
ctl_vars,
|
||||
consumer,
|
||||
stark.constraint_degree(),
|
||||
);
|
||||
}
|
||||
@ -1,34 +1,22 @@
|
||||
use core::any::type_name;
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{BigEndianHash, U256};
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::verifier::verify_fri_proof;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
use starky::config::StarkConfig;
|
||||
use starky::cross_table_lookup::{get_ctl_vars_from_proofs, verify_cross_table_lookups};
|
||||
use starky::lookup::GrandProductChallenge;
|
||||
use starky::stark::Stark;
|
||||
use starky::verifier::verify_stark_proof_with_challenges;
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{
|
||||
num_ctl_helper_columns_by_table, verify_cross_table_lookups, CtlCheckVars,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::{GrandProductChallenge, LookupCheckVars};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::proof::{
|
||||
AllProof, AllProofChallenges, PublicValues, StarkOpeningSet, StarkProof, StarkProofChallenges,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::proof::{AllProof, AllProofChallenges, PublicValues};
|
||||
use crate::util::h2u;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
|
||||
pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
all_stark: &AllStark<F, D>,
|
||||
@ -57,73 +45,71 @@ where
|
||||
cross_table_lookups,
|
||||
} = all_stark;
|
||||
|
||||
let num_ctl_helper_cols = num_ctl_helper_columns_by_table(
|
||||
cross_table_lookups,
|
||||
all_stark.arithmetic_stark.constraint_degree(),
|
||||
);
|
||||
|
||||
let ctl_vars_per_table = CtlCheckVars::from_proofs(
|
||||
&all_proof.stark_proofs,
|
||||
let ctl_vars_per_table = get_ctl_vars_from_proofs(
|
||||
&all_proof.multi_proof,
|
||||
cross_table_lookups,
|
||||
&ctl_challenges,
|
||||
&num_lookup_columns,
|
||||
&num_ctl_helper_cols,
|
||||
all_stark.arithmetic_stark.constraint_degree(),
|
||||
);
|
||||
|
||||
let stark_proofs = &all_proof.multi_proof.stark_proofs;
|
||||
|
||||
verify_stark_proof_with_challenges(
|
||||
arithmetic_stark,
|
||||
&all_proof.stark_proofs[Table::Arithmetic as usize].proof,
|
||||
&stark_proofs[Table::Arithmetic as usize].proof,
|
||||
&stark_challenges[Table::Arithmetic as usize],
|
||||
&ctl_vars_per_table[Table::Arithmetic as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::Arithmetic as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
|
||||
verify_stark_proof_with_challenges(
|
||||
byte_packing_stark,
|
||||
&all_proof.stark_proofs[Table::BytePacking as usize].proof,
|
||||
&stark_proofs[Table::BytePacking as usize].proof,
|
||||
&stark_challenges[Table::BytePacking as usize],
|
||||
&ctl_vars_per_table[Table::BytePacking as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::BytePacking as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
cpu_stark,
|
||||
&all_proof.stark_proofs[Table::Cpu as usize].proof,
|
||||
&stark_proofs[Table::Cpu as usize].proof,
|
||||
&stark_challenges[Table::Cpu as usize],
|
||||
&ctl_vars_per_table[Table::Cpu as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::Cpu as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
keccak_stark,
|
||||
&all_proof.stark_proofs[Table::Keccak as usize].proof,
|
||||
&stark_proofs[Table::Keccak as usize].proof,
|
||||
&stark_challenges[Table::Keccak as usize],
|
||||
&ctl_vars_per_table[Table::Keccak as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::Keccak as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
keccak_sponge_stark,
|
||||
&all_proof.stark_proofs[Table::KeccakSponge as usize].proof,
|
||||
&stark_proofs[Table::KeccakSponge as usize].proof,
|
||||
&stark_challenges[Table::KeccakSponge as usize],
|
||||
&ctl_vars_per_table[Table::KeccakSponge as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::KeccakSponge as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
logic_stark,
|
||||
&all_proof.stark_proofs[Table::Logic as usize].proof,
|
||||
&stark_proofs[Table::Logic as usize].proof,
|
||||
&stark_challenges[Table::Logic as usize],
|
||||
&ctl_vars_per_table[Table::Logic as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::Logic as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
memory_stark,
|
||||
&all_proof.stark_proofs[Table::Memory as usize].proof,
|
||||
&stark_proofs[Table::Memory as usize].proof,
|
||||
&stark_challenges[Table::Memory as usize],
|
||||
&ctl_vars_per_table[Table::Memory as usize],
|
||||
&ctl_challenges,
|
||||
Some(&ctl_vars_per_table[Table::Memory as usize]),
|
||||
&[],
|
||||
config,
|
||||
)?;
|
||||
|
||||
@ -141,9 +127,10 @@ where
|
||||
verify_cross_table_lookups::<F, D, NUM_TABLES>(
|
||||
cross_table_lookups,
|
||||
all_proof
|
||||
.multi_proof
|
||||
.stark_proofs
|
||||
.map(|p| p.proof.openings.ctl_zs_first),
|
||||
extra_looking_sums,
|
||||
.map(|p| p.proof.openings.ctl_zs_first.unwrap()),
|
||||
Some(&extra_looking_sums),
|
||||
config,
|
||||
)
|
||||
}
|
||||
@ -293,186 +280,8 @@ where
|
||||
running_sum + challenge.combine(row.iter()).inverse()
|
||||
}
|
||||
|
||||
pub(crate) fn verify_stark_proof_with_challenges<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: &S,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
challenges: &StarkProofChallenges<F, D>,
|
||||
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
|
||||
ctl_challenges: &GrandProductChallengeSet<F>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
log::debug!("Checking proof: {}", type_name::<S>());
|
||||
let num_ctl_polys = ctl_vars
|
||||
.iter()
|
||||
.map(|ctl| ctl.helper_columns.len())
|
||||
.sum::<usize>();
|
||||
let num_ctl_z_polys = ctl_vars.len();
|
||||
validate_proof_shape(stark, proof, config, num_ctl_polys, num_ctl_z_polys)?;
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first: _,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = S::EvaluationFrame::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta);
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let z_last = challenges.stark_zeta - last.into();
|
||||
let mut consumer = ConstraintConsumer::<F::Extension>::new(
|
||||
challenges
|
||||
.stark_alphas
|
||||
.iter()
|
||||
.map(|&alpha| F::Extension::from_basefield(alpha))
|
||||
.collect::<Vec<_>>(),
|
||||
z_last,
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(config);
|
||||
let lookup_challenges = (num_lookup_columns > 0).then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars {
|
||||
local_values: auxiliary_polys[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
let lookups = stark.lookups();
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
stark,
|
||||
&vars,
|
||||
&lookups,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let zeta_pow_deg = challenges.stark_zeta.exp_power_of_2(degree_bits);
|
||||
let z_h_zeta = zeta_pow_deg - F::Extension::ONE;
|
||||
// `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations.
|
||||
// Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)`
|
||||
// where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`.
|
||||
// So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each
|
||||
// `quotient_degree_factor`-sized chunk of the original evaluations.
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
ensure!(
|
||||
vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg),
|
||||
"Mismatch between evaluation and opening of quotient polynomial"
|
||||
);
|
||||
}
|
||||
|
||||
let merkle_caps = vec![
|
||||
proof.trace_cap.clone(),
|
||||
proof.auxiliary_polys_cap.clone(),
|
||||
proof.quotient_polys_cap.clone(),
|
||||
];
|
||||
|
||||
let num_ctl_zs = ctl_vars
|
||||
.iter()
|
||||
.map(|ctl| ctl.helper_columns.len())
|
||||
.collect::<Vec<_>>();
|
||||
verify_fri_proof::<F, C, D>(
|
||||
&stark.fri_instance(
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
num_ctl_polys,
|
||||
num_ctl_zs,
|
||||
config,
|
||||
),
|
||||
&proof.openings.to_fri_openings(),
|
||||
&challenges.fri_challenges,
|
||||
&merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&config.fri_params(degree_bits),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_proof_shape<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
config: &StarkConfig,
|
||||
num_ctl_helpers: usize,
|
||||
num_ctl_zs: usize,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let StarkProof {
|
||||
trace_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
// The shape of the opening proof will be checked in the FRI verifier (see
|
||||
// validate_fri_proof_shape), so we ignore it here.
|
||||
opening_proof: _,
|
||||
} = proof;
|
||||
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = openings;
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_auxiliary = num_ctl_helpers + stark.num_lookup_helper_columns(config) + num_ctl_zs;
|
||||
|
||||
ensure!(trace_cap.height() == cap_height);
|
||||
ensure!(auxiliary_polys_cap.height() == cap_height);
|
||||
ensure!(quotient_polys_cap.height() == cap_height);
|
||||
|
||||
ensure!(local_values.len() == S::COLUMNS);
|
||||
ensure!(next_values.len() == S::COLUMNS);
|
||||
ensure!(auxiliary_polys.len() == num_auxiliary);
|
||||
ensure!(auxiliary_polys_next.len() == num_auxiliary);
|
||||
ensure!(ctl_zs_first.len() == num_ctl_zs);
|
||||
ensure!(quotient_polys.len() == stark.num_quotient_polys(config));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Evaluate the Lagrange polynomials `L_0` and `L_(n-1)` at a point `x`.
|
||||
/// `L_0(x) = (x^n - 1)/(n * (x - 1))`
|
||||
/// `L_(n-1)(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup.
|
||||
fn eval_l_0_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
let n = F::from_canonical_usize(1 << log_n);
|
||||
let g = F::primitive_root_of_unity(log_n);
|
||||
let z_x = x.exp_power_of_2(log_n) - F::ONE;
|
||||
let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]);
|
||||
|
||||
(z_x * invs[0], z_x * invs[1])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod testutils {
|
||||
#[cfg(debug_assertions)]
|
||||
pub(crate) mod debug_utils {
|
||||
use super::*;
|
||||
|
||||
/// Output all the extra memory rows that don't appear in the CPU trace but are
|
||||
@ -610,26 +419,3 @@ pub(crate) mod testutils {
|
||||
row
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Sample;
|
||||
|
||||
use crate::verifier::eval_l_0_and_l_last;
|
||||
|
||||
#[test]
|
||||
fn test_eval_l_0_and_l_last() {
|
||||
type F = GoldilocksField;
|
||||
let log_n = 5;
|
||||
let n = 1 << log_n;
|
||||
|
||||
let x = F::rand(); // challenge point
|
||||
let expected_l_first_x = PolynomialValues::selector(n, 0).ifft().eval(x);
|
||||
let expected_l_last_x = PolynomialValues::selector(n, n - 1).ifft().eval(x);
|
||||
|
||||
let (l_first_x, l_last_x) = eval_l_0_and_l_last(log_n, x);
|
||||
assert_eq!(l_first_x, expected_l_first_x);
|
||||
assert_eq!(l_last_x, expected_l_last_x);
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,15 +6,15 @@ use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use starky::config::StarkConfig;
|
||||
use starky::util::trace_rows_to_poly_values;
|
||||
|
||||
use crate::all_stark::{AllStark, NUM_TABLES};
|
||||
use crate::arithmetic::{BinaryOperator, Operation};
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingOp;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::keccak_sponge::columns::KECCAK_WIDTH_BYTES;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeOp;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::witness::memory::MemoryOp;
|
||||
use crate::{arithmetic, keccak, keccak_sponge, logic};
|
||||
|
||||
|
||||
@ -11,14 +11,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -11,15 +11,13 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::cpu::kernel::opcodes::{get_opcode, get_push_opcode};
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -11,12 +11,9 @@ use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||
use plonky2::util::serialization::{DefaultGateSerializer, DefaultGeneratorSerializer};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits;
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, PublicValues, TrieRoots};
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllRecursiveCircuits, AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -10,14 +10,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -10,14 +10,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -12,16 +12,13 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::fixed_recursive_verifier::AllRecursiveCircuits;
|
||||
use plonky2_evm::generation::mpt::transaction_testing::{AddressOption, LegacyTransactionRlp};
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LogRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllRecursiveCircuits, AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -11,14 +11,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -10,14 +10,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -11,14 +11,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
@ -9,14 +9,12 @@ use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::AccountRlp;
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
use plonky2_evm::{AllStark, Node, StarkConfig};
|
||||
use rand::random;
|
||||
|
||||
type F = GoldilocksField;
|
||||
|
||||
@ -360,6 +360,7 @@ impl<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> CompressedFriPr
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FriChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
// Scaling factor to combine polynomials.
|
||||
pub fri_alpha: F::Extension,
|
||||
@ -373,6 +374,7 @@ pub struct FriChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
pub fri_query_indices: Vec<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FriChallengesTarget<const D: usize> {
|
||||
pub fri_alpha: ExtensionTarget<D>,
|
||||
pub fri_betas: Vec<ExtensionTarget<D>>,
|
||||
|
||||
@ -17,7 +17,9 @@ std = ["anyhow/std", "plonky2/std"]
|
||||
timing = ["plonky2/timing"]
|
||||
|
||||
[dependencies]
|
||||
ahash = { version = "0.8.3", default-features = false, features = ["compile-time-rng"] } # NOTE: Be sure to keep this version the same as the dependency in `hashbrown`.
|
||||
anyhow = { version = "1.0.40", default-features = false }
|
||||
hashbrown = { version = "0.14.0", default-features = false, features = ["ahash", "serde"] } # NOTE: When upgrading, see `ahash` dependency.
|
||||
itertools = { version = "0.11.0", default-features = false }
|
||||
log = { version = "0.4.14", default-features = false }
|
||||
num-bigint = { version = "0.4.3", default-features = false }
|
||||
|
||||
@ -1,17 +1,49 @@
|
||||
//! A [`StarkConfig`] defines all the parameters to be used when proving a
|
||||
//! [`Stark`][crate::stark::Stark].
|
||||
//!
|
||||
//! The default configuration is aimed for speed, yielding fast but large
|
||||
//! proofs, with a targeted security level of 100 bits.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::format;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::reduction_strategies::FriReductionStrategy;
|
||||
use plonky2::fri::{FriConfig, FriParams};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
|
||||
/// A configuration containing the different parameters used by the STARK prover.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StarkConfig {
|
||||
/// The targeted security level for the proofs generated with this configuration.
|
||||
pub security_bits: usize,
|
||||
|
||||
/// The number of challenge points to generate, for IOPs that have soundness errors of (roughly)
|
||||
/// `degree / |F|`.
|
||||
pub num_challenges: usize,
|
||||
|
||||
/// The configuration of the FRI sub-protocol.
|
||||
pub fri_config: FriConfig,
|
||||
}
|
||||
|
||||
impl Default for StarkConfig {
|
||||
fn default() -> Self {
|
||||
Self::standard_fast_config()
|
||||
}
|
||||
}
|
||||
|
||||
impl StarkConfig {
|
||||
/// Returns a custom STARK configuration.
|
||||
pub const fn new(security_bits: usize, num_challenges: usize, fri_config: FriConfig) -> Self {
|
||||
Self {
|
||||
security_bits,
|
||||
num_challenges,
|
||||
fri_config,
|
||||
}
|
||||
}
|
||||
|
||||
/// A typical configuration with a rate of 2, resulting in fast but large proofs.
|
||||
/// Targets ~100 bit conjectured security.
|
||||
pub const fn standard_fast_config() -> Self {
|
||||
@ -28,7 +60,88 @@ impl StarkConfig {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||
/// Outputs the [`FriParams`] used during the FRI sub-protocol by this [`StarkConfig`].
|
||||
pub fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||
self.fri_config.fri_params(degree_bits, false)
|
||||
}
|
||||
|
||||
/// Checks that this STARK configuration is consistent, i.e. that the different
|
||||
/// parameters meet the targeted security level.
|
||||
pub fn check_config<F: RichField + Extendable<D>, const D: usize>(&self) -> Result<()> {
|
||||
let StarkConfig {
|
||||
security_bits,
|
||||
fri_config:
|
||||
FriConfig {
|
||||
rate_bits,
|
||||
proof_of_work_bits,
|
||||
num_query_rounds,
|
||||
..
|
||||
},
|
||||
..
|
||||
} = &self;
|
||||
|
||||
// Conjectured FRI security; see the ethSTARK paper.
|
||||
let fri_field_bits = F::Extension::order().bits() as usize;
|
||||
let fri_query_security_bits = num_query_rounds * rate_bits + *proof_of_work_bits as usize;
|
||||
let fri_security_bits = fri_field_bits.min(fri_query_security_bits);
|
||||
|
||||
if fri_security_bits < *security_bits {
|
||||
Err(anyhow!(format!(
|
||||
"FRI params fall short of target security {}, reaching only {}",
|
||||
security_bits, fri_security_bits
|
||||
)))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_config() {
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
assert!(config.check_config::<F, D>().is_ok());
|
||||
|
||||
let high_rate_config = StarkConfig::new(
|
||||
100,
|
||||
2,
|
||||
FriConfig {
|
||||
rate_bits: 3,
|
||||
cap_height: 4,
|
||||
proof_of_work_bits: 16,
|
||||
reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5),
|
||||
num_query_rounds: 28,
|
||||
},
|
||||
);
|
||||
assert!(high_rate_config.check_config::<F, D>().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_config() {
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
|
||||
let too_few_queries_config = StarkConfig::new(
|
||||
100,
|
||||
2,
|
||||
FriConfig {
|
||||
rate_bits: 1,
|
||||
cap_height: 4,
|
||||
proof_of_work_bits: 16,
|
||||
reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5),
|
||||
num_query_rounds: 50,
|
||||
},
|
||||
);
|
||||
// The conjectured security yields `rate_bits` * `num_query_rounds` + `proof_of_work_bits` = 66
|
||||
// bits of security for FRI, which falls short of the 100 bits of security target.
|
||||
assert!(too_few_queries_config.check_config::<F, D>().is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,10 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! Implementation of the constraint consumer.
|
||||
//!
|
||||
//! The [`ConstraintConsumer`], and its circuit counterpart, allow a
|
||||
//! prover to evaluate all polynomials of a [`Stark`][crate::stark::Stark].
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension::Extendable;
|
||||
@ -9,14 +14,15 @@ use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
/// A [`ConstraintConsumer`] evaluates all constraint, permutation and cross-table
|
||||
/// lookup polynomials of a [`Stark`][crate::stark::Stark].
|
||||
#[derive(Debug)]
|
||||
pub struct ConstraintConsumer<P: PackedField> {
|
||||
/// Random values used to combine multiple constraints into one.
|
||||
alphas: Vec<P::Scalar>,
|
||||
|
||||
/// Running sums of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
// TODO(JN): This is pub so it can be used in a test. Once we have an API for accessing this
|
||||
// result, it should be made private.
|
||||
pub constraint_accs: Vec<P>,
|
||||
constraint_accs: Vec<P>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: P,
|
||||
@ -31,6 +37,7 @@ pub struct ConstraintConsumer<P: PackedField> {
|
||||
}
|
||||
|
||||
impl<P: PackedField> ConstraintConsumer<P> {
|
||||
/// Creates a new instance of [`ConstraintConsumer`].
|
||||
pub fn new(
|
||||
alphas: Vec<P::Scalar>,
|
||||
z_last: P,
|
||||
@ -46,6 +53,8 @@ impl<P: PackedField> ConstraintConsumer<P> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes this [`ConstraintConsumer`] and outputs its sum of accumulated
|
||||
/// constraints scaled by powers of `alpha`.
|
||||
pub fn accumulators(self) -> Vec<P> {
|
||||
self.constraint_accs
|
||||
}
|
||||
@ -76,6 +85,8 @@ impl<P: PackedField> ConstraintConsumer<P> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of [`ConstraintConsumer`].
|
||||
#[derive(Debug)]
|
||||
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// A random value used to combine multiple constraints into one.
|
||||
alphas: Vec<Target>,
|
||||
@ -98,6 +109,7 @@ pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: us
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
||||
/// Creates a new instance of [`RecursiveConstraintConsumer`].
|
||||
pub fn new(
|
||||
zero: ExtensionTarget<D>,
|
||||
alphas: Vec<Target>,
|
||||
@ -115,6 +127,8 @@ impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes this [`RecursiveConstraintConsumer`] and outputs its sum of accumulated
|
||||
/// `Target` constraints scaled by powers of `alpha`.
|
||||
pub fn accumulators(self) -> Vec<ExtensionTarget<D>> {
|
||||
self.constraint_accs
|
||||
}
|
||||
|
||||
@ -27,8 +27,11 @@
|
||||
//! is similar, but we provide not only `local_values` but also `next_values` -- corresponding to
|
||||
//! the current and next row values -- when computing the linear combinations.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::cmp::min;
|
||||
use core::fmt::Debug;
|
||||
use core::iter::once;
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use itertools::Itertools;
|
||||
@ -37,40 +40,39 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::util::ceil_div_usize;
|
||||
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::{
|
||||
eval_helper_columns, eval_helper_columns_circuit, get_helper_cols, Column, ColumnFilter,
|
||||
Filter, GrandProductChallenge,
|
||||
eval_helper_columns, eval_helper_columns_circuit, get_grand_product_challenge_set,
|
||||
get_helper_cols, Column, ColumnFilter, Filter, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::{StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::proof::{MultiProof, StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// An alias for `usize`, to represent the index of a STARK table in a multi-STARK setting.
|
||||
pub(crate) type TableIdx = usize;
|
||||
pub type TableIdx = usize;
|
||||
|
||||
/// A `table` index with a linear combination of columns and a filter.
|
||||
/// `filter` is used to determine the rows to select in `table`.
|
||||
/// `columns` represents linear combinations of the columns of `table`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct TableWithColumns<F: Field> {
|
||||
pub struct TableWithColumns<F: Field> {
|
||||
table: TableIdx,
|
||||
columns: Vec<Column<F>>,
|
||||
pub(crate) filter: Option<Filter<F>>,
|
||||
filter: Option<Filter<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> TableWithColumns<F> {
|
||||
/// Generates a new `TableWithColumns` given a `table` index, a linear combination of columns `columns` and a `filter`.
|
||||
pub(crate) fn new(table: TableIdx, columns: Vec<Column<F>>, filter: Option<Filter<F>>) -> Self {
|
||||
pub fn new(table: TableIdx, columns: Vec<Column<F>>, filter: Option<Filter<F>>) -> Self {
|
||||
Self {
|
||||
table,
|
||||
columns,
|
||||
@ -81,7 +83,7 @@ impl<F: Field> TableWithColumns<F> {
|
||||
|
||||
/// Cross-table lookup data consisting in the lookup table (`looked_table`) and all the tables that look into `looked_table` (`looking_tables`).
|
||||
/// Each `looking_table` corresponds to a STARK's table whose rows have been filtered out and whose columns have been through a linear combination (see `eval_table`). The concatenation of those smaller tables should result in the `looked_table`.
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CrossTableLookup<F: Field> {
|
||||
/// Column linear combinations for all tables that are looking into the current table.
|
||||
pub(crate) looking_tables: Vec<TableWithColumns<F>>,
|
||||
@ -92,7 +94,7 @@ pub struct CrossTableLookup<F: Field> {
|
||||
impl<F: Field> CrossTableLookup<F> {
|
||||
/// Creates a new `CrossTableLookup` given some looking tables and a looked table.
|
||||
/// All tables should have the same width.
|
||||
pub(crate) fn new(
|
||||
pub fn new(
|
||||
looking_tables: Vec<TableWithColumns<F>>,
|
||||
looked_table: TableWithColumns<F>,
|
||||
) -> Self {
|
||||
@ -109,7 +111,7 @@ impl<F: Field> CrossTableLookup<F> {
|
||||
/// - the total number of helper columns for this table, over all Cross-table lookups,
|
||||
/// - the total number of z polynomials for this table, over all Cross-table lookups,
|
||||
/// - the number of helper columns for this table, for each Cross-table lookup.
|
||||
pub(crate) fn num_ctl_helpers_zs_all(
|
||||
pub fn num_ctl_helpers_zs_all(
|
||||
ctls: &[Self],
|
||||
table: TableIdx,
|
||||
num_challenges: usize,
|
||||
@ -119,7 +121,7 @@ impl<F: Field> CrossTableLookup<F> {
|
||||
let mut num_ctls = 0;
|
||||
let mut num_helpers_by_ctl = vec![0; ctls.len()];
|
||||
for (i, ctl) in ctls.iter().enumerate() {
|
||||
let all_tables = std::iter::once(&ctl.looked_table).chain(&ctl.looking_tables);
|
||||
let all_tables = once(&ctl.looked_table).chain(&ctl.looking_tables);
|
||||
let num_appearances = all_tables.filter(|twc| twc.table == table).count();
|
||||
let is_helpers = num_appearances > 2;
|
||||
if is_helpers {
|
||||
@ -140,23 +142,23 @@ impl<F: Field> CrossTableLookup<F> {
|
||||
}
|
||||
|
||||
/// Cross-table lookup data for one table.
|
||||
#[derive(Clone, Default)]
|
||||
pub(crate) struct CtlData<'a, F: Field> {
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct CtlData<'a, F: Field> {
|
||||
/// Data associated with all Z(x) polynomials for one table.
|
||||
pub(crate) zs_columns: Vec<CtlZData<'a, F>>,
|
||||
pub zs_columns: Vec<CtlZData<'a, F>>,
|
||||
}
|
||||
|
||||
/// Cross-table lookup data associated with one Z(x) polynomial.
|
||||
/// One Z(x) polynomial can be associated to multiple tables,
|
||||
/// built from the same STARK.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct CtlZData<'a, F: Field> {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CtlZData<'a, F: Field> {
|
||||
/// Helper columns to verify the Z polynomial values.
|
||||
pub(crate) helper_columns: Vec<PolynomialValues<F>>,
|
||||
/// Z polynomial values.
|
||||
pub(crate) z: PolynomialValues<F>,
|
||||
/// Cross-table lookup challenge.
|
||||
pub(crate) challenge: GrandProductChallenge<F>,
|
||||
pub challenge: GrandProductChallenge<F>,
|
||||
/// Vector of column linear combinations for the current tables.
|
||||
pub(crate) columns: Vec<&'a [Column<F>]>,
|
||||
/// Vector of filter columns for the current table.
|
||||
@ -164,17 +166,26 @@ pub(crate) struct CtlZData<'a, F: Field> {
|
||||
pub(crate) filter: Vec<Option<Filter<F>>>,
|
||||
}
|
||||
|
||||
impl<'a, F: Field> CtlZData<'a, F> {
|
||||
/// Returs new CTL data from the provided arguments.
|
||||
pub fn new(
|
||||
helper_columns: Vec<PolynomialValues<F>>,
|
||||
z: PolynomialValues<F>,
|
||||
challenge: GrandProductChallenge<F>,
|
||||
columns: Vec<&'a [Column<F>]>,
|
||||
filter: Vec<Option<Filter<F>>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
helper_columns,
|
||||
z,
|
||||
challenge,
|
||||
columns,
|
||||
filter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F: Field> CtlData<'a, F> {
|
||||
/// Returns the number of cross-table lookup polynomials.
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.zs_columns.len()
|
||||
}
|
||||
|
||||
/// Returns whether there are no cross-table lookups.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.zs_columns.is_empty()
|
||||
}
|
||||
|
||||
/// Returns all the cross-table lookup helper polynomials.
|
||||
pub(crate) fn ctl_helper_polys(&self) -> Vec<PolynomialValues<F>> {
|
||||
let num_polys = self
|
||||
@ -210,82 +221,58 @@ impl<'a, F: Field> CtlData<'a, F> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
|
||||
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
||||
}
|
||||
|
||||
impl GrandProductChallengeSet<Target> {
|
||||
pub(crate) fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_usize(self.challenges.len())?;
|
||||
for challenge in &self.challenges {
|
||||
buffer.write_target(challenge.beta)?;
|
||||
buffer.write_target(challenge.gamma)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let length = buffer.read_usize()?;
|
||||
let mut challenges = Vec::with_capacity(length);
|
||||
for _ in 0..length {
|
||||
challenges.push(GrandProductChallenge {
|
||||
beta: buffer.read_target()?,
|
||||
gamma: buffer.read_target()?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(GrandProductChallengeSet { challenges })
|
||||
}
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
) -> GrandProductChallenge<F> {
|
||||
let beta = challenger.get_challenge();
|
||||
let gamma = challenger.get_challenge();
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<F> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge(challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge_target<
|
||||
/// Outputs a tuple of (challenges, data) of CTL challenges and all
|
||||
/// the CTL data necessary to prove a multi-STARK system.
|
||||
pub fn get_ctl_data<'a, F, C, const D: usize, const N: usize>(
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[Vec<PolynomialValues<F>>; N],
|
||||
all_cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
max_constraint_degree: usize,
|
||||
) -> (GrandProductChallengeSet<F>, [CtlData<'a, F>; N])
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
) -> GrandProductChallenge<Target> {
|
||||
let beta = challenger.get_challenge(builder);
|
||||
let gamma = challenger.get_challenge(builder);
|
||||
GrandProductChallenge { beta, gamma }
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
// Get challenges for the cross-table lookups.
|
||||
let ctl_challenges = get_grand_product_challenge_set(challenger, config.num_challenges);
|
||||
|
||||
// For each STARK, compute its cross-table lookup Z polynomials
|
||||
// and get the associated `CtlData`.
|
||||
let ctl_data = cross_table_lookup_data::<F, D, N>(
|
||||
trace_poly_values,
|
||||
all_cross_table_lookups,
|
||||
&ctl_challenges,
|
||||
max_constraint_degree,
|
||||
);
|
||||
|
||||
(ctl_challenges, ctl_data)
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set_target<
|
||||
/// Outputs all the CTL data necessary to prove a multi-STARK system.
|
||||
pub fn get_ctl_vars_from_proofs<'a, F, C, const D: usize, const N: usize>(
|
||||
multi_proof: &MultiProof<F, C, D, N>,
|
||||
all_cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_lookup_columns: &'a [usize; N],
|
||||
max_constraint_degree: usize,
|
||||
) -> [Vec<CtlCheckVars<'a, F, <F as Extendable<D>>::Extension, <F as Extendable<D>>::Extension, D>>;
|
||||
N]
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<Target> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge_target(builder, challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
let num_ctl_helper_cols =
|
||||
num_ctl_helper_columns_by_table(all_cross_table_lookups, max_constraint_degree);
|
||||
|
||||
CtlCheckVars::from_proofs(
|
||||
&multi_proof.stark_proofs,
|
||||
all_cross_table_lookups,
|
||||
ctl_challenges,
|
||||
num_lookup_columns,
|
||||
&num_ctl_helper_cols,
|
||||
)
|
||||
}
|
||||
/// Returns the number of helper columns for each `Table`.
|
||||
pub(crate) fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
|
||||
ctls: &[CrossTableLookup<F>],
|
||||
@ -314,6 +301,17 @@ pub(crate) fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
|
||||
res
|
||||
}
|
||||
|
||||
/// Gets the auxiliary polynomials associated to these CTL data.
|
||||
pub(crate) fn get_ctl_auxiliary_polys<F: Field>(
|
||||
ctl_data: Option<&CtlData<F>>,
|
||||
) -> Option<Vec<PolynomialValues<F>>> {
|
||||
ctl_data.map(|data| {
|
||||
let mut ctl_polys = data.ctl_helper_polys();
|
||||
ctl_polys.extend(data.ctl_z_polys());
|
||||
ctl_polys
|
||||
})
|
||||
}
|
||||
|
||||
/// Generates all the cross-table lookup data, for all tables.
|
||||
/// - `trace_poly_values` corresponds to the trace values for all tables.
|
||||
/// - `cross_table_lookups` corresponds to all the cross-table lookups, i.e. the looked and looking tables, as described in `CrossTableLookup`.
|
||||
@ -467,8 +465,8 @@ fn partial_sums<F: Field>(
|
||||
}
|
||||
|
||||
/// Data necessary to check the cross-table lookups of a given table.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct CtlCheckVars<'a, F, FE, P, const D2: usize>
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CtlCheckVars<'a, F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -493,13 +491,24 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
|
||||
{
|
||||
/// Extracts the `CtlCheckVars` for each STARK.
|
||||
pub(crate) fn from_proofs<C: GenericConfig<D, F = F>, const N: usize>(
|
||||
pub fn from_proofs<C: GenericConfig<D, F = F>, const N: usize>(
|
||||
proofs: &[StarkProofWithMetadata<F, C, D>; N],
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_lookup_columns: &[usize; N],
|
||||
num_helper_ctl_columns: &Vec<[usize; N]>,
|
||||
) -> [Vec<Self>; N] {
|
||||
let mut ctl_vars_per_table = [0; N].map(|_| vec![]);
|
||||
// If there are no auxiliary polys in the proofs `openings`,
|
||||
// return early. The verifier will reject the proofs when
|
||||
// calling `validate_proof_shape`.
|
||||
if proofs
|
||||
.iter()
|
||||
.any(|p| p.proof.openings.auxiliary_polys.is_none())
|
||||
{
|
||||
return ctl_vars_per_table;
|
||||
}
|
||||
|
||||
let mut total_num_helper_cols_by_table = [0; N];
|
||||
for p_ctls in num_helper_ctl_columns {
|
||||
for j in 0..N {
|
||||
@ -514,8 +523,14 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
.map(|(p, &num_lookup)| {
|
||||
let openings = &p.proof.openings;
|
||||
|
||||
let ctl_zs = &openings.auxiliary_polys[num_lookup..];
|
||||
let ctl_zs_next = &openings.auxiliary_polys_next[num_lookup..];
|
||||
let ctl_zs = &openings
|
||||
.auxiliary_polys
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
|
||||
let ctl_zs_next = &openings
|
||||
.auxiliary_polys_next
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
|
||||
ctl_zs.iter().zip(ctl_zs_next).collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@ -523,7 +538,6 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
// Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t.
|
||||
let mut start_indices = [0; N];
|
||||
let mut z_indices = [0; N];
|
||||
let mut ctl_vars_per_table = [0; N].map(|_| vec![]);
|
||||
for (
|
||||
CrossTableLookup {
|
||||
looking_tables,
|
||||
@ -698,8 +712,8 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
}
|
||||
|
||||
/// Circuit version of `CtlCheckVars`. Data necessary to check the cross-table lookups of a given table.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct CtlCheckVarsTarget<F: Field, const D: usize> {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CtlCheckVarsTarget<F: Field, const D: usize> {
|
||||
///Evaluation of the helper columns to check that the Z polyomial
|
||||
/// was constructed correctly.
|
||||
pub(crate) helper_columns: Vec<ExtensionTarget<D>>,
|
||||
@ -716,8 +730,8 @@ pub(crate) struct CtlCheckVarsTarget<F: Field, const D: usize> {
|
||||
}
|
||||
|
||||
impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
/// Circuit version of `from_proofs`. Extracts the `CtlCheckVarsTarget` for each STARK.
|
||||
pub(crate) fn from_proof(
|
||||
/// Circuit version of `from_proofs`, for a single STARK.
|
||||
pub fn from_proof(
|
||||
table: TableIdx,
|
||||
proof: &StarkProofTarget<D>,
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
@ -729,15 +743,24 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
// Get all cross-table lookup polynomial openings for each STARK proof.
|
||||
let ctl_zs = {
|
||||
let openings = &proof.openings;
|
||||
let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup_columns);
|
||||
let ctl_zs = openings
|
||||
.auxiliary_polys
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")
|
||||
.iter()
|
||||
.skip(num_lookup_columns);
|
||||
let ctl_zs_next = openings
|
||||
.auxiliary_polys_next
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")
|
||||
.iter()
|
||||
.skip(num_lookup_columns);
|
||||
ctl_zs.zip(ctl_zs_next).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t.
|
||||
// Put each cross-table lookup polynomial into the correct table's data.
|
||||
// If a CTL polynomial is extracted from the looking/looked table `t``,
|
||||
// then we add it to the `CtlCheckVars` of table `t``.
|
||||
let mut z_index = 0;
|
||||
let mut start_index = 0;
|
||||
let mut ctl_vars = vec![];
|
||||
@ -750,7 +773,8 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
) in cross_table_lookups.iter().enumerate()
|
||||
{
|
||||
for &challenges in &ctl_challenges.challenges {
|
||||
// Group looking tables by `Table`, since we bundle the looking tables taken from the same `Table` together thanks to helper columns.
|
||||
// Group looking tables by `Table`, since we bundle the looking tables
|
||||
// taken from the same `Table` together thanks to helper columns.
|
||||
|
||||
let count = looking_tables
|
||||
.iter()
|
||||
@ -779,8 +803,6 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
|
||||
start_index += num_helper_ctl_columns[i];
|
||||
z_index += 1;
|
||||
// let columns = group.0.clone();
|
||||
// let filter = group.1.clone();
|
||||
ctl_vars.push(Self {
|
||||
helper_columns,
|
||||
local_z: *looking_z,
|
||||
@ -921,14 +943,10 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
}
|
||||
|
||||
/// Verifies all cross-table lookups.
|
||||
pub(crate) fn verify_cross_table_lookups<
|
||||
F: RichField + Extendable<D>,
|
||||
const D: usize,
|
||||
const N: usize,
|
||||
>(
|
||||
pub fn verify_cross_table_lookups<F: RichField + Extendable<D>, const D: usize, const N: usize>(
|
||||
cross_table_lookups: &[CrossTableLookup<F>],
|
||||
ctl_zs_first: [Vec<F>; N],
|
||||
ctl_extra_looking_sums: Vec<Vec<F>>,
|
||||
ctl_extra_looking_sums: Option<&[Vec<F>]>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
let mut ctl_zs_openings = ctl_zs_first.iter().map(|v| v.iter()).collect::<Vec<_>>();
|
||||
@ -941,7 +959,9 @@ pub(crate) fn verify_cross_table_lookups<
|
||||
) in cross_table_lookups.iter().enumerate()
|
||||
{
|
||||
// Get elements looking into `looked_table` that are not associated to any STARK.
|
||||
let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table];
|
||||
let extra_sum_vec: &[F] = ctl_extra_looking_sums
|
||||
.map(|v| v[looked_table.table].as_ref())
|
||||
.unwrap_or_default();
|
||||
// We want to iterate on each looking table only once.
|
||||
let mut filtered_looking_tables = vec![];
|
||||
for table in looking_tables {
|
||||
@ -974,7 +994,7 @@ pub(crate) fn verify_cross_table_lookups<
|
||||
}
|
||||
|
||||
/// Circuit version of `verify_cross_table_lookups`. Verifies all cross-table lookups.
|
||||
pub(crate) fn verify_cross_table_lookups_circuit<
|
||||
pub fn verify_cross_table_lookups_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
const D: usize,
|
||||
const N: usize,
|
||||
@ -982,7 +1002,7 @@ pub(crate) fn verify_cross_table_lookups_circuit<
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
cross_table_lookups: Vec<CrossTableLookup<F>>,
|
||||
ctl_zs_first: [Vec<Target>; N],
|
||||
ctl_extra_looking_sums: Vec<Vec<Target>>,
|
||||
ctl_extra_looking_sums: Option<&[Vec<Target>]>,
|
||||
inner_config: &StarkConfig,
|
||||
) {
|
||||
let mut ctl_zs_openings = ctl_zs_first.iter().map(|v| v.iter()).collect::<Vec<_>>();
|
||||
@ -992,7 +1012,9 @@ pub(crate) fn verify_cross_table_lookups_circuit<
|
||||
} in cross_table_lookups.into_iter()
|
||||
{
|
||||
// Get elements looking into `looked_table` that are not associated to any STARK.
|
||||
let extra_sum_vec = &ctl_extra_looking_sums[looked_table.table];
|
||||
let extra_sum_vec: &[Target] = ctl_extra_looking_sums
|
||||
.map(|v| v[looked_table.table].as_ref())
|
||||
.unwrap_or_default();
|
||||
// We want to iterate on each looking table only once.
|
||||
let mut filtered_looking_tables = vec![];
|
||||
for table in looking_tables {
|
||||
@ -1019,26 +1041,32 @@ pub(crate) fn verify_cross_table_lookups_circuit<
|
||||
debug_assert!(ctl_zs_openings.iter_mut().all(|iter| iter.next().is_none()));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod testutils {
|
||||
use std::collections::HashMap;
|
||||
/// Debugging module, to assert correctness of the different CTLs of a multi-STARK system,
|
||||
/// that can be used during the proof generation process.
|
||||
///
|
||||
/// **Note**: this is an expensive check, hence is only available when the `debug_assertions`
|
||||
/// flag is activated, to not hinder performances with regular `release` build.
|
||||
#[cfg(debug_assertions)]
|
||||
pub mod debug_utils {
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
use crate::all_stark::Table;
|
||||
use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns};
|
||||
use super::{CrossTableLookup, TableIdx, TableWithColumns};
|
||||
|
||||
type MultiSet<F> = HashMap<Vec<F>, Vec<(Table, usize)>>;
|
||||
type MultiSet<F> = HashMap<Vec<F>, Vec<(TableIdx, usize)>>;
|
||||
|
||||
/// Check that the provided traces and cross-table lookups are consistent.
|
||||
pub(crate) fn check_ctls<F: Field>(
|
||||
pub fn check_ctls<F: Field>(
|
||||
trace_poly_values: &[Vec<PolynomialValues<F>>],
|
||||
cross_table_lookups: &[CrossTableLookup<F>],
|
||||
extra_memory_looking_values: &[Vec<F>],
|
||||
extra_looking_values: &HashMap<TableIdx, Vec<Vec<F>>>,
|
||||
) {
|
||||
for (i, ctl) in cross_table_lookups.iter().enumerate() {
|
||||
check_ctl(trace_poly_values, ctl, i, extra_memory_looking_values);
|
||||
check_ctl(trace_poly_values, ctl, i, extra_looking_values.get(&i));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1046,7 +1074,7 @@ pub(crate) mod testutils {
|
||||
trace_poly_values: &[Vec<PolynomialValues<F>>],
|
||||
ctl: &CrossTableLookup<F>,
|
||||
ctl_index: usize,
|
||||
extra_memory_looking_values: &[Vec<F>],
|
||||
extra_looking_values: Option<&Vec<Vec<F>>>,
|
||||
) {
|
||||
let CrossTableLookup {
|
||||
looking_tables,
|
||||
@ -1063,15 +1091,15 @@ pub(crate) mod testutils {
|
||||
}
|
||||
process_table(trace_poly_values, looked_table, &mut looked_multiset);
|
||||
|
||||
// Extra looking values for memory
|
||||
if ctl_index == Table::Memory as usize {
|
||||
for row in extra_memory_looking_values.iter() {
|
||||
// Include extra looking values if any for this `ctl_index`.
|
||||
if let Some(values) = extra_looking_values {
|
||||
for row in values.iter() {
|
||||
// The table and the row index don't matter here, as we just want to enforce
|
||||
// that the special extra values do appear when looking against the Memory table.
|
||||
// that the special extra values do appear when looking against the specified table.
|
||||
looking_multiset
|
||||
.entry(row.to_vec())
|
||||
.or_default()
|
||||
.push((Table::Cpu, 0));
|
||||
.push((0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1106,10 +1134,7 @@ pub(crate) mod testutils {
|
||||
.iter()
|
||||
.map(|c| c.eval_table(trace, i))
|
||||
.collect::<Vec<_>>();
|
||||
multiset
|
||||
.entry(row)
|
||||
.or_default()
|
||||
.push((Table::all()[table.table], i));
|
||||
multiset.entry(row).or_default().push((table.table, i));
|
||||
} else {
|
||||
assert_eq!(filter, F::ZERO, "Non-binary filter?")
|
||||
}
|
||||
@ -1117,8 +1142,8 @@ pub(crate) mod testutils {
|
||||
}
|
||||
|
||||
fn check_locations<F: Field>(
|
||||
looking_locations: &[(Table, usize)],
|
||||
looked_locations: &[(Table, usize)],
|
||||
looking_locations: &[(TableIdx, usize)],
|
||||
looked_locations: &[(TableIdx, usize)],
|
||||
ctl_index: usize,
|
||||
row: &[F],
|
||||
) {
|
||||
@ -1,3 +1,5 @@
|
||||
//! Implementation of constraint evaluation frames for STARKs.
|
||||
|
||||
/// A trait for viewing an evaluation frame of a STARK table.
|
||||
///
|
||||
/// It allows to access the current and next rows at a given step
|
||||
@ -8,6 +10,7 @@ pub trait StarkEvaluationFrame<T: Copy + Clone + Default, U: Copy + Clone + Defa
|
||||
{
|
||||
/// The number of columns for the STARK table this evaluation frame views.
|
||||
const COLUMNS: usize;
|
||||
/// The number of public inputs for the STARK.
|
||||
const PUBLIC_INPUTS: usize;
|
||||
|
||||
/// Returns the local values (i.e. current row) for this evaluation frame.
|
||||
@ -15,6 +18,7 @@ pub trait StarkEvaluationFrame<T: Copy + Clone + Default, U: Copy + Clone + Defa
|
||||
/// Returns the next values (i.e. next row) for this evaluation frame.
|
||||
fn get_next_values(&self) -> &[T];
|
||||
|
||||
/// Returns the public inputs for this evaluation frame.
|
||||
fn get_public_inputs(&self) -> &[U];
|
||||
|
||||
/// Outputs a new evaluation frame from the provided local and next values.
|
||||
@ -24,6 +28,9 @@ pub trait StarkEvaluationFrame<T: Copy + Clone + Default, U: Copy + Clone + Defa
|
||||
fn from_values(lv: &[T], nv: &[T], pis: &[U]) -> Self;
|
||||
}
|
||||
|
||||
/// An evaluation frame to be used when defining constraints of a STARK system, that
|
||||
/// implements the [`StarkEvaluationFrame`] trait.
|
||||
#[derive(Debug)]
|
||||
pub struct StarkFrame<
|
||||
T: Copy + Clone + Default,
|
||||
U: Copy + Clone + Default,
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! An example of generating and verifying STARK proofs for the Fibonacci sequence.
|
||||
//! The toy STARK system also includes two columns that are a permutation of the other,
|
||||
//! to highlight the use of the permutation argument with logUp.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
@ -16,9 +20,8 @@ use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
/// Toy STARK system used for testing.
|
||||
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
|
||||
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
|
||||
/// Note: The `i, j` columns are only used to test the permutation argument.
|
||||
/// Computes a Fibonacci sequence with state `[x0, x1]` using the state transition
|
||||
/// `x0' <- x1, x1' <- x0 + x1.
|
||||
#[derive(Copy, Clone)]
|
||||
struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
@ -41,6 +44,120 @@ impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, x1` as initial state values.
|
||||
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
let trace_rows = (0..self.num_rows)
|
||||
.scan([x0, x1], |acc, _| {
|
||||
let tmp = *acc;
|
||||
acc[0] = tmp[1];
|
||||
acc[1] = tmp[0] + tmp[1];
|
||||
Some(tmp)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
const FIBONACCI_COLUMNS: usize = 2;
|
||||
const FIBONACCI_PUBLIC_INPUTS: usize = 3;
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, FIBONACCI_COLUMNS, FIBONACCI_PUBLIC_INPUTS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<
|
||||
ExtensionTarget<D>,
|
||||
ExtensionTarget<D>,
|
||||
FIBONACCI_COLUMNS,
|
||||
FIBONACCI_PUBLIC_INPUTS,
|
||||
>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let public_inputs = vars.get_public_inputs();
|
||||
|
||||
// Check public inputs.
|
||||
yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
|
||||
yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
|
||||
yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
|
||||
|
||||
// x0' <- x1
|
||||
yield_constr.constraint_transition(next_values[0] - local_values[1]);
|
||||
// x1' <- x0 + x1
|
||||
yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
|
||||
}
|
||||
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let public_inputs = vars.get_public_inputs();
|
||||
// Check public inputs.
|
||||
let pis_constraints = [
|
||||
builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
|
||||
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
|
||||
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
|
||||
];
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||
yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||
|
||||
// x0' <- x1
|
||||
let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
|
||||
yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
// x1' <- x0 + x1
|
||||
let second_col_constraint = {
|
||||
let tmp = builder.sub_extension(next_values[1], local_values[0]);
|
||||
builder.sub_extension(tmp, local_values[1])
|
||||
};
|
||||
yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
2
|
||||
}
|
||||
}
|
||||
|
||||
/// Similar system than above, but with extra columns to illustrate the permutation argument.
|
||||
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
|
||||
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
|
||||
/// Note: The `i, j` columns are the columns used to test the permutation argument.
|
||||
#[derive(Copy, Clone)]
|
||||
struct FibonacciWithPermutationStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> FibonacciWithPermutationStark<F, D> {
|
||||
// The first public input is `x0`.
|
||||
const PI_INDEX_X0: usize = 0;
|
||||
// The second public input is `x1`.
|
||||
const PI_INDEX_X1: usize = 1;
|
||||
// The third public input is the second element of the last row, which should be equal to the
|
||||
// `num_rows`-th Fibonacci number.
|
||||
const PI_INDEX_RES: usize = 2;
|
||||
|
||||
const fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, x1, 0, 1, 1` as initial state values.
|
||||
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
let mut trace_rows = (0..self.num_rows)
|
||||
@ -59,17 +176,23 @@ impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
|
||||
}
|
||||
}
|
||||
|
||||
const COLUMNS: usize = 5;
|
||||
const PUBLIC_INPUTS: usize = 3;
|
||||
const FIBONACCI_PERM_COLUMNS: usize = 5;
|
||||
const FIBONACCI_PERM_PUBLIC_INPUTS: usize = 3;
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D>
|
||||
for FibonacciWithPermutationStark<F, D>
|
||||
{
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, FIBONACCI_PERM_COLUMNS, FIBONACCI_PERM_PUBLIC_INPUTS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget =
|
||||
StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
|
||||
type EvaluationFrameTarget = StarkFrame<
|
||||
ExtensionTarget<D>,
|
||||
ExtensionTarget<D>,
|
||||
FIBONACCI_PERM_COLUMNS,
|
||||
FIBONACCI_PERM_PUBLIC_INPUTS,
|
||||
>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
@ -151,7 +274,7 @@ mod tests {
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::fibonacci_stark::FibonacciStark;
|
||||
use crate::fibonacci_stark::{FibonacciStark, FibonacciWithPermutationStark};
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
@ -171,14 +294,30 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
|
||||
// Test first STARK
|
||||
let stark = S1::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
let proof = prove::<F, C, S1, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)?;
|
||||
|
||||
// Test second STARK
|
||||
let stark = S2::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S2, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
@ -194,10 +333,14 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
let stark = S1::new(num_rows);
|
||||
test_stark_low_degree(stark)?;
|
||||
|
||||
let stark = S2::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
@ -206,11 +349,14 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
||||
let stark = S1::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S1, D>(stark)?;
|
||||
let stark = S2::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S2, D>(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -219,14 +365,17 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
|
||||
// Test first STARK
|
||||
let stark = S1::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
let proof = prove::<F, C, S1, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
@ -235,7 +384,21 @@ mod tests {
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
recursive_proof::<F, C, S1, C, D>(stark, proof, &config, true)?;
|
||||
|
||||
// Test second STARK
|
||||
let stark = S2::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S2, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S2, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
@ -257,8 +420,9 @@ mod tests {
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::new();
|
||||
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
|
||||
let pt =
|
||||
add_virtual_stark_proof_with_pis(&mut builder, &stark, inner_config, degree_bits, 0, 0);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof, builder.zero());
|
||||
|
||||
verify_stark_proof_circuit::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::polynomial::PolynomialCoeffs;
|
||||
use plonky2::fri::proof::{FriProof, FriProofTarget};
|
||||
@ -12,12 +10,23 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::lookup::{get_grand_product_challenge_set, get_grand_product_challenge_set_target};
|
||||
use crate::lookup::{
|
||||
get_grand_product_challenge_set, get_grand_product_challenge_set_target,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::*;
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Generates challenges for a STARK proof from a challenger and given
|
||||
/// all the arguments needed to update the challenger state.
|
||||
///
|
||||
/// Note: `trace_cap` is passed as `Option` to signify whether to observe it
|
||||
/// or not by the challenger. Observing it here could be redundant in a
|
||||
/// multi-STARK system where trace caps would have already been observed
|
||||
/// before proving individually each STARK.
|
||||
fn get_challenges<F, C, const D: usize>(
|
||||
trace_cap: &MerkleCap<F, C::Hasher>,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
trace_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
auxiliary_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
|
||||
openings: &StarkOpeningSet<F, D>,
|
||||
@ -33,15 +42,21 @@ where
|
||||
{
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
if let Some(cap) = &trace_cap {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
challenger.observe_cap(trace_cap);
|
||||
let lookup_challenge_set = if let Some(&challenges) = challenges.as_ref() {
|
||||
Some(challenges.clone())
|
||||
} else {
|
||||
auxiliary_polys_cap
|
||||
.is_some()
|
||||
.then(|| get_grand_product_challenge_set(challenger, num_challenges))
|
||||
};
|
||||
|
||||
let lookup_challenge_set = auxiliary_polys_cap.map(|auxiliary_polys_cap| {
|
||||
let tmp = get_grand_product_challenge_set(&mut challenger, num_challenges);
|
||||
challenger.observe_cap(auxiliary_polys_cap);
|
||||
tmp
|
||||
});
|
||||
if let Some(cap) = &auxiliary_polys_cap {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
@ -64,25 +79,27 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
|
||||
impl<F, C, const D: usize> StarkProof<F, C, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
// TODO: Should be used later in compression?
|
||||
#![allow(dead_code)]
|
||||
pub(crate) fn fri_query_indices(&self, config: &StarkConfig, degree_bits: usize) -> Vec<usize> {
|
||||
self.get_challenges(config, degree_bits)
|
||||
.fri_challenges
|
||||
.fri_query_indices
|
||||
}
|
||||
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
pub(crate) fn get_challenges(
|
||||
/// For a single STARK system, the `ignore_trace_cap` boolean should
|
||||
/// always be set to `false`.
|
||||
///
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges(
|
||||
&self,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
let degree_bits = self.recover_degree_bits(config);
|
||||
|
||||
let StarkProof {
|
||||
trace_cap,
|
||||
auxiliary_polys_cap,
|
||||
@ -95,9 +112,17 @@ where
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
} = &self.proof;
|
||||
} = &self;
|
||||
|
||||
let trace_cap = if ignore_trace_cap {
|
||||
None
|
||||
} else {
|
||||
Some(trace_cap)
|
||||
};
|
||||
|
||||
get_challenges::<F, C, D>(
|
||||
challenger,
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap,
|
||||
@ -111,14 +136,37 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn get_challenges_target<
|
||||
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
{
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
/// For a single STARK system, the `ignore_trace_cap` boolean should
|
||||
/// always be set to `false`.
|
||||
///
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges(
|
||||
&self,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
self.proof
|
||||
.get_challenges(challenger, challenges, ignore_trace_cap, config)
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of `get_challenges`, with the same flexibility around
|
||||
/// `trace_cap` being passed as an `Option`.
|
||||
fn get_challenges_target<F, C, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
trace_cap: &MerkleCapTarget,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
trace_cap: Option<&MerkleCapTarget>,
|
||||
auxiliary_polys_cap: Option<&MerkleCapTarget>,
|
||||
quotient_polys_cap: &MerkleCapTarget,
|
||||
openings: &StarkOpeningSetTarget<D>,
|
||||
@ -128,26 +176,34 @@ pub(crate) fn get_challenges_target<
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
|
||||
if let Some(trace_cap) = trace_cap {
|
||||
challenger.observe_cap(trace_cap);
|
||||
}
|
||||
|
||||
challenger.observe_cap(trace_cap);
|
||||
let lookup_challenge_set = if let Some(&challenges) = challenges.as_ref() {
|
||||
Some(challenges.clone())
|
||||
} else {
|
||||
auxiliary_polys_cap
|
||||
.is_some()
|
||||
.then(|| get_grand_product_challenge_set_target(builder, challenger, num_challenges))
|
||||
};
|
||||
|
||||
let lookup_challenge_set = auxiliary_polys_cap.map(|permutation_zs_cap| {
|
||||
let tmp = get_grand_product_challenge_set_target(builder, &mut challenger, num_challenges);
|
||||
challenger.observe_cap(permutation_zs_cap);
|
||||
tmp
|
||||
});
|
||||
if let Some(cap) = auxiliary_polys_cap {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
|
||||
|
||||
StarkProofChallengesTarget {
|
||||
lookup_challenge_set,
|
||||
@ -163,10 +219,20 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
pub(crate) fn get_challenges<F, C>(
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Creates all Fiat-Shamir `Target` challenges used in the STARK proof.
|
||||
/// For a single STARK system, the `ignore_trace_cap` boolean should
|
||||
/// always be set to `false`.
|
||||
///
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges<F, C>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
@ -186,10 +252,18 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
} = &self.proof;
|
||||
} = self;
|
||||
|
||||
let trace_cap = if ignore_trace_cap {
|
||||
None
|
||||
} else {
|
||||
Some(trace_cap)
|
||||
};
|
||||
|
||||
get_challenges_target::<F, C, D>(
|
||||
builder,
|
||||
challenger,
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap,
|
||||
@ -202,6 +276,32 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
/// Creates all Fiat-Shamir `Target` challenges used in the STARK proof.
|
||||
/// For a single STARK system, the `ignore_trace_cap` boolean should
|
||||
/// always be set to `false`.
|
||||
///
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges<F, C>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
self.proof
|
||||
.get_challenges::<F, C>(builder, challenger, challenges, ignore_trace_cap, config)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Deal with the compressed stuff.
|
||||
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
// CompressedProofWithPublicInputs<F, C, D>
|
||||
|
||||
@ -1,14 +1,332 @@
|
||||
//! A FRI-based STARK implementation over the Goldilocks field, with support
|
||||
//! for recursive proof verification through the plonky2 SNARK backend.
|
||||
//!
|
||||
//! This library is intended to provide all the necessary tools to prove,
|
||||
//! verify, and recursively verify STARK statements. While the library
|
||||
//! is tailored for a system with a single STARK, it also is flexible
|
||||
//! enough to support a multi-STARK system, i.e. a system of independent
|
||||
//! STARK statements possibly sharing common values. See section below for
|
||||
//! more information on how to define such a system.
|
||||
//!
|
||||
//!
|
||||
//! # Defining a STARK statement
|
||||
//!
|
||||
//! A STARK system is configured by a [`StarkConfig`][crate::config::StarkConfig]
|
||||
//! defining all the parameters to be used when generating proofs associated
|
||||
//! to the statement. How constraints should be defined over the STARK trace is
|
||||
//! defined through the [`Stark`][crate::stark::Stark] trait, that takes a
|
||||
//! [`StarkEvaluationFrame`][crate::evaluation_frame::StarkEvaluationFrame] of
|
||||
//! two consecutive rows and a list of public inputs.
|
||||
//!
|
||||
//! ### Example: Fibonacci sequence
|
||||
//!
|
||||
//! To build a STARK for the modified Fibonacci sequence starting with two
|
||||
//! user-provided values `x0` and `x1`, one can do the following:
|
||||
//!
|
||||
//! ```rust
|
||||
//! # use core::marker::PhantomData;
|
||||
//! // Imports all basic types.
|
||||
//! use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
//! use plonky2::field::packed::PackedField;
|
||||
//! use plonky2::field::polynomial::PolynomialValues;
|
||||
//! use plonky2::hash::hash_types::RichField;
|
||||
//! # use starky::util::trace_rows_to_poly_values;
|
||||
//!
|
||||
//! // Imports to define the constraints of our STARK.
|
||||
//! use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
//! use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
//! use starky::stark::Stark;
|
||||
//!
|
||||
//! // Imports to define the recursive constraints of our STARK.
|
||||
//! use plonky2::iop::ext_target::ExtensionTarget;
|
||||
//! use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
//!
|
||||
//! pub struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
//! num_rows: usize,
|
||||
//! _phantom: PhantomData<F>,
|
||||
//! }
|
||||
//!
|
||||
//! // Define witness generation.
|
||||
//! impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
|
||||
//! // The first public input is `x0`.
|
||||
//! const PI_INDEX_X0: usize = 0;
|
||||
//! // The second public input is `x1`.
|
||||
//! const PI_INDEX_X1: usize = 1;
|
||||
//! // The third public input is the second element of the last row,
|
||||
//! // which should be equal to the `num_rows`-th Fibonacci number.
|
||||
//! const PI_INDEX_RES: usize = 2;
|
||||
//!
|
||||
//! /// Generate the trace using `x0, x1, 0` as initial state values.
|
||||
//! fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
//! let mut trace_rows = (0..self.num_rows)
|
||||
//! .scan([x0, x1, F::ZERO], |acc, _| {
|
||||
//! let tmp = *acc;
|
||||
//! acc[0] = tmp[1];
|
||||
//! acc[1] = tmp[0] + tmp[1];
|
||||
//! acc[2] = tmp[2] + F::ONE;
|
||||
//! Some(tmp)
|
||||
//! })
|
||||
//! .collect::<Vec<_>>();
|
||||
//!
|
||||
//! // Transpose the row-wise trace for the prover.
|
||||
//! trace_rows_to_poly_values(trace_rows)
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! // Define constraints.
|
||||
//! const COLUMNS: usize = 3;
|
||||
//! const PUBLIC_INPUTS: usize = 3;
|
||||
//!
|
||||
//! impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
|
||||
//! type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
|
||||
//! where
|
||||
//! FE: FieldExtension<D2, BaseField = F>,
|
||||
//! P: PackedField<Scalar = FE>;
|
||||
//!
|
||||
//! type EvaluationFrameTarget =
|
||||
//! StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
|
||||
//!
|
||||
//! // Define this STARK's constraints.
|
||||
//! fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
//! &self,
|
||||
//! vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
//! yield_constr: &mut ConstraintConsumer<P>,
|
||||
//! ) where
|
||||
//! FE: FieldExtension<D2, BaseField = F>,
|
||||
//! P: PackedField<Scalar = FE>,
|
||||
//! {
|
||||
//! let local_values = vars.get_local_values();
|
||||
//! let next_values = vars.get_next_values();
|
||||
//! let public_inputs = vars.get_public_inputs();
|
||||
//!
|
||||
//! // Check public inputs.
|
||||
//! yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
|
||||
//! yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
|
||||
//! yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
|
||||
//!
|
||||
//! // Enforce the Fibonacci transition constraints.
|
||||
//! // x0' <- x1
|
||||
//! yield_constr.constraint_transition(next_values[0] - local_values[1]);
|
||||
//! // x1' <- x0 + x1
|
||||
//! yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
|
||||
//! }
|
||||
//!
|
||||
//! // Define the constraints to recursively verify this STARK.
|
||||
//! fn eval_ext_circuit(
|
||||
//! &self,
|
||||
//! builder: &mut CircuitBuilder<F, D>,
|
||||
//! vars: &Self::EvaluationFrameTarget,
|
||||
//! yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
//! ) {
|
||||
//! let local_values = vars.get_local_values();
|
||||
//! let next_values = vars.get_next_values();
|
||||
//! let public_inputs = vars.get_public_inputs();
|
||||
//!
|
||||
//! // Check public inputs.
|
||||
//! let pis_constraints = [
|
||||
//! builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
|
||||
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
|
||||
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
|
||||
//! ];
|
||||
//!
|
||||
//! yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
//! yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||
//! yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||
//!
|
||||
//! // Enforce the Fibonacci transition constraints.
|
||||
//! // x0' <- x1
|
||||
//! let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
|
||||
//! yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
//! // x1' <- x0 + x1
|
||||
//! let second_col_constraint = {
|
||||
//! let tmp = builder.sub_extension(next_values[1], local_values[0]);
|
||||
//! builder.sub_extension(tmp, local_values[1])
|
||||
//! };
|
||||
//! yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
//! }
|
||||
//!
|
||||
//! fn constraint_degree(&self) -> usize {
|
||||
//! 2
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! One can then instantiate a new `FibonacciStark` instance, generate an associated
|
||||
//! STARK trace, and generate a proof for it.
|
||||
//!
|
||||
//! ```rust
|
||||
//! # use anyhow::Result;
|
||||
//! # use core::marker::PhantomData;
|
||||
//! # // Imports all basic types.
|
||||
//! # use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
//! # use plonky2::field::types::Field;
|
||||
//! # use plonky2::field::packed::PackedField;
|
||||
//! # use plonky2::field::polynomial::PolynomialValues;
|
||||
//! # use plonky2::hash::hash_types::RichField;
|
||||
//! # use starky::util::trace_rows_to_poly_values;
|
||||
//! # // Imports to define the constraints of our STARK.
|
||||
//! # use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
//! # use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
//! # use starky::stark::Stark;
|
||||
//! # // Imports to define the recursive constraints of our STARK.
|
||||
//! # use plonky2::iop::ext_target::ExtensionTarget;
|
||||
//! # use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
//! # use plonky2::util::timing::TimingTree;
|
||||
//! # use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
//! # use starky::prover::prove;
|
||||
//! # use starky::verifier::verify_stark_proof;
|
||||
//! # use starky::config::StarkConfig;
|
||||
//! #
|
||||
//! # #[derive(Copy, Clone)]
|
||||
//! # pub struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
//! # num_rows: usize,
|
||||
//! # _phantom: PhantomData<F>,
|
||||
//! # }
|
||||
//! # // Define witness generation.
|
||||
//! # impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
|
||||
//! # // The first public input is `x0`.
|
||||
//! # const PI_INDEX_X0: usize = 0;
|
||||
//! # // The second public input is `x1`.
|
||||
//! # const PI_INDEX_X1: usize = 1;
|
||||
//! # // The third public input is the second element of the last row,
|
||||
//! # // which should be equal to the `num_rows`-th Fibonacci number.
|
||||
//! # const PI_INDEX_RES: usize = 2;
|
||||
//! # /// Generate the trace using `x0, x1, 0` as initial state values.
|
||||
//! # fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
//! # let mut trace_rows = (0..self.num_rows)
|
||||
//! # .scan([x0, x1, F::ZERO], |acc, _| {
|
||||
//! # let tmp = *acc;
|
||||
//! # acc[0] = tmp[1];
|
||||
//! # acc[1] = tmp[0] + tmp[1];
|
||||
//! # acc[2] = tmp[2] + F::ONE;
|
||||
//! # Some(tmp)
|
||||
//! # })
|
||||
//! # .collect::<Vec<_>>();
|
||||
//! # // Transpose the row-wise trace for the prover.
|
||||
//! # trace_rows_to_poly_values(trace_rows)
|
||||
//! # }
|
||||
//! # const fn new(num_rows: usize) -> Self {
|
||||
//! # Self {
|
||||
//! # num_rows,
|
||||
//! # _phantom: PhantomData,
|
||||
//! # }
|
||||
//! # }
|
||||
//! # }
|
||||
//! # // Define constraints.
|
||||
//! # const COLUMNS: usize = 3;
|
||||
//! # const PUBLIC_INPUTS: usize = 3;
|
||||
//! # impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
|
||||
//! # type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
|
||||
//! # where
|
||||
//! # FE: FieldExtension<D2, BaseField = F>,
|
||||
//! # P: PackedField<Scalar = FE>;
|
||||
//! # type EvaluationFrameTarget =
|
||||
//! # StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
|
||||
//! # // Define this STARK's constraints.
|
||||
//! # fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
//! # &self,
|
||||
//! # vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
//! # yield_constr: &mut ConstraintConsumer<P>,
|
||||
//! # ) where
|
||||
//! # FE: FieldExtension<D2, BaseField = F>,
|
||||
//! # P: PackedField<Scalar = FE>,
|
||||
//! # {
|
||||
//! # let local_values = vars.get_local_values();
|
||||
//! # let next_values = vars.get_next_values();
|
||||
//! # let public_inputs = vars.get_public_inputs();
|
||||
//! # // Check public inputs.
|
||||
//! # yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
|
||||
//! # yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
|
||||
//! # yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
|
||||
//! # // Enforce the Fibonacci transition constraints.
|
||||
//! # // x0' <- x1
|
||||
//! # yield_constr.constraint_transition(next_values[0] - local_values[1]);
|
||||
//! # // x1' <- x0 + x1
|
||||
//! # yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
|
||||
//! # }
|
||||
//! # // Define the constraints to recursively verify this STARK.
|
||||
//! # fn eval_ext_circuit(
|
||||
//! # &self,
|
||||
//! # builder: &mut CircuitBuilder<F, D>,
|
||||
//! # vars: &Self::EvaluationFrameTarget,
|
||||
//! # yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
//! # ) {
|
||||
//! # let local_values = vars.get_local_values();
|
||||
//! # let next_values = vars.get_next_values();
|
||||
//! # let public_inputs = vars.get_public_inputs();
|
||||
//! # // Check public inputs.
|
||||
//! # let pis_constraints = [
|
||||
//! # builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
|
||||
//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
|
||||
//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
|
||||
//! # ];
|
||||
//! # yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
//! # yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||
//! # yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||
//! # // Enforce the Fibonacci transition constraints.
|
||||
//! # // x0' <- x1
|
||||
//! # let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
|
||||
//! # yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
//! # // x1' <- x0 + x1
|
||||
//! # let second_col_constraint = {
|
||||
//! # let tmp = builder.sub_extension(next_values[1], local_values[0]);
|
||||
//! # builder.sub_extension(tmp, local_values[1])
|
||||
//! # };
|
||||
//! # yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
//! # }
|
||||
//! # fn constraint_degree(&self) -> usize {
|
||||
//! # 2
|
||||
//! # }
|
||||
//! # }
|
||||
//! # fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
|
||||
//! # (0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
|
||||
//! # }
|
||||
//! #
|
||||
//! const D: usize = 2;
|
||||
//! const CONFIG: StarkConfig = StarkConfig::standard_fast_config();
|
||||
//! type C = PoseidonGoldilocksConfig;
|
||||
//! type F = <C as GenericConfig<D>>::F;
|
||||
//! type S = FibonacciStark<F, D>;
|
||||
//!
|
||||
//! fn main() {
|
||||
//! let num_rows = 1 << 10;
|
||||
//! let x0 = F::from_canonical_u32(2);
|
||||
//! let x1 = F::from_canonical_u32(7);
|
||||
//!
|
||||
//! let public_inputs = [x0, x1, fibonacci(num_rows - 1, x0, x1)];
|
||||
//! let stark = FibonacciStark::<F, D>::new(num_rows);
|
||||
//! let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
//!
|
||||
//! let proof = prove::<F, C, S, D>(
|
||||
//! stark,
|
||||
//! &CONFIG,
|
||||
//! trace,
|
||||
//! &public_inputs,
|
||||
//! &mut TimingTree::default(),
|
||||
//! ).expect("We should have a valid proof!");
|
||||
//!
|
||||
//! verify_stark_proof(stark, proof, &CONFIG)
|
||||
//! .expect("We should be able to verify this proof!")
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::needless_range_loop)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(unused)] // TODO: Remove post code migration
|
||||
#![deny(rustdoc::broken_intra_doc_links)]
|
||||
#![deny(missing_debug_implementations)]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
extern crate alloc;
|
||||
|
||||
mod get_challenges;
|
||||
|
||||
pub mod config;
|
||||
pub mod constraint_consumer;
|
||||
pub mod cross_table_lookup;
|
||||
pub mod evaluation_frame;
|
||||
pub mod lookup;
|
||||
pub mod proof;
|
||||
@ -17,7 +335,7 @@ pub mod recursive_verifier;
|
||||
pub mod stark;
|
||||
pub mod stark_testing;
|
||||
pub mod util;
|
||||
pub mod vanishing_poly;
|
||||
mod vanishing_poly;
|
||||
pub mod verifier;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! A Lookup protocol leveraging logarithmic derivatives,
|
||||
//! introduced in <https://eprint.iacr.org/2022/1530.pdf>.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::borrow::Borrow;
|
||||
use core::fmt::Debug;
|
||||
use core::iter::repeat;
|
||||
@ -37,6 +40,7 @@ pub struct Filter<F: Field> {
|
||||
}
|
||||
|
||||
impl<F: Field> Filter<F> {
|
||||
/// Returns a filter from the provided `products` and `constants` vectors.
|
||||
pub fn new(products: Vec<(Column<F>, Column<F>)>, constants: Vec<Column<F>>) -> Self {
|
||||
Self {
|
||||
products,
|
||||
@ -113,14 +117,6 @@ impl<F: Field> Filter<F> {
|
||||
.map(|col| col.eval_table(table, row))
|
||||
.sum()
|
||||
}
|
||||
|
||||
pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues<F>]) -> Vec<F> {
|
||||
let length = table[0].len();
|
||||
|
||||
(0..length)
|
||||
.map(|row| self.eval_table(table, row))
|
||||
.collect::<Vec<F>>()
|
||||
}
|
||||
}
|
||||
|
||||
/// Represent two linear combination of columns, corresponding to the current and next row values.
|
||||
@ -402,12 +398,24 @@ impl<F: Field> Column<F> {
|
||||
|
||||
pub(crate) type ColumnFilter<'a, F> = (&'a [Column<F>], &'a Option<Filter<F>>);
|
||||
|
||||
/// A [`Lookup`] defines a set of `columns`` whose values should appear in a
|
||||
/// `table_column` (i.e. the lookup table associated to these looking columns),
|
||||
/// along with a `frequencies_column` indicating the frequency of each looking
|
||||
/// column in the looked table.
|
||||
///
|
||||
/// It also features a `filter_columns` vector, optionally adding at most one
|
||||
/// filter per looking column.
|
||||
///
|
||||
/// The lookup argumented implemented here is based on logarithmic derivatives,
|
||||
/// a technique described with the whole lookup protocol in
|
||||
/// <https://eprint.iacr.org/2022/1530>.
|
||||
#[derive(Debug)]
|
||||
pub struct Lookup<F: Field> {
|
||||
/// Columns whose values should be contained in the lookup table.
|
||||
/// These are the f_i(x) polynomials in the logUp paper.
|
||||
pub columns: Vec<Column<F>>,
|
||||
/// Column containing the lookup table.
|
||||
/// This is the t(x) polynomial in the paper.
|
||||
/// This is the t(x) polynomial in the logUp paper.
|
||||
pub table_column: Column<F>,
|
||||
/// Column containing the frequencies of `columns` in `table_column`.
|
||||
/// This is the m(x) polynomial in the paper.
|
||||
@ -419,6 +427,7 @@ pub struct Lookup<F: Field> {
|
||||
}
|
||||
|
||||
impl<F: Field> Lookup<F> {
|
||||
/// Outputs the number of helper columns needed by this [`Lookup`].
|
||||
pub fn num_helper_columns(&self, constraint_degree: usize) -> usize {
|
||||
// One helper column for each column batch of size `constraint_degree-1`,
|
||||
// then one column for the inverse of `table + challenge` and one for the `Z` polynomial.
|
||||
@ -428,18 +437,18 @@ impl<F: Field> Lookup<F> {
|
||||
|
||||
/// Randomness for a single instance of a permutation check protocol.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
||||
pub struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
||||
/// Randomness used to combine multiple columns into one.
|
||||
pub(crate) beta: T,
|
||||
pub beta: T,
|
||||
/// Random offset that's added to the beta-reduced column values.
|
||||
pub(crate) gamma: T,
|
||||
pub gamma: T,
|
||||
}
|
||||
|
||||
impl<F: Field> GrandProductChallenge<F> {
|
||||
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
||||
&self,
|
||||
terms: T,
|
||||
) -> P
|
||||
/// Combines a series of values `t_i` with these challenge random values.
|
||||
/// In particular, given `beta` and `gamma` challenges, this will compute
|
||||
/// `(Σ t_i * beta^i) + gamma`.
|
||||
pub fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(&self, terms: T) -> P
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
@ -462,7 +471,8 @@ impl GrandProductChallenge<Target> {
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
/// Circuit version of `combine`.
|
||||
pub fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[Target],
|
||||
@ -475,11 +485,14 @@ impl GrandProductChallenge<Target> {
|
||||
/// Like `GrandProductChallenge`, but with `num_challenges` copies to boost soundness.
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
|
||||
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
||||
/// A sequence of `num_challenges` challenge pairs, where `num_challenges`
|
||||
/// is defined in [`StarkConfig`][crate::config::StarkConfig].
|
||||
pub challenges: Vec<GrandProductChallenge<T>>,
|
||||
}
|
||||
|
||||
impl GrandProductChallengeSet<Target> {
|
||||
pub(crate) fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
/// Serializes this `GrandProductChallengeSet` of `Target`s.
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_usize(self.challenges.len())?;
|
||||
for challenge in &self.challenges {
|
||||
buffer.write_target(challenge.beta)?;
|
||||
@ -488,7 +501,8 @@ impl GrandProductChallengeSet<Target> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
/// Serializes a `GrandProductChallengeSet` of `Target`s from the provided buffer.
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let length = buffer.read_usize()?;
|
||||
let mut challenges = Vec::with_capacity(length);
|
||||
for _ in 0..length {
|
||||
@ -510,7 +524,9 @@ fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
/// Generates a new `GrandProductChallengeSet` containing `num_challenges`
|
||||
/// pairs of challenges from the current `challenger` state.
|
||||
pub fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<F> {
|
||||
@ -533,7 +549,8 @@ fn get_grand_product_challenge_target<
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set_target<
|
||||
/// Circuit version of `get_grand_product_challenge_set`.
|
||||
pub fn get_grand_product_challenge_set_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
@ -570,7 +587,6 @@ pub(crate) fn lookup_helper_columns<F: Field>(
|
||||
assert!(BigUint::from(num_total_logup_entries) < F::characteristic());
|
||||
|
||||
let num_helper_columns = lookup.num_helper_columns(constraint_degree);
|
||||
let mut helper_columns: Vec<PolynomialValues<F>> = Vec::with_capacity(num_helper_columns);
|
||||
|
||||
let looking_cols = lookup
|
||||
.columns
|
||||
@ -762,7 +778,6 @@ pub(crate) fn get_helper_cols<F: Field>(
|
||||
|
||||
let mut helper_columns = Vec::with_capacity(num_helper_columns);
|
||||
|
||||
let mut filter_index = 0;
|
||||
for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) {
|
||||
let (first_col, first_filter) = cols_filts.next().unwrap();
|
||||
|
||||
@ -842,6 +857,7 @@ pub(crate) fn get_helper_cols<F: Field>(
|
||||
helper_columns
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LookupCheckVars<F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
@ -919,6 +935,7 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LookupCheckVarsTarget<const D: usize> {
|
||||
pub(crate) local_values: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) next_values: Vec<ExtensionTarget<D>>,
|
||||
@ -936,7 +953,6 @@ pub(crate) fn eval_ext_lookups_circuit<
|
||||
lookup_vars: LookupCheckVarsTarget<D>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
let degree = stark.constraint_degree();
|
||||
let lookups = stark.lookups();
|
||||
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! All the different proof types and their associated `circuit` versions
|
||||
//! to be used when proving (recursive) [`Stark`][crate::stark::Stark]
|
||||
//! statements
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
@ -14,17 +18,19 @@ use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::lookup::GrandProductChallengeSet;
|
||||
|
||||
/// Merkle caps and openings that form the proof of a single STARK.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of permutation Z values.
|
||||
/// Optional merkle cap of LDEs of permutation Z values, if any.
|
||||
pub auxiliary_polys_cap: Option<MerkleCap<F, C::Hasher>>,
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
@ -46,15 +52,57 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> S
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of [`StarkProof`].
|
||||
/// Merkle caps and openings that form the proof of a single STARK.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct StarkProofTarget<const D: usize> {
|
||||
/// `Target` for the Merkle cap trace values LDEs.
|
||||
pub trace_cap: MerkleCapTarget,
|
||||
/// Optional `Target` for the Merkle cap of lookup helper and CTL columns LDEs, if any.
|
||||
pub auxiliary_polys_cap: Option<MerkleCapTarget>,
|
||||
/// `Target` for the Merkle cap of quotient polynomial evaluations LDEs.
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
/// `Target`s for the purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSetTarget<D>,
|
||||
/// `Target`s for the batch FRI argument for all openings.
|
||||
pub opening_proof: FriProofTarget<D>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Serializes a STARK proof.
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_merkle_cap(&self.trace_cap)?;
|
||||
buffer.write_bool(self.auxiliary_polys_cap.is_some())?;
|
||||
if let Some(poly) = &self.auxiliary_polys_cap {
|
||||
buffer.write_target_merkle_cap(poly)?;
|
||||
}
|
||||
buffer.write_target_merkle_cap(&self.quotient_polys_cap)?;
|
||||
buffer.write_target_fri_proof(&self.opening_proof)?;
|
||||
self.openings.to_buffer(buffer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deserializes a STARK proof.
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let trace_cap = buffer.read_target_merkle_cap()?;
|
||||
let auxiliary_polys_cap = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_merkle_cap()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let quotient_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let opening_proof = buffer.read_target_fri_proof()?;
|
||||
let openings = StarkOpeningSetTarget::from_buffer(buffer)?;
|
||||
|
||||
Ok(Self {
|
||||
trace_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
})
|
||||
}
|
||||
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
@ -66,22 +114,31 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Merkle caps and openings that form the proof of a single STARK, along with its public inputs.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProofWithPublicInputs<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
> {
|
||||
/// A STARK proof.
|
||||
pub proof: StarkProof<F, C, D>,
|
||||
/// Public inputs associated to this STARK proof.
|
||||
// TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`.
|
||||
pub public_inputs: Vec<F>,
|
||||
}
|
||||
|
||||
/// Circuit version of [`StarkProofWithPublicInputs`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProofWithPublicInputsTarget<const D: usize> {
|
||||
/// `Target` STARK proof.
|
||||
pub proof: StarkProofTarget<D>,
|
||||
/// `Target` public inputs for this STARK proof.
|
||||
pub public_inputs: Vec<Target>,
|
||||
}
|
||||
|
||||
/// A compressed proof format of a single STARK.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CompressedStarkProof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -95,69 +152,158 @@ pub struct CompressedStarkProof<
|
||||
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
|
||||
}
|
||||
|
||||
/// A compressed [`StarkProof`] format of a single STARK with its public inputs.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CompressedStarkProofWithPublicInputs<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
> {
|
||||
/// A compressed STARK proof.
|
||||
pub proof: CompressedStarkProof<F, C, D>,
|
||||
/// Public inputs for this compressed STARK proof.
|
||||
pub public_inputs: Vec<F>,
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Randomness used in any permutation arguments.
|
||||
pub lookup_challenge_set: Option<GrandProductChallengeSet<F>>,
|
||||
/// A [`StarkProof`] along with metadata about the initial Fiat-Shamir state, which is used when
|
||||
/// creating a recursive wrapper proof around a STARK proof.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProofWithMetadata<F, C, const D: usize>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
/// Initial Fiat-Shamir state.
|
||||
pub init_challenger_state: <C::Hasher as Hasher<F>>::Permutation,
|
||||
/// Proof for a single STARK.
|
||||
pub proof: StarkProof<F, C, D>,
|
||||
}
|
||||
|
||||
/// A combination of STARK proofs for independent statements operating on possibly shared variables,
|
||||
/// along with Cross-Table Lookup (CTL) challenges to assert consistency of common variables across tables.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MultiProof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
const N: usize,
|
||||
> {
|
||||
/// Proofs for all the different STARK modules.
|
||||
pub stark_proofs: [StarkProofWithMetadata<F, C, D>; N],
|
||||
/// Cross-table lookup challenges.
|
||||
pub ctl_challenges: GrandProductChallengeSet<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize, const N: usize>
|
||||
MultiProof<F, C, D, N>
|
||||
{
|
||||
/// Returns the degree (i.e. the trace length) of each STARK proof,
|
||||
/// from their common [`StarkConfig`].
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> [usize; N] {
|
||||
core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config))
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomness used for a STARK proof.
|
||||
#[derive(Debug)]
|
||||
pub struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Optional randomness used in any permutation argument.
|
||||
pub lookup_challenge_set: Option<GrandProductChallengeSet<F>>,
|
||||
/// Random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<F>,
|
||||
|
||||
/// Point at which the STARK polynomials are opened.
|
||||
pub stark_zeta: F::Extension,
|
||||
|
||||
/// Randomness used in FRI.
|
||||
pub fri_challenges: FriChallenges<F, D>,
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||
/// Circuit version of [`StarkProofChallenges`].
|
||||
#[derive(Debug)]
|
||||
pub struct StarkProofChallengesTarget<const D: usize> {
|
||||
/// Optional `Target`'s randomness used in any permutation argument.
|
||||
pub lookup_challenge_set: Option<GrandProductChallengeSet<Target>>,
|
||||
/// `Target`s for the random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<Target>,
|
||||
/// `ExtensionTarget` for the point at which the STARK polynomials are opened.
|
||||
pub stark_zeta: ExtensionTarget<D>,
|
||||
/// `Target`s for the randomness used in FRI.
|
||||
pub fri_challenges: FriChallengesTarget<D>,
|
||||
}
|
||||
|
||||
/// Randomness for all STARK proofs contained in a [`MultiProof`]`.
|
||||
#[derive(Debug)]
|
||||
pub struct MultiProofChallenges<F: RichField + Extendable<D>, const D: usize, const N: usize> {
|
||||
/// Randomness used in each STARK proof.
|
||||
pub stark_challenges: [StarkProofChallenges<F, D>; N],
|
||||
/// Randomness used for cross-table lookups. It is shared by all STARKs.
|
||||
pub ctl_challenges: GrandProductChallengeSet<F>,
|
||||
}
|
||||
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Openings of trace polynomials at `zeta`.
|
||||
pub local_values: Vec<F::Extension>,
|
||||
/// Openings of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<F::Extension>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub auxiliary_polys: Option<Vec<F::Extension>>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub auxiliary_polys_next: Option<Vec<F::Extension>>,
|
||||
/// Openings of cross-table lookups `Z` polynomials at `1`.
|
||||
pub ctl_zs_first: Option<Vec<F>>,
|
||||
/// Openings of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<F::Extension>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
/// Returns a `StarkOpeningSet` given all the polynomial commitments, the number
|
||||
/// of permutation `Z`polynomials, the evaluation point and a generator `g`.
|
||||
///
|
||||
/// Polynomials are evaluated at point `zeta` and, if necessary, at `g * zeta`.
|
||||
pub fn new<C: GenericConfig<D, F = F>>(
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: Option<&PolynomialBatch<F, C, D>>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
num_lookup_columns: usize,
|
||||
requires_ctl: bool,
|
||||
num_ctl_polys: &[usize],
|
||||
) -> Self {
|
||||
// Batch evaluates polynomials on the LDE, at a point `z`.
|
||||
let eval_commitment = |z: F::Extension, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.to_extension().eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
// Batch evaluates polynomials at a base field point `z`.
|
||||
let eval_commitment_base = |z: F, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let auxiliary_first = auxiliary_polys_commitment.map(|c| eval_commitment_base(F::ONE, c));
|
||||
// `g * zeta`.
|
||||
let zeta_next = zeta.scalar_mul(g);
|
||||
Self {
|
||||
local_values: eval_commitment(zeta, trace_commitment),
|
||||
next_values: eval_commitment(zeta_next, trace_commitment),
|
||||
auxiliary_polys: auxiliary_polys_commitment.map(|c| eval_commitment(zeta, c)),
|
||||
auxiliary_polys_next: auxiliary_polys_commitment.map(|c| eval_commitment(zeta_next, c)),
|
||||
ctl_zs_first: requires_ctl.then(|| {
|
||||
let total_num_helper_cols: usize = num_ctl_polys.iter().sum();
|
||||
auxiliary_first.unwrap()[num_lookup_columns + total_num_helper_cols..].to_vec()
|
||||
}),
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs the openings required by FRI.
|
||||
/// All openings but `ctl_zs_first` are grouped together.
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpenings<F, D> {
|
||||
let zeta_batch = FriOpeningBatch {
|
||||
values: self
|
||||
@ -176,22 +322,107 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
FriOpenings {
|
||||
batches: vec![zeta_batch, zeta_next_batch],
|
||||
|
||||
let mut batches = vec![zeta_batch, zeta_next_batch];
|
||||
|
||||
if let Some(ctl_zs_first) = self.ctl_zs_first.as_ref() {
|
||||
debug_assert!(!ctl_zs_first.is_empty());
|
||||
debug_assert!(self.auxiliary_polys.is_some());
|
||||
debug_assert!(self.auxiliary_polys_next.is_some());
|
||||
|
||||
let ctl_first_batch = FriOpeningBatch {
|
||||
values: ctl_zs_first
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect(),
|
||||
};
|
||||
|
||||
batches.push(ctl_first_batch);
|
||||
}
|
||||
|
||||
FriOpenings { batches }
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of [`StarkOpeningSet`].
|
||||
/// `Target`s for the purported values of each polynomial at the challenge point.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct StarkOpeningSetTarget<const D: usize> {
|
||||
/// `ExtensionTarget`s for the openings of trace polynomials at `zeta`.
|
||||
pub local_values: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<ExtensionTarget<D>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub auxiliary_polys: Option<Vec<ExtensionTarget<D>>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub auxiliary_polys_next: Option<Vec<ExtensionTarget<D>>>,
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1.
|
||||
pub ctl_zs_first: Option<Vec<Target>>,
|
||||
/// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
|
||||
/// Serializes a STARK's opening set.
|
||||
pub(crate) fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_ext_vec(&self.local_values)?;
|
||||
buffer.write_target_ext_vec(&self.next_values)?;
|
||||
if let Some(poly) = &self.auxiliary_polys {
|
||||
buffer.write_bool(true)?;
|
||||
buffer.write_target_ext_vec(poly)?;
|
||||
} else {
|
||||
buffer.write_bool(false)?;
|
||||
}
|
||||
if let Some(poly_next) = &self.auxiliary_polys_next {
|
||||
buffer.write_bool(true)?;
|
||||
buffer.write_target_ext_vec(poly_next)?;
|
||||
} else {
|
||||
buffer.write_bool(false)?;
|
||||
}
|
||||
if let Some(ctl_zs_first) = &self.ctl_zs_first {
|
||||
buffer.write_bool(true)?;
|
||||
buffer.write_target_vec(ctl_zs_first)?;
|
||||
} else {
|
||||
buffer.write_bool(false)?;
|
||||
}
|
||||
buffer.write_target_ext_vec(&self.quotient_polys)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deserializes a STARK's opening set.
|
||||
pub(crate) fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let local_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let next_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let auxiliary_polys = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_ext_vec::<D>()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let auxiliary_polys_next = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_ext_vec::<D>()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let ctl_zs_first = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_vec()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let quotient_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
|
||||
Ok(Self {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
})
|
||||
}
|
||||
|
||||
/// Circuit version of `to_fri_openings`for [`FriOpeningsTarget`].
|
||||
pub(crate) fn to_fri_openings(&self, zero: Target) -> FriOpeningsTarget<D> {
|
||||
let zeta_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.local_values
|
||||
@ -209,8 +440,24 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
FriOpeningsTarget {
|
||||
batches: vec![zeta_batch, zeta_next_batch],
|
||||
|
||||
let mut batches = vec![zeta_batch, zeta_next_batch];
|
||||
|
||||
if let Some(ctl_zs_first) = self.ctl_zs_first.as_ref() {
|
||||
debug_assert!(!ctl_zs_first.is_empty());
|
||||
debug_assert!(self.auxiliary_polys.is_some());
|
||||
debug_assert!(self.auxiliary_polys_next.is_some());
|
||||
|
||||
let ctl_first_batch = FriOpeningBatchTarget {
|
||||
values: ctl_zs_first
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|t| t.to_ext_target(zero))
|
||||
.collect(),
|
||||
};
|
||||
|
||||
batches.push(ctl_first_batch);
|
||||
}
|
||||
FriOpeningsTarget { batches }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,3 +1,6 @@
|
||||
//! Implementation of the STARK prover.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::iter::once;
|
||||
|
||||
@ -20,15 +23,17 @@ use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cross_table_lookup::{get_ctl_auxiliary_polys, CtlCheckVars, CtlData};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::{
|
||||
get_grand_product_challenge_set, lookup_helper_columns, Lookup, LookupCheckVars,
|
||||
get_grand_product_challenge_set, lookup_helper_columns, GrandProductChallengeSet, Lookup,
|
||||
LookupCheckVars,
|
||||
};
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
|
||||
#[allow(clippy::useless_asref)]
|
||||
/// From a STARK trace, computes a STARK proof to attest its correctness.
|
||||
pub fn prove<F, C, S, const D: usize>(
|
||||
stark: S,
|
||||
config: &StarkConfig,
|
||||
@ -68,54 +73,120 @@ where
|
||||
let mut challenger = Challenger::new();
|
||||
challenger.observe_cap(&trace_cap);
|
||||
|
||||
// Lookup argument.
|
||||
prove_with_commitment(
|
||||
&stark,
|
||||
config,
|
||||
&trace_poly_values,
|
||||
&trace_commitment,
|
||||
None,
|
||||
None,
|
||||
&mut challenger,
|
||||
public_inputs,
|
||||
timing,
|
||||
)
|
||||
}
|
||||
|
||||
/// Generates a proof for a single STARK table, including:
|
||||
///
|
||||
/// - the initial state of the challenger,
|
||||
/// - all the required Merkle caps,
|
||||
/// - all the required polynomial and FRI argument openings.
|
||||
/// - individual `ctl_data` and common `ctl_challenges` if the STARK is part
|
||||
/// of a multi-STARK system.
|
||||
pub fn prove_with_commitment<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
ctl_data: Option<&CtlData<F>>,
|
||||
ctl_challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
public_inputs: &[F],
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<StarkProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let degree = trace_poly_values[0].len();
|
||||
let degree_bits = log2_strict(degree);
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
assert!(
|
||||
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
|
||||
"FRI total reduction arity is too large.",
|
||||
);
|
||||
|
||||
// Permutation arguments.
|
||||
|
||||
let constraint_degree = stark.constraint_degree();
|
||||
let lookups = stark.lookups();
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
get_grand_product_challenge_set(&mut challenger, config.num_challenges)
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
if let Some(c) = ctl_challenges {
|
||||
c.challenges.iter().map(|ch| ch.beta).collect::<Vec<_>>()
|
||||
} else {
|
||||
get_grand_product_challenge_set(challenger, config.num_challenges)
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
});
|
||||
|
||||
let num_lookup_columns = lookups
|
||||
.iter()
|
||||
.map(|l| l.num_helper_columns(constraint_degree))
|
||||
.sum();
|
||||
|
||||
let auxiliary_polys_commitment = stark.uses_lookups().then(|| {
|
||||
let lookup_helper_columns = timed!(timing, "compute lookup helper columns", {
|
||||
let challenges = lookup_challenges.as_ref().expect("We do have challenges.");
|
||||
let mut columns = Vec::with_capacity(num_lookup_columns);
|
||||
let lookups = stark.lookups();
|
||||
let lookup_helper_columns = timed!(
|
||||
timing,
|
||||
"compute lookup helper columns",
|
||||
lookup_challenges.as_ref().map(|challenges| {
|
||||
let mut columns = Vec::new();
|
||||
for lookup in &lookups {
|
||||
for &challenge in challenges {
|
||||
columns.extend(lookup_helper_columns(
|
||||
lookup,
|
||||
&trace_poly_values,
|
||||
trace_poly_values,
|
||||
challenge,
|
||||
constraint_degree,
|
||||
));
|
||||
}
|
||||
}
|
||||
columns
|
||||
});
|
||||
})
|
||||
);
|
||||
let num_lookup_columns = lookup_helper_columns.as_ref().map_or(0, |v| v.len());
|
||||
|
||||
// Get the polynomial commitments for all auxiliary polynomials.
|
||||
let auxiliary_polys_commitment = timed!(
|
||||
// We add CTLs, if there are any, to the permutation arguments so that
|
||||
// we can batch commit to all auxiliary polynomials.
|
||||
let auxiliary_polys = match lookup_helper_columns {
|
||||
None => get_ctl_auxiliary_polys(ctl_data),
|
||||
Some(mut lookup_columns) => {
|
||||
if let Some(p) = get_ctl_auxiliary_polys(ctl_data) {
|
||||
lookup_columns.extend(p)
|
||||
};
|
||||
|
||||
Some(lookup_columns)
|
||||
}
|
||||
};
|
||||
|
||||
debug_assert!(
|
||||
(stark.uses_lookups() || stark.requires_ctls()) || auxiliary_polys.is_none(),
|
||||
"There should be auxiliary polynomials if and only if we have either lookups or require cross-table lookups."
|
||||
);
|
||||
|
||||
// Get the polynomial commitments for all auxiliary polynomials.
|
||||
let auxiliary_polys_commitment = auxiliary_polys.map(|aux_polys| {
|
||||
timed!(
|
||||
timing,
|
||||
"compute permutation Z commitments",
|
||||
"compute auxiliary polynomials commitment",
|
||||
PolynomialBatch::from_values(
|
||||
lookup_helper_columns,
|
||||
aux_polys,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
|
||||
auxiliary_polys_commitment
|
||||
)
|
||||
});
|
||||
|
||||
let auxiliary_polys_cap = auxiliary_polys_commitment
|
||||
@ -127,18 +198,25 @@ where
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
|
||||
#[cfg(test)]
|
||||
let num_ctl_polys = ctl_data
|
||||
.map(|data| data.num_ctl_helper_polys())
|
||||
.unwrap_or_default();
|
||||
|
||||
// This is an expensive check, hence is only run when `debug_assertions` are enabled.
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
check_constraints(
|
||||
&stark,
|
||||
&trace_commitment,
|
||||
stark,
|
||||
trace_commitment,
|
||||
public_inputs,
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas.clone(),
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
&num_ctl_polys,
|
||||
);
|
||||
}
|
||||
|
||||
@ -146,19 +224,20 @@ where
|
||||
timing,
|
||||
"compute quotient polys",
|
||||
compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
|
||||
&stark,
|
||||
&trace_commitment,
|
||||
stark,
|
||||
trace_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
public_inputs,
|
||||
alphas,
|
||||
alphas.clone(),
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
&num_ctl_polys,
|
||||
config,
|
||||
)
|
||||
);
|
||||
|
||||
let all_quotient_chunks = timed!(
|
||||
timing,
|
||||
"split quotient polys",
|
||||
@ -175,7 +254,7 @@ where
|
||||
})
|
||||
.collect()
|
||||
);
|
||||
|
||||
// Commit to the quotient polynomials.
|
||||
let quotient_commitment = timed!(
|
||||
timing,
|
||||
"compute quotient commitment",
|
||||
@ -188,12 +267,12 @@ where
|
||||
None,
|
||||
)
|
||||
);
|
||||
|
||||
// Observe the quotient polynomials Merkle cap.
|
||||
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap("ient_polys_cap);
|
||||
|
||||
let zeta = challenger.get_extension_challenge::<D>();
|
||||
|
||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||
@ -207,15 +286,17 @@ where
|
||||
let openings = StarkOpeningSet::new(
|
||||
zeta,
|
||||
g,
|
||||
&trace_commitment,
|
||||
trace_commitment,
|
||||
auxiliary_polys_commitment.as_ref(),
|
||||
"ient_commitment,
|
||||
stark.num_lookup_helper_columns(config),
|
||||
stark.requires_ctls(),
|
||||
&num_ctl_polys,
|
||||
);
|
||||
|
||||
// Get the FRI openings and observe them.
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
let initial_merkle_trees = once(&trace_commitment)
|
||||
let initial_merkle_trees = once(trace_commitment)
|
||||
.chain(&auxiliary_polys_commitment)
|
||||
.chain(once("ient_commitment))
|
||||
.collect_vec();
|
||||
@ -224,15 +305,16 @@ where
|
||||
timing,
|
||||
"compute openings proof",
|
||||
PolynomialBatch::prove_openings(
|
||||
&stark.fri_instance(zeta, g, config),
|
||||
&stark.fri_instance(zeta, g, num_ctl_polys.iter().sum(), num_ctl_polys, config),
|
||||
&initial_merkle_trees,
|
||||
&mut challenger,
|
||||
challenger,
|
||||
&fri_params,
|
||||
timing,
|
||||
)
|
||||
);
|
||||
|
||||
let proof = StarkProof {
|
||||
trace_cap,
|
||||
trace_cap: trace_commitment.merkle_tree.cap.clone(),
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
@ -246,17 +328,19 @@ where
|
||||
}
|
||||
|
||||
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
|
||||
/// where the `C_i`s are the Stark constraints.
|
||||
/// where the `C_i`s are the STARK constraints.
|
||||
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: &'a Option<PolynomialBatch<F, C, D>>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup<F>],
|
||||
ctl_data: Option<&CtlData<F>>,
|
||||
public_inputs: &[F],
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_columns: &[usize],
|
||||
config: &StarkConfig,
|
||||
) -> Vec<PolynomialCoeffs<F>>
|
||||
where
|
||||
@ -267,6 +351,7 @@ where
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let total_num_helper_cols: usize = num_ctl_columns.iter().sum();
|
||||
|
||||
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
|
||||
assert!(
|
||||
@ -331,15 +416,62 @@ where
|
||||
local_values: auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_lde_values_packed(i_start, step)
|
||||
.get_lde_values_packed(i_start, step)[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
next_values: auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_lde_values_packed(i_next_start, step),
|
||||
.get_lde_values_packed(i_next_start, step)[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
|
||||
// Get all the data for this STARK's CTLs, if any:
|
||||
// - the local and next row evaluations for the CTL Z polynomials
|
||||
// - the associated challenges.
|
||||
// - for each CTL:
|
||||
// - the filter `Column`
|
||||
// - the `Column`s that form the looking/looked table.
|
||||
|
||||
let ctl_vars = ctl_data.map(|data| {
|
||||
let mut start_index = 0;
|
||||
data.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, zs_columns)| {
|
||||
let num_ctl_helper_cols = num_ctl_columns[i];
|
||||
let helper_columns = auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_lde_values_packed(i_start, step)
|
||||
[num_lookup_columns + start_index
|
||||
..num_lookup_columns + start_index + num_ctl_helper_cols]
|
||||
.to_vec();
|
||||
|
||||
let ctl_vars = CtlCheckVars::<F, F, P, 1> {
|
||||
helper_columns,
|
||||
local_z: auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_lde_values_packed(i_start, step)
|
||||
[num_lookup_columns + total_num_helper_cols + i],
|
||||
next_z: auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get_lde_values_packed(i_next_start, step)
|
||||
[num_lookup_columns + total_num_helper_cols + i],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: zs_columns.columns.clone(),
|
||||
filter: zs_columns.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_ctl_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
// Evaluate the polynomial combining all constraints, including
|
||||
// those associated to the permutation arguments.
|
||||
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
||||
@ -347,6 +479,7 @@ where
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
ctl_vars.as_deref(),
|
||||
&mut consumer,
|
||||
);
|
||||
|
||||
@ -375,9 +508,15 @@ where
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
/// Check that all constraints evaluate to zero on `H`.
|
||||
/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup.
|
||||
///
|
||||
/// Debugging module, to assert that all constraints evaluate to zero on `H`.
|
||||
/// It can also be used to check the degree of the constraints by evaluating on a larger subgroup.
|
||||
///
|
||||
/// **Note**: this is an expensive check, hence is only available when the `debug_assertions`
|
||||
/// flag is activated, to not hinder performances with regular `release` build.
|
||||
#[cfg(debug_assertions)]
|
||||
fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
@ -385,9 +524,11 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
auxiliary_commitment: &'a Option<PolynomialBatch<F, C, D>>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup<F>],
|
||||
ctl_data: Option<&CtlData<F>>,
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_helper_cols: &[usize],
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -395,6 +536,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
||||
let total_num_helper_cols: usize = num_ctl_helper_cols.iter().sum();
|
||||
|
||||
let size = degree << rate_bits;
|
||||
let step = 1 << rate_bits;
|
||||
@ -446,11 +588,44 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
);
|
||||
// Get the local and next row evaluations for the current STARK's permutation argument.
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_subgroup_evals.as_ref().unwrap()[i].clone(),
|
||||
next_values: auxiliary_subgroup_evals.as_ref().unwrap()[i_next].clone(),
|
||||
local_values: auxiliary_subgroup_evals.as_ref().unwrap()[i][..num_lookup_columns]
|
||||
.to_vec(),
|
||||
next_values: auxiliary_subgroup_evals.as_ref().unwrap()[i_next]
|
||||
[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
|
||||
// Get the local and next row evaluations for the current STARK's CTL Z polynomials.
|
||||
let mut start_index = 0;
|
||||
let ctl_vars = ctl_data.map(|data| {
|
||||
data.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(iii, zs_columns)| {
|
||||
let num_helper_cols = num_ctl_helper_cols[iii];
|
||||
let helper_columns = auxiliary_subgroup_evals.as_ref().unwrap()[i]
|
||||
[num_lookup_columns + start_index
|
||||
..num_lookup_columns + start_index + num_helper_cols]
|
||||
.to_vec();
|
||||
let ctl_vars = CtlCheckVars::<F, F, F, 1> {
|
||||
helper_columns,
|
||||
local_z: auxiliary_subgroup_evals.as_ref().unwrap()[i]
|
||||
[num_lookup_columns + total_num_helper_cols + iii],
|
||||
next_z: auxiliary_subgroup_evals.as_ref().unwrap()[i_next]
|
||||
[num_lookup_columns + total_num_helper_cols + iii],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: zs_columns.columns.clone(),
|
||||
filter: zs_columns.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
// Evaluate the polynomial combining all constraints, including those associated
|
||||
// to the permutation arguments.
|
||||
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
||||
@ -458,6 +633,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
ctl_vars.as_deref(),
|
||||
&mut consumer,
|
||||
);
|
||||
consumer.accumulators()
|
||||
|
||||
@ -1,4 +1,7 @@
|
||||
use alloc::vec;
|
||||
//! Implementation of the STARK recursive verifier, i.e. where proof
|
||||
//! verification if encoded in a plonky2 circuit.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::iter::once;
|
||||
|
||||
@ -8,7 +11,9 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::witness_util::set_fri_proof_target;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::RecursiveChallenger;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::iop::witness::Witness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
@ -17,6 +22,7 @@ use plonky2::with_context;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cross_table_lookup::CtlCheckVarsTarget;
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVarsTarget;
|
||||
use crate::proof::{
|
||||
@ -26,6 +32,8 @@ use crate::proof::{
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
|
||||
/// Encodes the verification of a [`StarkProofWithPublicInputsTarget`]
|
||||
/// for some statement in a circuit.
|
||||
pub fn verify_stark_proof_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -40,51 +48,57 @@ pub fn verify_stark_proof_circuit<
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
|
||||
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
|
||||
|
||||
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
|
||||
let challenges = with_context!(
|
||||
builder,
|
||||
"compute challenges",
|
||||
proof_with_pis.get_challenges::<F, C>(builder, inner_config)
|
||||
proof_with_pis.get_challenges::<F, C>(builder, &mut challenger, None, false, inner_config)
|
||||
);
|
||||
|
||||
verify_stark_proof_with_challenges_circuit::<F, C, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
proof_with_pis,
|
||||
&stark,
|
||||
&proof_with_pis.proof,
|
||||
&proof_with_pis.public_inputs,
|
||||
challenges,
|
||||
None,
|
||||
inner_config,
|
||||
degree_bits,
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursively verifies an inner proof.
|
||||
fn verify_stark_proof_with_challenges_circuit<
|
||||
/// Recursively verifies an inner STARK proof.
|
||||
pub fn verify_stark_proof_with_challenges_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
stark: &S,
|
||||
proof: &StarkProofTarget<D>,
|
||||
public_inputs: &[Target],
|
||||
challenges: StarkProofChallengesTarget<D>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
inner_config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
check_lookup_options(&stark, &proof_with_pis, &challenges).unwrap();
|
||||
check_lookup_options(stark, proof, &challenges).unwrap();
|
||||
|
||||
let zero = builder.zero();
|
||||
let one = builder.one_extension();
|
||||
|
||||
let StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
let num_ctl_polys = ctl_vars
|
||||
.map(|v| v.iter().map(|ctl| ctl.helper_columns.len()).sum::<usize>())
|
||||
.unwrap_or_default();
|
||||
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
|
||||
@ -92,11 +106,12 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
local_values,
|
||||
next_values,
|
||||
&public_inputs
|
||||
.into_iter()
|
||||
.map(|t| builder.convert_to_ext(t))
|
||||
.iter()
|
||||
.map(|&t| builder.convert_to_ext(t))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(inner_config);
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
let (l_0, l_last) =
|
||||
@ -117,6 +132,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
challenges
|
||||
.lookup_challenge_set
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.challenges
|
||||
.iter()
|
||||
@ -133,7 +149,14 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
with_context!(
|
||||
builder,
|
||||
"evaluate vanishing polynomial",
|
||||
eval_vanishing_poly_circuit::<F, S, D>(builder, &stark, &vars, lookup_vars, &mut consumer)
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
&vars,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer
|
||||
)
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
@ -148,20 +171,22 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
|
||||
let merkle_caps = once(proof.trace_cap)
|
||||
.chain(proof.auxiliary_polys_cap)
|
||||
.chain(once(proof.quotient_polys_cap))
|
||||
let merkle_caps = once(proof.trace_cap.clone())
|
||||
.chain(proof.auxiliary_polys_cap.clone())
|
||||
.chain(once(proof.quotient_polys_cap.clone()))
|
||||
.collect_vec();
|
||||
|
||||
let fri_instance = stark.fri_instance_target(
|
||||
builder,
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
num_ctl_polys,
|
||||
ctl_zs_first.as_ref().map_or(0, |c| c.len()),
|
||||
inner_config,
|
||||
);
|
||||
builder.verify_fri_proof::<C>(
|
||||
&fri_instance,
|
||||
&proof.openings.to_fri_openings(),
|
||||
&proof.openings.to_fri_openings(zero),
|
||||
&challenges.fri_challenges,
|
||||
&merkle_caps,
|
||||
&proof.opening_proof,
|
||||
@ -188,17 +213,27 @@ fn eval_l_0_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
)
|
||||
}
|
||||
|
||||
/// Adds a new `StarkProofWithPublicInputsTarget` to this circuit.
|
||||
pub fn add_virtual_stark_proof_with_pis<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
num_ctl_helper_zs: usize,
|
||||
num_ctl_zs: usize,
|
||||
) -> StarkProofWithPublicInputsTarget<D> {
|
||||
let proof = add_virtual_stark_proof::<F, S, D>(builder, stark, config, degree_bits);
|
||||
let proof = add_virtual_stark_proof::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
config,
|
||||
degree_bits,
|
||||
num_ctl_helper_zs,
|
||||
num_ctl_zs,
|
||||
);
|
||||
let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS);
|
||||
StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
@ -206,58 +241,79 @@ pub fn add_virtual_stark_proof_with_pis<
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a new `StarkProofTarget` to this circuit.
|
||||
pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
num_ctl_helper_zs: usize,
|
||||
num_ctl_zs: usize,
|
||||
) -> StarkProofTarget<D> {
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_leaves_per_oracle = vec![
|
||||
S::COLUMNS,
|
||||
stark.num_lookup_helper_columns(config),
|
||||
stark.quotient_degree_factor() * config.num_challenges,
|
||||
];
|
||||
let num_leaves_per_oracle = once(S::COLUMNS)
|
||||
.chain(
|
||||
(stark.uses_lookups() || stark.requires_ctls())
|
||||
.then(|| stark.num_lookup_helper_columns(config) + num_ctl_helper_zs),
|
||||
)
|
||||
.chain(once(stark.quotient_degree_factor() * config.num_challenges))
|
||||
.collect_vec();
|
||||
|
||||
let auxiliary_polys_cap = stark
|
||||
.uses_lookups()
|
||||
let auxiliary_polys_cap = (stark.uses_lookups() || stark.requires_ctls())
|
||||
.then(|| builder.add_virtual_cap(cap_height));
|
||||
|
||||
StarkProofTarget {
|
||||
trace_cap: builder.add_virtual_cap(cap_height),
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||
openings: add_stark_opening_set_target::<F, S, D>(builder, stark, config),
|
||||
openings: add_virtual_stark_opening_set::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
num_ctl_helper_zs,
|
||||
num_ctl_zs,
|
||||
config,
|
||||
),
|
||||
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_stark_opening_set_target<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
fn add_virtual_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
stark: &S,
|
||||
num_ctl_helper_zs: usize,
|
||||
num_ctl_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) -> StarkOpeningSetTarget<D> {
|
||||
let num_challenges = config.num_challenges;
|
||||
StarkOpeningSetTarget {
|
||||
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
auxiliary_polys: stark.uses_lookups().then(|| {
|
||||
builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config))
|
||||
auxiliary_polys: (stark.uses_lookups() || stark.requires_ctls()).then(|| {
|
||||
builder.add_virtual_extension_targets(
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_helper_zs,
|
||||
)
|
||||
}),
|
||||
auxiliary_polys_next: stark.uses_lookups().then(|| {
|
||||
builder.add_virtual_extension_targets(stark.num_lookup_helper_columns(config))
|
||||
auxiliary_polys_next: (stark.uses_lookups() || stark.requires_ctls()).then(|| {
|
||||
builder.add_virtual_extension_targets(
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_helper_zs,
|
||||
)
|
||||
}),
|
||||
ctl_zs_first: stark
|
||||
.requires_ctls()
|
||||
.then(|| builder.add_virtual_targets(num_ctl_zs)),
|
||||
quotient_polys: builder
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * config.num_challenges),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the targets in a `StarkProofWithPublicInputsTarget` to
|
||||
/// their corresponding values in a `StarkProofWithPublicInputs`.
|
||||
pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,
|
||||
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
|
||||
zero: Target,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
@ -277,13 +333,16 @@ pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D
|
||||
witness.set_target(pi_t, pi);
|
||||
}
|
||||
|
||||
set_stark_proof_target(witness, pt, proof);
|
||||
set_stark_proof_target(witness, pt, proof, zero);
|
||||
}
|
||||
|
||||
/// Set the targets in a [`StarkProofTarget`] to their corresponding values in a
|
||||
/// [`StarkProof`].
|
||||
pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
proof_target: &StarkProofTarget<D>,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
zero: Target,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
@ -293,7 +352,7 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||
|
||||
witness.set_fri_openings(
|
||||
&proof_target.openings.to_fri_openings(),
|
||||
&proof_target.openings.to_fri_openings(zero),
|
||||
&proof.openings.to_fri_openings(),
|
||||
);
|
||||
|
||||
@ -308,23 +367,23 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
}
|
||||
|
||||
/// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff
|
||||
/// the Stark uses a permutation argument.
|
||||
/// the STARK uses a permutation argument.
|
||||
fn check_lookup_options<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: &S,
|
||||
proof_with_pis: &StarkProofWithPublicInputsTarget<D>,
|
||||
proof: &StarkProofTarget<D>,
|
||||
challenges: &StarkProofChallengesTarget<D>,
|
||||
) -> Result<()> {
|
||||
let options_is_some = [
|
||||
proof_with_pis.proof.auxiliary_polys_cap.is_some(),
|
||||
proof_with_pis.proof.openings.auxiliary_polys.is_some(),
|
||||
proof_with_pis.proof.openings.auxiliary_polys_next.is_some(),
|
||||
proof.auxiliary_polys_cap.is_some(),
|
||||
proof.openings.auxiliary_polys.is_some(),
|
||||
proof.openings.auxiliary_polys_next.is_some(),
|
||||
challenges.lookup_challenge_set.is_some(),
|
||||
];
|
||||
ensure!(
|
||||
options_is_some
|
||||
.into_iter()
|
||||
.all(|b| b == stark.uses_lookups()),
|
||||
"Lookups data doesn't match with Stark configuration."
|
||||
.iter()
|
||||
.all(|&b| b == stark.uses_lookups() || stark.requires_ctls()),
|
||||
"Lookups data doesn't match with STARK configuration."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! Implementation of the [`Stark`] trait that defines the set of constraints
|
||||
//! related to a statement.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
@ -17,14 +20,11 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::Lookup;
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const AUXILIARY_ORACLE_INDEX: usize = 1;
|
||||
const QUOTIENT_ORACLE_INDEX: usize = 2;
|
||||
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The total number of columns in the trace.
|
||||
const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS;
|
||||
/// The total number of public inputs.
|
||||
const PUBLIC_INPUTS: usize = Self::EvaluationFrameTarget::PUBLIC_INPUTS;
|
||||
|
||||
/// This is used to evaluate constraints natively.
|
||||
@ -36,7 +36,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively.
|
||||
type EvaluationFrameTarget: StarkEvaluationFrame<ExtensionTarget<D>, ExtensionTarget<D>>;
|
||||
|
||||
/// Evaluate constraints at a vector of points.
|
||||
/// Evaluates constraints at a vector of points.
|
||||
///
|
||||
/// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us
|
||||
/// evaluate constraints over a larger domain if desired. This can also be called with `FE = F`
|
||||
@ -50,7 +50,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// Evaluate constraints at a vector of points from the base field `F`.
|
||||
/// Evaluates constraints at a vector of points from the base field `F`.
|
||||
fn eval_packed_base<P: PackedField<Scalar = F>>(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<F, P, 1>,
|
||||
@ -59,7 +59,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a single point from the degree `D` extension field.
|
||||
/// Evaluates constraints at a single point from the degree `D` extension field.
|
||||
fn eval_ext(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<F::Extension, F::Extension, D>,
|
||||
@ -68,10 +68,10 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a vector of points from the degree `D` extension field. This is like
|
||||
/// `eval_ext`, except in the context of a recursive circuit.
|
||||
/// Note: constraints must be added through`yield_constr.constraint(builder, constraint)` in the
|
||||
/// same order as they are given in `eval_packed_generic`.
|
||||
/// Evaluates constraints at a vector of points from the degree `D` extension field.
|
||||
/// This is like `eval_ext`, except in the context of a recursive circuit.
|
||||
/// Note: constraints must be added through`yield_constr.constraint(builder, constraint)`
|
||||
/// in the same order as they are given in `eval_packed_generic`.
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
@ -79,14 +79,16 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
/// The maximum constraint degree.
|
||||
/// Outputs the maximum constraint degree of this [`Stark`].
|
||||
fn constraint_degree(&self) -> usize;
|
||||
|
||||
/// The maximum constraint degree.
|
||||
/// Outputs the maximum quotient polynomial's degree factor of this [`Stark`].
|
||||
fn quotient_degree_factor(&self) -> usize {
|
||||
1.max(self.constraint_degree() - 1)
|
||||
}
|
||||
|
||||
/// Outputs the number of quotient polynomials this [`Stark`] would require with
|
||||
/// the provided [`StarkConfig`]
|
||||
fn num_quotient_polys(&self, config: &StarkConfig) -> usize {
|
||||
self.quotient_degree_factor() * config.num_challenges
|
||||
}
|
||||
@ -96,30 +98,36 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
&self,
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
num_ctl_helpers: usize,
|
||||
num_ctl_zs: Vec<usize>,
|
||||
config: &StarkConfig,
|
||||
) -> FriInstanceInfo<F, D> {
|
||||
let trace_oracle = FriOracleInfo {
|
||||
let mut oracles = vec![];
|
||||
let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: Self::COLUMNS,
|
||||
blinding: false,
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
});
|
||||
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(config);
|
||||
let num_auxiliary_polys = num_lookup_columns;
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_helpers + num_ctl_zs.len();
|
||||
let auxiliary_polys_info = if self.uses_lookups() || self.requires_ctls() {
|
||||
let aux_polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_auxiliary_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
});
|
||||
aux_polys
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
let quotient_oracle = FriOracleInfo {
|
||||
let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys);
|
||||
});
|
||||
|
||||
let zeta_batch = FriBatchInfo {
|
||||
point: zeta,
|
||||
@ -135,10 +143,22 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
|
||||
FriInstanceInfo {
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch],
|
||||
let mut batches = vec![zeta_batch, zeta_next_batch];
|
||||
|
||||
if self.requires_ctls() {
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
1, // auxiliary oracle index
|
||||
num_lookup_columns + num_ctl_helpers..num_auxiliary_polys,
|
||||
);
|
||||
let ctl_first_batch = FriBatchInfo {
|
||||
point: F::Extension::ONE,
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
|
||||
batches.push(ctl_first_batch);
|
||||
}
|
||||
|
||||
FriInstanceInfo { oracles, batches }
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
@ -147,30 +167,36 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
g: F,
|
||||
num_ctl_helper_polys: usize,
|
||||
num_ctl_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) -> FriInstanceInfoTarget<D> {
|
||||
let trace_oracle = FriOracleInfo {
|
||||
let mut oracles = vec![];
|
||||
let trace_info = FriPolynomialInfo::from_range(oracles.len(), 0..Self::COLUMNS);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: Self::COLUMNS,
|
||||
blinding: false,
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
});
|
||||
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(config);
|
||||
let num_auxiliary_polys = num_lookup_columns;
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_helper_polys + num_ctl_zs;
|
||||
let auxiliary_polys_info = if self.uses_lookups() || self.requires_ctls() {
|
||||
let aux_polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_auxiliary_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
});
|
||||
aux_polys
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
let quotient_oracle = FriOracleInfo {
|
||||
let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(QUOTIENT_ORACLE_INDEX, 0..num_quotient_polys);
|
||||
});
|
||||
|
||||
let zeta_batch = FriBatchInfoTarget {
|
||||
point: zeta,
|
||||
@ -187,16 +213,31 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
|
||||
FriInstanceInfoTarget {
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch],
|
||||
let mut batches = vec![zeta_batch, zeta_next_batch];
|
||||
|
||||
if self.requires_ctls() {
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
1, // auxiliary oracle index
|
||||
num_lookup_columns + num_ctl_helper_polys..num_auxiliary_polys,
|
||||
);
|
||||
let ctl_first_batch = FriBatchInfoTarget {
|
||||
point: builder.one_extension(),
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
|
||||
batches.push(ctl_first_batch);
|
||||
}
|
||||
|
||||
FriInstanceInfoTarget { oracles, batches }
|
||||
}
|
||||
|
||||
/// Outputs all the [`Lookup`] this STARK table needs to perform across its columns.
|
||||
fn lookups(&self) -> Vec<Lookup<F>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
/// Outputs the number of total lookup helper columns, based on this STARK's vector
|
||||
/// of [`Lookup`] and the number of challenges used by this [`StarkConfig`].
|
||||
fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize {
|
||||
self.lookups()
|
||||
.iter()
|
||||
@ -205,7 +246,17 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
* config.num_challenges
|
||||
}
|
||||
|
||||
/// Indicates whether this STARK uses lookups over some of its columns, and as such requires
|
||||
/// additional steps during proof generation to handle auxiliary polynomials.
|
||||
fn uses_lookups(&self) -> bool {
|
||||
!self.lookups().is_empty()
|
||||
}
|
||||
|
||||
/// Indicates whether this STARK belongs to a multi-STARK system, and as such may require
|
||||
/// cross-table lookups to connect shared values across different traces.
|
||||
///
|
||||
/// It defaults to `false`, i.e. for simple uni-STARK systems.
|
||||
fn requires_ctls(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
use alloc::vec;
|
||||
use alloc::vec::Vec;
|
||||
//! Utility module for testing [`Stark`] implementation.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
|
||||
@ -1,3 +1,6 @@
|
||||
//! Utility module providing some helper functions.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
@ -4,17 +4,24 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{
|
||||
eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars,
|
||||
CtlCheckVarsTarget,
|
||||
};
|
||||
use crate::lookup::{
|
||||
eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars,
|
||||
LookupCheckVarsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Evaluates all constraint, permutation and cross-table lookup polynomials
|
||||
/// of the current STARK at the local and next values.
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookups: &[Lookup<F>],
|
||||
lookup_vars: Option<LookupCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, FE, P, D2>]>,
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
@ -22,6 +29,7 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
// Evaluate all of the STARK's table constraints.
|
||||
stark.eval_packed_generic(vars, consumer);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
// Evaluate the STARK constraints related to the permutation arguments.
|
||||
@ -33,21 +41,45 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
consumer,
|
||||
);
|
||||
}
|
||||
if let Some(ctl_vars) = ctl_vars {
|
||||
// Evaluate the STARK constraints related to the CTLs.
|
||||
eval_cross_table_lookup_checks::<F, FE, P, S, D, D2>(
|
||||
vars,
|
||||
ctl_vars,
|
||||
consumer,
|
||||
stark.constraint_degree(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_vanishing_poly`.
|
||||
/// Evaluates all constraint, permutation and cross-table lookup polynomials
|
||||
/// of the current STARK at the local and next values.
|
||||
pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: Option<LookupCheckVarsTarget<D>>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
// Evaluate all of the STARK's table constraints.
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
// Evaluate all of the STARK's constraints related to the permutation argument.
|
||||
eval_ext_lookups_circuit::<F, S, D>(builder, stark, vars, lookup_vars, consumer);
|
||||
}
|
||||
if let Some(ctl_vars) = ctl_vars {
|
||||
// Evaluate all of the STARK's constraints related to the CTLs.
|
||||
eval_cross_table_lookup_checks_circuit::<S, F, D>(
|
||||
builder,
|
||||
vars,
|
||||
ctl_vars,
|
||||
consumer,
|
||||
stark.constraint_degree(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,8 @@
|
||||
//! Implementation of the STARK verifier.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::any::type_name;
|
||||
use core::iter::once;
|
||||
|
||||
use anyhow::{anyhow, ensure, Result};
|
||||
@ -8,17 +12,20 @@ use plonky2::field::types::Field;
|
||||
use plonky2::fri::verifier::verify_fri_proof;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cross_table_lookup::CtlCheckVars;
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVars;
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
|
||||
/// Verifies a [`StarkProofWithPublicInputs`] against a STARK statement.
|
||||
pub fn verify_stark_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -30,36 +37,66 @@ pub fn verify_stark_proof<
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS);
|
||||
let degree_bits = proof_with_pis.proof.recover_degree_bits(config);
|
||||
let challenges = proof_with_pis.get_challenges(config, degree_bits);
|
||||
verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config)
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
|
||||
let challenges = proof_with_pis.get_challenges(&mut challenger, None, false, config);
|
||||
|
||||
verify_stark_proof_with_challenges(
|
||||
&stark,
|
||||
&proof_with_pis.proof,
|
||||
&challenges,
|
||||
None,
|
||||
&proof_with_pis.public_inputs,
|
||||
config,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn verify_stark_proof_with_challenges<
|
||||
/// Verifies a [`StarkProofWithPublicInputs`] against a STARK statement,
|
||||
/// with the provided [`StarkProofChallenges`].
|
||||
/// It also supports optional cross-table lookups data and challenges,
|
||||
/// in case this proof is part of a multi-STARK system.
|
||||
pub fn verify_stark_proof_with_challenges<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
challenges: &StarkProofChallenges<F, D>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, F::Extension, F::Extension, D>]>,
|
||||
public_inputs: &[F],
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputs<F, C, D>,
|
||||
challenges: StarkProofChallenges<F, D>,
|
||||
degree_bits: usize,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
validate_proof_shape(&stark, &proof_with_pis, config)?;
|
||||
{
|
||||
log::debug!("Checking proof: {}", type_name::<S>());
|
||||
|
||||
let StarkProofWithPublicInputs {
|
||||
let (num_ctl_z_polys, num_ctl_polys) = ctl_vars
|
||||
.map(|ctls| {
|
||||
(
|
||||
ctls.len(),
|
||||
ctls.iter().map(|ctl| ctl.helper_columns.len()).sum(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
validate_proof_shape(
|
||||
stark,
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
config,
|
||||
num_ctl_polys,
|
||||
num_ctl_z_polys,
|
||||
)?;
|
||||
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first: _,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
local_values,
|
||||
next_values,
|
||||
@ -69,9 +106,12 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta);
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let z_last = challenges.stark_zeta - last.into();
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F::Extension>::new(
|
||||
challenges
|
||||
.stark_alphas
|
||||
@ -84,28 +124,34 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
);
|
||||
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(config);
|
||||
let lookup_challenges = (num_lookup_columns > 0).then(|| {
|
||||
challenges
|
||||
.lookup_challenge_set
|
||||
.unwrap()
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let lookup_challenges = if stark.uses_lookups() {
|
||||
Some(
|
||||
challenges
|
||||
.lookup_challenge_set
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars {
|
||||
local_values: auxiliary_polys.as_ref().unwrap().clone(),
|
||||
next_values: auxiliary_polys_next.as_ref().unwrap().clone(),
|
||||
local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
let lookups = stark.lookups();
|
||||
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
&stark,
|
||||
stark,
|
||||
&vars,
|
||||
&lookups,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
@ -128,15 +174,25 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
);
|
||||
}
|
||||
|
||||
let merkle_caps = once(proof.trace_cap)
|
||||
.chain(proof.auxiliary_polys_cap)
|
||||
.chain(once(proof.quotient_polys_cap))
|
||||
let merkle_caps = once(proof.trace_cap.clone())
|
||||
.chain(proof.auxiliary_polys_cap.clone())
|
||||
.chain(once(proof.quotient_polys_cap.clone()))
|
||||
.collect_vec();
|
||||
|
||||
let num_ctl_zs = ctl_vars
|
||||
.map(|vars| {
|
||||
vars.iter()
|
||||
.map(|ctl| ctl.helper_columns.len())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
verify_fri_proof::<F, C, D>(
|
||||
&stark.fri_instance(
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
num_ctl_polys,
|
||||
num_ctl_zs,
|
||||
config,
|
||||
),
|
||||
&proof.openings.to_fri_openings(),
|
||||
@ -151,18 +207,17 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
|
||||
fn validate_proof_shape<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
public_inputs: &[F],
|
||||
config: &StarkConfig,
|
||||
num_ctl_helpers: usize,
|
||||
num_ctl_zs: usize,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let StarkProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
|
||||
let StarkProof {
|
||||
@ -180,6 +235,7 @@ where
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = openings;
|
||||
|
||||
@ -188,8 +244,6 @@ where
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_auxiliary = stark.num_lookup_helper_columns(config);
|
||||
|
||||
ensure!(trace_cap.height() == cap_height);
|
||||
ensure!(quotient_polys_cap.height() == cap_height);
|
||||
|
||||
@ -202,6 +256,9 @@ where
|
||||
auxiliary_polys_cap,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
num_ctl_helpers,
|
||||
num_ctl_zs,
|
||||
ctl_zs_first,
|
||||
config,
|
||||
)?;
|
||||
|
||||
@ -221,21 +278,24 @@ fn eval_l_0_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
}
|
||||
|
||||
/// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff
|
||||
/// the Stark uses a permutation argument.
|
||||
fn check_lookup_options<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
/// the STARK uses a permutation argument.
|
||||
fn check_lookup_options<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
auxiliary_polys_cap: &Option<MerkleCap<F, <C as GenericConfig<D>>::Hasher>>,
|
||||
auxiliary_polys: &Option<Vec<<F as Extendable<D>>::Extension>>,
|
||||
auxiliary_polys_next: &Option<Vec<<F as Extendable<D>>::Extension>>,
|
||||
num_ctl_helpers: usize,
|
||||
num_ctl_zs: usize,
|
||||
ctl_zs_first: &Option<Vec<F>>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
if stark.uses_lookups() {
|
||||
let num_auxiliary = stark.num_lookup_helper_columns(config);
|
||||
) -> Result<()>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
if stark.uses_lookups() || stark.requires_ctls() {
|
||||
let num_auxiliary = stark.num_lookup_helper_columns(config) + num_ctl_helpers + num_ctl_zs;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
|
||||
let auxiliary_polys_cap = auxiliary_polys_cap
|
||||
@ -248,6 +308,10 @@ fn check_lookup_options<
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("Missing auxiliary_polys_next"))?;
|
||||
|
||||
if let Some(ctl_zs_first) = ctl_zs_first {
|
||||
ensure!(ctl_zs_first.len() == num_ctl_zs);
|
||||
}
|
||||
|
||||
ensure!(auxiliary_polys_cap.height() == cap_height);
|
||||
ensure!(auxiliary_polys.len() == num_auxiliary);
|
||||
ensure!(auxiliary_polys_next.len() == num_auxiliary);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user