mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-22 23:53:11 +00:00
Merge pull request #1235 from topos-protocol/new-logup
Rebased logUp implementation
This commit is contained in:
commit
1ff6d4a283
@ -51,27 +51,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Default for AllStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
|
||||
pub(crate) fn nums_permutation_zs(&self, config: &StarkConfig) -> [usize; NUM_TABLES] {
|
||||
pub(crate) fn num_lookups_helper_columns(&self, config: &StarkConfig) -> [usize; NUM_TABLES] {
|
||||
[
|
||||
self.arithmetic_stark.num_permutation_batches(config),
|
||||
self.byte_packing_stark.num_permutation_batches(config),
|
||||
self.cpu_stark.num_permutation_batches(config),
|
||||
self.keccak_stark.num_permutation_batches(config),
|
||||
self.keccak_sponge_stark.num_permutation_batches(config),
|
||||
self.logic_stark.num_permutation_batches(config),
|
||||
self.memory_stark.num_permutation_batches(config),
|
||||
]
|
||||
}
|
||||
|
||||
pub(crate) fn permutation_batch_sizes(&self) -> [usize; NUM_TABLES] {
|
||||
[
|
||||
self.arithmetic_stark.permutation_batch_size(),
|
||||
self.byte_packing_stark.permutation_batch_size(),
|
||||
self.cpu_stark.permutation_batch_size(),
|
||||
self.keccak_stark.permutation_batch_size(),
|
||||
self.keccak_sponge_stark.permutation_batch_size(),
|
||||
self.logic_stark.permutation_batch_size(),
|
||||
self.memory_stark.permutation_batch_size(),
|
||||
self.arithmetic_stark.num_lookup_helper_columns(config),
|
||||
self.byte_packing_stark.num_lookup_helper_columns(config),
|
||||
self.cpu_stark.num_lookup_helper_columns(config),
|
||||
self.keccak_stark.num_lookup_helper_columns(config),
|
||||
self.keccak_sponge_stark.num_lookup_helper_columns(config),
|
||||
self.logic_stark.num_lookup_helper_columns(config),
|
||||
self.memory_stark.num_lookup_helper_columns(config),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Range;
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
@ -14,12 +13,12 @@ use static_assertions::const_assert;
|
||||
|
||||
use super::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::all_stark::Table;
|
||||
use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
||||
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Link the 16-bit columns of the arithmetic table, split into groups
|
||||
@ -124,13 +123,18 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
|
||||
cols[columns::RANGE_COUNTER][i] = F::from_canonical_usize(RANGE_MAX - 1);
|
||||
}
|
||||
|
||||
// For each column c in cols, generate the range-check
|
||||
// permutations and put them in the corresponding range-check
|
||||
// columns rc_c and rc_c+1.
|
||||
for (c, rc_c) in columns::SHARED_COLS.zip(columns::RC_COLS.step_by(2)) {
|
||||
let (col_perm, table_perm) = permuted_cols(&cols[c], &cols[columns::RANGE_COUNTER]);
|
||||
cols[rc_c].copy_from_slice(&col_perm);
|
||||
cols[rc_c + 1].copy_from_slice(&table_perm);
|
||||
// Generate the frequencies column.
|
||||
for col in SHARED_COLS {
|
||||
for i in 0..n_rows {
|
||||
let x = cols[col][i].to_canonical_u64() as usize;
|
||||
assert!(
|
||||
x < RANGE_MAX,
|
||||
"column value {} exceeds the max range value {}",
|
||||
x,
|
||||
RANGE_MAX
|
||||
);
|
||||
cols[RC_FREQUENCIES][x] += F::ONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -185,11 +189,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
// Range check all the columns
|
||||
for col in columns::RC_COLS.step_by(2) {
|
||||
eval_lookups(vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
@ -217,11 +216,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Range check all the columns
|
||||
for col in columns::RC_COLS.step_by(2) {
|
||||
eval_lookups_circuit(builder, vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
@ -249,18 +243,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
3
|
||||
}
|
||||
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
const START: usize = columns::START_SHARED_COLS;
|
||||
const END: usize = START + columns::NUM_SHARED_COLS;
|
||||
let mut pairs = Vec::with_capacity(2 * columns::NUM_SHARED_COLS);
|
||||
for (c, c_perm) in (START..END).zip_eq(columns::RC_COLS.step_by(2)) {
|
||||
pairs.push(PermutationPair::singletons(c, c_perm));
|
||||
pairs.push(PermutationPair::singletons(
|
||||
c_perm + 1,
|
||||
columns::RANGE_COUNTER,
|
||||
));
|
||||
}
|
||||
pairs
|
||||
fn lookups(&self) -> Vec<Lookup> {
|
||||
vec![Lookup {
|
||||
columns: SHARED_COLS.collect(),
|
||||
table_column: RANGE_COUNTER,
|
||||
frequencies_column: RC_FREQUENCIES,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -109,8 +109,7 @@ pub(crate) const MODULAR_DIV_DENOM_IS_ZERO: usize = AUX_REGISTER_2.end;
|
||||
// of the column and the permutation of the range. The two
|
||||
// permutations associated to column i will be in columns RC_COLS[2i]
|
||||
// and RC_COLS[2i+1].
|
||||
pub(crate) const NUM_RANGE_CHECK_COLS: usize = 1 + 2 * NUM_SHARED_COLS;
|
||||
pub(crate) const RANGE_COUNTER: usize = START_SHARED_COLS + NUM_SHARED_COLS;
|
||||
pub(crate) const RC_COLS: Range<usize> = RANGE_COUNTER + 1..RANGE_COUNTER + 1 + 2 * NUM_SHARED_COLS;
|
||||
pub(crate) const RC_FREQUENCIES: usize = RANGE_COUNTER + 1;
|
||||
|
||||
pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS + NUM_RANGE_CHECK_COLS;
|
||||
pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS + 2;
|
||||
|
||||
@ -47,12 +47,12 @@ use plonky2::util::transpose;
|
||||
use super::NUM_BYTES;
|
||||
use crate::byte_packing::columns::{
|
||||
index_bytes, value_bytes, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, BYTE_INDICES_COLS, IS_READ,
|
||||
NUM_COLUMNS, RANGE_COUNTER, RC_COLS, SEQUENCE_END, TIMESTAMP,
|
||||
NUM_COLUMNS, RANGE_COUNTER, RC_FREQUENCIES, SEQUENCE_END, TIMESTAMP,
|
||||
};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
@ -240,11 +240,18 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
// For each column c in cols, generate the range-check
|
||||
// permutations and put them in the corresponding range-check
|
||||
// columns rc_c and rc_c+1.
|
||||
for (i, rc_c) in (0..NUM_BYTES).zip(RC_COLS.step_by(2)) {
|
||||
let c = value_bytes(i);
|
||||
let (col_perm, table_perm) = permuted_cols(&cols[c], &cols[RANGE_COUNTER]);
|
||||
cols[rc_c].copy_from_slice(&col_perm);
|
||||
cols[rc_c + 1].copy_from_slice(&table_perm);
|
||||
for col in 0..NUM_BYTES {
|
||||
for i in 0..n_rows {
|
||||
let c = value_bytes(col);
|
||||
let x = cols[c][i].to_canonical_u64() as usize;
|
||||
assert!(
|
||||
x < BYTE_RANGE_MAX,
|
||||
"column value {} exceeds the max range value {}",
|
||||
x,
|
||||
BYTE_RANGE_MAX
|
||||
);
|
||||
cols[RC_FREQUENCIES][x] += F::ONE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,11 +303,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
// Range check all the columns
|
||||
for col in RC_COLS.step_by(2) {
|
||||
eval_lookups(vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let local_values: &[P; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[P; NUM_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
@ -409,11 +411,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Range check all the columns
|
||||
for col in RC_COLS.step_by(2) {
|
||||
eval_lookups_circuit(builder, vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let local_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
@ -556,6 +553,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn lookups(&self) -> Vec<Lookup> {
|
||||
vec![Lookup {
|
||||
columns: (value_bytes(0)..value_bytes(0) + NUM_BYTES).collect(),
|
||||
table_column: RANGE_COUNTER,
|
||||
frequencies_column: RC_FREQUENCIES,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@ -39,7 +39,6 @@ pub(crate) const fn value_bytes(i: usize) -> usize {
|
||||
// The two permutations associated to the byte in column i will be in
|
||||
// columns RC_COLS[2i] and RC_COLS[2i+1].
|
||||
pub(crate) const RANGE_COUNTER: usize = BYTES_VALUES_START + NUM_BYTES;
|
||||
pub(crate) const NUM_RANGE_CHECK_COLS: usize = 1 + 2 * NUM_BYTES;
|
||||
pub(crate) const RC_COLS: Range<usize> = RANGE_COUNTER + 1..RANGE_COUNTER + NUM_RANGE_CHECK_COLS;
|
||||
pub(crate) const RC_FREQUENCIES: usize = RANGE_COUNTER + 1;
|
||||
|
||||
pub(crate) const NUM_COLUMNS: usize = RANGE_COUNTER + NUM_RANGE_CHECK_COLS;
|
||||
pub(crate) const NUM_COLUMNS: usize = RANGE_COUNTER + 2;
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::fmt::Debug;
|
||||
use std::iter::repeat;
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
@ -8,16 +9,20 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::plonk_common::{
|
||||
reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit,
|
||||
};
|
||||
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
|
||||
use crate::all_stark::{Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::permutation::{GrandProductChallenge, GrandProductChallengeSet};
|
||||
use crate::proof::{StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
|
||||
@ -322,6 +327,128 @@ impl<F: Field> CtlData<F> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomness for a single instance of a permutation check protocol.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
||||
/// Randomness used to combine multiple columns into one.
|
||||
pub(crate) beta: T,
|
||||
/// Random offset that's added to the beta-reduced column values.
|
||||
pub(crate) gamma: T,
|
||||
}
|
||||
|
||||
impl<F: Field> GrandProductChallenge<F> {
|
||||
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
||||
&self,
|
||||
terms: T,
|
||||
) -> P
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
T::IntoIter: DoubleEndedIterator,
|
||||
{
|
||||
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D> {
|
||||
let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta);
|
||||
let gamma = builder.convert_to_ext(self.gamma);
|
||||
builder.add_extension(reduced, gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[Target],
|
||||
) -> Target {
|
||||
let reduced = reduce_with_powers_circuit(builder, terms, self.beta);
|
||||
builder.add(reduced, self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
|
||||
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
||||
}
|
||||
|
||||
impl GrandProductChallengeSet<Target> {
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_usize(self.challenges.len())?;
|
||||
for challenge in &self.challenges {
|
||||
buffer.write_target(challenge.beta)?;
|
||||
buffer.write_target(challenge.gamma)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let length = buffer.read_usize()?;
|
||||
let mut challenges = Vec::with_capacity(length);
|
||||
for _ in 0..length {
|
||||
challenges.push(GrandProductChallenge {
|
||||
beta: buffer.read_target()?,
|
||||
gamma: buffer.read_target()?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(GrandProductChallengeSet { challenges })
|
||||
}
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
) -> GrandProductChallenge<F> {
|
||||
let beta = challenger.get_challenge();
|
||||
let gamma = challenger.get_challenge();
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<F> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge(challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
) -> GrandProductChallenge<Target> {
|
||||
let beta = challenger.get_challenge(builder);
|
||||
let gamma = challenger.get_challenge(builder);
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<Target> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge_target(builder, challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
pub(crate) fn cross_table_lookup_data<F: RichField, const D: usize>(
|
||||
trace_poly_values: &[Vec<PolynomialValues<F>>; NUM_TABLES],
|
||||
cross_table_lookups: &[CrossTableLookup<F>],
|
||||
@ -423,15 +550,15 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
proofs: &[StarkProofWithMetadata<F, C, D>; NUM_TABLES],
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_permutation_zs: &[usize; NUM_TABLES],
|
||||
num_lookup_columns: &[usize; NUM_TABLES],
|
||||
) -> [Vec<Self>; NUM_TABLES] {
|
||||
let mut ctl_zs = proofs
|
||||
.iter()
|
||||
.zip(num_permutation_zs)
|
||||
.map(|(p, &num_perms)| {
|
||||
.zip(num_lookup_columns)
|
||||
.map(|(p, &num_lookup)| {
|
||||
let openings = &p.proof.openings;
|
||||
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
|
||||
let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms);
|
||||
let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup);
|
||||
let ctl_zs_next = openings.auxiliary_polys_next.iter().skip(num_lookup);
|
||||
ctl_zs.zip(ctl_zs_next)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@ -528,15 +655,15 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
|
||||
proof: &StarkProofTarget<D>,
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<Target>,
|
||||
num_permutation_zs: usize,
|
||||
num_lookup_columns: usize,
|
||||
) -> Vec<Self> {
|
||||
let mut ctl_zs = {
|
||||
let openings = &proof.openings;
|
||||
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_permutation_zs);
|
||||
let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup_columns);
|
||||
let ctl_zs_next = openings
|
||||
.permutation_ctl_zs_next
|
||||
.auxiliary_polys_next
|
||||
.iter()
|
||||
.skip(num_permutation_zs);
|
||||
.skip(num_lookup_columns);
|
||||
ctl_zs.zip(ctl_zs_next)
|
||||
};
|
||||
|
||||
|
||||
@ -29,10 +29,12 @@ use plonky2_util::log2_ceil;
|
||||
|
||||
use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup};
|
||||
use crate::cross_table_lookup::{
|
||||
get_grand_product_challenge_set_target, verify_cross_table_lookups_circuit, CrossTableLookup,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::get_challenges::observe_public_values_target;
|
||||
use crate::permutation::{get_grand_product_challenge_set_target, GrandProductChallengeSet};
|
||||
use crate::proof::{
|
||||
BlockHashesTarget, BlockMetadataTarget, ExtraBlockDataTarget, PublicValues, PublicValuesTarget,
|
||||
StarkProofWithMetadata, TrieRootsTarget,
|
||||
|
||||
@ -8,10 +8,7 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
|
||||
use crate::all_stark::{AllStark, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::permutation::{
|
||||
get_grand_product_challenge_set, get_n_grand_product_challenge_sets,
|
||||
get_n_grand_product_challenge_sets_target,
|
||||
};
|
||||
use crate::cross_table_lookup::get_grand_product_challenge_set;
|
||||
use crate::proof::*;
|
||||
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
|
||||
use crate::witness::errors::ProgramError;
|
||||
@ -206,7 +203,6 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
pub(crate) fn get_challenges(
|
||||
&self,
|
||||
all_stark: &AllStark<F, D>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<AllProofChallenges<F, D>, ProgramError> {
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
@ -220,18 +216,12 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
|
||||
let ctl_challenges =
|
||||
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
|
||||
|
||||
let num_permutation_zs = all_stark.nums_permutation_zs(config);
|
||||
let num_permutation_batch_sizes = all_stark.permutation_batch_sizes();
|
||||
|
||||
Ok(AllProofChallenges {
|
||||
stark_challenges: core::array::from_fn(|i| {
|
||||
challenger.compact();
|
||||
self.stark_proofs[i].proof.get_challenges(
|
||||
&mut challenger,
|
||||
num_permutation_zs[i] > 0,
|
||||
num_permutation_batch_sizes[i],
|
||||
config,
|
||||
)
|
||||
self.stark_proofs[i]
|
||||
.proof
|
||||
.get_challenges(&mut challenger, config)
|
||||
}),
|
||||
ctl_challenges,
|
||||
})
|
||||
@ -254,17 +244,13 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
|
||||
let ctl_challenges =
|
||||
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
|
||||
|
||||
let num_permutation_zs = all_stark.nums_permutation_zs(config);
|
||||
let num_permutation_batch_sizes = all_stark.permutation_batch_sizes();
|
||||
let lookups = all_stark.num_lookups_helper_columns(config);
|
||||
|
||||
let mut challenger_states = vec![challenger.compact()];
|
||||
for i in 0..NUM_TABLES {
|
||||
self.stark_proofs[i].proof.get_challenges(
|
||||
&mut challenger,
|
||||
num_permutation_zs[i] > 0,
|
||||
num_permutation_batch_sizes[i],
|
||||
config,
|
||||
);
|
||||
self.stark_proofs[i]
|
||||
.proof
|
||||
.get_challenges(&mut challenger, config);
|
||||
challenger_states.push(challenger.compact());
|
||||
}
|
||||
|
||||
@ -284,14 +270,12 @@ where
|
||||
pub(crate) fn get_challenges(
|
||||
&self,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
stark_use_permutation: bool,
|
||||
stark_permutation_batch_size: usize,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
let degree_bits = self.recover_degree_bits(config);
|
||||
|
||||
let StarkProof {
|
||||
permutation_ctl_zs_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
@ -306,15 +290,7 @@ where
|
||||
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
let permutation_challenge_sets = stark_use_permutation.then(|| {
|
||||
get_n_grand_product_challenge_sets(
|
||||
challenger,
|
||||
num_challenges,
|
||||
stark_permutation_batch_size,
|
||||
)
|
||||
});
|
||||
|
||||
challenger.observe_cap(permutation_ctl_zs_cap);
|
||||
challenger.observe_cap(auxiliary_polys_cap);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
@ -324,7 +300,6 @@ where
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
StarkProofChallenges {
|
||||
permutation_challenge_sets,
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges::<C, D>(
|
||||
@ -343,15 +318,13 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
stark_use_permutation: bool,
|
||||
stark_permutation_batch_size: usize,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let StarkProofTarget {
|
||||
permutation_ctl_zs_cap,
|
||||
auxiliary_polys_cap: auxiliary_polys,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
@ -366,16 +339,7 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
let permutation_challenge_sets = stark_use_permutation.then(|| {
|
||||
get_n_grand_product_challenge_sets_target(
|
||||
builder,
|
||||
challenger,
|
||||
num_challenges,
|
||||
stark_permutation_batch_size,
|
||||
)
|
||||
});
|
||||
|
||||
challenger.observe_cap(permutation_ctl_zs_cap);
|
||||
challenger.observe_cap(auxiliary_polys);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
@ -385,7 +349,6 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
|
||||
|
||||
StarkProofChallengesTarget {
|
||||
permutation_challenge_sets,
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges(
|
||||
|
||||
@ -652,10 +652,11 @@ mod tests {
|
||||
use tiny_keccak::keccakf;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{CtlData, CtlZData};
|
||||
use crate::cross_table_lookup::{
|
||||
CtlData, CtlZData, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::keccak::columns::reg_output_limb;
|
||||
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
|
||||
use crate::permutation::GrandProductChallenge;
|
||||
use crate::prover::prove_single_table;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
|
||||
@ -769,7 +770,7 @@ mod tests {
|
||||
filter_column: None,
|
||||
};
|
||||
let ctl_data = CtlData {
|
||||
zs_columns: vec![ctl_z_data; config.num_challenges],
|
||||
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
|
||||
};
|
||||
|
||||
prove_single_table(
|
||||
@ -778,6 +779,9 @@ mod tests {
|
||||
&trace_poly_values,
|
||||
&trace_commitments,
|
||||
&ctl_data,
|
||||
&GrandProductChallengeSet {
|
||||
challenges: vec![ctl_z_data.challenge; config.num_challenges],
|
||||
},
|
||||
&mut Challenger::new(),
|
||||
&mut timing,
|
||||
)?;
|
||||
|
||||
@ -23,7 +23,6 @@ pub mod keccak_sponge;
|
||||
pub mod logic;
|
||||
pub mod lookup;
|
||||
pub mod memory;
|
||||
pub mod permutation;
|
||||
pub mod proof;
|
||||
pub mod prover;
|
||||
pub mod recursive_verifier;
|
||||
|
||||
@ -1,134 +1,248 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use num_bigint::BigUint;
|
||||
use plonky2::field::batch_util::batch_add_inplace;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
|
||||
pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, E: StarkEvaluationFrame<P>>(
|
||||
vars: &E,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
col_permuted_input: usize,
|
||||
col_permuted_table: usize,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let local_perm_input = local_values[col_permuted_input];
|
||||
let next_perm_table = next_values[col_permuted_table];
|
||||
let next_perm_input = next_values[col_permuted_input];
|
||||
|
||||
// A "vertical" diff between the local and next permuted inputs.
|
||||
let diff_input_prev = next_perm_input - local_perm_input;
|
||||
// A "horizontal" diff between the next permuted input and permuted table value.
|
||||
let diff_input_table = next_perm_input - next_perm_table;
|
||||
|
||||
yield_constr.constraint(diff_input_prev * diff_input_table);
|
||||
|
||||
// This is actually constraining the first row, as per the spec, since `diff_input_table`
|
||||
// is a diff of the next row's values. In the context of `constraint_last_row`, the next
|
||||
// row is the first row.
|
||||
yield_constr.constraint_last_row(diff_input_table);
|
||||
pub struct Lookup {
|
||||
/// Columns whose values should be contained in the lookup table.
|
||||
/// These are the f_i(x) polynomials in the logUp paper.
|
||||
pub(crate) columns: Vec<usize>,
|
||||
/// Column containing the lookup table.
|
||||
/// This is the t(x) polynomial in the paper.
|
||||
pub(crate) table_column: usize,
|
||||
/// Column containing the frequencies of `columns` in `table_column`.
|
||||
/// This is the m(x) polynomial in the paper.
|
||||
pub(crate) frequencies_column: usize,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_lookups_circuit<
|
||||
impl Lookup {
|
||||
pub(crate) fn num_helper_columns(&self, constraint_degree: usize) -> usize {
|
||||
// One helper column for each column batch of size `constraint_degree-1`,
|
||||
// then one column for the inverse of `table + challenge` and one for the `Z` polynomial.
|
||||
ceil_div_usize(self.columns.len(), constraint_degree - 1) + 1
|
||||
}
|
||||
}
|
||||
|
||||
/// logUp protocol from https://ia.cr/2022/1530
|
||||
/// Compute the helper columns for the lookup argument.
|
||||
/// Given columns `f0,...,fk` and a column `t`, such that `∪fi ⊆ t`, and challenges `x`,
|
||||
/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`,
|
||||
/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column.
|
||||
pub(crate) fn lookup_helper_columns<F: Field>(
|
||||
lookup: &Lookup,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
challenge: F,
|
||||
constraint_degree: usize,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
assert_eq!(
|
||||
constraint_degree, 3,
|
||||
"TODO: Allow other constraint degrees."
|
||||
);
|
||||
|
||||
let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len();
|
||||
assert!(BigUint::from(num_total_logup_entries) < F::characteristic());
|
||||
|
||||
let num_helper_columns = lookup.num_helper_columns(constraint_degree);
|
||||
let mut helper_columns: Vec<PolynomialValues<F>> = Vec::with_capacity(num_helper_columns);
|
||||
|
||||
// For each batch of `constraint_degree-1` columns `fi`, compute `sum 1/(f_i+challenge)` and
|
||||
// add it to the helper columns.
|
||||
// TODO: This does one batch inversion per column. It would also be possible to do one batch inversion
|
||||
// for every group of columns, but that would require building a big vector of all the columns concatenated.
|
||||
// Not sure which approach is better.
|
||||
// Note: these are the h_k(x) polynomials in the paper, with a few differences:
|
||||
// * Here, the first ratio m_0(x)/phi_0(x) is not included with the columns batched up to create the
|
||||
// h_k polynomials; instead there's a separate helper column for it (see below).
|
||||
// * Here, we use 1 instead of -1 as the numerator (and subtract later).
|
||||
// * Here, for now, the batch size (l) is always constraint_degree - 1 = 2.
|
||||
for mut col_inds in &lookup.columns.iter().chunks(constraint_degree - 1) {
|
||||
let first = *col_inds.next().unwrap();
|
||||
// TODO: The clone could probably be avoided by using a modified version of `batch_multiplicative_inverse`
|
||||
// taking `challenge` as an additional argument.
|
||||
let mut column = trace_poly_values[first].values.clone();
|
||||
for x in column.iter_mut() {
|
||||
*x = challenge + *x;
|
||||
}
|
||||
let mut acc = F::batch_multiplicative_inverse(&column);
|
||||
for &ind in col_inds {
|
||||
let mut column = trace_poly_values[ind].values.clone();
|
||||
for x in column.iter_mut() {
|
||||
*x = challenge + *x;
|
||||
}
|
||||
column = F::batch_multiplicative_inverse(&column);
|
||||
batch_add_inplace(&mut acc, &column);
|
||||
}
|
||||
helper_columns.push(acc.into());
|
||||
}
|
||||
|
||||
// Add `1/(table+challenge)` to the helper columns.
|
||||
// This is 1/phi_0(x) = 1/(x + t(x)) from the paper.
|
||||
// Here, we don't include m(x) in the numerator, instead multiplying it with this column later.
|
||||
let mut table = trace_poly_values[lookup.table_column].values.clone();
|
||||
for x in table.iter_mut() {
|
||||
*x = challenge + *x;
|
||||
}
|
||||
let table_inverse: Vec<F> = F::batch_multiplicative_inverse(&table);
|
||||
|
||||
// Compute the `Z` polynomial with `Z(1)=0` and `Z(gx) = Z(x) + sum h_i(x) - frequencies(x)g(x)`.
|
||||
// This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H.
|
||||
// In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled
|
||||
// into the h_k(x) polynomials.
|
||||
let frequencies = &trace_poly_values[lookup.frequencies_column].values;
|
||||
let mut z = Vec::with_capacity(frequencies.len());
|
||||
z.push(F::ZERO);
|
||||
for i in 0..frequencies.len() - 1 {
|
||||
let x = helper_columns[..num_helper_columns - 1]
|
||||
.iter()
|
||||
.map(|col| col.values[i])
|
||||
.sum::<F>()
|
||||
- frequencies[i] * table_inverse[i];
|
||||
z.push(z[i] + x);
|
||||
}
|
||||
helper_columns.push(z.into());
|
||||
|
||||
helper_columns
|
||||
}
|
||||
|
||||
pub struct LookupCheckVars<F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
pub(crate) local_values: Vec<P>,
|
||||
pub(crate) next_values: Vec<P>,
|
||||
pub(crate) challenges: Vec<F>,
|
||||
}
|
||||
|
||||
/// Constraints for the logUp lookup argument.
|
||||
pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
lookups: &[Lookup],
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookup_vars: LookupCheckVars<F, FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
E: StarkEvaluationFrame<ExtensionTarget<D>>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let degree = stark.constraint_degree();
|
||||
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
for &challenge in &lookup_vars.challenges {
|
||||
let challenge = FE::from_basefield(challenge);
|
||||
// For each chunk, check that `h_i (x+f_2i) (x+f_2i+1) = (x+f_2i) + (x+f_2i+1)` if the chunk has length 2
|
||||
// or if it has length 1, check that `h_i * (x+f_2i) = 1`, where x is the challenge
|
||||
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
|
||||
let mut x = lookup_vars.local_values[start + j];
|
||||
let mut y = P::ZEROS;
|
||||
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
|
||||
for f in fs {
|
||||
x *= f + challenge;
|
||||
y += f + challenge;
|
||||
}
|
||||
match chunk.len() {
|
||||
2 => yield_constr.constraint(x - y),
|
||||
1 => yield_constr.constraint(x - P::ONES),
|
||||
_ => todo!("Allow other constraint degrees."),
|
||||
}
|
||||
}
|
||||
|
||||
// Check the `Z` polynomial.
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_with_challenge = vars.get_local_values()[lookup.table_column] + challenge;
|
||||
let y = lookup_vars.local_values[start..start + num_helper_columns - 1]
|
||||
.iter()
|
||||
.fold(P::ZEROS, |acc, x| acc + *x)
|
||||
* table_with_challenge
|
||||
- vars.get_local_values()[lookup.frequencies_column];
|
||||
yield_constr.constraint((next_z - z) * table_with_challenge - y);
|
||||
start += num_helper_columns;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LookupCheckVarsTarget<const D: usize> {
|
||||
pub(crate) local_values: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) next_values: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) challenges: Vec<Target>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_ext_lookups_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: &E,
|
||||
stark: &S,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: LookupCheckVarsTarget<D>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
col_permuted_input: usize,
|
||||
col_permuted_table: usize,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let local_perm_input = local_values[col_permuted_input];
|
||||
let next_perm_table = next_values[col_permuted_table];
|
||||
let next_perm_input = next_values[col_permuted_input];
|
||||
|
||||
// A "vertical" diff between the local and next permuted inputs.
|
||||
let diff_input_prev = builder.sub_extension(next_perm_input, local_perm_input);
|
||||
// A "horizontal" diff between the next permuted input and permuted table value.
|
||||
let diff_input_table = builder.sub_extension(next_perm_input, next_perm_table);
|
||||
|
||||
let diff_product = builder.mul_extension(diff_input_prev, diff_input_table);
|
||||
yield_constr.constraint(builder, diff_product);
|
||||
|
||||
// This is actually constraining the first row, as per the spec, since `diff_input_table`
|
||||
// is a diff of the next row's values. In the context of `constraint_last_row`, the next
|
||||
// row is the first row.
|
||||
yield_constr.constraint_last_row(builder, diff_input_table);
|
||||
}
|
||||
|
||||
/// Given an input column and a table column, generate the permuted input and permuted table columns
|
||||
/// used in the Halo2 permutation argument.
|
||||
pub fn permuted_cols<F: PrimeField64>(inputs: &[F], table: &[F]) -> (Vec<F>, Vec<F>) {
|
||||
let n = inputs.len();
|
||||
|
||||
// The permuted inputs do not have to be ordered, but we found that sorting was faster than
|
||||
// hash-based grouping. We also sort the table, as this helps us identify "unused" table
|
||||
// elements efficiently.
|
||||
|
||||
// To compare elements, e.g. for sorting, we first need them in canonical form. It would be
|
||||
// wasteful to canonicalize in each comparison, as a single element may be involved in many
|
||||
// comparisons. So we will canonicalize once upfront, then use `to_noncanonical_u64` when
|
||||
// comparing elements.
|
||||
|
||||
let sorted_inputs = inputs
|
||||
.iter()
|
||||
.map(|x| x.to_canonical())
|
||||
.sorted_unstable_by_key(|x| x.to_noncanonical_u64())
|
||||
.collect_vec();
|
||||
let sorted_table = table
|
||||
.iter()
|
||||
.map(|x| x.to_canonical())
|
||||
.sorted_unstable_by_key(|x| x.to_noncanonical_u64())
|
||||
.collect_vec();
|
||||
|
||||
let mut unused_table_inds = Vec::with_capacity(n);
|
||||
let mut unused_table_vals = Vec::with_capacity(n);
|
||||
let mut permuted_table = vec![F::ZERO; n];
|
||||
let mut i = 0;
|
||||
let mut j = 0;
|
||||
while (j < n) && (i < n) {
|
||||
let input_val = sorted_inputs[i].to_noncanonical_u64();
|
||||
let table_val = sorted_table[j].to_noncanonical_u64();
|
||||
match input_val.cmp(&table_val) {
|
||||
Ordering::Greater => {
|
||||
unused_table_vals.push(sorted_table[j]);
|
||||
j += 1;
|
||||
}
|
||||
Ordering::Less => {
|
||||
if let Some(x) = unused_table_vals.pop() {
|
||||
permuted_table[i] = x;
|
||||
} else {
|
||||
unused_table_inds.push(i);
|
||||
let one = builder.one_extension();
|
||||
let degree = stark.constraint_degree();
|
||||
let lookups = stark.lookups();
|
||||
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
for &challenge in &lookup_vars.challenges {
|
||||
let challenge = builder.convert_to_ext(challenge);
|
||||
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
|
||||
let mut x = lookup_vars.local_values[start + j];
|
||||
let mut y = builder.zero_extension();
|
||||
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
|
||||
for f in fs {
|
||||
let tmp = builder.add_extension(f, challenge);
|
||||
x = builder.mul_extension(x, tmp);
|
||||
y = builder.add_extension(y, tmp);
|
||||
}
|
||||
match chunk.len() {
|
||||
2 => {
|
||||
let tmp = builder.sub_extension(x, y);
|
||||
yield_constr.constraint(builder, tmp)
|
||||
}
|
||||
1 => {
|
||||
let tmp = builder.sub_extension(x, one);
|
||||
yield_constr.constraint(builder, tmp)
|
||||
}
|
||||
_ => todo!("Allow other constraint degrees."),
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
Ordering::Equal => {
|
||||
permuted_table[i] = sorted_table[j];
|
||||
i += 1;
|
||||
j += 1;
|
||||
}
|
||||
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_with_challenge =
|
||||
builder.add_extension(vars.get_local_values()[lookup.table_column], challenge);
|
||||
let mut y = builder.add_many_extension(
|
||||
&lookup_vars.local_values[start..start + num_helper_columns - 1],
|
||||
);
|
||||
|
||||
y = builder.mul_extension(y, table_with_challenge);
|
||||
y = builder.sub_extension(y, vars.get_local_values()[lookup.frequencies_column]);
|
||||
|
||||
let mut constraint = builder.sub_extension(next_z, z);
|
||||
constraint = builder.mul_extension(constraint, table_with_challenge);
|
||||
constraint = builder.sub_extension(constraint, y);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
start += num_helper_columns;
|
||||
}
|
||||
}
|
||||
|
||||
unused_table_vals.extend_from_slice(&sorted_table[j..n]);
|
||||
unused_table_inds.extend(i..n);
|
||||
|
||||
for (ind, val) in unused_table_inds.into_iter().zip_eq(unused_table_vals) {
|
||||
permuted_table[ind] = val;
|
||||
}
|
||||
|
||||
(sorted_inputs, permuted_table)
|
||||
}
|
||||
|
||||
@ -31,8 +31,7 @@ pub(crate) const VIRTUAL_FIRST_CHANGE: usize = SEGMENT_FIRST_CHANGE + 1;
|
||||
pub(crate) const RANGE_CHECK: usize = VIRTUAL_FIRST_CHANGE + 1;
|
||||
// The counter column (used for the range check) starts from 0 and increments.
|
||||
pub(crate) const COUNTER: usize = RANGE_CHECK + 1;
|
||||
// Helper columns for the permutation argument used to enforce the range check.
|
||||
pub(crate) const RANGE_CHECK_PERMUTED: usize = COUNTER + 1;
|
||||
pub(crate) const COUNTER_PERMUTED: usize = RANGE_CHECK_PERMUTED + 1;
|
||||
// The frequencies column used in logUp.
|
||||
pub(crate) const FREQUENCIES: usize = COUNTER + 1;
|
||||
|
||||
pub(crate) const NUM_COLUMNS: usize = COUNTER_PERMUTED + 1;
|
||||
pub(crate) const NUM_COLUMNS: usize = FREQUENCIES + 1;
|
||||
|
||||
@ -16,14 +16,13 @@ use plonky2_maybe_rayon::*;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::memory::columns::{
|
||||
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER,
|
||||
COUNTER_PERMUTED, FILTER, IS_READ, NUM_COLUMNS, RANGE_CHECK, RANGE_CHECK_PERMUTED,
|
||||
SEGMENT_FIRST_CHANGE, TIMESTAMP, VIRTUAL_FIRST_CHANGE,
|
||||
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, FILTER,
|
||||
FREQUENCIES, IS_READ, NUM_COLUMNS, RANGE_CHECK, SEGMENT_FIRST_CHANGE, TIMESTAMP,
|
||||
VIRTUAL_FIRST_CHANGE,
|
||||
};
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::witness::memory::MemoryOpKind::Read;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryOp};
|
||||
@ -139,16 +138,16 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
||||
trace_rows
|
||||
}
|
||||
|
||||
/// Generates the `COUNTER`, `RANGE_CHECK_PERMUTED` and `COUNTER_PERMUTED` columns, given a
|
||||
/// Generates the `COUNTER`, `RANGE_CHECK` and `FREQUENCIES` columns, given a
|
||||
/// trace in column-major form.
|
||||
fn generate_trace_col_major(trace_col_vecs: &mut [Vec<F>]) {
|
||||
let height = trace_col_vecs[0].len();
|
||||
trace_col_vecs[COUNTER] = (0..height).map(|i| F::from_canonical_usize(i)).collect();
|
||||
|
||||
let (permuted_inputs, permuted_table) =
|
||||
permuted_cols(&trace_col_vecs[RANGE_CHECK], &trace_col_vecs[COUNTER]);
|
||||
trace_col_vecs[RANGE_CHECK_PERMUTED] = permuted_inputs;
|
||||
trace_col_vecs[COUNTER_PERMUTED] = permuted_table;
|
||||
for i in 0..height {
|
||||
let x = trace_col_vecs[RANGE_CHECK][i].to_canonical_u64() as usize;
|
||||
trace_col_vecs[FREQUENCIES][x] += F::ONE;
|
||||
}
|
||||
}
|
||||
|
||||
/// This memory STARK orders rows by `(context, segment, virt, timestamp)`. To enforce the
|
||||
@ -324,8 +323,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
next_is_read * address_unchanged * (next_values_limbs[i] - value_limbs[i]),
|
||||
);
|
||||
}
|
||||
|
||||
eval_lookups(vars, yield_constr, RANGE_CHECK_PERMUTED, COUNTER_PERMUTED)
|
||||
}
|
||||
|
||||
fn eval_ext_circuit(
|
||||
@ -448,25 +445,18 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
let read_constraint = builder.mul_extension(next_is_read, zero_if_read);
|
||||
yield_constr.constraint_transition(builder, read_constraint);
|
||||
}
|
||||
|
||||
eval_lookups_circuit(
|
||||
builder,
|
||||
vars,
|
||||
yield_constr,
|
||||
RANGE_CHECK_PERMUTED,
|
||||
COUNTER_PERMUTED,
|
||||
)
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
vec![
|
||||
PermutationPair::singletons(RANGE_CHECK, RANGE_CHECK_PERMUTED),
|
||||
PermutationPair::singletons(COUNTER, COUNTER_PERMUTED),
|
||||
]
|
||||
fn lookups(&self) -> Vec<Lookup> {
|
||||
vec![Lookup {
|
||||
columns: vec![RANGE_CHECK],
|
||||
table_column: COUNTER,
|
||||
frequencies_column: FREQUENCIES,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,462 +0,0 @@
|
||||
//! Permutation arguments.
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::batch_util::batch_multiply_inplace;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, Hasher};
|
||||
use plonky2::plonk::plonk_common::{
|
||||
reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit,
|
||||
};
|
||||
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
|
||||
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
|
||||
/// In particular, there should exist some permutation `pi` such that for any `i`,
|
||||
/// `trace[lhs[i]] = pi(trace[rhs[i]])`. Here `trace` denotes the trace in column-major form, so
|
||||
/// `trace[col]` is a column vector.
|
||||
pub struct PermutationPair {
|
||||
/// Each entry contains two column indices, representing two columns which should be
|
||||
/// permutations of one another.
|
||||
pub column_pairs: Vec<(usize, usize)>,
|
||||
}
|
||||
|
||||
impl PermutationPair {
|
||||
pub fn singletons(lhs: usize, rhs: usize) -> Self {
|
||||
Self {
|
||||
column_pairs: vec![(lhs, rhs)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single instance of a permutation check protocol.
|
||||
pub(crate) struct PermutationInstance<'a, T: Copy + Eq + PartialEq + Debug> {
|
||||
pub(crate) pair: &'a PermutationPair,
|
||||
pub(crate) challenge: GrandProductChallenge<T>,
|
||||
}
|
||||
|
||||
/// Randomness for a single instance of a permutation check protocol.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
||||
/// Randomness used to combine multiple columns into one.
|
||||
pub(crate) beta: T,
|
||||
/// Random offset that's added to the beta-reduced column values.
|
||||
pub(crate) gamma: T,
|
||||
}
|
||||
|
||||
impl<F: Field> GrandProductChallenge<F> {
|
||||
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
||||
&self,
|
||||
terms: T,
|
||||
) -> P
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
T::IntoIter: DoubleEndedIterator,
|
||||
{
|
||||
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D> {
|
||||
let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta);
|
||||
let gamma = builder.convert_to_ext(self.gamma);
|
||||
builder.add_extension(reduced, gamma)
|
||||
}
|
||||
}
|
||||
|
||||
impl GrandProductChallenge<Target> {
|
||||
pub(crate) fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
terms: &[Target],
|
||||
) -> Target {
|
||||
let reduced = reduce_with_powers_circuit(builder, terms, self.beta);
|
||||
builder.add(reduced, self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub(crate) struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
|
||||
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
||||
}
|
||||
|
||||
impl GrandProductChallengeSet<Target> {
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_usize(self.challenges.len())?;
|
||||
for challenge in &self.challenges {
|
||||
buffer.write_target(challenge.beta)?;
|
||||
buffer.write_target(challenge.gamma)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let length = buffer.read_usize()?;
|
||||
let mut challenges = Vec::with_capacity(length);
|
||||
for _ in 0..length {
|
||||
challenges.push(GrandProductChallenge {
|
||||
beta: buffer.read_target()?,
|
||||
gamma: buffer.read_target()?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(GrandProductChallengeSet { challenges })
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute all Z polynomials (for permutation arguments).
|
||||
pub(crate) fn compute_permutation_z_polys<F, S, const D: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
permutation_challenge_sets: &[GrandProductChallengeSet<F>],
|
||||
) -> Vec<PolynomialValues<F>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
permutation_batches
|
||||
.into_par_iter()
|
||||
.map(|instances| compute_permutation_z_poly(&instances, trace_poly_values))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute a single Z polynomial.
|
||||
fn compute_permutation_z_poly<F: Field>(
|
||||
instances: &[PermutationInstance<F>],
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
) -> PolynomialValues<F> {
|
||||
let degree = trace_poly_values[0].len();
|
||||
let (reduced_lhs_polys, reduced_rhs_polys): (Vec<_>, Vec<_>) = instances
|
||||
.iter()
|
||||
.map(|instance| permutation_reduced_polys(instance, trace_poly_values, degree))
|
||||
.unzip();
|
||||
|
||||
let numerator = poly_product_elementwise(reduced_lhs_polys.into_iter());
|
||||
let denominator = poly_product_elementwise(reduced_rhs_polys.into_iter());
|
||||
|
||||
// Compute the quotients.
|
||||
let denominator_inverses = F::batch_multiplicative_inverse(&denominator.values);
|
||||
let mut quotients = numerator.values;
|
||||
batch_multiply_inplace(&mut quotients, &denominator_inverses);
|
||||
|
||||
// Compute Z, which contains partial products of the quotients.
|
||||
let mut partial_products = Vec::with_capacity(degree);
|
||||
let mut acc = F::ONE;
|
||||
for q in quotients {
|
||||
partial_products.push(acc);
|
||||
acc *= q;
|
||||
}
|
||||
PolynomialValues::new(partial_products)
|
||||
}
|
||||
|
||||
/// Computes the reduced polynomial, `\sum beta^i f_i(x) + gamma`, for both the "left" and "right"
|
||||
/// sides of a given `PermutationPair`.
|
||||
fn permutation_reduced_polys<F: Field>(
|
||||
instance: &PermutationInstance<F>,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
degree: usize,
|
||||
) -> (PolynomialValues<F>, PolynomialValues<F>) {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
|
||||
let mut reduced_lhs = PolynomialValues::constant(*gamma, degree);
|
||||
let mut reduced_rhs = PolynomialValues::constant(*gamma, degree);
|
||||
for ((lhs, rhs), weight) in column_pairs.iter().zip(beta.powers()) {
|
||||
reduced_lhs.add_assign_scaled(&trace_poly_values[*lhs], weight);
|
||||
reduced_rhs.add_assign_scaled(&trace_poly_values[*rhs], weight);
|
||||
}
|
||||
(reduced_lhs, reduced_rhs)
|
||||
}
|
||||
|
||||
/// Computes the elementwise product of a set of polynomials. Assumes that the set is non-empty and
|
||||
/// that each polynomial has the same length.
|
||||
fn poly_product_elementwise<F: Field>(
|
||||
mut polys: impl Iterator<Item = PolynomialValues<F>>,
|
||||
) -> PolynomialValues<F> {
|
||||
let mut product = polys.next().expect("Expected at least one polynomial");
|
||||
for poly in polys {
|
||||
batch_multiply_inplace(&mut product.values, &poly.values)
|
||||
}
|
||||
product
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
) -> GrandProductChallenge<F> {
|
||||
let beta = challenger.get_challenge();
|
||||
let gamma = challenger.get_challenge();
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<F> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge(challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
num_sets: usize,
|
||||
) -> Vec<GrandProductChallengeSet<F>> {
|
||||
(0..num_sets)
|
||||
.map(|_| get_grand_product_challenge_set(challenger, num_challenges))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
) -> GrandProductChallenge<Target> {
|
||||
let beta = challenger.get_challenge(builder);
|
||||
let gamma = challenger.get_challenge(builder);
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<Target> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge_target(builder, challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
pub(crate) fn get_n_grand_product_challenge_sets_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
num_sets: usize,
|
||||
) -> Vec<GrandProductChallengeSet<Target>> {
|
||||
(0..num_sets)
|
||||
.map(|_| get_grand_product_challenge_set_target(builder, challenger, num_challenges))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get a list of instances of our batch-permutation argument. These are permutation arguments
|
||||
/// where the same `Z(x)` polynomial is used to check more than one permutation.
|
||||
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
|
||||
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
|
||||
/// chunk these arguments based on our batch size.
|
||||
pub(crate) fn get_permutation_batches<'a, T: Copy + Eq + PartialEq + Debug>(
|
||||
permutation_pairs: &'a [PermutationPair],
|
||||
permutation_challenge_sets: &[GrandProductChallengeSet<T>],
|
||||
num_challenges: usize,
|
||||
batch_size: usize,
|
||||
) -> Vec<Vec<PermutationInstance<'a, T>>> {
|
||||
permutation_pairs
|
||||
.iter()
|
||||
.cartesian_product(0..num_challenges)
|
||||
.chunks(batch_size)
|
||||
.into_iter()
|
||||
.map(|batch| {
|
||||
batch
|
||||
.enumerate()
|
||||
.map(|(i, (pair, chal))| {
|
||||
let challenge = permutation_challenge_sets[i].challenges[chal];
|
||||
PermutationInstance { pair, challenge }
|
||||
})
|
||||
.collect_vec()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub struct PermutationCheckVars<F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
pub(crate) local_zs: Vec<P>,
|
||||
pub(crate) next_zs: Vec<P>,
|
||||
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<F>>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_permutation_checks<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
|
||||
let PermutationCheckVars {
|
||||
local_zs,
|
||||
next_zs,
|
||||
permutation_challenge_sets,
|
||||
} = permutation_vars;
|
||||
|
||||
// Check that Z(1) = 1;
|
||||
for &z in &local_zs {
|
||||
consumer.constraint_first_row(z - FE::ONE);
|
||||
}
|
||||
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
&permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
// Each zs value corresponds to a permutation batch.
|
||||
for (i, instances) in permutation_batches.iter().enumerate() {
|
||||
// Z(gx) * down = Z x * up
|
||||
let (reduced_lhs, reduced_rhs): (Vec<P>, Vec<P>) = instances
|
||||
.iter()
|
||||
.map(|instance| {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
let mut factor = ReducingFactor::new(*beta);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (local_values[i], local_values[j]))
|
||||
.unzip();
|
||||
(
|
||||
factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma),
|
||||
factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma),
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
let constraint = next_zs[i] * reduced_rhs.into_iter().product::<P>()
|
||||
- local_zs[i] * reduced_lhs.into_iter().product::<P>();
|
||||
consumer.constraint(constraint);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PermutationCheckDataTarget<const D: usize> {
|
||||
pub(crate) local_zs: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) next_zs: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<Target>>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
permutation_data: PermutationCheckDataTarget<D>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
|
||||
let PermutationCheckDataTarget {
|
||||
local_zs,
|
||||
next_zs,
|
||||
permutation_challenge_sets,
|
||||
} = permutation_data;
|
||||
|
||||
let one = builder.one_extension();
|
||||
// Check that Z(1) = 1;
|
||||
for &z in &local_zs {
|
||||
let z_1 = builder.sub_extension(z, one);
|
||||
consumer.constraint_first_row(builder, z_1);
|
||||
}
|
||||
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
&permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
// Each zs value corresponds to a permutation batch.
|
||||
for (i, instances) in permutation_batches.iter().enumerate() {
|
||||
let (reduced_lhs, reduced_rhs): (Vec<ExtensionTarget<D>>, Vec<ExtensionTarget<D>>) =
|
||||
instances
|
||||
.iter()
|
||||
.map(|instance| {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
let beta_ext = builder.convert_to_ext(*beta);
|
||||
let gamma_ext = builder.convert_to_ext(*gamma);
|
||||
let mut factor = ReducingFactorTarget::new(beta_ext);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (local_values[i], local_values[j]))
|
||||
.unzip();
|
||||
let reduced_lhs = factor.reduce(&lhs, builder);
|
||||
let reduced_rhs = factor.reduce(&rhs, builder);
|
||||
(
|
||||
builder.add_extension(reduced_lhs, gamma_ext),
|
||||
builder.add_extension(reduced_rhs, gamma_ext),
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
let reduced_lhs_product = builder.mul_many_extension(reduced_lhs);
|
||||
let reduced_rhs_product = builder.mul_many_extension(reduced_rhs);
|
||||
// constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product
|
||||
let constraint = {
|
||||
let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product);
|
||||
builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp)
|
||||
};
|
||||
consumer.constraint(builder, constraint)
|
||||
}
|
||||
}
|
||||
@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::all_stark::NUM_TABLES;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::permutation::GrandProductChallengeSet;
|
||||
use crate::cross_table_lookup::GrandProductChallengeSet;
|
||||
|
||||
/// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs.
|
||||
#[derive(Debug, Clone)]
|
||||
@ -654,8 +654,8 @@ impl ExtraBlockDataTarget {
|
||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of permutation Z values.
|
||||
pub permutation_ctl_zs_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of lookup helper and CTL columns.
|
||||
pub auxiliary_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of quotient polynomial evaluations.
|
||||
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
@ -696,7 +696,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> S
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct StarkProofTarget<const D: usize> {
|
||||
pub trace_cap: MerkleCapTarget,
|
||||
pub permutation_ctl_zs_cap: MerkleCapTarget,
|
||||
pub auxiliary_polys_cap: MerkleCapTarget,
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
pub openings: StarkOpeningSetTarget<D>,
|
||||
pub opening_proof: FriProofTarget<D>,
|
||||
@ -705,7 +705,7 @@ pub struct StarkProofTarget<const D: usize> {
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_merkle_cap(&self.trace_cap)?;
|
||||
buffer.write_target_merkle_cap(&self.permutation_ctl_zs_cap)?;
|
||||
buffer.write_target_merkle_cap(&self.auxiliary_polys_cap)?;
|
||||
buffer.write_target_merkle_cap(&self.quotient_polys_cap)?;
|
||||
buffer.write_target_fri_proof(&self.opening_proof)?;
|
||||
self.openings.to_buffer(buffer)?;
|
||||
@ -714,14 +714,14 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let trace_cap = buffer.read_target_merkle_cap()?;
|
||||
let permutation_ctl_zs_cap = buffer.read_target_merkle_cap()?;
|
||||
let auxiliary_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let quotient_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let opening_proof = buffer.read_target_fri_proof()?;
|
||||
let openings = StarkOpeningSetTarget::from_buffer(buffer)?;
|
||||
|
||||
Ok(Self {
|
||||
trace_cap,
|
||||
permutation_ctl_zs_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
@ -740,9 +740,6 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Randomness used in any permutation arguments.
|
||||
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<F>>>,
|
||||
|
||||
/// Random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<F>,
|
||||
|
||||
@ -753,7 +750,6 @@ pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: us
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<Target>>>,
|
||||
pub stark_alphas: Vec<Target>,
|
||||
pub stark_zeta: ExtensionTarget<D>,
|
||||
pub fri_challenges: FriChallengesTarget<D>,
|
||||
@ -766,10 +762,10 @@ pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
pub local_values: Vec<F::Extension>,
|
||||
/// Openings of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<F::Extension>,
|
||||
/// Openings of permutations and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub permutation_ctl_zs: Vec<F::Extension>,
|
||||
/// Openings of permutations and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub permutation_ctl_zs_next: Vec<F::Extension>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub auxiliary_polys: Vec<F::Extension>,
|
||||
/// Openings of lookups and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub auxiliary_polys_next: Vec<F::Extension>,
|
||||
/// Openings of cross-table lookups `Z` polynomials at `1`.
|
||||
pub ctl_zs_first: Vec<F>,
|
||||
/// Openings of quotient polynomials at `zeta`.
|
||||
@ -781,9 +777,9 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: &PolynomialBatch<F, C, D>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
num_permutation_zs: usize,
|
||||
num_lookup_columns: usize,
|
||||
) -> Self {
|
||||
let eval_commitment = |z: F::Extension, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
@ -801,10 +797,10 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
Self {
|
||||
local_values: eval_commitment(zeta, trace_commitment),
|
||||
next_values: eval_commitment(zeta_next, trace_commitment),
|
||||
permutation_ctl_zs: eval_commitment(zeta, permutation_ctl_zs_commitment),
|
||||
permutation_ctl_zs_next: eval_commitment(zeta_next, permutation_ctl_zs_commitment),
|
||||
ctl_zs_first: eval_commitment_base(F::ONE, permutation_ctl_zs_commitment)
|
||||
[num_permutation_zs..]
|
||||
auxiliary_polys: eval_commitment(zeta, auxiliary_polys_commitment),
|
||||
auxiliary_polys_next: eval_commitment(zeta_next, auxiliary_polys_commitment),
|
||||
ctl_zs_first: eval_commitment_base(F::ONE, auxiliary_polys_commitment)
|
||||
[num_lookup_columns..]
|
||||
.to_vec(),
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
}
|
||||
@ -815,7 +811,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs)
|
||||
.chain(&self.auxiliary_polys)
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
@ -824,7 +820,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs_next)
|
||||
.chain(&self.auxiliary_polys_next)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
@ -848,8 +844,8 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
pub struct StarkOpeningSetTarget<const D: usize> {
|
||||
pub local_values: Vec<ExtensionTarget<D>>,
|
||||
pub next_values: Vec<ExtensionTarget<D>>,
|
||||
pub permutation_ctl_zs: Vec<ExtensionTarget<D>>,
|
||||
pub permutation_ctl_zs_next: Vec<ExtensionTarget<D>>,
|
||||
pub auxiliary_polys: Vec<ExtensionTarget<D>>,
|
||||
pub auxiliary_polys_next: Vec<ExtensionTarget<D>>,
|
||||
pub ctl_zs_first: Vec<Target>,
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
}
|
||||
@ -858,8 +854,8 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
pub fn to_buffer(&self, buffer: &mut Vec<u8>) -> IoResult<()> {
|
||||
buffer.write_target_ext_vec(&self.local_values)?;
|
||||
buffer.write_target_ext_vec(&self.next_values)?;
|
||||
buffer.write_target_ext_vec(&self.permutation_ctl_zs)?;
|
||||
buffer.write_target_ext_vec(&self.permutation_ctl_zs_next)?;
|
||||
buffer.write_target_ext_vec(&self.auxiliary_polys)?;
|
||||
buffer.write_target_ext_vec(&self.auxiliary_polys_next)?;
|
||||
buffer.write_target_vec(&self.ctl_zs_first)?;
|
||||
buffer.write_target_ext_vec(&self.quotient_polys)?;
|
||||
Ok(())
|
||||
@ -868,16 +864,16 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let local_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let next_values = buffer.read_target_ext_vec::<D>()?;
|
||||
let permutation_ctl_zs = buffer.read_target_ext_vec::<D>()?;
|
||||
let permutation_ctl_zs_next = buffer.read_target_ext_vec::<D>()?;
|
||||
let auxiliary_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
let auxiliary_polys_next = buffer.read_target_ext_vec::<D>()?;
|
||||
let ctl_zs_first = buffer.read_target_vec()?;
|
||||
let quotient_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
|
||||
Ok(Self {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_ctl_zs,
|
||||
permutation_ctl_zs_next,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
})
|
||||
@ -888,7 +884,7 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs)
|
||||
.chain(&self.auxiliary_polys)
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
@ -897,7 +893,7 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs_next)
|
||||
.chain(&self.auxiliary_polys_next)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
|
||||
@ -23,15 +23,15 @@ use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
|
||||
use crate::cross_table_lookup::{
|
||||
cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::generation::outputs::GenerationOutputs;
|
||||
use crate::generation::{generate_traces, GenerationInputs};
|
||||
use crate::get_challenges::observe_public_values;
|
||||
use crate::permutation::{
|
||||
compute_permutation_z_polys, get_grand_product_challenge_set,
|
||||
get_n_grand_product_challenge_sets, GrandProductChallengeSet, PermutationCheckVars,
|
||||
};
|
||||
use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars};
|
||||
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
@ -146,6 +146,7 @@ where
|
||||
trace_commitments,
|
||||
ctl_data_per_table,
|
||||
&mut challenger,
|
||||
&ctl_challenges,
|
||||
timing
|
||||
)?
|
||||
);
|
||||
@ -164,6 +165,7 @@ fn prove_with_commitments<F, C, const D: usize>(
|
||||
trace_commitments: Vec<PolynomialBatch<F, C, D>>,
|
||||
ctl_data_per_table: [CtlData<F>; NUM_TABLES],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
ctl_challenges: &GrandProductChallengeSet<F>,
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<[StarkProofWithMetadata<F, C, D>; NUM_TABLES]>
|
||||
where
|
||||
@ -179,6 +181,7 @@ where
|
||||
&trace_poly_values[Table::Arithmetic as usize],
|
||||
&trace_commitments[Table::Arithmetic as usize],
|
||||
&ctl_data_per_table[Table::Arithmetic as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -192,6 +195,7 @@ where
|
||||
&trace_poly_values[Table::BytePacking as usize],
|
||||
&trace_commitments[Table::BytePacking as usize],
|
||||
&ctl_data_per_table[Table::BytePacking as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -205,6 +209,7 @@ where
|
||||
&trace_poly_values[Table::Cpu as usize],
|
||||
&trace_commitments[Table::Cpu as usize],
|
||||
&ctl_data_per_table[Table::Cpu as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -218,6 +223,7 @@ where
|
||||
&trace_poly_values[Table::Keccak as usize],
|
||||
&trace_commitments[Table::Keccak as usize],
|
||||
&ctl_data_per_table[Table::Keccak as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -231,6 +237,7 @@ where
|
||||
&trace_poly_values[Table::KeccakSponge as usize],
|
||||
&trace_commitments[Table::KeccakSponge as usize],
|
||||
&ctl_data_per_table[Table::KeccakSponge as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -244,6 +251,7 @@ where
|
||||
&trace_poly_values[Table::Logic as usize],
|
||||
&trace_commitments[Table::Logic as usize],
|
||||
&ctl_data_per_table[Table::Logic as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -257,6 +265,7 @@ where
|
||||
&trace_poly_values[Table::Memory as usize],
|
||||
&trace_commitments[Table::Memory as usize],
|
||||
&ctl_data_per_table[Table::Memory as usize],
|
||||
ctl_challenges,
|
||||
challenger,
|
||||
timing,
|
||||
)?
|
||||
@ -280,6 +289,7 @@ pub(crate) fn prove_single_table<F, C, S, const D: usize>(
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
ctl_data: &CtlData<F>,
|
||||
ctl_challenges: &GrandProductChallengeSet<F>,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<StarkProofWithMetadata<F, C, D>>
|
||||
@ -300,37 +310,49 @@ where
|
||||
|
||||
let init_challenger_state = challenger.compact();
|
||||
|
||||
// Permutation arguments.
|
||||
let permutation_challenges = stark.uses_permutation_args().then(|| {
|
||||
get_n_grand_product_challenge_sets(
|
||||
challenger,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
)
|
||||
let constraint_degree = stark.constraint_degree();
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let permutation_zs = permutation_challenges.as_ref().map(|challenges| {
|
||||
timed!(
|
||||
timing,
|
||||
"compute permutation Z(x) polys",
|
||||
compute_permutation_z_polys::<F, S, D>(stark, config, trace_poly_values, challenges)
|
||||
)
|
||||
});
|
||||
let num_permutation_zs = permutation_zs.as_ref().map(|v| v.len()).unwrap_or(0);
|
||||
let lookups = stark.lookups();
|
||||
let lookup_helper_columns = timed!(
|
||||
timing,
|
||||
"compute lookup helper columns",
|
||||
lookup_challenges.as_ref().map(|challenges| {
|
||||
let mut columns = Vec::new();
|
||||
for lookup in &lookups {
|
||||
for &challenge in challenges {
|
||||
columns.extend(lookup_helper_columns(
|
||||
lookup,
|
||||
trace_poly_values,
|
||||
challenge,
|
||||
constraint_degree,
|
||||
));
|
||||
}
|
||||
}
|
||||
columns
|
||||
})
|
||||
);
|
||||
let num_lookup_columns = lookup_helper_columns.as_ref().map(|v| v.len()).unwrap_or(0);
|
||||
|
||||
let z_polys = match permutation_zs {
|
||||
let auxiliary_polys = match lookup_helper_columns {
|
||||
None => ctl_data.z_polys(),
|
||||
Some(mut permutation_zs) => {
|
||||
permutation_zs.extend(ctl_data.z_polys());
|
||||
permutation_zs
|
||||
Some(mut lookup_columns) => {
|
||||
lookup_columns.extend(ctl_data.z_polys());
|
||||
lookup_columns
|
||||
}
|
||||
};
|
||||
assert!(!z_polys.is_empty(), "No CTL?");
|
||||
assert!(!auxiliary_polys.is_empty(), "No CTL?");
|
||||
|
||||
let permutation_ctl_zs_commitment = timed!(
|
||||
let auxiliary_polys_commitment = timed!(
|
||||
timing,
|
||||
"compute Zs commitment",
|
||||
"compute auxiliary polynomials commitment",
|
||||
PolynomialBatch::from_values(
|
||||
z_polys,
|
||||
auxiliary_polys,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
@ -339,21 +361,21 @@ where
|
||||
)
|
||||
);
|
||||
|
||||
let permutation_ctl_zs_cap = permutation_ctl_zs_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap(&permutation_ctl_zs_cap);
|
||||
let auxiliary_polys_cap = auxiliary_polys_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap(&auxiliary_polys_cap);
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
if cfg!(test) {
|
||||
check_constraints(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
permutation_challenges.as_ref(),
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas.clone(),
|
||||
degree_bits,
|
||||
num_permutation_zs,
|
||||
config,
|
||||
num_lookup_columns,
|
||||
);
|
||||
}
|
||||
let quotient_polys = timed!(
|
||||
@ -362,12 +384,13 @@ where
|
||||
compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
permutation_challenges.as_ref(),
|
||||
&auxiliary_polys_commitment,
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas,
|
||||
degree_bits,
|
||||
num_permutation_zs,
|
||||
num_lookup_columns,
|
||||
config,
|
||||
)
|
||||
);
|
||||
@ -416,15 +439,15 @@ where
|
||||
zeta,
|
||||
g,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
"ient_commitment,
|
||||
stark.num_permutation_batches(config),
|
||||
stark.num_lookup_helper_columns(config),
|
||||
);
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
let initial_merkle_trees = vec![
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
&auxiliary_polys_commitment,
|
||||
"ient_commitment,
|
||||
];
|
||||
|
||||
@ -442,7 +465,7 @@ where
|
||||
|
||||
let proof = StarkProof {
|
||||
trace_cap: trace_commitment.merkle_tree.cap.clone(),
|
||||
permutation_ctl_zs_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
@ -458,12 +481,13 @@ where
|
||||
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
|
||||
auxiliary_polys_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup],
|
||||
ctl_data: &CtlData<F>,
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_permutation_zs: usize,
|
||||
num_lookup_columns: usize,
|
||||
config: &StarkConfig,
|
||||
) -> Vec<PolynomialCoeffs<F>>
|
||||
where
|
||||
@ -529,25 +553,22 @@ where
|
||||
&get_trace_values_packed(i_start),
|
||||
&get_trace_values_packed(i_next_start),
|
||||
);
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
|
||||
[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
next_zs: permutation_ctl_zs_commitment
|
||||
.get_lde_values_packed(i_next_start, step)[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
|
||||
});
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
||||
[..num_lookup_columns]
|
||||
.to_vec(),
|
||||
next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, zs_columns)| CtlCheckVars::<F, F, P, 1> {
|
||||
local_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
|
||||
[num_permutation_zs + i],
|
||||
next_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_next_start, step)
|
||||
[num_permutation_zs + i],
|
||||
local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
||||
[num_lookup_columns + i],
|
||||
next_z: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step)
|
||||
[num_lookup_columns + i],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: &zs_columns.columns,
|
||||
filter_column: &zs_columns.filter_column,
|
||||
@ -555,9 +576,9 @@ where
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
&vars,
|
||||
permutation_check_vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
@ -590,13 +611,13 @@ where
|
||||
fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
|
||||
auxiliary_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
lookup_challenges: Option<&'a Vec<F>>,
|
||||
lookups: &[Lookup],
|
||||
ctl_data: &CtlData<F>,
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_permutation_zs: usize,
|
||||
config: &StarkConfig,
|
||||
num_lookup_columns: usize,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -626,7 +647,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
};
|
||||
|
||||
let trace_subgroup_evals = get_subgroup_evals(trace_commitment);
|
||||
let permutation_ctl_zs_subgroup_evals = get_subgroup_evals(permutation_ctl_zs_commitment);
|
||||
let auxiliary_subgroup_evals = get_subgroup_evals(auxiliary_commitment);
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
@ -650,21 +671,19 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
&trace_subgroup_evals[i],
|
||||
&trace_subgroup_evals[i_next],
|
||||
);
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_subgroup_evals[i][..num_permutation_zs].to_vec(),
|
||||
next_zs: permutation_ctl_zs_subgroup_evals[i_next][..num_permutation_zs]
|
||||
.to_vec(),
|
||||
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
|
||||
});
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(),
|
||||
challenges: challenges.to_vec(),
|
||||
});
|
||||
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(iii, zs_columns)| CtlCheckVars::<F, F, F, 1> {
|
||||
local_z: permutation_ctl_zs_subgroup_evals[i][num_permutation_zs + iii],
|
||||
next_z: permutation_ctl_zs_subgroup_evals[i_next][num_permutation_zs + iii],
|
||||
local_z: auxiliary_subgroup_evals[i][num_lookup_columns + iii],
|
||||
next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: &zs_columns.columns,
|
||||
filter_column: &zs_columns.filter_column,
|
||||
@ -672,9 +691,9 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
&vars,
|
||||
permutation_check_vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
|
||||
@ -29,14 +29,14 @@ use crate::all_stark::{Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups, CrossTableLookup, CtlCheckVarsTarget};
|
||||
use crate::cross_table_lookup::{
|
||||
get_grand_product_challenge_set, verify_cross_table_lookups, CrossTableLookup,
|
||||
CtlCheckVarsTarget, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVarsTarget;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::{
|
||||
get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet,
|
||||
PermutationCheckDataTarget,
|
||||
};
|
||||
use crate::proof::{
|
||||
BlockHashes, BlockHashesTarget, BlockMetadata, BlockMetadataTarget, ExtraBlockData,
|
||||
ExtraBlockDataTarget, PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof,
|
||||
@ -302,8 +302,7 @@ where
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config.clone());
|
||||
let zero_target = builder.zero();
|
||||
|
||||
let num_permutation_zs = stark.num_permutation_batches(inner_config);
|
||||
let num_permutation_batch_size = stark.permutation_batch_size();
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(inner_config);
|
||||
let num_ctl_zs =
|
||||
CrossTableLookup::num_ctl_zs(cross_table_lookups, table, inner_config.num_challenges);
|
||||
let proof_target =
|
||||
@ -331,7 +330,7 @@ where
|
||||
&proof_target,
|
||||
cross_table_lookups,
|
||||
&ctl_challenges_target,
|
||||
num_permutation_zs,
|
||||
num_lookup_columns,
|
||||
);
|
||||
|
||||
let init_challenger_state_target =
|
||||
@ -340,13 +339,8 @@ where
|
||||
}));
|
||||
let mut challenger =
|
||||
RecursiveChallenger::<F, C::Hasher, D>::from_state(init_challenger_state_target);
|
||||
let challenges = proof_target.get_challenges::<F, C>(
|
||||
&mut builder,
|
||||
&mut challenger,
|
||||
num_permutation_zs > 0,
|
||||
num_permutation_batch_size,
|
||||
inner_config,
|
||||
);
|
||||
let challenges =
|
||||
proof_target.get_challenges::<F, C>(&mut builder, &mut challenger, inner_config);
|
||||
let challenger_state = challenger.compact(&mut builder);
|
||||
builder.register_public_inputs(challenger_state.as_ref());
|
||||
|
||||
@ -358,6 +352,7 @@ where
|
||||
&proof_target,
|
||||
&challenges,
|
||||
&ctl_vars,
|
||||
&ctl_challenges_target,
|
||||
inner_config,
|
||||
);
|
||||
|
||||
@ -401,6 +396,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
proof: &StarkProofTarget<D>,
|
||||
challenges: &StarkProofChallengesTarget<D>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
ctl_challenges: &GrandProductChallengeSet<Target>,
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
@ -411,8 +407,8 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_ctl_zs,
|
||||
permutation_ctl_zs_next,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
@ -435,14 +431,20 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
l_last,
|
||||
);
|
||||
|
||||
let num_permutation_zs = stark.num_permutation_batches(inner_config);
|
||||
let permutation_data = stark
|
||||
.uses_permutation_args()
|
||||
.then(|| PermutationCheckDataTarget {
|
||||
local_zs: permutation_ctl_zs[..num_permutation_zs].to_vec(),
|
||||
next_zs: permutation_ctl_zs_next[..num_permutation_zs].to_vec(),
|
||||
permutation_challenge_sets: challenges.permutation_challenge_sets.clone().unwrap(),
|
||||
});
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(inner_config);
|
||||
let lookup_challenges = (num_lookup_columns > 0).then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget {
|
||||
local_values: auxiliary_polys[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
|
||||
with_context!(
|
||||
builder,
|
||||
@ -450,9 +452,8 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
inner_config,
|
||||
&vars,
|
||||
permutation_data,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
)
|
||||
@ -472,7 +473,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
|
||||
let merkle_caps = vec![
|
||||
proof.trace_cap.clone(),
|
||||
proof.permutation_ctl_zs_cap.clone(),
|
||||
proof.auxiliary_polys_cap.clone(),
|
||||
proof.quotient_polys_cap.clone(),
|
||||
];
|
||||
|
||||
@ -843,15 +844,15 @@ pub(crate) fn add_virtual_stark_proof<
|
||||
|
||||
let num_leaves_per_oracle = vec![
|
||||
S::COLUMNS,
|
||||
stark.num_permutation_batches(config) + num_ctl_zs,
|
||||
stark.num_lookup_helper_columns(config) + num_ctl_zs,
|
||||
stark.quotient_degree_factor() * config.num_challenges,
|
||||
];
|
||||
|
||||
let permutation_zs_cap = builder.add_virtual_cap(cap_height);
|
||||
let auxiliary_polys_cap = builder.add_virtual_cap(cap_height);
|
||||
|
||||
StarkProofTarget {
|
||||
trace_cap: builder.add_virtual_cap(cap_height),
|
||||
permutation_ctl_zs_cap: permutation_zs_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||
openings: add_virtual_stark_opening_set::<F, S, D>(builder, stark, num_ctl_zs, config),
|
||||
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
|
||||
@ -868,10 +869,10 @@ fn add_virtual_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, c
|
||||
StarkOpeningSetTarget {
|
||||
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
permutation_ctl_zs: builder
|
||||
.add_virtual_extension_targets(stark.num_permutation_batches(config) + num_ctl_zs),
|
||||
permutation_ctl_zs_next: builder
|
||||
.add_virtual_extension_targets(stark.num_permutation_batches(config) + num_ctl_zs),
|
||||
auxiliary_polys: builder
|
||||
.add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs),
|
||||
auxiliary_polys_next: builder
|
||||
.add_virtual_extension_targets(stark.num_lookup_helper_columns(config) + num_ctl_zs),
|
||||
ctl_zs_first: builder.add_virtual_targets(num_ctl_zs),
|
||||
quotient_polys: builder
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
|
||||
@ -897,8 +898,8 @@ pub(crate) fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D:
|
||||
);
|
||||
|
||||
witness.set_cap_target(
|
||||
&proof_target.permutation_ctl_zs_cap,
|
||||
&proof.permutation_ctl_zs_cap,
|
||||
&proof_target.auxiliary_polys_cap,
|
||||
&proof.auxiliary_polys_cap,
|
||||
);
|
||||
|
||||
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
|
||||
|
||||
@ -8,15 +8,14 @@ use plonky2::fri::structure::{
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::lookup::Lookup;
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const PERMUTATION_CTL_ORACLE_INDEX: usize = 1;
|
||||
const AUXILIARY_ORACLE_INDEX: usize = 1;
|
||||
const QUOTIENT_ORACLE_INDEX: usize = 2;
|
||||
|
||||
/// Represents a STARK system.
|
||||
@ -102,20 +101,18 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
|
||||
let num_permutation_batches = self.num_permutation_batches(config);
|
||||
let num_perutation_ctl_polys = num_permutation_batches + num_ctl_zs;
|
||||
let permutation_ctl_oracle = FriOracleInfo {
|
||||
num_polys: num_perutation_ctl_polys,
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(config);
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_zs;
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let permutation_ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
PERMUTATION_CTL_ORACLE_INDEX,
|
||||
0..num_perutation_ctl_polys,
|
||||
);
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
PERMUTATION_CTL_ORACLE_INDEX,
|
||||
num_permutation_batches..num_permutation_batches + num_ctl_zs,
|
||||
AUXILIARY_ORACLE_INDEX,
|
||||
num_lookup_columns..num_lookup_columns + num_ctl_zs,
|
||||
);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
@ -130,21 +127,21 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
point: zeta,
|
||||
polynomials: [
|
||||
trace_info.clone(),
|
||||
permutation_ctl_zs_info.clone(),
|
||||
auxiliary_polys_info.clone(),
|
||||
quotient_info,
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
let zeta_next_batch = FriBatchInfo {
|
||||
point: zeta.scalar_mul(g),
|
||||
polynomials: [trace_info, permutation_ctl_zs_info].concat(),
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
let ctl_first_batch = FriBatchInfo {
|
||||
point: F::Extension::ONE,
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
FriInstanceInfo {
|
||||
oracles: vec![trace_oracle, permutation_ctl_oracle, quotient_oracle],
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
@ -164,20 +161,18 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
};
|
||||
let trace_info = FriPolynomialInfo::from_range(TRACE_ORACLE_INDEX, 0..Self::COLUMNS);
|
||||
|
||||
let num_permutation_batches = self.num_permutation_batches(inner_config);
|
||||
let num_perutation_ctl_polys = num_permutation_batches + num_ctl_zs;
|
||||
let permutation_ctl_oracle = FriOracleInfo {
|
||||
num_polys: num_perutation_ctl_polys,
|
||||
let num_lookup_columns = self.num_lookup_helper_columns(inner_config);
|
||||
let num_auxiliary_polys = num_lookup_columns + num_ctl_zs;
|
||||
let auxiliary_oracle = FriOracleInfo {
|
||||
num_polys: num_auxiliary_polys,
|
||||
blinding: false,
|
||||
};
|
||||
let permutation_ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
PERMUTATION_CTL_ORACLE_INDEX,
|
||||
0..num_perutation_ctl_polys,
|
||||
);
|
||||
let auxiliary_polys_info =
|
||||
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
|
||||
|
||||
let ctl_zs_info = FriPolynomialInfo::from_range(
|
||||
PERMUTATION_CTL_ORACLE_INDEX,
|
||||
num_permutation_batches..num_permutation_batches + num_ctl_zs,
|
||||
AUXILIARY_ORACLE_INDEX,
|
||||
num_lookup_columns..num_lookup_columns + num_ctl_zs,
|
||||
);
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(inner_config);
|
||||
@ -192,7 +187,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
point: zeta,
|
||||
polynomials: [
|
||||
trace_info.clone(),
|
||||
permutation_ctl_zs_info.clone(),
|
||||
auxiliary_polys_info.clone(),
|
||||
quotient_info,
|
||||
]
|
||||
.concat(),
|
||||
@ -200,45 +195,31 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
let zeta_next = builder.mul_const_extension(g, zeta);
|
||||
let zeta_next_batch = FriBatchInfoTarget {
|
||||
point: zeta_next,
|
||||
polynomials: [trace_info, permutation_ctl_zs_info].concat(),
|
||||
polynomials: [trace_info, auxiliary_polys_info].concat(),
|
||||
};
|
||||
let ctl_first_batch = FriBatchInfoTarget {
|
||||
point: builder.one_extension(),
|
||||
polynomials: ctl_zs_info,
|
||||
};
|
||||
FriInstanceInfoTarget {
|
||||
oracles: vec![trace_oracle, permutation_ctl_oracle, quotient_oracle],
|
||||
oracles: vec![trace_oracle, auxiliary_oracle, quotient_oracle],
|
||||
batches: vec![zeta_batch, zeta_next_batch, ctl_first_batch],
|
||||
}
|
||||
}
|
||||
|
||||
/// Pairs of lists of columns that should be permutations of one another. A permutation argument
|
||||
/// will be used for each such pair. Empty by default.
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
fn lookups(&self) -> Vec<Lookup> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn uses_permutation_args(&self) -> bool {
|
||||
!self.permutation_pairs().is_empty()
|
||||
fn num_lookup_helper_columns(&self, config: &StarkConfig) -> usize {
|
||||
self.lookups()
|
||||
.iter()
|
||||
.map(|lookup| lookup.num_helper_columns(self.constraint_degree()))
|
||||
.sum::<usize>()
|
||||
* config.num_challenges
|
||||
}
|
||||
|
||||
/// The number of permutation argument instances that can be combined into a single constraint.
|
||||
fn permutation_batch_size(&self) -> usize {
|
||||
// The permutation argument constraints look like
|
||||
// Z(x) \prod(...) = Z(g x) \prod(...)
|
||||
// where each product has a number of terms equal to the batch size. So our batch size
|
||||
// should be one less than our constraint degree, which happens to be our quotient degree.
|
||||
self.quotient_degree_factor()
|
||||
}
|
||||
|
||||
fn num_permutation_instances(&self, config: &StarkConfig) -> usize {
|
||||
self.permutation_pairs().len() * config.num_challenges
|
||||
}
|
||||
|
||||
fn num_permutation_batches(&self, config: &StarkConfig) -> usize {
|
||||
ceil_div_usize(
|
||||
self.num_permutation_instances(config),
|
||||
self.permutation_batch_size(),
|
||||
)
|
||||
fn uses_lookups(&self) -> bool {
|
||||
!self.lookups().is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,23 +3,22 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{
|
||||
eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars,
|
||||
CtlCheckVarsTarget,
|
||||
};
|
||||
use crate::permutation::{
|
||||
eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget,
|
||||
PermutationCheckVars,
|
||||
use crate::lookup::{
|
||||
eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars,
|
||||
LookupCheckVarsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
permutation_vars: Option<PermutationCheckVars<F, FE, P, D2>>,
|
||||
lookups: &[Lookup],
|
||||
lookup_vars: Option<LookupCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
@ -29,12 +28,12 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
stark.eval_packed_generic(vars, consumer);
|
||||
if let Some(permutation_vars) = permutation_vars {
|
||||
eval_permutation_checks::<F, FE, P, S, D, D2>(
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
eval_packed_lookups_generic::<F, FE, P, S, D, D2>(
|
||||
stark,
|
||||
config,
|
||||
lookups,
|
||||
vars,
|
||||
permutation_vars,
|
||||
lookup_vars,
|
||||
consumer,
|
||||
);
|
||||
}
|
||||
@ -44,9 +43,8 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
permutation_data: Option<PermutationCheckDataTarget<D>>,
|
||||
lookup_vars: Option<LookupCheckVarsTarget<D>>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
@ -54,15 +52,8 @@ pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(permutation_data) = permutation_data {
|
||||
eval_permutation_checks_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_data,
|
||||
consumer,
|
||||
);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
eval_ext_lookups_circuit::<F, S, D>(builder, stark, vars, lookup_vars, consumer);
|
||||
}
|
||||
eval_cross_table_lookup_checks_circuit::<S, F, D>(builder, vars, ctl_vars, consumer);
|
||||
}
|
||||
|
||||
@ -14,11 +14,13 @@ use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
|
||||
use crate::cross_table_lookup::{
|
||||
verify_cross_table_lookups, CtlCheckVars, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVars;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::{GrandProductChallenge, PermutationCheckVars};
|
||||
use crate::proof::{
|
||||
AllProof, AllProofChallenges, PublicValues, StarkOpeningSet, StarkProof, StarkProofChallenges,
|
||||
};
|
||||
@ -37,10 +39,10 @@ where
|
||||
stark_challenges,
|
||||
ctl_challenges,
|
||||
} = all_proof
|
||||
.get_challenges(all_stark, config)
|
||||
.get_challenges(config)
|
||||
.map_err(|_| anyhow::Error::msg("Invalid sampling of proof challenges."))?;
|
||||
|
||||
let nums_permutation_zs = all_stark.nums_permutation_zs(config);
|
||||
let num_lookup_columns = all_stark.num_lookups_helper_columns(config);
|
||||
|
||||
let AllStark {
|
||||
arithmetic_stark,
|
||||
@ -57,7 +59,7 @@ where
|
||||
&all_proof.stark_proofs,
|
||||
cross_table_lookups,
|
||||
&ctl_challenges,
|
||||
&nums_permutation_zs,
|
||||
&num_lookup_columns,
|
||||
);
|
||||
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -65,6 +67,7 @@ where
|
||||
&all_proof.stark_proofs[Table::Arithmetic as usize].proof,
|
||||
&stark_challenges[Table::Arithmetic as usize],
|
||||
&ctl_vars_per_table[Table::Arithmetic as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -72,6 +75,7 @@ where
|
||||
&all_proof.stark_proofs[Table::BytePacking as usize].proof,
|
||||
&stark_challenges[Table::BytePacking as usize],
|
||||
&ctl_vars_per_table[Table::BytePacking as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -79,6 +83,7 @@ where
|
||||
&all_proof.stark_proofs[Table::Cpu as usize].proof,
|
||||
&stark_challenges[Table::Cpu as usize],
|
||||
&ctl_vars_per_table[Table::Cpu as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -86,6 +91,7 @@ where
|
||||
&all_proof.stark_proofs[Table::Keccak as usize].proof,
|
||||
&stark_challenges[Table::Keccak as usize],
|
||||
&ctl_vars_per_table[Table::Keccak as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -93,6 +99,7 @@ where
|
||||
&all_proof.stark_proofs[Table::KeccakSponge as usize].proof,
|
||||
&stark_challenges[Table::KeccakSponge as usize],
|
||||
&ctl_vars_per_table[Table::KeccakSponge as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -100,6 +107,7 @@ where
|
||||
&all_proof.stark_proofs[Table::Logic as usize].proof,
|
||||
&stark_challenges[Table::Logic as usize],
|
||||
&ctl_vars_per_table[Table::Logic as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
@ -107,6 +115,7 @@ where
|
||||
&all_proof.stark_proofs[Table::Memory as usize].proof,
|
||||
&stark_challenges[Table::Memory as usize],
|
||||
&ctl_vars_per_table[Table::Memory as usize],
|
||||
&ctl_challenges,
|
||||
config,
|
||||
)?;
|
||||
|
||||
@ -290,6 +299,7 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
proof: &StarkProof<F, C, D>,
|
||||
challenges: &StarkProofChallenges<F, D>,
|
||||
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
|
||||
ctl_challenges: &GrandProductChallengeSet<F>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
log::debug!("Checking proof: {}", type_name::<S>());
|
||||
@ -297,8 +307,8 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_ctl_zs,
|
||||
permutation_ctl_zs_next,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
@ -318,17 +328,26 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
let num_permutation_zs = stark.num_permutation_batches(config);
|
||||
let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs[..num_permutation_zs].to_vec(),
|
||||
next_zs: permutation_ctl_zs_next[..num_permutation_zs].to_vec(),
|
||||
permutation_challenge_sets: challenges.permutation_challenge_sets.clone().unwrap(),
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(config);
|
||||
let lookup_challenges = (num_lookup_columns > 0).then(|| {
|
||||
ctl_challenges
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVars {
|
||||
local_values: auxiliary_polys[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
let lookups = stark.lookups();
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
stark,
|
||||
config,
|
||||
&vars,
|
||||
permutation_data,
|
||||
&lookups,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
@ -354,7 +373,7 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
|
||||
let merkle_caps = vec![
|
||||
proof.trace_cap.clone(),
|
||||
proof.permutation_ctl_zs_cap.clone(),
|
||||
proof.auxiliary_polys_cap.clone(),
|
||||
proof.quotient_polys_cap.clone(),
|
||||
];
|
||||
|
||||
@ -388,7 +407,7 @@ where
|
||||
{
|
||||
let StarkProof {
|
||||
trace_cap,
|
||||
permutation_ctl_zs_cap,
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
// The shape of the opening proof will be checked in the FRI verifier (see
|
||||
@ -399,8 +418,8 @@ where
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_ctl_zs,
|
||||
permutation_ctl_zs_next,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = openings;
|
||||
@ -408,16 +427,16 @@ where
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
let num_zs = num_ctl_zs + stark.num_permutation_batches(config);
|
||||
let num_auxiliary = num_ctl_zs + stark.num_lookup_helper_columns(config);
|
||||
|
||||
ensure!(trace_cap.height() == cap_height);
|
||||
ensure!(permutation_ctl_zs_cap.height() == cap_height);
|
||||
ensure!(auxiliary_polys_cap.height() == cap_height);
|
||||
ensure!(quotient_polys_cap.height() == cap_height);
|
||||
|
||||
ensure!(local_values.len() == S::COLUMNS);
|
||||
ensure!(next_values.len() == S::COLUMNS);
|
||||
ensure!(permutation_ctl_zs.len() == num_zs);
|
||||
ensure!(permutation_ctl_zs_next.len() == num_zs);
|
||||
ensure!(auxiliary_polys.len() == num_auxiliary);
|
||||
ensure!(auxiliary_polys_next.len() == num_auxiliary);
|
||||
ensure!(ctl_zs_first.len() == num_ctl_zs);
|
||||
ensure!(quotient_polys.len() == stark.num_quotient_polys(config));
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user