Filter range checks (#1433)

* Add filtering to range-checks

* Cleanup

* Fix Clippy

* Apply comment
This commit is contained in:
Linda Guiga 2023-12-18 23:27:12 +01:00 committed by GitHub
parent f67ee258a1
commit 18e08f4f61
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 136 additions and 55 deletions

View File

@ -14,7 +14,7 @@ use static_assertions::const_assert;
use super::columns::NUM_ARITH_COLUMNS;
use super::shift;
use crate::all_stark::Table;
use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter, TableWithColumns};
@ -290,11 +290,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
3
}
fn lookups(&self) -> Vec<Lookup> {
fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: SHARED_COLS.collect(),
table_column: RANGE_COUNTER,
frequencies_column: RC_FREQUENCIES,
columns: Column::singles(SHARED_COLS).collect(),
table_column: Column::single(RANGE_COUNTER),
frequencies_column: Column::single(RC_FREQUENCIES),
filter_columns: vec![None; NUM_SHARED_COLS],
}]
}
}

View File

@ -391,11 +391,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
3
}
fn lookups(&self) -> Vec<Lookup> {
fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: (value_bytes(0)..value_bytes(0) + NUM_BYTES).collect(),
table_column: RANGE_COUNTER,
frequencies_column: RC_FREQUENCIES,
columns: Column::singles(value_bytes(0)..value_bytes(0) + NUM_BYTES).collect(),
table_column: Column::single(RANGE_COUNTER),
frequencies_column: Column::single(RC_FREQUENCIES),
filter_columns: vec![None; NUM_BYTES],
}]
}
}

View File

@ -259,6 +259,14 @@ impl<F: Field> Column<F> {
res
}
/// Evaluates the column on all rows.
pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues<F>]) -> Vec<F> {
let length = table[0].len();
(0..length)
.map(|row| self.eval_table(table, row))
.collect::<Vec<F>>()
}
/// Circuit version of `eval`: Given a row's targets, returns their linear combination.
pub(crate) fn eval_circuit<const D: usize>(
&self,
@ -401,6 +409,14 @@ impl<F: Field> Filter<F> {
.map(|col| col.eval_table(table, row))
.sum()
}
pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues<F>]) -> Vec<F> {
let length = table[0].len();
(0..length)
.map(|row| self.eval_table(table, row))
.collect::<Vec<F>>()
}
}
/// A `Table` with a linear combination of columns and a filter.

View File

@ -800,11 +800,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
3
}
fn lookups(&self) -> Vec<Lookup> {
fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: get_block_bytes_range().collect(),
table_column: RANGE_COUNTER,
frequencies_column: RC_FREQUENCIES,
columns: Column::singles(get_block_bytes_range()).collect(),
table_column: Column::single(RANGE_COUNTER),
frequencies_column: Column::single(RC_FREQUENCIES),
filter_columns: vec![None; KECCAK_RATE_BYTES],
}]
}
}

View File

@ -1,6 +1,6 @@
use itertools::Itertools;
use num_bigint::BigUint;
use plonky2::field::batch_util::batch_add_inplace;
use plonky2::field::batch_util::{batch_add_inplace, batch_multiply_inplace};
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
@ -12,22 +12,27 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2_util::ceil_div_usize;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter};
use crate::evaluation_frame::StarkEvaluationFrame;
use crate::stark::Stark;
pub struct Lookup {
pub struct Lookup<F: Field> {
/// Columns whose values should be contained in the lookup table.
/// These are the f_i(x) polynomials in the logUp paper.
pub(crate) columns: Vec<usize>,
pub(crate) columns: Vec<Column<F>>,
/// Column containing the lookup table.
/// This is the t(x) polynomial in the paper.
pub(crate) table_column: usize,
pub(crate) table_column: Column<F>,
/// Column containing the frequencies of `columns` in `table_column`.
/// This is the m(x) polynomial in the paper.
pub(crate) frequencies_column: usize,
pub(crate) frequencies_column: Column<F>,
/// Columns to filter some elements. There is at most one filter
/// column per column to range-check.
pub(crate) filter_columns: Vec<Option<Filter<F>>>,
}
impl Lookup {
impl<F: Field> Lookup<F> {
pub(crate) fn num_helper_columns(&self, constraint_degree: usize) -> usize {
// One helper column for each column batch of size `constraint_degree-1`,
// then one column for the inverse of `table + challenge` and one for the `Z` polynomial.
@ -41,7 +46,7 @@ impl Lookup {
/// this computes the helper columns `h_i = 1/(x+f_2i) + 1/(x+f_2i+1)`, `g = 1/(x+t)`,
/// and `Z(gx) = Z(x) + sum h_i(x) - m(x)g(x)` where `m` is the frequencies column.
pub(crate) fn lookup_helper_columns<F: Field>(
lookup: &Lookup,
lookup: &Lookup<F>,
trace_poly_values: &[PolynomialValues<F>],
challenge: F,
constraint_degree: usize,
@ -51,6 +56,8 @@ pub(crate) fn lookup_helper_columns<F: Field>(
"TODO: Allow other constraint degrees."
);
assert_eq!(lookup.columns.len(), lookup.filter_columns.len());
let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len();
assert!(BigUint::from(num_total_logup_entries) < F::characteristic());
@ -67,30 +74,43 @@ pub(crate) fn lookup_helper_columns<F: Field>(
// h_k polynomials; instead there's a separate helper column for it (see below).
// * Here, we use 1 instead of -1 as the numerator (and subtract later).
// * Here, for now, the batch size (l) is always constraint_degree - 1 = 2.
for mut col_inds in &lookup.columns.iter().chunks(constraint_degree - 1) {
let first = *col_inds.next().unwrap();
// TODO: The clone could probably be avoided by using a modified version of `batch_multiplicative_inverse`
// taking `challenge` as an additional argument.
let mut column = trace_poly_values[first].values.clone();
for (i, mut col_inds) in (&lookup.columns.iter().chunks(constraint_degree - 1))
.into_iter()
.enumerate()
{
let first = col_inds.next().unwrap();
let mut column = first.eval_all_rows(trace_poly_values);
let length = column.len();
for x in column.iter_mut() {
*x = challenge + *x;
}
let mut acc = F::batch_multiplicative_inverse(&column);
for &ind in col_inds {
let mut column = trace_poly_values[ind].values.clone();
if let Some(filter) = &lookup.filter_columns[0] {
batch_multiply_inplace(&mut acc, &filter.eval_all_rows(trace_poly_values));
}
for (j, ind) in col_inds.enumerate() {
let mut column = ind.eval_all_rows(trace_poly_values);
for x in column.iter_mut() {
*x = challenge + *x;
}
column = F::batch_multiplicative_inverse(&column);
let filter_idx = (constraint_degree - 1) * i + j + 1;
if let Some(filter) = &lookup.filter_columns[filter_idx] {
batch_multiply_inplace(&mut column, &filter.eval_all_rows(trace_poly_values));
}
batch_add_inplace(&mut acc, &column);
}
helper_columns.push(acc.into());
}
// Add `1/(table+challenge)` to the helper columns.
// This is 1/phi_0(x) = 1/(x + t(x)) from the paper.
// Here, we don't include m(x) in the numerator, instead multiplying it with this column later.
let mut table = trace_poly_values[lookup.table_column].values.clone();
let mut table = lookup.table_column.eval_all_rows(trace_poly_values);
for x in table.iter_mut() {
*x = challenge + *x;
}
@ -100,7 +120,7 @@ pub(crate) fn lookup_helper_columns<F: Field>(
// This enforces the check from the paper, that the sum of the h_k(x) polynomials is 0 over H.
// In the paper, that sum includes m(x)/(x + t(x)) = frequencies(x)/g(x), because that was bundled
// into the h_k(x) polynomials.
let frequencies = &trace_poly_values[lookup.frequencies_column].values;
let frequencies = &lookup.frequencies_column.eval_all_rows(trace_poly_values);
let mut z = Vec::with_capacity(frequencies.len());
z.push(F::ZERO);
for i in 0..frequencies.len() - 1 {
@ -130,7 +150,7 @@ where
/// Constraints for the logUp lookup argument.
pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2: usize>(
stark: &S,
lookups: &[Lookup],
lookups: &[Lookup<F>],
vars: &S::EvaluationFrame<FE, P, D2>,
lookup_vars: LookupCheckVars<F, FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
@ -140,6 +160,8 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
P: PackedField<Scalar = FE>,
S: Stark<F, D>,
{
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
let degree = stark.constraint_degree();
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
let mut start = 0;
@ -147,19 +169,34 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
let num_helper_columns = lookup.num_helper_columns(degree);
for &challenge in &lookup_vars.challenges {
let challenge = FE::from_basefield(challenge);
// For each chunk, check that `h_i (x+f_2i) (x+f_2i+1) = (x+f_2i) + (x+f_2i+1)` if the chunk has length 2
// or if it has length 1, check that `h_i * (x+f_2i) = 1`, where x is the challenge
// For each chunk, check that `h_i (x+f_2i) (x+f_{2i+1}) = (x+f_2i) * filter_{2i+1} + (x+f_{2i+1}) * filter_2i` if the chunk has length 2
// or if it has length 1, check that `h_i * (x+f_2i) = filter_2i`, where x is the challenge
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
let mut x = lookup_vars.local_values[start + j];
let mut y = P::ZEROS;
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
for f in fs {
x *= f + challenge;
y += f + challenge;
let col_values = chunk
.iter()
.map(|col| col.eval_with_next(local_values, next_values));
let filters = lookup.filter_columns
[(degree - 1) * j..(degree - 1) * j + chunk.len()]
.iter()
.map(|maybe_filter| {
if let Some(filter) = maybe_filter {
filter.eval_filter(local_values, next_values)
} else {
P::ONES
}
})
.rev()
.collect::<Vec<_>>();
let last_filter_value = filters[0];
for (val, f) in col_values.zip_eq(filters) {
x *= val + challenge;
y += (val + challenge) * f;
}
match chunk.len() {
2 => yield_constr.constraint(x - y),
1 => yield_constr.constraint(x - P::ONES),
1 => yield_constr.constraint(x - last_filter_value),
_ => todo!("Allow other constraint degrees."),
}
}
@ -167,12 +204,12 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
// Check the `Z` polynomial.
let z = lookup_vars.local_values[start + num_helper_columns - 1];
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
let table_with_challenge = vars.get_local_values()[lookup.table_column] + challenge;
let table_with_challenge = lookup.table_column.eval(local_values) + challenge;
let y = lookup_vars.local_values[start..start + num_helper_columns - 1]
.iter()
.fold(P::ZEROS, |acc, x| acc + *x)
* table_with_challenge
- vars.get_local_values()[lookup.frequencies_column];
- lookup.frequencies_column.eval(local_values);
yield_constr.constraint((next_z - z) * table_with_challenge - y);
start += num_helper_columns;
}
@ -199,6 +236,9 @@ pub(crate) fn eval_ext_lookups_circuit<
let one = builder.one_extension();
let degree = stark.constraint_degree();
let lookups = stark.lookups();
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
assert_eq!(degree, 3, "TODO: Allow other constraint degrees.");
let mut start = 0;
for lookup in lookups {
@ -208,11 +248,27 @@ pub(crate) fn eval_ext_lookups_circuit<
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
let mut x = lookup_vars.local_values[start + j];
let mut y = builder.zero_extension();
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
for f in fs {
let tmp = builder.add_extension(f, challenge);
let col_values = chunk
.iter()
.map(|k| k.eval_with_next_circuit(builder, local_values, next_values))
.collect::<Vec<_>>();
let filters = lookup.filter_columns
[(degree - 1) * j..(degree - 1) * j + chunk.len()]
.iter()
.map(|maybe_filter| {
if let Some(filter) = maybe_filter {
filter.eval_filter_circuit(builder, local_values, next_values)
} else {
one
}
})
.rev()
.collect::<Vec<_>>();
let last_filter_value = filters[0];
for (&val, f) in col_values.iter().zip_eq(filters) {
let tmp = builder.add_extension(val, challenge);
x = builder.mul_extension(x, tmp);
y = builder.add_extension(y, tmp);
y = builder.mul_add_extension(f, tmp, y);
}
match chunk.len() {
2 => {
@ -220,7 +276,7 @@ pub(crate) fn eval_ext_lookups_circuit<
yield_constr.constraint(builder, tmp)
}
1 => {
let tmp = builder.sub_extension(x, one);
let tmp = builder.sub_extension(x, last_filter_value);
yield_constr.constraint(builder, tmp)
}
_ => todo!("Allow other constraint degrees."),
@ -229,14 +285,19 @@ pub(crate) fn eval_ext_lookups_circuit<
let z = lookup_vars.local_values[start + num_helper_columns - 1];
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
let table_with_challenge =
builder.add_extension(vars.get_local_values()[lookup.table_column], challenge);
let table_column = lookup
.table_column
.eval_circuit(builder, vars.get_local_values());
let table_with_challenge = builder.add_extension(table_column, challenge);
let mut y = builder.add_many_extension(
&lookup_vars.local_values[start..start + num_helper_columns - 1],
);
let frequencies_column = lookup
.frequencies_column
.eval_circuit(builder, vars.get_local_values());
y = builder.mul_extension(y, table_with_challenge);
y = builder.sub_extension(y, vars.get_local_values()[lookup.frequencies_column]);
y = builder.sub_extension(y, frequencies_column);
let mut constraint = builder.sub_extension(next_z, z);
constraint = builder.mul_extension(constraint, table_with_challenge);

View File

@ -528,11 +528,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
3
}
fn lookups(&self) -> Vec<Lookup> {
fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: vec![RANGE_CHECK],
table_column: COUNTER,
frequencies_column: FREQUENCIES,
columns: vec![Column::single(RANGE_CHECK)],
table_column: Column::single(COUNTER),
frequencies_column: Column::single(FREQUENCIES),
filter_columns: vec![None],
}]
}
}

View File

@ -532,7 +532,7 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
trace_commitment: &'a PolynomialBatch<F, C, D>,
auxiliary_polys_commitment: &'a PolynomialBatch<F, C, D>,
lookup_challenges: Option<&'a Vec<F>>,
lookups: &[Lookup],
lookups: &[Lookup<F>],
ctl_data: &CtlData<F>,
alphas: Vec<F>,
degree_bits: usize,
@ -688,7 +688,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
trace_commitment: &'a PolynomialBatch<F, C, D>,
auxiliary_commitment: &'a PolynomialBatch<F, C, D>,
lookup_challenges: Option<&'a Vec<F>>,
lookups: &[Lookup],
lookups: &[Lookup<F>],
ctl_data: &CtlData<F>,
alphas: Vec<F>,
degree_bits: usize,

View File

@ -207,7 +207,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
}
}
fn lookups(&self) -> Vec<Lookup> {
fn lookups(&self) -> Vec<Lookup<F>> {
vec![]
}

View File

@ -19,7 +19,7 @@ use crate::stark::Stark;
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
stark: &S,
vars: &S::EvaluationFrame<FE, P, D2>,
lookups: &[Lookup],
lookups: &[Lookup<F>],
lookup_vars: Option<LookupCheckVars<F, FE, P, D2>>,
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
consumer: &mut ConstraintConsumer<P>,