mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 06:13:07 +00:00
Implement degree 2 filters (#1404)
* Implement degree 2 filters * Apply comments
This commit is contained in:
parent
2d36559dad
commit
46b6aa108d
@ -36,12 +36,14 @@ The various STARK tables carry out independent operations, but on shared values.
|
||||
|
||||
Suppose STARK $S_1$ requires an operation -- say $Op$ -- that is carried out by another STARK $S_2$. Then $S_1$ writes the input and output of $Op$ in its own table, and provides the inputs to $S_2$. $S_2$ also writes the inputs and outputs in its rows, and the table's constraints check that $Op$ is carried out correctly. We then need to ensure that the inputs and outputs are the same in $S_1$ and $S_2$.
|
||||
|
||||
In other words, we need to ensure that the rows -- reduced to the input and output columns -- of $S_1$ calling $Op$ are permutations of the rows of $S_2$ that carry out $Op$. Our CTL protocol is also based on logUp and is similar to our range-checks.
|
||||
In other words, we need to ensure that the rows -- reduced to the input and output columns -- of $S_1$ calling $Op$ are permutations of the rows of $S_2$ that carry out $Op$. Our CTL protocol is based on logUp and is similar to our range-checks.
|
||||
|
||||
To prove this, the first step is to only select the rows of interest in $S_1$ and $S_2$, and filter out the rest. Let $f^1$ be the filter for $S_1$ and $f^2$ the filter for $S_2$. $f^1$ and $f^2$ are constrained to be in $\{0, 1\}$. $f^1 = 1$ (resp. $f^2 = 1$) whenever the row at hand carries out $Op$ in $S_1$ (resp. in $S_2$), and 0 otherwise. Let also $(\alpha, \beta)$ be two random challenges.
|
||||
|
||||
The idea is to create subtables $S_1'$ and $S_2'$ of $S_1$ and $S_2$ respectively, such that $f^1 = 1$ and $f^2 = 1$ for all their rows. The columns in the subtables are limited to the ones whose values must be identical (the inputs and outputs of $Op$ in our example).
|
||||
|
||||
Note that for design and constraint reasons, filters are limited to (at most) degree 2 combinations of columns.
|
||||
|
||||
Let $\{c^{1, i}\}_{i=1}^m$ be the columns in $S_1'$ an $\{c^{2,i}\}_{i=1}^m$ be the columns in $S_2'$.
|
||||
|
||||
The prover defines a ``running sum'' $Z$ for $S_1'$ such that:
|
||||
|
||||
Binary file not shown.
@ -17,7 +17,7 @@ use crate::all_stark::Table;
|
||||
use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
||||
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::cross_table_lookup::{Column, Filter, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
@ -98,7 +98,9 @@ pub(crate) fn ctl_arithmetic_rows<F: Field>() -> TableWithColumns<F> {
|
||||
let mut filter_cols = COMBINED_OPS.to_vec();
|
||||
filter_cols.push((columns::IS_RANGE_CHECK, 0x01));
|
||||
|
||||
let filter_column = Some(Column::sum(filter_cols.iter().map(|(c, _v)| *c)));
|
||||
let filter = Some(Filter::new_simple(Column::sum(
|
||||
filter_cols.iter().map(|(c, _v)| *c),
|
||||
)));
|
||||
|
||||
let mut all_combined_cols = COMBINED_OPS.to_vec();
|
||||
all_combined_cols.push((columns::OPCODE_COL, 0x01));
|
||||
@ -110,7 +112,7 @@ pub(crate) fn ctl_arithmetic_rows<F: Field>() -> TableWithColumns<F> {
|
||||
TableWithColumns::new(
|
||||
Table::Arithmetic,
|
||||
cpu_arith_data_link(&all_combined_cols, ®ISTER_MAP),
|
||||
filter_column,
|
||||
filter,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -45,7 +45,7 @@ use crate::byte_packing::columns::{
|
||||
NUM_COLUMNS, RANGE_COUNTER, RC_FREQUENCIES, TIMESTAMP,
|
||||
};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::cross_table_lookup::{Column, Filter};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
@ -84,10 +84,10 @@ pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the `BytePackingStark` looked table.
|
||||
pub(crate) fn ctl_looked_filter<F: Field>() -> Column<F> {
|
||||
pub(crate) fn ctl_looked_filter<F: Field>() -> Filter<F> {
|
||||
// The CPU table is only interested in our sequence end rows,
|
||||
// since those contain the final limbs of our packed int.
|
||||
Column::sum((0..NUM_BYTES).map(index_len))
|
||||
Filter::new_simple(Column::sum((0..NUM_BYTES).map(index_len)))
|
||||
}
|
||||
|
||||
/// Column linear combination for the `BytePackingStark` table reading/writing the `i`th byte sequence from `MemoryStark`.
|
||||
@ -119,8 +119,8 @@ pub(crate) fn ctl_looking_memory<F: Field>(i: usize) -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for reading/writing the `i`th byte of the byte sequence from/to memory.
|
||||
pub(crate) fn ctl_looking_memory_filter<F: Field>(i: usize) -> Column<F> {
|
||||
Column::sum((i..NUM_BYTES).map(index_len))
|
||||
pub(crate) fn ctl_looking_memory_filter<F: Field>(i: usize) -> Filter<F> {
|
||||
Filter::new_simple(Column::sum((i..NUM_BYTES).map(index_len)))
|
||||
}
|
||||
|
||||
/// Information about a byte packing operation needed for witness generation.
|
||||
|
||||
@ -19,7 +19,7 @@ use crate::cpu::{
|
||||
byte_unpacking, clock, contextops, control_flow, decode, dup_swap, gas, jumps, membus, memio,
|
||||
modfp254, pc, push0, shift, simple_logic, stack, syscalls_exceptions,
|
||||
};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::cross_table_lookup::{Column, Filter, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
|
||||
@ -48,8 +48,8 @@ pub(crate) fn ctl_data_keccak_sponge<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for a call to the Keccak sponge.
|
||||
pub(crate) fn ctl_filter_keccak_sponge<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.is_keccak_sponge)
|
||||
pub(crate) fn ctl_filter_keccak_sponge<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.is_keccak_sponge))
|
||||
}
|
||||
|
||||
/// Creates the vector of `Columns` corresponding to the two inputs and
|
||||
@ -82,8 +82,8 @@ pub(crate) fn ctl_data_logic<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for logic operations.
|
||||
pub(crate) fn ctl_filter_logic<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.op.logic_op)
|
||||
pub(crate) fn ctl_filter_logic<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.op.logic_op))
|
||||
}
|
||||
|
||||
/// Returns the `TableWithColumns` for the CPU rows calling arithmetic operations.
|
||||
@ -99,7 +99,7 @@ pub(crate) fn ctl_arithmetic_base_rows<F: Field>() -> TableWithColumns<F> {
|
||||
TableWithColumns::new(
|
||||
Table::Cpu,
|
||||
columns,
|
||||
Some(Column::sum([
|
||||
Some(Filter::new_simple(Column::sum([
|
||||
COL_MAP.op.binary_op,
|
||||
COL_MAP.op.fp254_op,
|
||||
COL_MAP.op.ternary_op,
|
||||
@ -107,7 +107,7 @@ pub(crate) fn ctl_arithmetic_base_rows<F: Field>() -> TableWithColumns<F> {
|
||||
COL_MAP.op.prover_input,
|
||||
COL_MAP.op.syscall,
|
||||
COL_MAP.op.exception,
|
||||
])),
|
||||
]))),
|
||||
)
|
||||
}
|
||||
|
||||
@ -118,8 +118,8 @@ pub(crate) fn ctl_data_byte_packing<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the `MLOAD_32BYTES` operation.
|
||||
pub(crate) fn ctl_filter_byte_packing<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.op.mload_32bytes)
|
||||
pub(crate) fn ctl_filter_byte_packing<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.op.mload_32bytes))
|
||||
}
|
||||
|
||||
/// Creates the vector of `Columns` corresponding to the contents of General Purpose channels when calling byte unpacking.
|
||||
@ -152,8 +152,8 @@ pub(crate) fn ctl_data_byte_unpacking<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the `MSTORE_32BYTES` operation.
|
||||
pub(crate) fn ctl_filter_byte_unpacking<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.op.mstore_32bytes)
|
||||
pub(crate) fn ctl_filter_byte_unpacking<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.op.mstore_32bytes))
|
||||
}
|
||||
|
||||
/// Creates the vector of `Columns` corresponding to the contents of the CPU registers when performing a `PUSH`.
|
||||
@ -179,8 +179,8 @@ pub(crate) fn ctl_data_byte_packing_push<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the `PUSH` operation.
|
||||
pub(crate) fn ctl_filter_byte_packing_push<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.op.push)
|
||||
pub(crate) fn ctl_filter_byte_packing_push<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.op.push))
|
||||
}
|
||||
|
||||
/// Index of the memory channel storing code.
|
||||
@ -254,17 +254,17 @@ pub(crate) fn ctl_data_partial_memory<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for code read and write operations.
|
||||
pub(crate) fn ctl_filter_code_memory<F: Field>() -> Column<F> {
|
||||
Column::sum(COL_MAP.op.iter())
|
||||
pub(crate) fn ctl_filter_code_memory<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::sum(COL_MAP.op.iter()))
|
||||
}
|
||||
|
||||
/// CTL filter for General Purpose memory read and write operations.
|
||||
pub(crate) fn ctl_filter_gp_memory<F: Field>(channel: usize) -> Column<F> {
|
||||
Column::single(COL_MAP.mem_channels[channel].used)
|
||||
pub(crate) fn ctl_filter_gp_memory<F: Field>(channel: usize) -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.mem_channels[channel].used))
|
||||
}
|
||||
|
||||
pub(crate) fn ctl_filter_partial_memory<F: Field>() -> Column<F> {
|
||||
Column::single(COL_MAP.partial_channel.used)
|
||||
pub(crate) fn ctl_filter_partial_memory<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(COL_MAP.partial_channel.used))
|
||||
}
|
||||
|
||||
/// Structure representing the CPU Stark.
|
||||
|
||||
@ -315,27 +315,111 @@ impl<F: Field> Column<F> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a CTL filter, which evaluates to 1 if the row must be considered for the CTL and 0 otherwise.
|
||||
/// It's an arbitrary degree 2 combination of columns: `products` are the degree 2 terms, and `constants` are
|
||||
/// the degree 1 terms.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Filter<F: Field> {
|
||||
products: Vec<(Column<F>, Column<F>)>,
|
||||
constants: Vec<Column<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> Filter<F> {
|
||||
pub(crate) fn new(products: Vec<(Column<F>, Column<F>)>, constants: Vec<Column<F>>) -> Self {
|
||||
Self {
|
||||
products,
|
||||
constants,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a filter made of a single column.
|
||||
pub(crate) fn new_simple(col: Column<F>) -> Self {
|
||||
Self {
|
||||
products: vec![],
|
||||
constants: vec![col],
|
||||
}
|
||||
}
|
||||
|
||||
/// Given the column values for the current and next rows, evaluates the filter.
|
||||
pub(crate) fn eval_filter<FE, P, const D: usize>(&self, v: &[P], next_v: &[P]) -> P
|
||||
where
|
||||
FE: FieldExtension<D, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
self.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| col1.eval_with_next(v, next_v) * col2.eval_with_next(v, next_v))
|
||||
.sum::<P>()
|
||||
+ self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_with_next(v, next_v))
|
||||
.sum::<P>()
|
||||
}
|
||||
|
||||
/// Circuit version of `eval_filter`:
|
||||
/// Given the column values for the current and next rows, evaluates the filter.
|
||||
pub(crate) fn eval_filter_circuit<const D: usize>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
v: &[ExtensionTarget<D>],
|
||||
next_v: &[ExtensionTarget<D>],
|
||||
) -> ExtensionTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
let prods = self
|
||||
.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| {
|
||||
let col1_eval = col1.eval_with_next_circuit(builder, v, next_v);
|
||||
let col2_eval = col2.eval_with_next_circuit(builder, v, next_v);
|
||||
builder.mul_extension(col1_eval, col2_eval)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let consts = self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_with_next_circuit(builder, v, next_v))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let prods = builder.add_many_extension(prods);
|
||||
let consts = builder.add_many_extension(consts);
|
||||
builder.add_extension(prods, consts)
|
||||
}
|
||||
|
||||
/// Evaluate on a row of a table given in column-major form.
|
||||
pub(crate) fn eval_table(&self, table: &[PolynomialValues<F>], row: usize) -> F {
|
||||
self.products
|
||||
.iter()
|
||||
.map(|(col1, col2)| col1.eval_table(table, row) * col2.eval_table(table, row))
|
||||
.sum::<F>()
|
||||
+ self
|
||||
.constants
|
||||
.iter()
|
||||
.map(|col| col.eval_table(table, row))
|
||||
.sum()
|
||||
}
|
||||
}
|
||||
|
||||
/// A `Table` with a linear combination of columns and a filter.
|
||||
/// `filter_column` is used to determine the rows to select in `Table`.
|
||||
/// `filter` is used to determine the rows to select in `Table`.
|
||||
/// `columns` represents linear combinations of the columns of `Table`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct TableWithColumns<F: Field> {
|
||||
table: Table,
|
||||
columns: Vec<Column<F>>,
|
||||
pub(crate) filter_column: Option<Column<F>>,
|
||||
pub(crate) filter: Option<Filter<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> TableWithColumns<F> {
|
||||
/// Generates a new `TableWithColumns` given a `Table`, a linear combination of columns `columns` and a `filter_column`.
|
||||
pub(crate) fn new(
|
||||
table: Table,
|
||||
columns: Vec<Column<F>>,
|
||||
filter_column: Option<Column<F>>,
|
||||
) -> Self {
|
||||
/// Generates a new `TableWithColumns` given a `Table`, a linear combination of columns `columns` and a `filter`.
|
||||
pub(crate) fn new(table: Table, columns: Vec<Column<F>>, filter: Option<Filter<F>>) -> Self {
|
||||
Self {
|
||||
table,
|
||||
columns,
|
||||
filter_column,
|
||||
filter,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -395,7 +479,7 @@ pub(crate) struct CtlZData<F: Field> {
|
||||
/// Column linear combination for the current table.
|
||||
pub(crate) columns: Vec<Column<F>>,
|
||||
/// Filter column for the current table. It evaluates to either 1 or 0.
|
||||
pub(crate) filter_column: Option<Column<F>>,
|
||||
pub(crate) filter: Option<Filter<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field> CtlData<F> {
|
||||
@ -562,14 +646,14 @@ pub(crate) fn cross_table_lookup_data<F: RichField, const D: usize>(
|
||||
partial_sums(
|
||||
&trace_poly_values[table.table as usize],
|
||||
&table.columns,
|
||||
&table.filter_column,
|
||||
&table.filter,
|
||||
challenge,
|
||||
)
|
||||
});
|
||||
let z_looked = partial_sums(
|
||||
&trace_poly_values[looked_table.table as usize],
|
||||
&looked_table.columns,
|
||||
&looked_table.filter_column,
|
||||
&looked_table.filter,
|
||||
challenge,
|
||||
);
|
||||
for (table, z) in looking_tables.iter().zip(zs_looking) {
|
||||
@ -579,7 +663,7 @@ pub(crate) fn cross_table_lookup_data<F: RichField, const D: usize>(
|
||||
z,
|
||||
challenge,
|
||||
columns: table.columns.clone(),
|
||||
filter_column: table.filter_column.clone(),
|
||||
filter: table.filter.clone(),
|
||||
});
|
||||
}
|
||||
ctl_data_per_table[looked_table.table as usize]
|
||||
@ -588,7 +672,7 @@ pub(crate) fn cross_table_lookup_data<F: RichField, const D: usize>(
|
||||
z: z_looked,
|
||||
challenge,
|
||||
columns: looked_table.columns.clone(),
|
||||
filter_column: looked_table.filter_column.clone(),
|
||||
filter: looked_table.filter.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -606,7 +690,7 @@ pub(crate) fn cross_table_lookup_data<F: RichField, const D: usize>(
|
||||
fn partial_sums<F: Field>(
|
||||
trace: &[PolynomialValues<F>],
|
||||
columns: &[Column<F>],
|
||||
filter_column: &Option<Column<F>>,
|
||||
filter: &Option<Filter<F>>,
|
||||
challenge: GrandProductChallenge<F>,
|
||||
) -> PolynomialValues<F> {
|
||||
let mut partial_sum = F::ZERO;
|
||||
@ -615,12 +699,12 @@ fn partial_sums<F: Field>(
|
||||
let mut res = Vec::with_capacity(degree);
|
||||
|
||||
for i in (0..degree).rev() {
|
||||
if let Some(column) = filter_column {
|
||||
let filter = column.eval_table(trace, i);
|
||||
if filter.is_one() {
|
||||
if let Some(filter) = filter {
|
||||
let filter_val = filter.eval_table(trace, i);
|
||||
if filter_val.is_one() {
|
||||
filters.push(true);
|
||||
} else {
|
||||
assert_eq!(filter, F::ZERO, "Non-binary filter?");
|
||||
assert_eq!(filter_val, F::ZERO, "Non-binary filter?");
|
||||
filters.push(false);
|
||||
}
|
||||
} else {
|
||||
@ -673,8 +757,8 @@ where
|
||||
pub(crate) challenges: GrandProductChallenge<F>,
|
||||
/// Column linear combinations of the `CrossTableLookup`s.
|
||||
pub(crate) columns: &'a [Column<F>],
|
||||
/// Column linear combination that evaluates to either 1 or 0.
|
||||
pub(crate) filter_column: &'a Option<Column<F>>,
|
||||
/// Filter that evaluates to either 1 or 0.
|
||||
pub(crate) filter: &'a Option<Filter<F>>,
|
||||
}
|
||||
|
||||
impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
@ -714,7 +798,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
next_z: *looking_z_next,
|
||||
challenges,
|
||||
columns: &table.columns,
|
||||
filter_column: &table.filter_column,
|
||||
filter: &table.filter,
|
||||
});
|
||||
}
|
||||
|
||||
@ -724,7 +808,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
next_z: *looked_z_next,
|
||||
challenges,
|
||||
columns: &looked_table.columns,
|
||||
filter_column: &looked_table.filter_column,
|
||||
filter: &looked_table.filter,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -758,7 +842,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
next_z,
|
||||
challenges,
|
||||
columns,
|
||||
filter_column,
|
||||
filter,
|
||||
} = lookup_vars;
|
||||
|
||||
// Compute all linear combinations on the current table, and combine them using the challenge.
|
||||
@ -767,8 +851,8 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
.map(|c| c.eval_with_next(local_values, next_values))
|
||||
.collect::<Vec<_>>();
|
||||
let combined = challenges.combine(evals.iter());
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_with_next(local_values, next_values)
|
||||
let local_filter = if let Some(combin) = filter {
|
||||
combin.eval_filter(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
@ -791,8 +875,8 @@ pub(crate) struct CtlCheckVarsTarget<'a, F: Field, const D: usize> {
|
||||
pub(crate) challenges: GrandProductChallenge<Target>,
|
||||
/// Column linear combinations of the `CrossTableLookup`s.
|
||||
pub(crate) columns: &'a [Column<F>],
|
||||
/// Column linear combination that evaluates to either 1 or 0.
|
||||
pub(crate) filter_column: &'a Option<Column<F>>,
|
||||
/// Filter that evaluates to either 1 or 0.
|
||||
pub(crate) filter: &'a Option<Filter<F>>,
|
||||
}
|
||||
|
||||
impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
|
||||
@ -831,7 +915,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
|
||||
next_z: *looking_z_next,
|
||||
challenges,
|
||||
columns: &looking_table.columns,
|
||||
filter_column: &looking_table.filter_column,
|
||||
filter: &looking_table.filter,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -843,7 +927,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
|
||||
next_z: *looked_z_next,
|
||||
challenges,
|
||||
columns: &looked_table.columns,
|
||||
filter_column: &looked_table.filter_column,
|
||||
filter: &looked_table.filter,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -879,12 +963,12 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
next_z,
|
||||
challenges,
|
||||
columns,
|
||||
filter_column,
|
||||
filter,
|
||||
} = lookup_vars;
|
||||
|
||||
let one = builder.one_extension();
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_circuit(builder, local_values)
|
||||
let local_filter = if let Some(combin) = filter {
|
||||
combin.eval_filter_circuit(builder, local_values, next_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
@ -1060,8 +1144,8 @@ pub(crate) mod testutils {
|
||||
) {
|
||||
let trace = &trace_poly_values[table.table as usize];
|
||||
for i in 0..trace[0].len() {
|
||||
let filter = if let Some(column) = &table.filter_column {
|
||||
column.eval_table(trace, i)
|
||||
let filter = if let Some(combin) = &table.filter {
|
||||
combin.eval_table(trace, i)
|
||||
} else {
|
||||
F::ONE
|
||||
};
|
||||
|
||||
@ -13,7 +13,7 @@ use plonky2::util::timing::TimingTree;
|
||||
|
||||
use super::columns::reg_input_limb;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::cross_table_lookup::{Column, Filter};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak::columns::{
|
||||
reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime,
|
||||
@ -48,13 +48,13 @@ pub(crate) fn ctl_data_outputs<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the first round of the Keccak permutation.
|
||||
pub(crate) fn ctl_filter_inputs<F: Field>() -> Column<F> {
|
||||
Column::single(reg_step(0))
|
||||
pub(crate) fn ctl_filter_inputs<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(reg_step(0)))
|
||||
}
|
||||
|
||||
/// CTL filter for the final round of the Keccak permutation.
|
||||
pub(crate) fn ctl_filter_outputs<F: Field>() -> Column<F> {
|
||||
Column::single(reg_step(NUM_ROUNDS - 1))
|
||||
pub(crate) fn ctl_filter_outputs<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(reg_step(NUM_ROUNDS - 1)))
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default)]
|
||||
@ -633,7 +633,7 @@ mod tests {
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{
|
||||
Column, CtlData, CtlZData, GrandProductChallenge, GrandProductChallengeSet,
|
||||
Column, CtlData, CtlZData, Filter, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::keccak::columns::reg_output_limb;
|
||||
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
|
||||
@ -748,7 +748,7 @@ mod tests {
|
||||
gamma: F::ZERO,
|
||||
},
|
||||
columns: vec![],
|
||||
filter_column: Some(Column::constant(F::ZERO)),
|
||||
filter: Some(Filter::new_simple(Column::constant(F::ZERO))),
|
||||
};
|
||||
let ctl_data = CtlData {
|
||||
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
|
||||
|
||||
@ -17,7 +17,7 @@ use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::kernel::keccak_util::keccakf_u32s;
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::cross_table_lookup::{Column, Filter};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak_sponge::columns::*;
|
||||
use crate::lookup::Lookup;
|
||||
@ -185,35 +185,41 @@ pub(crate) fn ctl_looking_logic<F: Field>(i: usize) -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for the final block rows of the `KeccakSponge` table.
|
||||
pub(crate) fn ctl_looked_filter<F: Field>() -> Column<F> {
|
||||
pub(crate) fn ctl_looked_filter<F: Field>() -> Filter<F> {
|
||||
// The CPU table is only interested in our final-block rows, since those contain the final
|
||||
// sponge output.
|
||||
Column::sum(KECCAK_SPONGE_COL_MAP.is_final_input_len)
|
||||
Filter::new_simple(Column::sum(KECCAK_SPONGE_COL_MAP.is_final_input_len))
|
||||
}
|
||||
|
||||
/// CTL filter for reading the `i`th byte of input from memory.
|
||||
pub(crate) fn ctl_looking_memory_filter<F: Field>(i: usize) -> Column<F> {
|
||||
pub(crate) fn ctl_looking_memory_filter<F: Field>(i: usize) -> Filter<F> {
|
||||
// We perform the `i`th read if either
|
||||
// - this is a full input block, or
|
||||
// - this is a final block of length `i` or greater
|
||||
let cols = KECCAK_SPONGE_COL_MAP;
|
||||
if i == KECCAK_RATE_BYTES - 1 {
|
||||
Column::single(cols.is_full_input_block)
|
||||
Filter::new_simple(Column::single(cols.is_full_input_block))
|
||||
} else {
|
||||
Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i + 1..]))
|
||||
Filter::new_simple(Column::sum(
|
||||
once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i + 1..]),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// CTL filter for looking at XORs in the logic table.
|
||||
pub(crate) fn ctl_looking_logic_filter<F: Field>() -> Column<F> {
|
||||
pub(crate) fn ctl_looking_logic_filter<F: Field>() -> Filter<F> {
|
||||
let cols = KECCAK_SPONGE_COL_MAP;
|
||||
Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len))
|
||||
Filter::new_simple(Column::sum(
|
||||
once(&cols.is_full_input_block).chain(&cols.is_final_input_len),
|
||||
))
|
||||
}
|
||||
|
||||
/// CTL filter for looking at the input and output in the Keccak table.
|
||||
pub(crate) fn ctl_looking_keccak_filter<F: Field>() -> Column<F> {
|
||||
pub(crate) fn ctl_looking_keccak_filter<F: Field>() -> Filter<F> {
|
||||
let cols = KECCAK_SPONGE_COL_MAP;
|
||||
Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len))
|
||||
Filter::new_simple(Column::sum(
|
||||
once(&cols.is_full_input_block).chain(&cols.is_final_input_len),
|
||||
))
|
||||
}
|
||||
|
||||
/// Information about a Keccak sponge operation needed for witness generation.
|
||||
|
||||
@ -13,7 +13,7 @@ use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::cross_table_lookup::{Column, Filter};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::logic::columns::NUM_COLUMNS;
|
||||
use crate::stark::Stark;
|
||||
@ -79,8 +79,12 @@ pub(crate) fn ctl_data<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for logic operations.
|
||||
pub(crate) fn ctl_filter<F: Field>() -> Column<F> {
|
||||
Column::sum([columns::IS_AND, columns::IS_OR, columns::IS_XOR])
|
||||
pub(crate) fn ctl_filter<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::sum([
|
||||
columns::IS_AND,
|
||||
columns::IS_OR,
|
||||
columns::IS_XOR,
|
||||
]))
|
||||
}
|
||||
|
||||
/// Structure representing the Logic STARK, which computes all logic operations.
|
||||
|
||||
@ -14,7 +14,7 @@ use plonky2::util::transpose;
|
||||
use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::cross_table_lookup::{Column, Filter};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::memory::columns::{
|
||||
@ -41,8 +41,8 @@ pub(crate) fn ctl_data<F: Field>() -> Vec<Column<F>> {
|
||||
}
|
||||
|
||||
/// CTL filter for memory operations.
|
||||
pub(crate) fn ctl_filter<F: Field>() -> Column<F> {
|
||||
Column::single(FILTER)
|
||||
pub(crate) fn ctl_filter<F: Field>() -> Filter<F> {
|
||||
Filter::new_simple(Column::single(FILTER))
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default)]
|
||||
|
||||
@ -615,7 +615,7 @@ where
|
||||
[num_lookup_columns + i],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: &zs_columns.columns,
|
||||
filter_column: &zs_columns.filter_column,
|
||||
filter: &zs_columns.filter,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -738,7 +738,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: &zs_columns.columns,
|
||||
filter_column: &zs_columns.filter_column,
|
||||
filter: &zs_columns.filter,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// Evaluate the polynomial combining all constraints, including those associated
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user