Merge pull request #558 from mir-protocol/filtered_ctl

Filtered CTL
This commit is contained in:
wborgeaud 2022-06-13 19:21:06 +02:00 committed by GitHub
commit 1356b980c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 279 additions and 128 deletions

View File

@ -49,21 +49,20 @@ impl Table {
#[cfg(test)]
mod tests {
use anyhow::Result;
use itertools::Itertools;
use itertools::{izip, Itertools};
use plonky2::field::field_types::Field;
use plonky2::iop::witness::PartialWitness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::util::timing::TimingTree;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use rand::{thread_rng, Rng};
use crate::all_stark::{AllStark, Table};
use crate::config::StarkConfig;
use crate::cpu;
use crate::cpu::columns::{KECCAK_INPUT_LIMBS, KECCAK_OUTPUT_LIMBS};
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::CrossTableLookup;
use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns};
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
use crate::proof::AllProof;
use crate::prover::prove;
@ -73,6 +72,7 @@ mod tests {
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
use crate::verifier::verify_proof;
use crate::{cpu, keccak};
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
@ -87,40 +87,76 @@ mod tests {
let keccak_stark = KeccakStark::<F, D> {
f: Default::default(),
};
let keccak_rows = (2 * NUM_ROUNDS + 1).next_power_of_two();
let keccak_looked_col = 3;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let num_inputs = 2;
let keccak_inputs = (0..num_inputs)
let mut rng = thread_rng();
let num_keccak_perms = 2;
let keccak_inputs = (0..num_keccak_perms)
.map(|_| [0u64; NUM_INPUTS].map(|_| rng.gen()))
.collect_vec();
let keccak_trace = keccak_stark.generate_trace(keccak_inputs);
let column_to_copy: Vec<_> = keccak_trace[keccak_looked_col].values[..].into();
let default = vec![F::ONE; 1];
let keccak_input_limbs: Vec<[F; 2 * NUM_INPUTS]> = (0..num_keccak_perms)
.map(|i| {
(0..2 * NUM_INPUTS)
.map(|j| {
keccak_trace[keccak::registers::reg_input_limb(j)].values
[(i + 1) * NUM_ROUNDS - 1]
})
.collect::<Vec<_>>()
.try_into()
.unwrap()
})
.collect();
let keccak_output_limbs: Vec<[F; 2 * NUM_INPUTS]> = (0..num_keccak_perms)
.map(|i| {
(0..2 * NUM_INPUTS)
.map(|j| {
keccak_trace[keccak::registers::reg_output_limb(j)].values
[(i + 1) * NUM_ROUNDS - 1]
})
.collect::<Vec<_>>()
.try_into()
.unwrap()
})
.collect();
let mut cpu_trace_rows = vec![];
for i in 0..cpu_rows {
let mut cpu_trace_row = [F::ZERO; CpuStark::<F, D>::COLUMNS];
cpu_trace_row[cpu::columns::IS_CPU_CYCLE] = F::ONE;
if i < keccak_rows {
cpu_trace_row[cpu::columns::OPCODE] = column_to_copy[i];
} else {
cpu_trace_row[cpu::columns::OPCODE] = default[0];
}
cpu_trace_row[cpu::columns::OPCODE] = F::from_canonical_usize(i);
cpu_stark.generate(&mut cpu_trace_row);
cpu_trace_rows.push(cpu_trace_row);
}
for i in 0..num_keccak_perms {
cpu_trace_rows[i][cpu::columns::IS_KECCAK] = F::ONE;
for (j, input, output) in
izip!(0..2 * NUM_INPUTS, KECCAK_INPUT_LIMBS, KECCAK_OUTPUT_LIMBS)
{
cpu_trace_rows[i][input] = keccak_input_limbs[i][j];
cpu_trace_rows[i][output] = keccak_output_limbs[i][j];
}
}
let cpu_trace = trace_rows_to_poly_values(cpu_trace_rows);
// TODO: temporary until cross-table-lookup filters are implemented
let mut cpu_keccak_input_output = cpu::columns::KECCAK_INPUT_LIMBS.collect::<Vec<_>>();
cpu_keccak_input_output.extend(cpu::columns::KECCAK_OUTPUT_LIMBS);
let mut keccak_keccak_input_output = (0..2 * NUM_INPUTS)
.map(keccak::registers::reg_input_limb)
.collect::<Vec<_>>();
keccak_keccak_input_output
.extend((0..2 * NUM_INPUTS).map(keccak::registers::reg_output_limb));
let cross_table_lookups = vec![CrossTableLookup::new(
vec![Table::Cpu],
vec![vec![cpu::columns::OPCODE]],
Table::Keccak,
vec![keccak_looked_col],
default,
vec![TableWithColumns::new(
Table::Cpu,
cpu_keccak_input_output,
vec![cpu::columns::IS_KECCAK],
)],
TableWithColumns::new(
Table::Keccak,
keccak_keccak_input_output,
vec![keccak::registers::reg_step(NUM_ROUNDS - 1)],
),
None,
)];
let all_stark = AllStark {

View File

@ -1,3 +1,5 @@
use std::ops::Range;
// Filter. 1 if the row corresponds to a cycle of execution and 0 otherwise.
// Lets us re-use decode columns in non-cycle rows.
pub const IS_CPU_CYCLE: usize = 0;
@ -132,4 +134,14 @@ pub const OPCODE_BITS: [usize; 8] = [
END_INSTRUCTION_FLAGS + 7,
];
pub const NUM_CPU_COLUMNS: usize = OPCODE_BITS[OPCODE_BITS.len() - 1] + 1;
/// Filter. 1 iff a Keccak permutation is computed on this row.
pub const IS_KECCAK: usize = OPCODE_BITS[OPCODE_BITS.len() - 1] + 1;
pub const START_KECCAK_INPUT: usize = IS_KECCAK + 1;
#[allow(dead_code)] // TODO: Remove when used
pub const KECCAK_INPUT_LIMBS: Range<usize> = START_KECCAK_INPUT..START_KECCAK_INPUT + 50;
pub const START_KECCAK_OUTPUT: usize = KECCAK_INPUT_LIMBS.end;
pub const KECCAK_OUTPUT_LIMBS: Range<usize> = START_KECCAK_OUTPUT..START_KECCAK_OUTPUT + 50;
pub const NUM_CPU_COLUMNS: usize = KECCAK_OUTPUT_LIMBS.end;

View File

@ -1,5 +1,5 @@
use anyhow::{ensure, Result};
use itertools::izip;
use itertools::Itertools;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::field_types::Field;
use plonky2::field::packed_field::PackedField;
@ -21,33 +21,60 @@ use crate::proof::{StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget}
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
#[derive(Clone)]
pub struct TableWithColumns {
table: Table,
columns: Vec<usize>,
/// Vector of columns `[c_1,...,c_k]` used as a filter using the sum `c_1 + ... + c_k`.
/// An empty vector corresponds to no filter.
filter_columns: Vec<usize>,
}
impl TableWithColumns {
pub fn new(table: Table, columns: Vec<usize>, filter_columns: Vec<usize>) -> Self {
debug_assert_eq!(
filter_columns.iter().unique().count(),
filter_columns.len(),
"Duplicate filter columns."
);
Self {
table,
columns,
filter_columns,
}
}
}
#[derive(Clone)]
pub struct CrossTableLookup<F: Field> {
looking_tables: Vec<Table>,
looking_columns: Vec<Vec<usize>>,
looked_table: Table,
looked_columns: Vec<usize>,
default: Vec<F>,
looking_tables: Vec<TableWithColumns>,
looked_table: TableWithColumns,
/// Default value if filters are not used.
default: Option<Vec<F>>,
}
impl<F: Field> CrossTableLookup<F> {
pub fn new(
looking_tables: Vec<Table>,
looking_columns: Vec<Vec<usize>>,
looked_table: Table,
looked_columns: Vec<usize>,
default: Vec<F>,
looking_tables: Vec<TableWithColumns>,
looked_table: TableWithColumns,
default: Option<Vec<F>>,
) -> Self {
assert_eq!(looking_tables.len(), looking_columns.len());
assert!(looking_columns
assert!(looking_tables
.iter()
.all(|cols| cols.len() == looked_columns.len()));
assert!(default.len() == looked_columns.len());
.all(|twc| twc.columns.len() == looked_table.columns.len()));
assert!(
looking_tables
.iter()
.all(|twc| twc.filter_columns.is_empty() == default.is_some())
&& default.is_some() == looked_table.filter_columns.is_empty(),
"Default values should be provided iff there are no filter columns."
);
if let Some(default) = &default {
assert_eq!(default.len(), looked_table.columns.len());
}
Self {
looking_tables,
looking_columns,
looked_table,
looked_columns,
default,
}
}
@ -58,8 +85,9 @@ impl<F: Field> CrossTableLookup<F> {
pub struct CtlData<F: Field> {
/// Challenges used in the argument.
pub(crate) challenges: GrandProductChallengeSet<F>,
/// Vector of `(Z, columns)` where `Z` is a Z-polynomial for a lookup on columns `columns`.
pub zs_columns: Vec<(PolynomialValues<F>, Vec<usize>)>,
/// Vector of `(Z, columns, filter_columns)` where `Z` is a Z-polynomial for a lookup
/// on columns `columns` with filter columns `filter_columns`.
pub zs_columns: Vec<(PolynomialValues<F>, Vec<usize>, Vec<usize>)>,
}
impl<F: Field> CtlData<F> {
@ -79,7 +107,7 @@ impl<F: Field> CtlData<F> {
}
pub fn z_polys(&self) -> Vec<PolynomialValues<F>> {
self.zs_columns.iter().map(|(p, _)| p.clone()).collect()
self.zs_columns.iter().map(|(p, _, _)| p.clone()).collect()
}
}
@ -93,22 +121,23 @@ pub fn cross_table_lookup_data<F: RichField, C: GenericConfig<D, F = F>, const D
let mut ctl_data_per_table = vec![CtlData::new(challenges.clone()); trace_poly_values.len()];
for CrossTableLookup {
looking_tables,
looking_columns,
looked_table,
looked_columns,
default,
} in cross_table_lookups
{
for &challenge in &challenges.challenges {
let zs_looking = looking_tables
.iter()
.zip(looking_columns)
.map(|(table, columns)| {
partial_products(&trace_poly_values[*table as usize], columns, challenge)
});
let zs_looking = looking_tables.iter().map(|table| {
partial_products(
&trace_poly_values[table.table as usize],
&table.columns,
&table.filter_columns,
challenge,
)
});
let z_looked = partial_products(
&trace_poly_values[*looked_table as usize],
looked_columns,
&trace_poly_values[looked_table.table as usize],
&looked_table.columns,
&looked_table.filter_columns,
challenge,
);
@ -118,23 +147,37 @@ pub fn cross_table_lookup_data<F: RichField, C: GenericConfig<D, F = F>, const D
.map(|z| *z.values.last().unwrap())
.product::<F>(),
*z_looked.values.last().unwrap()
* challenge.combine(default).exp_u64(
looking_tables
.iter()
.map(|table| trace_poly_values[*table as usize][0].len() as u64)
.sum::<u64>()
- trace_poly_values[*looked_table as usize][0].len() as u64
)
* default
.as_ref()
.map(|default| {
challenge.combine(default).exp_u64(
looking_tables
.iter()
.map(|table| {
trace_poly_values[table.table as usize][0].len() as u64
})
.sum::<u64>()
- trace_poly_values[looked_table.table as usize][0].len()
as u64,
)
})
.unwrap_or(F::ONE)
);
for (table, columns, z) in izip!(looking_tables, looking_columns, zs_looking) {
ctl_data_per_table[*table as usize]
.zs_columns
.push((z, columns.clone()));
for (table, z) in looking_tables.iter().zip(zs_looking) {
ctl_data_per_table[table.table as usize].zs_columns.push((
z,
table.columns.clone(),
table.filter_columns.clone(),
));
}
ctl_data_per_table[*looked_table as usize]
ctl_data_per_table[looked_table.table as usize]
.zs_columns
.push((z_looked, looked_columns.clone()));
.push((
z_looked,
looked_table.columns.clone(),
looked_table.filter_columns.clone(),
));
}
}
ctl_data_per_table
@ -143,13 +186,23 @@ pub fn cross_table_lookup_data<F: RichField, C: GenericConfig<D, F = F>, const D
fn partial_products<F: Field>(
trace: &[PolynomialValues<F>],
columns: &[usize],
filter_columns: &[usize],
challenge: GrandProductChallenge<F>,
) -> PolynomialValues<F> {
let mut partial_prod = F::ONE;
let degree = trace[0].len();
let mut res = Vec::with_capacity(degree);
for i in 0..degree {
partial_prod *= challenge.combine(columns.iter().map(|&j| &trace[j].values[i]));
let filter = if filter_columns.is_empty() {
F::ONE
} else {
filter_columns.iter().map(|&j| trace[j].values[i]).sum()
};
if filter.is_one() {
partial_prod *= challenge.combine(columns.iter().map(|&j| &trace[j].values[i]));
} else {
assert_eq!(filter, F::ZERO, "Non-binary filter?")
};
res.push(partial_prod);
}
res.into()
@ -166,6 +219,7 @@ where
pub(crate) next_z: P,
pub(crate) challenges: GrandProductChallenge<F>,
pub(crate) columns: &'a [usize],
pub(crate) filter_columns: &'a [usize],
}
impl<'a, F: RichField + Extendable<D>, const D: usize>
@ -192,29 +246,29 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
let mut ctl_vars_per_table = vec![vec![]; proofs.len()];
for CrossTableLookup {
looking_tables,
looking_columns,
looked_table,
looked_columns,
..
} in cross_table_lookups
{
for &challenges in &ctl_challenges.challenges {
for (table, columns) in looking_tables.iter().zip(looking_columns) {
let (looking_z, looking_z_next) = ctl_zs[*table as usize].next().unwrap();
ctl_vars_per_table[*table as usize].push(Self {
for table in looking_tables {
let (looking_z, looking_z_next) = ctl_zs[table.table as usize].next().unwrap();
ctl_vars_per_table[table.table as usize].push(Self {
local_z: *looking_z,
next_z: *looking_z_next,
challenges,
columns,
columns: &table.columns,
filter_columns: &table.filter_columns,
});
}
let (looked_z, looked_z_next) = ctl_zs[*looked_table as usize].next().unwrap();
ctl_vars_per_table[*looked_table as usize].push(Self {
let (looked_z, looked_z_next) = ctl_zs[looked_table.table as usize].next().unwrap();
ctl_vars_per_table[looked_table.table as usize].push(Self {
local_z: *looked_z,
next_z: *looked_z_next,
challenges,
columns: looked_columns,
columns: &looked_table.columns,
filter_columns: &looked_table.filter_columns,
});
}
}
@ -239,13 +293,26 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, C, S, const D: usize, con
next_z,
challenges,
columns,
filter_columns,
} = lookup_vars;
let combine = |v: &[P]| -> P { challenges.combine(columns.iter().map(|&i| &v[i])) };
let filter = |v: &[P]| -> P {
if filter_columns.is_empty() {
P::ONES
} else {
filter_columns.iter().map(|&i| v[i]).sum()
}
};
let local_filter = filter(vars.local_values);
let next_filter = filter(vars.next_values);
let select = |filter, x| filter * x + P::ONES - filter;
// Check value of `Z(1)`
consumer.constraint_first_row(*local_z - combine(vars.local_values));
consumer.constraint_first_row(*local_z - select(local_filter, combine(vars.local_values)));
// Check `Z(gw) = combination * Z(w)`
consumer.constraint_transition(*next_z - *local_z * combine(vars.next_values));
consumer.constraint_transition(
*next_z - *local_z * select(next_filter, combine(vars.next_values)),
);
}
}
@ -255,6 +322,7 @@ pub struct CtlCheckVarsTarget<'a, const D: usize> {
pub(crate) next_z: ExtensionTarget<D>,
pub(crate) challenges: GrandProductChallenge<Target>,
pub(crate) columns: &'a [usize],
pub(crate) filter_columns: &'a [usize],
}
impl<'a, const D: usize> CtlCheckVarsTarget<'a, D> {
@ -279,29 +347,29 @@ impl<'a, const D: usize> CtlCheckVarsTarget<'a, D> {
let mut ctl_vars_per_table = vec![vec![]; proofs.len()];
for CrossTableLookup {
looking_tables,
looking_columns,
looked_table,
looked_columns,
..
} in cross_table_lookups
{
for &challenges in &ctl_challenges.challenges {
for (table, columns) in looking_tables.iter().zip(looking_columns) {
let (looking_z, looking_z_next) = ctl_zs[*table as usize].next().unwrap();
ctl_vars_per_table[*table as usize].push(Self {
for table in looking_tables {
let (looking_z, looking_z_next) = ctl_zs[table.table as usize].next().unwrap();
ctl_vars_per_table[table.table as usize].push(Self {
local_z: *looking_z,
next_z: *looking_z_next,
challenges,
columns,
columns: &table.columns,
filter_columns: &table.filter_columns,
});
}
let (looked_z, looked_z_next) = ctl_zs[*looked_table as usize].next().unwrap();
ctl_vars_per_table[*looked_table as usize].push(Self {
let (looked_z, looked_z_next) = ctl_zs[looked_table.table as usize].next().unwrap();
ctl_vars_per_table[looked_table.table as usize].push(Self {
local_z: *looked_z,
next_z: *looked_z_next,
challenges,
columns: looked_columns,
columns: &looked_table.columns,
filter_columns: &looked_table.filter_columns,
});
}
}
@ -325,8 +393,30 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
next_z,
challenges,
columns,
filter_columns,
} = lookup_vars;
let one = builder.one_extension();
let local_filter = if filter_columns.is_empty() {
one
} else {
builder.add_many_extension(filter_columns.iter().map(|&i| vars.local_values[i]))
};
let next_filter = if filter_columns.is_empty() {
one
} else {
builder.add_many_extension(filter_columns.iter().map(|&i| vars.next_values[i]))
};
fn select<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
filter: ExtensionTarget<D>,
x: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
let one = builder.one_extension();
let tmp = builder.sub_extension(one, filter);
builder.mul_add_extension(filter, x, tmp) // filter * x + 1 - filter
}
// Check value of `Z(1)`
let combined_local = challenges.combine_circuit(
builder,
@ -335,7 +425,8 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
.map(|&i| vars.local_values[i])
.collect::<Vec<_>>(),
);
let first_row = builder.sub_extension(*local_z, combined_local);
let selected_local = select(builder, local_filter, combined_local);
let first_row = builder.sub_extension(*local_z, selected_local);
consumer.constraint_first_row(builder, first_row);
// Check `Z(gw) = combination * Z(w)`
let combined_next = challenges.combine_circuit(
@ -345,7 +436,8 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
.map(|&i| vars.next_values[i])
.collect::<Vec<_>>(),
);
let mut transition = builder.mul_extension(*local_z, combined_next);
let selected_next = select(builder, next_filter, combined_next);
let mut transition = builder.mul_extension(*local_z, selected_next);
transition = builder.sub_extension(*next_z, transition);
consumer.constraint_transition(builder, transition);
}
@ -381,16 +473,18 @@ pub(crate) fn verify_cross_table_lookups<
{
let looking_degrees_sum = looking_tables
.iter()
.map(|&table| 1 << degrees_bits[table as usize])
.map(|table| 1 << degrees_bits[table.table as usize])
.sum::<u64>();
let looked_degree = 1 << degrees_bits[looked_table as usize];
let looked_degree = 1 << degrees_bits[looked_table.table as usize];
let looking_zs_prod = looking_tables
.into_iter()
.map(|table| *ctl_zs_openings[table as usize].next().unwrap())
.map(|table| *ctl_zs_openings[table.table as usize].next().unwrap())
.product::<F>();
let looked_z = *ctl_zs_openings[looked_table as usize].next().unwrap();
let looked_z = *ctl_zs_openings[looked_table.table as usize].next().unwrap();
let challenge = challenges.challenges[i % config.num_challenges];
let combined_default = challenge.combine(default.iter());
let combined_default = default
.map(|default| challenge.combine(default.iter()))
.unwrap_or(F::ONE);
ensure!(
looking_zs_prod
@ -433,24 +527,28 @@ pub(crate) fn verify_cross_table_lookups_circuit<
{
let looking_degrees_sum = looking_tables
.iter()
.map(|&table| 1 << degrees_bits[table as usize])
.map(|table| 1 << degrees_bits[table.table as usize])
.sum::<u64>();
let looked_degree = 1 << degrees_bits[looked_table as usize];
let looked_degree = 1 << degrees_bits[looked_table.table as usize];
let looking_zs_prod = builder.mul_many(
looking_tables
.into_iter()
.map(|table| *ctl_zs_openings[table as usize].next().unwrap()),
.map(|table| *ctl_zs_openings[table.table as usize].next().unwrap()),
);
let looked_z = *ctl_zs_openings[looked_table as usize].next().unwrap();
let looked_z = *ctl_zs_openings[looked_table.table as usize].next().unwrap();
let challenge = challenges.challenges[i % inner_config.num_challenges];
let default = default
.into_iter()
.map(|x| builder.constant(x))
.collect::<Vec<_>>();
let combined_default = challenge.combine_base_circuit(builder, &default);
if let Some(default) = default {
let default = default
.into_iter()
.map(|x| builder.constant(x))
.collect::<Vec<_>>();
let combined_default = challenge.combine_base_circuit(builder, &default);
let pad = builder.exp_u64(combined_default, looking_degrees_sum - looked_degree);
let padded_looked_z = builder.mul(looked_z, pad);
builder.connect(looking_zs_prod, padded_looked_z);
let pad = builder.exp_u64(combined_default, looking_degrees_sum - looked_degree);
let padded_looked_z = builder.mul(looked_z, pad);
builder.connect(looking_zs_prod, padded_looked_z);
} else {
builder.connect(looking_zs_prod, looked_z);
}
}
}

View File

@ -1,7 +1,7 @@
use crate::keccak::keccak_stark::{NUM_INPUTS, NUM_ROUNDS};
/// A register which is set to 1 if we are in the `i`th round, otherwise 0.
pub(crate) const fn reg_step(i: usize) -> usize {
pub const fn reg_step(i: usize) -> usize {
debug_assert!(i < NUM_ROUNDS);
i
}
@ -9,7 +9,7 @@ pub(crate) const fn reg_step(i: usize) -> usize {
/// Registers to hold permutation inputs.
/// `reg_input_limb(2*i) -> input[i] as u32`
/// `reg_input_limb(2*i+1) -> input[i] >> 32`
pub(crate) const fn reg_input_limb(i: usize) -> usize {
pub const fn reg_input_limb(i: usize) -> usize {
debug_assert!(i < 2 * NUM_INPUTS);
NUM_ROUNDS + i
}
@ -17,8 +17,7 @@ pub(crate) const fn reg_input_limb(i: usize) -> usize {
/// Registers to hold permutation outputs.
/// `reg_output_limb(2*i) -> output[i] as u32`
/// `reg_output_limb(2*i+1) -> output[i] >> 32`
#[allow(dead_code)] // TODO: Remove once it is used.
pub(crate) const fn reg_output_limb(i: usize) -> usize {
pub const fn reg_output_limb(i: usize) -> usize {
debug_assert!(i < 2 * NUM_INPUTS);
let ii = i / 2;
let x = ii / 5;

View File

@ -391,14 +391,17 @@ where
.zs_columns
.iter()
.enumerate()
.map(|(i, (_, columns))| CtlCheckVars::<F, F, P, 1> {
local_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
[num_permutation_zs + i],
next_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_next_start, step)
[num_permutation_zs + i],
challenges: ctl_data.challenges.challenges[i % config.num_challenges],
columns,
})
.map(
|(i, (_, columns, filter_columns))| CtlCheckVars::<F, F, P, 1> {
local_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
[num_permutation_zs + i],
next_z: permutation_ctl_zs_commitment
.get_lde_values_packed(i_next_start, step)[num_permutation_zs + i],
challenges: ctl_data.challenges.challenges[i % config.num_challenges],
columns,
filter_columns,
},
)
.collect::<Vec<_>>();
eval_vanishing_poly::<F, F, P, C, S, D, 1>(
stark,
@ -506,14 +509,17 @@ fn check_constraints<'a, F, C, S, const D: usize>(
.zs_columns
.iter()
.enumerate()
.map(|(iii, (_, columns))| CtlCheckVars::<F, F, F, 1> {
local_z: get_comm_values(permutation_ctl_zs_commitment, i)
[num_permutation_zs + iii],
next_z: get_comm_values(permutation_ctl_zs_commitment, i_next)
[num_permutation_zs + iii],
challenges: ctl_data.challenges.challenges[iii % config.num_challenges],
columns,
})
.map(
|(iii, (_, columns, filter_columns))| CtlCheckVars::<F, F, F, 1> {
local_z: get_comm_values(permutation_ctl_zs_commitment, i)
[num_permutation_zs + iii],
next_z: get_comm_values(permutation_ctl_zs_commitment, i_next)
[num_permutation_zs + iii],
challenges: ctl_data.challenges.challenges[iii % config.num_challenges],
columns,
filter_columns,
},
)
.collect::<Vec<_>>();
eval_vanishing_poly::<F, F, F, C, S, D, 1>(
stark,