mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-05 23:33:07 +00:00
Add Boolean constraints for ArithmeticStark (#1453)
* Constrain IS_READ to be boolean * Constrain arithmetic flags to be boolean * Comment on memory stark
This commit is contained in:
parent
54a1313588
commit
c8430dac39
@ -11,7 +11,7 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
|||||||
use plonky2::util::transpose;
|
use plonky2::util::transpose;
|
||||||
use static_assertions::const_assert;
|
use static_assertions::const_assert;
|
||||||
|
|
||||||
use super::columns::NUM_ARITH_COLUMNS;
|
use super::columns::{op_flags, NUM_ARITH_COLUMNS};
|
||||||
use super::shift;
|
use super::shift;
|
||||||
use crate::all_stark::Table;
|
use crate::all_stark::Table;
|
||||||
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
use crate::arithmetic::columns::{NUM_SHARED_COLS, RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
||||||
@ -208,6 +208,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
|||||||
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||||
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||||
|
|
||||||
|
// Flags must be boolean.
|
||||||
|
for flag_idx in op_flags() {
|
||||||
|
let flag = lv[flag_idx];
|
||||||
|
yield_constr.constraint(flag * (flag - P::ONES));
|
||||||
|
}
|
||||||
|
|
||||||
// Check that `OPCODE_COL` holds 0 if the operation is not a range_check.
|
// Check that `OPCODE_COL` holds 0 if the operation is not a range_check.
|
||||||
let opcode_constraint = (P::ONES - lv[columns::IS_RANGE_CHECK]) * lv[columns::OPCODE_COL];
|
let opcode_constraint = (P::ONES - lv[columns::IS_RANGE_CHECK]) * lv[columns::OPCODE_COL];
|
||||||
yield_constr.constraint(opcode_constraint);
|
yield_constr.constraint(opcode_constraint);
|
||||||
@ -248,6 +254,13 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
|||||||
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||||
vars.get_next_values().try_into().unwrap();
|
vars.get_next_values().try_into().unwrap();
|
||||||
|
|
||||||
|
// Flags must be boolean.
|
||||||
|
for flag_idx in op_flags() {
|
||||||
|
let flag = lv[flag_idx];
|
||||||
|
let constraint = builder.mul_sub_extension(flag, flag, flag);
|
||||||
|
yield_constr.constraint(builder, constraint);
|
||||||
|
}
|
||||||
|
|
||||||
// Check that `OPCODE_COL` holds 0 if the operation is not a range_check.
|
// Check that `OPCODE_COL` holds 0 if the operation is not a range_check.
|
||||||
let opcode_constraint = builder.arithmetic_extension(
|
let opcode_constraint = builder.arithmetic_extension(
|
||||||
F::NEG_ONE,
|
F::NEG_ONE,
|
||||||
|
|||||||
@ -43,6 +43,10 @@ pub(crate) const IS_RANGE_CHECK: usize = IS_SHR + 1;
|
|||||||
pub(crate) const OPCODE_COL: usize = IS_RANGE_CHECK + 1;
|
pub(crate) const OPCODE_COL: usize = IS_RANGE_CHECK + 1;
|
||||||
pub(crate) const START_SHARED_COLS: usize = OPCODE_COL + 1;
|
pub(crate) const START_SHARED_COLS: usize = OPCODE_COL + 1;
|
||||||
|
|
||||||
|
pub(crate) const fn op_flags() -> Range<usize> {
|
||||||
|
IS_ADD..IS_RANGE_CHECK + 1
|
||||||
|
}
|
||||||
|
|
||||||
/// Within the Arithmetic Unit, there are shared columns which can be
|
/// Within the Arithmetic Unit, there are shared columns which can be
|
||||||
/// used by any arithmetic circuit, depending on which one is active
|
/// used by any arithmetic circuit, depending on which one is active
|
||||||
/// this cycle.
|
/// this cycle.
|
||||||
|
|||||||
@ -305,6 +305,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
|||||||
let filter = local_values[FILTER];
|
let filter = local_values[FILTER];
|
||||||
yield_constr.constraint(filter * (filter - P::ONES));
|
yield_constr.constraint(filter * (filter - P::ONES));
|
||||||
|
|
||||||
|
// IS_READ must be 0 or 1.
|
||||||
|
// This is implied by the MemoryStark CTL, where corresponding values are either
|
||||||
|
// hardcoded to 0/1, or boolean-constrained in their respective STARK modules.
|
||||||
|
|
||||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||||
let is_dummy = P::ONES - filter;
|
let is_dummy = P::ONES - filter;
|
||||||
@ -411,6 +415,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
|||||||
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
||||||
yield_constr.constraint(builder, constraint);
|
yield_constr.constraint(builder, constraint);
|
||||||
|
|
||||||
|
// IS_READ must be 0 or 1.
|
||||||
|
// This is implied by the MemoryStark CTL, where corresponding values are either
|
||||||
|
// hardcoded to 0/1, or boolean-constrained in their respective STARK modules.
|
||||||
|
|
||||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||||
let is_dummy = builder.sub_extension(one, filter);
|
let is_dummy = builder.sub_extension(one, filter);
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user