mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 06:13:07 +00:00
Merge branch 'main' into 'new-logup'
This commit is contained in:
commit
bbc6fe768f
@ -6,18 +6,20 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::util::transpose;
|
||||
use static_assertions::const_assert;
|
||||
|
||||
use super::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::all_stark::Table;
|
||||
use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
|
||||
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Link the 16-bit columns of the arithmetic table, split into groups
|
||||
/// of N_LIMBS at a time in `regs`, with the corresponding 32-bit
|
||||
@ -172,18 +174,23 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
|
||||
const COLUMNS: usize = columns::NUM_ARITH_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_ARITH_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let lv = vars.local_values;
|
||||
let nv = vars.next_values;
|
||||
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
// Check the range column: First value must be 0, last row
|
||||
// must be 2^16-1, and intermediate rows must increment by 0
|
||||
@ -206,11 +213,13 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let lv = vars.local_values;
|
||||
let nv = vars.next_values;
|
||||
let lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
|
||||
let rc1 = lv[columns::RANGE_COUNTER];
|
||||
let rc2 = nv[columns::RANGE_COUNTER];
|
||||
|
||||
@ -51,9 +51,9 @@ use crate::byte_packing::columns::{
|
||||
};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
/// Strict upper bound for the individual bytes range-check.
|
||||
@ -211,7 +211,7 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
row[value_bytes(i)] = F::from_canonical_u8(byte);
|
||||
row[index_bytes(i)] = F::ONE;
|
||||
|
||||
rows.push(row.into());
|
||||
rows.push(row);
|
||||
row[index_bytes(i)] = F::ZERO;
|
||||
row[ADDR_VIRTUAL] -= F::ONE;
|
||||
}
|
||||
@ -255,7 +255,7 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
}
|
||||
}
|
||||
|
||||
/// There is only one `i` for which `vars.local_values[index_bytes(i)]` is non-zero,
|
||||
/// There is only one `i` for which `local_values[index_bytes(i)]` is non-zero,
|
||||
/// and `i+1` is the current position:
|
||||
fn get_active_position<FE, P, const D2: usize>(&self, row: &[P; NUM_COLUMNS]) -> P
|
||||
where
|
||||
@ -288,78 +288,77 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values: &[P; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[P; NUM_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
let one = P::ONES;
|
||||
|
||||
// We filter active columns by summing all the byte indices.
|
||||
// Constraining each of them to be boolean is done later on below.
|
||||
let current_filter = vars.local_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let current_filter = local_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint(current_filter * (current_filter - one));
|
||||
|
||||
// The filter column must start by one.
|
||||
yield_constr.constraint_first_row(current_filter - one);
|
||||
|
||||
// The is_read flag must be boolean.
|
||||
let current_is_read = vars.local_values[IS_READ];
|
||||
let current_is_read = local_values[IS_READ];
|
||||
yield_constr.constraint(current_is_read * (current_is_read - one));
|
||||
|
||||
// Each byte index must be boolean.
|
||||
for i in 0..NUM_BYTES {
|
||||
let idx_i = vars.local_values[index_bytes(i)];
|
||||
let idx_i = local_values[index_bytes(i)];
|
||||
yield_constr.constraint(idx_i * (idx_i - one));
|
||||
}
|
||||
|
||||
// The sequence start flag column must start by one.
|
||||
let current_sequence_start = vars.local_values[index_bytes(0)];
|
||||
let current_sequence_start = local_values[index_bytes(0)];
|
||||
yield_constr.constraint_first_row(current_sequence_start - one);
|
||||
|
||||
// The sequence end flag must be boolean
|
||||
let current_sequence_end = vars.local_values[SEQUENCE_END];
|
||||
let current_sequence_end = local_values[SEQUENCE_END];
|
||||
yield_constr.constraint(current_sequence_end * (current_sequence_end - one));
|
||||
|
||||
// If filter is off, all flags and byte indices must be off.
|
||||
let byte_indices = vars.local_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let byte_indices = local_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint(
|
||||
(current_filter - one) * (current_is_read + current_sequence_end + byte_indices),
|
||||
);
|
||||
|
||||
// Only padding rows have their filter turned off.
|
||||
let next_filter = vars.next_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let next_filter = next_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint_transition(next_filter * (next_filter - current_filter));
|
||||
|
||||
// Unless the current sequence end flag is activated, the is_read filter must remain unchanged.
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
yield_constr
|
||||
.constraint_transition((current_sequence_end - one) * (next_is_read - current_is_read));
|
||||
|
||||
// If the sequence end flag is activated, the next row must be a new sequence or filter must be off.
|
||||
let next_sequence_start = vars.next_values[index_bytes(0)];
|
||||
let next_sequence_start = next_values[index_bytes(0)];
|
||||
yield_constr.constraint_transition(
|
||||
current_sequence_end * next_filter * (next_sequence_start - one),
|
||||
);
|
||||
|
||||
// The active position in a byte sequence must increase by one on every row
|
||||
// or be one on the next row (i.e. at the start of a new sequence).
|
||||
let current_position = self.get_active_position(vars.local_values);
|
||||
let next_position = self.get_active_position(vars.next_values);
|
||||
let current_position = self.get_active_position(local_values);
|
||||
let next_position = self.get_active_position(next_values);
|
||||
yield_constr.constraint_transition(
|
||||
next_filter * (next_position - one) * (next_position - current_position - one),
|
||||
);
|
||||
@ -373,14 +372,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The context, segment and timestamp fields must remain unchanged throughout a byte sequence.
|
||||
// The virtual address must decrement by one at each step of a sequence.
|
||||
let current_context = vars.local_values[ADDR_CONTEXT];
|
||||
let next_context = vars.next_values[ADDR_CONTEXT];
|
||||
let current_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let next_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let current_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = vars.local_values[TIMESTAMP];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let current_context = local_values[ADDR_CONTEXT];
|
||||
let next_context = next_values[ADDR_CONTEXT];
|
||||
let current_segment = local_values[ADDR_SEGMENT];
|
||||
let next_segment = next_values[ADDR_SEGMENT];
|
||||
let current_virtual = local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = local_values[TIMESTAMP];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
yield_constr.constraint_transition(
|
||||
next_filter * (next_sequence_start - one) * (next_context - current_context),
|
||||
);
|
||||
@ -397,9 +396,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
// If not at the end of a sequence, each next byte must equal the current one
|
||||
// when reading through the sequence, or the next byte index must be one.
|
||||
for i in 0..NUM_BYTES {
|
||||
let current_byte = vars.local_values[value_bytes(i)];
|
||||
let next_byte = vars.next_values[value_bytes(i)];
|
||||
let next_byte_index = vars.next_values[index_bytes(i)];
|
||||
let current_byte = local_values[value_bytes(i)];
|
||||
let next_byte = next_values[value_bytes(i)];
|
||||
let next_byte_index = next_values[index_bytes(i)];
|
||||
yield_constr.constraint_transition(
|
||||
(current_sequence_end - one) * (next_byte_index - one) * (next_byte - current_byte),
|
||||
);
|
||||
@ -409,12 +408,17 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
|
||||
// We filter active columns by summing all the byte indices.
|
||||
// Constraining each of them to be boolean is done later on below.
|
||||
let current_filter = builder.add_many_extension(&vars.local_values[BYTE_INDICES_COLS]);
|
||||
let current_filter = builder.add_many_extension(&local_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.mul_sub_extension(current_filter, current_filter, current_filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
@ -423,25 +427,25 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
yield_constr.constraint_first_row(builder, constraint);
|
||||
|
||||
// The is_read flag must be boolean.
|
||||
let current_is_read = vars.local_values[IS_READ];
|
||||
let current_is_read = local_values[IS_READ];
|
||||
let constraint =
|
||||
builder.mul_sub_extension(current_is_read, current_is_read, current_is_read);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// Each byte index must be boolean.
|
||||
for i in 0..NUM_BYTES {
|
||||
let idx_i = vars.local_values[index_bytes(i)];
|
||||
let idx_i = local_values[index_bytes(i)];
|
||||
let constraint = builder.mul_sub_extension(idx_i, idx_i, idx_i);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
}
|
||||
|
||||
// The sequence start flag column must start by one.
|
||||
let current_sequence_start = vars.local_values[index_bytes(0)];
|
||||
let current_sequence_start = local_values[index_bytes(0)];
|
||||
let constraint = builder.add_const_extension(current_sequence_start, F::NEG_ONE);
|
||||
yield_constr.constraint_first_row(builder, constraint);
|
||||
|
||||
// The sequence end flag must be boolean
|
||||
let current_sequence_end = vars.local_values[SEQUENCE_END];
|
||||
let current_sequence_end = local_values[SEQUENCE_END];
|
||||
let constraint = builder.mul_sub_extension(
|
||||
current_sequence_end,
|
||||
current_sequence_end,
|
||||
@ -450,27 +454,27 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If filter is off, all flags and byte indices must be off.
|
||||
let byte_indices = builder.add_many_extension(&vars.local_values[BYTE_INDICES_COLS]);
|
||||
let byte_indices = builder.add_many_extension(&local_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.add_extension(current_sequence_end, byte_indices);
|
||||
let constraint = builder.add_extension(constraint, current_is_read);
|
||||
let constraint = builder.mul_sub_extension(constraint, current_filter, constraint);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// Only padding rows have their filter turned off.
|
||||
let next_filter = builder.add_many_extension(&vars.next_values[BYTE_INDICES_COLS]);
|
||||
let next_filter = builder.add_many_extension(&next_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.sub_extension(next_filter, current_filter);
|
||||
let constraint = builder.mul_extension(next_filter, constraint);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
// Unless the current sequence end flag is activated, the is_read filter must remain unchanged.
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let diff_is_read = builder.sub_extension(next_is_read, current_is_read);
|
||||
let constraint =
|
||||
builder.mul_sub_extension(diff_is_read, current_sequence_end, diff_is_read);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
// If the sequence end flag is activated, the next row must be a new sequence or filter must be off.
|
||||
let next_sequence_start = vars.next_values[index_bytes(0)];
|
||||
let next_sequence_start = next_values[index_bytes(0)];
|
||||
let constraint = builder.mul_sub_extension(
|
||||
current_sequence_end,
|
||||
next_sequence_start,
|
||||
@ -481,8 +485,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The active position in a byte sequence must increase by one on every row
|
||||
// or be one on the next row (i.e. at the start of a new sequence).
|
||||
let current_position = self.get_active_position_circuit(builder, vars.local_values);
|
||||
let next_position = self.get_active_position_circuit(builder, vars.next_values);
|
||||
let current_position = self.get_active_position_circuit(builder, local_values);
|
||||
let next_position = self.get_active_position_circuit(builder, next_values);
|
||||
|
||||
let position_diff = builder.sub_extension(next_position, current_position);
|
||||
let is_new_or_inactive = builder.mul_sub_extension(next_filter, next_position, next_filter);
|
||||
@ -502,14 +506,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The context, segment and timestamp fields must remain unchanged throughout a byte sequence.
|
||||
// The virtual address must decrement by one at each step of a sequence.
|
||||
let current_context = vars.local_values[ADDR_CONTEXT];
|
||||
let next_context = vars.next_values[ADDR_CONTEXT];
|
||||
let current_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let next_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let current_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = vars.local_values[TIMESTAMP];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let current_context = local_values[ADDR_CONTEXT];
|
||||
let next_context = next_values[ADDR_CONTEXT];
|
||||
let current_segment = local_values[ADDR_SEGMENT];
|
||||
let next_segment = next_values[ADDR_SEGMENT];
|
||||
let current_virtual = local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = local_values[TIMESTAMP];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
let addr_filter = builder.mul_sub_extension(next_filter, next_sequence_start, next_filter);
|
||||
{
|
||||
let constraint = builder.sub_extension(next_context, current_context);
|
||||
@ -535,9 +539,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
// If not at the end of a sequence, each next byte must equal the current one
|
||||
// when reading through the sequence, or the next byte index must be one.
|
||||
for i in 0..NUM_BYTES {
|
||||
let current_byte = vars.local_values[value_bytes(i)];
|
||||
let next_byte = vars.next_values[value_bytes(i)];
|
||||
let next_byte_index = vars.next_values[index_bytes(i)];
|
||||
let current_byte = local_values[value_bytes(i)];
|
||||
let next_byte = next_values[value_bytes(i)];
|
||||
let next_byte_index = next_values[index_bytes(i)];
|
||||
let byte_diff = builder.sub_extension(next_byte, current_byte);
|
||||
let constraint = builder.mul_sub_extension(byte_diff, next_byte_index, byte_diff);
|
||||
let constraint =
|
||||
|
||||
@ -1,22 +1,20 @@
|
||||
//! The initial phase of execution, where the kernel code is hashed while being written to memory.
|
||||
//! The hash is then checked against a precomputed kernel hash.
|
||||
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
use crate::witness::util::{keccak_sponge_log, mem_write_gp_log_and_fill};
|
||||
|
||||
@ -58,13 +56,11 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
|
||||
log::info!("Bootstrapping took {} cycles", state.traces.clock());
|
||||
}
|
||||
|
||||
pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS>,
|
||||
pub(crate) fn eval_bootstrap_kernel_packed<F: Field, P: PackedField<Scalar = F>>(
|
||||
local_values: &CpuColumnsView<P>,
|
||||
next_values: &CpuColumnsView<P>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
|
||||
let next_values: &CpuColumnsView<_> = vars.next_values.borrow();
|
||||
|
||||
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
|
||||
let local_is_bootstrap = local_values.is_bootstrap_kernel;
|
||||
let next_is_bootstrap = next_values.is_bootstrap_kernel;
|
||||
@ -103,13 +99,12 @@ pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
pub(crate) fn eval_bootstrap_kernel_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS>,
|
||||
local_values: &CpuColumnsView<ExtensionTarget<D>>,
|
||||
next_values: &CpuColumnsView<ExtensionTarget<D>>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
|
||||
let next_values: &CpuColumnsView<_> = vars.next_values.borrow();
|
||||
let one = builder.one_extension();
|
||||
|
||||
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
|
||||
|
||||
@ -7,6 +7,7 @@ use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
|
||||
use super::halt;
|
||||
use crate::all_stark::Table;
|
||||
@ -18,10 +19,10 @@ use crate::cpu::{
|
||||
modfp254, pc, push0, shift, simple_logic, stack, stack_bounds, syscalls_exceptions,
|
||||
};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub fn ctl_data_keccak_sponge<F: Field>() -> Vec<Column<F>> {
|
||||
// When executing KECCAK_GENERAL, the GP memory channels are used as follows:
|
||||
@ -227,19 +228,27 @@ impl<F: RichField, const D: usize> CpuStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
|
||||
const COLUMNS: usize = NUM_CPU_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_CPU_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_CPU_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values = vars.local_values.borrow();
|
||||
let next_values = vars.next_values.borrow();
|
||||
bootstrap_kernel::eval_bootstrap_kernel(vars, yield_constr);
|
||||
let local_values: &[P; NUM_CPU_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &CpuColumnsView<P> = local_values.borrow();
|
||||
let next_values: &[P; NUM_CPU_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &CpuColumnsView<P> = next_values.borrow();
|
||||
|
||||
bootstrap_kernel::eval_bootstrap_kernel_packed(local_values, next_values, yield_constr);
|
||||
contextops::eval_packed(local_values, next_values, yield_constr);
|
||||
control_flow::eval_packed_generic(local_values, next_values, yield_constr);
|
||||
decode::eval_packed_generic(local_values, yield_constr);
|
||||
@ -262,12 +271,22 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.local_values.borrow();
|
||||
let next_values = vars.next_values.borrow();
|
||||
bootstrap_kernel::eval_bootstrap_kernel_circuit(builder, vars, yield_constr);
|
||||
let local_values: &[ExtensionTarget<D>; NUM_CPU_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &CpuColumnsView<ExtensionTarget<D>> = local_values.borrow();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_CPU_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &CpuColumnsView<ExtensionTarget<D>> = next_values.borrow();
|
||||
|
||||
bootstrap_kernel::eval_bootstrap_kernel_ext_circuit(
|
||||
builder,
|
||||
local_values,
|
||||
next_values,
|
||||
yield_constr,
|
||||
);
|
||||
contextops::eval_ext_circuit(builder, local_values, next_values, yield_constr);
|
||||
control_flow::eval_ext_circuit(builder, local_values, next_values, yield_constr);
|
||||
decode::eval_ext_circuit(builder, local_values, yield_constr);
|
||||
|
||||
@ -22,9 +22,9 @@ use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
|
||||
use crate::all_stark::{Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::proof::{StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Represent a linear combination of columns.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -600,7 +600,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
/// Z(w) = Z(gw) * combine(w) where combine is called on the local row
|
||||
/// and not the next. This enables CTLs across two rows.
|
||||
pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
@ -609,6 +609,9 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
for lookup_vars in ctl_vars {
|
||||
let CtlCheckVars {
|
||||
local_z,
|
||||
@ -620,11 +623,11 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
|
||||
let evals = columns
|
||||
.iter()
|
||||
.map(|c| c.eval_with_next(vars.local_values, vars.next_values))
|
||||
.map(|c| c.eval_with_next(local_values, next_values))
|
||||
.collect::<Vec<_>>();
|
||||
let combined = challenges.combine(evals.iter());
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_with_next(vars.local_values, vars.next_values)
|
||||
column.eval_with_next(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
@ -707,10 +710,13 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
for lookup_vars in ctl_vars {
|
||||
let CtlCheckVarsTarget {
|
||||
local_z,
|
||||
@ -722,7 +728,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
|
||||
let one = builder.one_extension();
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_circuit(builder, vars.local_values)
|
||||
column.eval_circuit(builder, local_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
@ -738,7 +744,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
|
||||
let evals = columns
|
||||
.iter()
|
||||
.map(|c| c.eval_with_next_circuit(builder, vars.local_values, vars.next_values))
|
||||
.map(|c| c.eval_with_next_circuit(builder, local_values, next_values))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let combined = challenges.combine_circuit(builder, &evals);
|
||||
|
||||
47
evm/src/evaluation_frame.rs
Normal file
47
evm/src/evaluation_frame.rs
Normal file
@ -0,0 +1,47 @@
|
||||
/// A trait for viewing an evaluation frame of a STARK table.
|
||||
///
|
||||
/// It allows to access the current and next rows at a given step
|
||||
/// and can be used to implement constraint evaluation both natively
|
||||
/// and recursively.
|
||||
pub trait StarkEvaluationFrame<T: Copy + Clone + Default>: Sized {
|
||||
/// The number of columns for the STARK table this evaluation frame views.
|
||||
const COLUMNS: usize;
|
||||
|
||||
/// Returns the local values (i.e. current row) for this evaluation frame.
|
||||
fn get_local_values(&self) -> &[T];
|
||||
/// Returns the next values (i.e. next row) for this evaluation frame.
|
||||
fn get_next_values(&self) -> &[T];
|
||||
|
||||
/// Outputs a new evaluation frame from the provided local and next values.
|
||||
///
|
||||
/// **NOTE**: Concrete implementations of this method SHOULD ensure that
|
||||
/// the provided slices lengths match the `Self::COLUMNS` value.
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self;
|
||||
}
|
||||
|
||||
pub struct StarkFrame<T: Copy + Clone + Default, const N: usize> {
|
||||
local_values: [T; N],
|
||||
next_values: [T; N],
|
||||
}
|
||||
|
||||
impl<T: Copy + Clone + Default, const N: usize> StarkEvaluationFrame<T> for StarkFrame<T, N> {
|
||||
const COLUMNS: usize = N;
|
||||
|
||||
fn get_local_values(&self) -> &[T] {
|
||||
&self.local_values
|
||||
}
|
||||
|
||||
fn get_next_values(&self) -> &[T] {
|
||||
&self.next_values
|
||||
}
|
||||
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self {
|
||||
assert_eq!(lv.len(), Self::COLUMNS);
|
||||
assert_eq!(nv.len(), Self::COLUMNS);
|
||||
|
||||
Self {
|
||||
local_values: lv.try_into().unwrap(),
|
||||
next_values: nv.try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -28,20 +28,13 @@ use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::log2_ceil;
|
||||
|
||||
use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::{
|
||||
get_grand_product_challenge_set_target, verify_cross_table_lookups_circuit, CrossTableLookup,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::get_challenges::observe_public_values_target;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::proof::{
|
||||
BlockHashesTarget, BlockMetadataTarget, ExtraBlockDataTarget, PublicValues, PublicValuesTarget,
|
||||
StarkProofWithMetadata, TrieRootsTarget,
|
||||
@ -299,13 +292,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F> + 'static,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
pub fn to_bytes(
|
||||
&self,
|
||||
@ -1085,10 +1071,7 @@ where
|
||||
degree_bits_range: Range<usize>,
|
||||
all_ctls: &[CrossTableLookup<F>],
|
||||
stark_config: &StarkConfig,
|
||||
) -> Self
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Self {
|
||||
let by_stark_size = degree_bits_range
|
||||
.map(|degree_bits| {
|
||||
(
|
||||
@ -1209,10 +1192,7 @@ where
|
||||
degree_bits: usize,
|
||||
all_ctls: &[CrossTableLookup<F>],
|
||||
stark_config: &StarkConfig,
|
||||
) -> Self
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Self {
|
||||
let initial_wrapper = recursive_stark_circuit(
|
||||
table,
|
||||
stark,
|
||||
|
||||
@ -6,12 +6,14 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers_ext_circuit;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak::columns::{
|
||||
reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime,
|
||||
reg_b, reg_c, reg_c_prime, reg_input_limb, reg_output_limb, reg_preimage, reg_step,
|
||||
@ -24,7 +26,6 @@ use crate::keccak::logic::{
|
||||
use crate::keccak::round_flags::{eval_round_flags, eval_round_flags_recursively};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Number of rounds in a Keccak permutation.
|
||||
pub(crate) const NUM_ROUNDS: usize = 24;
|
||||
@ -239,11 +240,16 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -251,33 +257,34 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
{
|
||||
eval_round_flags(vars, yield_constr);
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
yield_constr.constraint(filter * (filter - P::ONES));
|
||||
|
||||
// If this is not the final step, the filter must be off.
|
||||
let final_step = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_final_step = P::ONES - final_step;
|
||||
yield_constr.constraint(not_final_step * filter);
|
||||
|
||||
// If this is not the final step, the local and next preimages must match.
|
||||
// Also, if this is the first step, the preimage must match A.
|
||||
let is_first_step = vars.local_values[reg_step(0)];
|
||||
let is_first_step = local_values[reg_step(0)];
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let reg_preimage_lo = reg_preimage(x, y);
|
||||
let reg_preimage_hi = reg_preimage_lo + 1;
|
||||
let diff_lo =
|
||||
vars.local_values[reg_preimage_lo] - vars.next_values[reg_preimage_lo];
|
||||
let diff_hi =
|
||||
vars.local_values[reg_preimage_hi] - vars.next_values[reg_preimage_hi];
|
||||
let diff_lo = local_values[reg_preimage_lo] - next_values[reg_preimage_lo];
|
||||
let diff_hi = local_values[reg_preimage_hi] - next_values[reg_preimage_hi];
|
||||
yield_constr.constraint_transition(not_final_step * diff_lo);
|
||||
yield_constr.constraint_transition(not_final_step * diff_hi);
|
||||
|
||||
let reg_a_lo = reg_a(x, y);
|
||||
let reg_a_hi = reg_a_lo + 1;
|
||||
let diff_lo = vars.local_values[reg_preimage_lo] - vars.local_values[reg_a_lo];
|
||||
let diff_hi = vars.local_values[reg_preimage_hi] - vars.local_values[reg_a_hi];
|
||||
let diff_lo = local_values[reg_preimage_lo] - local_values[reg_a_lo];
|
||||
let diff_hi = local_values[reg_preimage_hi] - local_values[reg_a_hi];
|
||||
yield_constr.constraint(is_first_step * diff_lo);
|
||||
yield_constr.constraint(is_first_step * diff_hi);
|
||||
}
|
||||
@ -287,11 +294,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let xor = xor3_gen(
|
||||
vars.local_values[reg_c(x, z)],
|
||||
vars.local_values[reg_c((x + 4) % 5, z)],
|
||||
vars.local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
local_values[reg_c(x, z)],
|
||||
local_values[reg_c((x + 4) % 5, z)],
|
||||
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
);
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
yield_constr.constraint(c_prime - xor);
|
||||
}
|
||||
}
|
||||
@ -304,12 +311,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// It isn't required, but makes this check a bit cleaner.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let a_lo = vars.local_values[reg_a(x, y)];
|
||||
let a_hi = vars.local_values[reg_a(x, y) + 1];
|
||||
let a_lo = local_values[reg_a(x, y)];
|
||||
let a_hi = local_values[reg_a(x, y) + 1];
|
||||
let get_bit = |z| {
|
||||
let a_prime = vars.local_values[reg_a_prime(x, y, z)];
|
||||
let c = vars.local_values[reg_c(x, z)];
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let a_prime = local_values[reg_a_prime(x, y, z)];
|
||||
let c = local_values[reg_c(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
xor3_gen(a_prime, c, c_prime)
|
||||
};
|
||||
let computed_lo = (0..32)
|
||||
@ -329,10 +336,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let sum: P = [0, 1, 2, 3, 4]
|
||||
.map(|i| vars.local_values[reg_a_prime(x, i, z)])
|
||||
.map(|i| local_values[reg_a_prime(x, i, z)])
|
||||
.into_iter()
|
||||
.sum();
|
||||
let diff = sum - vars.local_values[reg_c_prime(x, z)];
|
||||
let diff = sum - local_values[reg_c_prime(x, z)];
|
||||
yield_constr
|
||||
.constraint(diff * (diff - FE::TWO) * (diff - FE::from_canonical_u8(4)));
|
||||
}
|
||||
@ -343,18 +350,18 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for y in 0..5 {
|
||||
let get_bit = |z| {
|
||||
xor_gen(
|
||||
vars.local_values[reg_b(x, y, z)],
|
||||
local_values[reg_b(x, y, z)],
|
||||
andn_gen(
|
||||
vars.local_values[reg_b((x + 1) % 5, y, z)],
|
||||
vars.local_values[reg_b((x + 2) % 5, y, z)],
|
||||
local_values[reg_b((x + 1) % 5, y, z)],
|
||||
local_values[reg_b((x + 2) % 5, y, z)],
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
let reg_lo = reg_a_prime_prime(x, y);
|
||||
let reg_hi = reg_lo + 1;
|
||||
let lo = vars.local_values[reg_lo];
|
||||
let hi = vars.local_values[reg_hi];
|
||||
let lo = local_values[reg_lo];
|
||||
let hi = local_values[reg_hi];
|
||||
let computed_lo = (0..32)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
|
||||
@ -369,7 +376,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
// A'''[0, 0] = A''[0, 0] XOR RC
|
||||
let a_prime_prime_0_0_bits = (0..64)
|
||||
.map(|i| vars.local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.collect_vec();
|
||||
let computed_a_prime_prime_0_0_lo = (0..32)
|
||||
.rev()
|
||||
@ -377,15 +384,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let computed_a_prime_prime_0_0_hi = (32..64)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + a_prime_prime_0_0_bits[z]);
|
||||
let a_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
yield_constr.constraint(computed_a_prime_prime_0_0_lo - a_prime_prime_0_0_lo);
|
||||
yield_constr.constraint(computed_a_prime_prime_0_0_hi - a_prime_prime_0_0_hi);
|
||||
|
||||
let get_xored_bit = |i| {
|
||||
let mut rc_bit_i = P::ZEROS;
|
||||
for r in 0..NUM_ROUNDS {
|
||||
let this_round = vars.local_values[reg_step(r)];
|
||||
let this_round = local_values[reg_step(r)];
|
||||
let this_round_constant =
|
||||
P::from(FE::from_canonical_u32(rc_value_bit(r, i) as u32));
|
||||
rc_bit_i += this_round * this_round_constant;
|
||||
@ -394,8 +401,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
xor_gen(a_prime_prime_0_0_bits[i], rc_bit_i)
|
||||
};
|
||||
|
||||
let a_prime_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let computed_a_prime_prime_prime_0_0_lo = (0..32)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + get_xored_bit(z));
|
||||
@ -408,11 +415,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// Enforce that this round's output equals the next round's input.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let output_lo = vars.local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = vars.local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = vars.next_values[reg_a(x, y)];
|
||||
let input_hi = vars.next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = next_values[reg_a(x, y)];
|
||||
let input_hi = next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_last_round = P::ONES - is_last_round;
|
||||
yield_constr.constraint_transition(not_last_round * (output_lo - input_lo));
|
||||
yield_constr.constraint_transition(not_last_round * (output_hi - input_hi));
|
||||
@ -423,7 +430,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one_ext = builder.one_extension();
|
||||
@ -433,49 +440,44 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
eval_round_flags_recursively(builder, vars, yield_constr);
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is not the final step, the filter must be off.
|
||||
let final_step = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_final_step = builder.sub_extension(one_ext, final_step);
|
||||
let constraint = builder.mul_extension(not_final_step, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is not the final step, the local and next preimages must match.
|
||||
// Also, if this is the first step, the preimage must match A.
|
||||
let is_first_step = vars.local_values[reg_step(0)];
|
||||
let is_first_step = local_values[reg_step(0)];
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let reg_preimage_lo = reg_preimage(x, y);
|
||||
let reg_preimage_hi = reg_preimage_lo + 1;
|
||||
let diff = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_lo],
|
||||
vars.next_values[reg_preimage_lo],
|
||||
);
|
||||
let diff = builder
|
||||
.sub_extension(local_values[reg_preimage_lo], next_values[reg_preimage_lo]);
|
||||
let constraint = builder.mul_extension(not_final_step, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
let diff = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_hi],
|
||||
vars.next_values[reg_preimage_hi],
|
||||
);
|
||||
let diff = builder
|
||||
.sub_extension(local_values[reg_preimage_hi], next_values[reg_preimage_hi]);
|
||||
let constraint = builder.mul_extension(not_final_step, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
let reg_a_lo = reg_a(x, y);
|
||||
let reg_a_hi = reg_a_lo + 1;
|
||||
let diff_lo = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_lo],
|
||||
vars.local_values[reg_a_lo],
|
||||
);
|
||||
let diff_lo =
|
||||
builder.sub_extension(local_values[reg_preimage_lo], local_values[reg_a_lo]);
|
||||
let constraint = builder.mul_extension(is_first_step, diff_lo);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
let diff_hi = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_hi],
|
||||
vars.local_values[reg_a_hi],
|
||||
);
|
||||
let diff_hi =
|
||||
builder.sub_extension(local_values[reg_preimage_hi], local_values[reg_a_hi]);
|
||||
let constraint = builder.mul_extension(is_first_step, diff_hi);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
}
|
||||
@ -486,11 +488,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for z in 0..64 {
|
||||
let xor = xor3_gen_circuit(
|
||||
builder,
|
||||
vars.local_values[reg_c(x, z)],
|
||||
vars.local_values[reg_c((x + 4) % 5, z)],
|
||||
vars.local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
local_values[reg_c(x, z)],
|
||||
local_values[reg_c((x + 4) % 5, z)],
|
||||
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
);
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
let diff = builder.sub_extension(c_prime, xor);
|
||||
yield_constr.constraint(builder, diff);
|
||||
}
|
||||
@ -504,12 +506,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// It isn't required, but makes this check a bit cleaner.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let a_lo = vars.local_values[reg_a(x, y)];
|
||||
let a_hi = vars.local_values[reg_a(x, y) + 1];
|
||||
let a_lo = local_values[reg_a(x, y)];
|
||||
let a_hi = local_values[reg_a(x, y) + 1];
|
||||
let mut get_bit = |z| {
|
||||
let a_prime = vars.local_values[reg_a_prime(x, y, z)];
|
||||
let c = vars.local_values[reg_c(x, z)];
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let a_prime = local_values[reg_a_prime(x, y, z)];
|
||||
let c = local_values[reg_c(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
xor3_gen_circuit(builder, a_prime, c, c_prime)
|
||||
};
|
||||
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
|
||||
@ -529,9 +531,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let sum = builder.add_many_extension(
|
||||
[0, 1, 2, 3, 4].map(|i| vars.local_values[reg_a_prime(x, i, z)]),
|
||||
[0, 1, 2, 3, 4].map(|i| local_values[reg_a_prime(x, i, z)]),
|
||||
);
|
||||
let diff = builder.sub_extension(sum, vars.local_values[reg_c_prime(x, z)]);
|
||||
let diff = builder.sub_extension(sum, local_values[reg_c_prime(x, z)]);
|
||||
let diff_minus_two = builder.sub_extension(diff, two_ext);
|
||||
let diff_minus_four = builder.sub_extension(diff, four_ext);
|
||||
let constraint =
|
||||
@ -546,16 +548,16 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let mut get_bit = |z| {
|
||||
let andn = andn_gen_circuit(
|
||||
builder,
|
||||
vars.local_values[reg_b((x + 1) % 5, y, z)],
|
||||
vars.local_values[reg_b((x + 2) % 5, y, z)],
|
||||
local_values[reg_b((x + 1) % 5, y, z)],
|
||||
local_values[reg_b((x + 2) % 5, y, z)],
|
||||
);
|
||||
xor_gen_circuit(builder, vars.local_values[reg_b(x, y, z)], andn)
|
||||
xor_gen_circuit(builder, local_values[reg_b(x, y, z)], andn)
|
||||
};
|
||||
|
||||
let reg_lo = reg_a_prime_prime(x, y);
|
||||
let reg_hi = reg_lo + 1;
|
||||
let lo = vars.local_values[reg_lo];
|
||||
let hi = vars.local_values[reg_hi];
|
||||
let lo = local_values[reg_lo];
|
||||
let hi = local_values[reg_hi];
|
||||
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
|
||||
let bits_hi = (32..64).map(get_bit).collect_vec();
|
||||
let computed_lo = reduce_with_powers_ext_circuit(builder, &bits_lo, two);
|
||||
@ -569,14 +571,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
// A'''[0, 0] = A''[0, 0] XOR RC
|
||||
let a_prime_prime_0_0_bits = (0..64)
|
||||
.map(|i| vars.local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.collect_vec();
|
||||
let computed_a_prime_prime_0_0_lo =
|
||||
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[0..32], two);
|
||||
let computed_a_prime_prime_0_0_hi =
|
||||
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[32..64], two);
|
||||
let a_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let diff = builder.sub_extension(computed_a_prime_prime_0_0_lo, a_prime_prime_0_0_lo);
|
||||
yield_constr.constraint(builder, diff);
|
||||
let diff = builder.sub_extension(computed_a_prime_prime_0_0_hi, a_prime_prime_0_0_hi);
|
||||
@ -585,7 +587,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let mut get_xored_bit = |i| {
|
||||
let mut rc_bit_i = builder.zero_extension();
|
||||
for r in 0..NUM_ROUNDS {
|
||||
let this_round = vars.local_values[reg_step(r)];
|
||||
let this_round = local_values[reg_step(r)];
|
||||
let this_round_constant = builder
|
||||
.constant_extension(F::from_canonical_u32(rc_value_bit(r, i) as u32).into());
|
||||
rc_bit_i = builder.mul_add_extension(this_round, this_round_constant, rc_bit_i);
|
||||
@ -594,8 +596,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
xor_gen_circuit(builder, a_prime_prime_0_0_bits[i], rc_bit_i)
|
||||
};
|
||||
|
||||
let a_prime_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let bits_lo = (0..32).map(&mut get_xored_bit).collect_vec();
|
||||
let bits_hi = (32..64).map(get_xored_bit).collect_vec();
|
||||
let computed_a_prime_prime_prime_0_0_lo =
|
||||
@ -616,11 +618,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// Enforce that this round's output equals the next round's input.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let output_lo = vars.local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = vars.local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = vars.next_values[reg_a(x, y)];
|
||||
let input_hi = vars.next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = next_values[reg_a(x, y)];
|
||||
let input_hi = next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let diff = builder.sub_extension(input_lo, output_lo);
|
||||
let filtered_diff = builder.mul_sub_extension(is_last_round, diff, diff);
|
||||
yield_constr.constraint_transition(builder, filtered_diff);
|
||||
@ -697,7 +699,7 @@ mod tests {
|
||||
f: Default::default(),
|
||||
};
|
||||
|
||||
let rows = stark.generate_trace_rows(vec![input.try_into().unwrap()], 8);
|
||||
let rows = stark.generate_trace_rows(vec![input], 8);
|
||||
let last_row = rows[NUM_ROUNDS - 1];
|
||||
let output = (0..NUM_INPUTS)
|
||||
.map(|i| {
|
||||
@ -736,7 +738,7 @@ mod tests {
|
||||
let trace_poly_values = timed!(
|
||||
timing,
|
||||
"generate trace",
|
||||
stark.generate_trace(input.try_into().unwrap(), 8, &mut timing)
|
||||
stark.generate_trace(input, 8, &mut timing)
|
||||
);
|
||||
|
||||
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
|
||||
|
||||
@ -2,60 +2,64 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak::columns::{reg_step, NUM_COLUMNS};
|
||||
use crate::keccak::keccak_stark::NUM_ROUNDS;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: StarkEvaluationVars<F, P, NUM_COLUMNS>,
|
||||
vars: &StarkFrame<P, NUM_COLUMNS>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// Initially, the first step flag should be 1 while the others should be 0.
|
||||
yield_constr.constraint_first_row(vars.local_values[reg_step(0)] - F::ONE);
|
||||
yield_constr.constraint_first_row(local_values[reg_step(0)] - F::ONE);
|
||||
for i in 1..NUM_ROUNDS {
|
||||
yield_constr.constraint_first_row(vars.local_values[reg_step(i)]);
|
||||
yield_constr.constraint_first_row(local_values[reg_step(i)]);
|
||||
}
|
||||
|
||||
// Flags should circularly increment, or be all zero for padding rows.
|
||||
let next_any_flag = (0..NUM_ROUNDS)
|
||||
.map(|i| vars.next_values[reg_step(i)])
|
||||
.sum::<P>();
|
||||
let next_any_flag = (0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]).sum::<P>();
|
||||
for i in 0..NUM_ROUNDS {
|
||||
let current_round_flag = vars.local_values[reg_step(i)];
|
||||
let next_round_flag = vars.next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let current_round_flag = local_values[reg_step(i)];
|
||||
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
yield_constr.constraint_transition(next_any_flag * (next_round_flag - current_round_flag));
|
||||
}
|
||||
|
||||
// Padding rows should always be followed by padding rows.
|
||||
let current_any_flag = (0..NUM_ROUNDS)
|
||||
.map(|i| vars.local_values[reg_step(i)])
|
||||
.map(|i| local_values[reg_step(i)])
|
||||
.sum::<P>();
|
||||
yield_constr.constraint_transition(next_any_flag * (current_any_flag - F::ONE));
|
||||
}
|
||||
|
||||
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, NUM_COLUMNS>,
|
||||
vars: &StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// Initially, the first step flag should be 1 while the others should be 0.
|
||||
let step_0_minus_1 = builder.sub_extension(vars.local_values[reg_step(0)], one);
|
||||
let step_0_minus_1 = builder.sub_extension(local_values[reg_step(0)], one);
|
||||
yield_constr.constraint_first_row(builder, step_0_minus_1);
|
||||
for i in 1..NUM_ROUNDS {
|
||||
yield_constr.constraint_first_row(builder, vars.local_values[reg_step(i)]);
|
||||
yield_constr.constraint_first_row(builder, local_values[reg_step(i)]);
|
||||
}
|
||||
|
||||
// Flags should circularly increment, or be all zero for padding rows.
|
||||
let next_any_flag =
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| vars.next_values[reg_step(i)]));
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]));
|
||||
for i in 0..NUM_ROUNDS {
|
||||
let current_round_flag = vars.local_values[reg_step(i)];
|
||||
let next_round_flag = vars.next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let current_round_flag = local_values[reg_step(i)];
|
||||
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let diff = builder.sub_extension(next_round_flag, current_round_flag);
|
||||
let constraint = builder.mul_extension(next_any_flag, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
@ -63,7 +67,7 @@ pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D
|
||||
|
||||
// Padding rows should always be followed by padding rows.
|
||||
let current_any_flag =
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| vars.local_values[reg_step(i)]));
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| local_values[reg_step(i)]));
|
||||
let constraint = builder.mul_sub_extension(next_any_flag, current_any_flag, next_any_flag);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
}
|
||||
|
||||
@ -17,10 +17,10 @@ use plonky2_util::ceil_div_usize;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::kernel::keccak_util::keccakf_u32s;
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak_sponge::columns::*;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
||||
@ -423,18 +423,27 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakSpongeStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeStark<F, D> {
|
||||
const COLUMNS: usize = NUM_KECCAK_SPONGE_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_KECCAK_SPONGE_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_KECCAK_SPONGE_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values: &KeccakSpongeColumnsView<P> = vars.local_values.borrow();
|
||||
let next_values: &KeccakSpongeColumnsView<P> = vars.next_values.borrow();
|
||||
let local_values: &[P; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &KeccakSpongeColumnsView<P> = local_values.borrow();
|
||||
let next_values: &[P; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &KeccakSpongeColumnsView<P> = next_values.borrow();
|
||||
|
||||
// Each flag (full-input block, final block or implied dummy flag) must be boolean.
|
||||
let is_full_input_block = local_values.is_full_input_block;
|
||||
@ -537,11 +546,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = vars.local_values.borrow();
|
||||
let next_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = vars.next_values.borrow();
|
||||
let local_values: &[ExtensionTarget<D>; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = local_values.borrow();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = next_values.borrow();
|
||||
|
||||
let one = builder.one_extension();
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::field_reassign_with_default)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(generic_const_exprs)]
|
||||
|
||||
pub mod all_stark;
|
||||
pub mod arithmetic;
|
||||
@ -14,6 +13,7 @@ pub mod constraint_consumer;
|
||||
pub mod cpu;
|
||||
pub mod cross_table_lookup;
|
||||
pub mod curve_pairings;
|
||||
pub mod evaluation_frame;
|
||||
pub mod extension_tower;
|
||||
pub mod fixed_recursive_verifier;
|
||||
pub mod generation;
|
||||
@ -30,7 +30,6 @@ pub mod stark;
|
||||
pub mod stark_testing;
|
||||
pub mod util;
|
||||
pub mod vanishing_poly;
|
||||
pub mod vars;
|
||||
pub mod verifier;
|
||||
pub mod witness;
|
||||
|
||||
|
||||
@ -7,16 +7,17 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::logic::columns::NUM_COLUMNS;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive, trace_rows_to_poly_values};
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
// Total number of bits per input/output.
|
||||
const VAL_BITS: usize = 256;
|
||||
@ -181,17 +182,22 @@ impl<F: RichField, const D: usize> LogicStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let lv = &vars.local_values;
|
||||
let lv = vars.get_local_values();
|
||||
|
||||
// IS_AND, IS_OR, and IS_XOR come from the CPU table, so we assume they're valid.
|
||||
let is_and = lv[columns::IS_AND];
|
||||
@ -237,10 +243,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F,
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let lv = &vars.local_values;
|
||||
let lv = vars.get_local_values();
|
||||
|
||||
// IS_AND, IS_OR, and IS_XOR come from the CPU table, so we assume they're valid.
|
||||
let is_and = lv[columns::IS_AND];
|
||||
|
||||
@ -12,8 +12,8 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub struct Lookup {
|
||||
/// Columns whose values should be contained in the lookup table.
|
||||
@ -131,7 +131,7 @@ where
|
||||
pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
lookups: &[Lookup],
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookup_vars: LookupCheckVars<F, FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
@ -152,7 +152,7 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
|
||||
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
|
||||
let mut x = lookup_vars.local_values[start + j];
|
||||
let mut y = P::ZEROS;
|
||||
let fs = chunk.iter().map(|&k| vars.local_values[k]);
|
||||
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
|
||||
for f in fs {
|
||||
x *= f + challenge;
|
||||
y += f + challenge;
|
||||
@ -167,12 +167,12 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
|
||||
// Check the `Z` polynomial.
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_with_challenge = vars.local_values[lookup.table_column] + challenge;
|
||||
let table_with_challenge = vars.get_local_values()[lookup.table_column] + challenge;
|
||||
let y = lookup_vars.local_values[start..start + num_helper_columns - 1]
|
||||
.iter()
|
||||
.fold(P::ZEROS, |acc, x| acc + *x)
|
||||
* table_with_challenge
|
||||
- vars.local_values[lookup.frequencies_column];
|
||||
- vars.get_local_values()[lookup.frequencies_column];
|
||||
yield_constr.constraint((next_z - z) * table_with_challenge - y);
|
||||
start += num_helper_columns;
|
||||
}
|
||||
@ -192,7 +192,7 @@ pub(crate) fn eval_ext_lookups_circuit<
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: LookupCheckVarsTarget<D>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
@ -208,7 +208,7 @@ pub(crate) fn eval_ext_lookups_circuit<
|
||||
for (j, chunk) in lookup.columns.chunks(degree - 1).enumerate() {
|
||||
let mut x = lookup_vars.local_values[start + j];
|
||||
let mut y = builder.zero_extension();
|
||||
let fs = chunk.iter().map(|&k| vars.local_values[k]);
|
||||
let fs = chunk.iter().map(|&k| vars.get_local_values()[k]);
|
||||
for f in fs {
|
||||
let tmp = builder.add_extension(f, challenge);
|
||||
x = builder.mul_extension(x, tmp);
|
||||
@ -230,13 +230,13 @@ pub(crate) fn eval_ext_lookups_circuit<
|
||||
let z = lookup_vars.local_values[start + num_helper_columns - 1];
|
||||
let next_z = lookup_vars.next_values[start + num_helper_columns - 1];
|
||||
let table_with_challenge =
|
||||
builder.add_extension(vars.local_values[lookup.table_column], challenge);
|
||||
builder.add_extension(vars.get_local_values()[lookup.table_column], challenge);
|
||||
let mut y = builder.add_many_extension(
|
||||
&lookup_vars.local_values[start..start + num_helper_columns - 1],
|
||||
);
|
||||
|
||||
y = builder.mul_extension(y, table_with_challenge);
|
||||
y = builder.sub_extension(y, vars.local_values[lookup.frequencies_column]);
|
||||
y = builder.sub_extension(y, vars.get_local_values()[lookup.frequencies_column]);
|
||||
|
||||
let mut constraint = builder.sub_extension(next_z, z);
|
||||
constraint = builder.mul_extension(constraint, table_with_challenge);
|
||||
|
||||
@ -7,6 +7,7 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
@ -14,6 +15,7 @@ use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::Lookup;
|
||||
use crate::memory::columns::{
|
||||
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER, FILTER,
|
||||
@ -22,7 +24,6 @@ use crate::memory::columns::{
|
||||
};
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryOpKind::Read;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryOp};
|
||||
|
||||
@ -237,48 +238,55 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let one = P::from(FE::ONE);
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let timestamp = vars.local_values[TIMESTAMP];
|
||||
let addr_context = vars.local_values[ADDR_CONTEXT];
|
||||
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
||||
let timestamp = local_values[TIMESTAMP];
|
||||
let addr_context = local_values[ADDR_CONTEXT];
|
||||
let addr_segment = local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = local_values[ADDR_VIRTUAL];
|
||||
let value_limbs: Vec<_> = (0..8).map(|i| local_values[value_limb(i)]).collect();
|
||||
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let next_addr_context = next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = next_values[ADDR_VIRTUAL];
|
||||
let next_values_limbs: Vec<_> = (0..8).map(|i| next_values[value_limb(i)]).collect();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[FILTER];
|
||||
let filter = local_values[FILTER];
|
||||
yield_constr.constraint(filter * (filter - P::ONES));
|
||||
|
||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||
let is_dummy = P::ONES - filter;
|
||||
let is_write = P::ONES - vars.local_values[IS_READ];
|
||||
let is_write = P::ONES - local_values[IS_READ];
|
||||
yield_constr.constraint(is_dummy * is_write);
|
||||
|
||||
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let context_first_change = local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let address_unchanged =
|
||||
one - context_first_change - segment_first_change - virtual_first_change;
|
||||
|
||||
let range_check = vars.local_values[RANGE_CHECK];
|
||||
let range_check = local_values[RANGE_CHECK];
|
||||
|
||||
let not_context_first_change = one - context_first_change;
|
||||
let not_segment_first_change = one - segment_first_change;
|
||||
@ -312,7 +320,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
// Enumerate purportedly-ordered log.
|
||||
for i in 0..8 {
|
||||
yield_constr.constraint_transition(
|
||||
next_is_read * address_unchanged * (next_values[i] - values[i]),
|
||||
next_is_read * address_unchanged * (next_values_limbs[i] - value_limbs[i]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -320,46 +328,48 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let addr_context = vars.local_values[ADDR_CONTEXT];
|
||||
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
||||
let timestamp = vars.local_values[TIMESTAMP];
|
||||
let addr_context = local_values[ADDR_CONTEXT];
|
||||
let addr_segment = local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = local_values[ADDR_VIRTUAL];
|
||||
let value_limbs: Vec<_> = (0..8).map(|i| local_values[value_limb(i)]).collect();
|
||||
let timestamp = local_values[TIMESTAMP];
|
||||
|
||||
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let next_addr_context = next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = next_values[ADDR_VIRTUAL];
|
||||
let next_values_limbs: Vec<_> = (0..8).map(|i| next_values[value_limb(i)]).collect();
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[FILTER];
|
||||
let filter = local_values[FILTER];
|
||||
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||
let is_dummy = builder.sub_extension(one, filter);
|
||||
let is_write = builder.sub_extension(one, vars.local_values[IS_READ]);
|
||||
let is_write = builder.sub_extension(one, local_values[IS_READ]);
|
||||
let is_dummy_write = builder.mul_extension(is_dummy, is_write);
|
||||
yield_constr.constraint(builder, is_dummy_write);
|
||||
|
||||
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let context_first_change = local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let address_unchanged = {
|
||||
let mut cur = builder.sub_extension(one, context_first_change);
|
||||
cur = builder.sub_extension(cur, segment_first_change);
|
||||
builder.sub_extension(cur, virtual_first_change)
|
||||
};
|
||||
|
||||
let range_check = vars.local_values[RANGE_CHECK];
|
||||
let range_check = local_values[RANGE_CHECK];
|
||||
|
||||
let not_context_first_change = builder.sub_extension(one, context_first_change);
|
||||
let not_segment_first_change = builder.sub_extension(one, segment_first_change);
|
||||
@ -430,7 +440,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
|
||||
// Enumerate purportedly-ordered log.
|
||||
for i in 0..8 {
|
||||
let value_diff = builder.sub_extension(next_values[i], values[i]);
|
||||
let value_diff = builder.sub_extension(next_values_limbs[i], value_limbs[i]);
|
||||
let zero_if_read = builder.mul_extension(address_unchanged, value_diff);
|
||||
let read_constraint = builder.mul_extension(next_is_read, zero_if_read);
|
||||
yield_constr.constraint_transition(builder, read_constraint);
|
||||
|
||||
@ -20,28 +20,21 @@ use plonky2_maybe_rayon::*;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cross_table_lookup::{
|
||||
cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::generation::outputs::GenerationOutputs;
|
||||
use crate::generation::{generate_traces, GenerationInputs};
|
||||
use crate::get_challenges::observe_public_values;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars};
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
/// Generate traces, then create all STARK proofs.
|
||||
pub fn prove<F, C, const D: usize>(
|
||||
@ -53,13 +46,6 @@ pub fn prove<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let (proof, _outputs) = prove_with_outputs(all_stark, config, inputs, timing)?;
|
||||
Ok(proof)
|
||||
@ -76,13 +62,6 @@ pub fn prove_with_outputs<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
timed!(timing, "build kernel", Lazy::force(&KERNEL));
|
||||
let (traces, public_values, outputs) = timed!(
|
||||
@ -105,13 +84,6 @@ pub(crate) fn prove_with_traces<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
@ -199,13 +171,6 @@ fn prove_with_commitments<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let arithmetic_proof = timed!(
|
||||
timing,
|
||||
@ -332,7 +297,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = trace_poly_values[0].len();
|
||||
let degree_bits = log2_strict(degree);
|
||||
@ -531,7 +495,6 @@ where
|
||||
P: PackedField<Scalar = F>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
@ -554,12 +517,8 @@ where
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_trace_values_packed = |i_start| -> [P; S::COLUMNS] {
|
||||
trace_commitment
|
||||
.get_lde_values_packed(i_start, step)
|
||||
.try_into()
|
||||
.unwrap()
|
||||
};
|
||||
let get_trace_values_packed =
|
||||
|i_start| -> Vec<P> { trace_commitment.get_lde_values_packed(i_start, step) };
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
@ -590,10 +549,10 @@ where
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &get_trace_values_packed(i_start),
|
||||
next_values: &get_trace_values_packed(i_next_start),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&get_trace_values_packed(i_start),
|
||||
&get_trace_values_packed(i_next_start),
|
||||
);
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
||||
[..num_lookup_columns]
|
||||
@ -617,7 +576,7 @@ where
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
||||
stark,
|
||||
vars,
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
@ -663,7 +622,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
||||
@ -709,10 +667,10 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(),
|
||||
next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_subgroup_evals[i],
|
||||
&trace_subgroup_evals[i_next],
|
||||
);
|
||||
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
||||
local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(),
|
||||
@ -733,7 +691,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
||||
stark,
|
||||
vars,
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
&ctl_vars,
|
||||
|
||||
@ -33,6 +33,7 @@ use crate::cross_table_lookup::{
|
||||
get_grand_product_challenge_set, verify_cross_table_lookups, CrossTableLookup,
|
||||
CtlCheckVarsTarget, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVarsTarget;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
@ -45,7 +46,6 @@ use crate::proof::{
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::witness::errors::ProgramError;
|
||||
|
||||
/// Table-wise recursive proofs of an `AllProof`.
|
||||
@ -297,7 +297,6 @@ pub(crate) fn recursive_stark_circuit<
|
||||
min_degree_bits: usize,
|
||||
) -> StarkWrapperCircuit<F, C, D>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config.clone());
|
||||
@ -401,7 +400,6 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let zero = builder.zero();
|
||||
let one = builder.one_extension();
|
||||
@ -414,10 +412,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationTargets {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrameTarget::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(inner_config);
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
@ -457,7 +452,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
vars,
|
||||
&vars,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
|
||||
@ -11,8 +11,8 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::Lookup;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const AUXILIARY_ORACLE_INDEX: usize = 1;
|
||||
@ -21,7 +21,16 @@ const QUOTIENT_ORACLE_INDEX: usize = 2;
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The total number of columns in the trace.
|
||||
const COLUMNS: usize;
|
||||
const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS;
|
||||
|
||||
/// This is used to evaluate constraints natively.
|
||||
type EvaluationFrame<FE, P, const D2: usize>: StarkEvaluationFrame<P>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively.
|
||||
type EvaluationFrameTarget: StarkEvaluationFrame<ExtensionTarget<D>>;
|
||||
|
||||
/// Evaluate constraints at a vector of points.
|
||||
///
|
||||
@ -31,7 +40,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// constraints over `F`.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -40,7 +49,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// Evaluate constraints at a vector of points from the base field `F`.
|
||||
fn eval_packed_base<P: PackedField<Scalar = F>>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<F, P, 1>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
@ -49,7 +58,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// Evaluate constraints at a single point from the degree `D` extension field.
|
||||
fn eval_ext(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<F::Extension, F::Extension, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<F::Extension, F::Extension, D>,
|
||||
yield_constr: &mut ConstraintConsumer<F::Extension>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
@ -62,7 +71,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
|
||||
@ -3,17 +3,16 @@ use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
const WITNESS_SIZE: usize = 1 << 5;
|
||||
|
||||
@ -21,10 +20,7 @@ const WITNESS_SIZE: usize = 1 << 5;
|
||||
/// low-degree witness polynomials.
|
||||
pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
|
||||
|
||||
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
|
||||
@ -39,13 +35,10 @@ where
|
||||
let alpha = F::rand();
|
||||
let constraint_evals = (0..size)
|
||||
.map(|i| {
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &trace_ldes[i].clone().try_into().unwrap(),
|
||||
next_values: &trace_ldes[(i + (1 << rate_bits)) % size]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_ldes[i],
|
||||
&trace_ldes[(i + (1 << rate_bits)) % size],
|
||||
);
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F>::new(
|
||||
vec![alpha],
|
||||
@ -53,7 +46,7 @@ where
|
||||
lagrange_first.values[i],
|
||||
lagrange_last.values[i],
|
||||
);
|
||||
stark.eval_packed_base(vars, &mut consumer);
|
||||
stark.eval_packed_base(&vars, &mut consumer);
|
||||
consumer.accumulators()[0]
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@ -84,17 +77,13 @@ pub fn test_stark_circuit_constraints<
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); <C::Hasher as Hasher<F>>::Permutation::WIDTH]:,
|
||||
[(); <C::InnerHasher as Hasher<F>>::Permutation::WIDTH]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
// Compute native constraint evaluation on random values.
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &F::Extension::rand_array::<{ S::COLUMNS }>(),
|
||||
next_values: &F::Extension::rand_array::<{ S::COLUMNS }>(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
);
|
||||
|
||||
let alphas = F::rand_vec(1);
|
||||
let z_last = F::Extension::rand();
|
||||
let lagrange_first = F::Extension::rand();
|
||||
@ -109,7 +98,7 @@ where
|
||||
lagrange_first,
|
||||
lagrange_last,
|
||||
);
|
||||
stark.eval_ext(vars, &mut consumer);
|
||||
stark.eval_ext(&vars, &mut consumer);
|
||||
let native_eval = consumer.accumulators()[0];
|
||||
|
||||
// Compute circuit constraint evaluation on same random values.
|
||||
@ -118,9 +107,9 @@ where
|
||||
let mut pw = PartialWitness::<F>::new();
|
||||
|
||||
let locals_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&locals_t, vars.local_values);
|
||||
pw.set_extension_targets(&locals_t, vars.get_local_values());
|
||||
let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&nexts_t, vars.next_values);
|
||||
pw.set_extension_targets(&nexts_t, vars.get_next_values());
|
||||
let alphas_t = builder.add_virtual_targets(1);
|
||||
pw.set_target(alphas_t[0], alphas[0]);
|
||||
let z_last_t = builder.add_virtual_extension_target();
|
||||
@ -130,10 +119,7 @@ where
|
||||
let lagrange_last_t = builder.add_virtual_extension_target();
|
||||
pw.set_extension_target(lagrange_last_t, lagrange_last);
|
||||
|
||||
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }> {
|
||||
local_values: &locals_t.try_into().unwrap(),
|
||||
next_values: &nexts_t.try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrameTarget::from_values(&locals_t, &nexts_t);
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
alphas_t,
|
||||
@ -141,7 +127,7 @@ where
|
||||
lagrange_first_t,
|
||||
lagrange_last_t,
|
||||
);
|
||||
stark.eval_ext_circuit(&mut builder, vars, &mut consumer);
|
||||
stark.eval_ext_circuit(&mut builder, &vars, &mut consumer);
|
||||
let circuit_eval = consumer.accumulators()[0];
|
||||
let native_eval_t = builder.constant_extension(native_eval);
|
||||
builder.connect_extension(circuit_eval, native_eval_t);
|
||||
|
||||
@ -13,11 +13,10 @@ use crate::lookup::{
|
||||
LookupCheckVarsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
lookups: &[Lookup],
|
||||
lookup_vars: Option<LookupCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
@ -44,14 +43,13 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
lookup_vars: Option<LookupCheckVarsTarget<D>>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(lookup_vars) = lookup_vars {
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize>
|
||||
where
|
||||
F: Field,
|
||||
P: PackedField<Scalar = F>,
|
||||
{
|
||||
pub local_values: &'a [P; COLUMNS],
|
||||
pub next_values: &'a [P; COLUMNS],
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationTargets<'a, const D: usize, const COLUMNS: usize> {
|
||||
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
}
|
||||
@ -11,20 +11,14 @@ use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{
|
||||
verify_cross_table_lookups, CtlCheckVars, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVars;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::proof::{
|
||||
@ -33,7 +27,6 @@ use crate::proof::{
|
||||
use crate::stark::Stark;
|
||||
use crate::util::h2u;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
all_stark: &AllStark<F, D>,
|
||||
@ -41,13 +34,6 @@ pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let AllProofChallenges {
|
||||
stark_challenges,
|
||||
@ -311,10 +297,7 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
|
||||
ctl_challenges: &GrandProductChallengeSet<F>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
log::debug!("Checking proof: {}", type_name::<S>());
|
||||
validate_proof_shape(stark, proof, config, ctl_vars.len())?;
|
||||
let StarkOpeningSet {
|
||||
@ -325,10 +308,7 @@ where
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta);
|
||||
@ -361,7 +341,7 @@ where
|
||||
let lookups = stark.lookups();
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
stark,
|
||||
vars,
|
||||
&vars,
|
||||
&lookups,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
@ -420,7 +400,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let StarkProof {
|
||||
trace_cap,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user