2022-06-07 14:40:42 -07:00
|
|
|
use std::marker::PhantomData;
|
|
|
|
|
|
2022-07-11 14:47:16 -07:00
|
|
|
use itertools::Itertools;
|
2022-07-24 18:06:03 -04:00
|
|
|
use maybe_rayon::*;
|
2022-06-27 07:18:21 -07:00
|
|
|
use plonky2::field::extension::{Extendable, FieldExtension};
|
2022-06-27 15:07:52 -07:00
|
|
|
use plonky2::field::packed::PackedField;
|
2022-06-23 13:59:57 -07:00
|
|
|
use plonky2::field::polynomial::PolynomialValues;
|
2022-06-27 12:24:09 -07:00
|
|
|
use plonky2::field::types::Field;
|
2022-06-09 10:39:25 -07:00
|
|
|
use plonky2::hash::hash_types::RichField;
|
2022-06-23 13:59:57 -07:00
|
|
|
use plonky2::timed;
|
|
|
|
|
use plonky2::util::timing::TimingTree;
|
2022-07-13 12:57:27 -07:00
|
|
|
use plonky2::util::transpose;
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-06-09 10:39:25 -07:00
|
|
|
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
2022-06-23 14:36:14 -07:00
|
|
|
use crate::cross_table_lookup::Column;
|
2022-06-21 14:35:19 -07:00
|
|
|
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
2022-06-29 10:02:03 +02:00
|
|
|
use crate::memory::columns::{
|
2022-08-23 17:24:35 -07:00
|
|
|
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER,
|
|
|
|
|
COUNTER_PERMUTED, FILTER, IS_READ, NUM_COLUMNS, RANGE_CHECK, RANGE_CHECK_PERMUTED,
|
2022-07-12 17:25:46 -07:00
|
|
|
SEGMENT_FIRST_CHANGE, TIMESTAMP, VIRTUAL_FIRST_CHANGE,
|
2022-06-07 14:40:42 -07:00
|
|
|
};
|
2022-08-23 17:24:35 -07:00
|
|
|
use crate::memory::VALUE_LIMBS;
|
2022-06-17 11:09:01 -07:00
|
|
|
use crate::permutation::PermutationPair;
|
2022-06-07 14:40:42 -07:00
|
|
|
use crate::stark::Stark;
|
2022-06-09 10:39:25 -07:00
|
|
|
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
2022-11-30 12:55:41 -08:00
|
|
|
use crate::witness::memory::MemoryOpKind::Read;
|
|
|
|
|
use crate::witness::memory::{MemoryAddress, MemoryOp};
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-06-23 14:36:14 -07:00
|
|
|
pub fn ctl_data<F: Field>() -> Vec<Column<F>> {
|
2022-07-07 09:29:10 -07:00
|
|
|
let mut res =
|
|
|
|
|
Column::singles([IS_READ, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL]).collect_vec();
|
2022-06-23 14:36:14 -07:00
|
|
|
res.extend(Column::singles((0..8).map(value_limb)));
|
2022-07-07 09:29:35 -07:00
|
|
|
res.push(Column::single(TIMESTAMP));
|
2022-06-23 14:36:14 -07:00
|
|
|
res
|
|
|
|
|
}
|
|
|
|
|
|
2022-08-23 17:24:35 -07:00
|
|
|
pub fn ctl_filter<F: Field>() -> Column<F> {
|
|
|
|
|
Column::single(FILTER)
|
2022-06-23 14:36:14 -07:00
|
|
|
}
|
|
|
|
|
|
2022-07-04 18:10:03 -07:00
|
|
|
#[derive(Copy, Clone, Default)]
|
2022-06-07 14:40:42 -07:00
|
|
|
pub struct MemoryStark<F, const D: usize> {
|
|
|
|
|
pub(crate) f: PhantomData<F>,
|
|
|
|
|
}
|
|
|
|
|
|
2022-07-16 09:14:51 -07:00
|
|
|
impl MemoryOp {
|
2022-07-13 12:57:27 -07:00
|
|
|
/// Generate a row for a given memory operation. Note that this does not generate columns which
|
|
|
|
|
/// depend on the next operation, such as `CONTEXT_FIRST_CHANGE`; those are generated later.
|
|
|
|
|
/// It also does not generate columns such as `COUNTER`, which are generated later, after the
|
|
|
|
|
/// trace has been transposed into column-major form.
|
2022-12-02 17:06:30 -08:00
|
|
|
fn into_row<F: Field>(self) -> [F; NUM_COLUMNS] {
|
2022-07-13 12:57:27 -07:00
|
|
|
let mut row = [F::ZERO; NUM_COLUMNS];
|
2022-08-23 17:24:35 -07:00
|
|
|
row[FILTER] = F::from_bool(self.filter);
|
2022-07-13 12:57:27 -07:00
|
|
|
row[TIMESTAMP] = F::from_canonical_usize(self.timestamp);
|
2022-12-02 17:06:30 -08:00
|
|
|
row[IS_READ] = F::from_bool(self.kind == Read);
|
2022-11-30 12:55:41 -08:00
|
|
|
let MemoryAddress {
|
|
|
|
|
context,
|
|
|
|
|
segment,
|
|
|
|
|
virt,
|
|
|
|
|
} = self.address;
|
|
|
|
|
row[ADDR_CONTEXT] = F::from_canonical_usize(context);
|
|
|
|
|
row[ADDR_SEGMENT] = F::from_canonical_usize(segment);
|
|
|
|
|
row[ADDR_VIRTUAL] = F::from_canonical_usize(virt);
|
2022-07-13 12:57:27 -07:00
|
|
|
for j in 0..VALUE_LIMBS {
|
2022-07-16 09:14:51 -07:00
|
|
|
row[value_limb(j)] = F::from_canonical_u32((self.value >> (j * 32)).low_u32());
|
2022-07-13 12:57:27 -07:00
|
|
|
}
|
|
|
|
|
row
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-07-16 09:14:51 -07:00
|
|
|
fn get_max_range_check(memory_ops: &[MemoryOp]) -> usize {
|
2022-07-12 17:25:46 -07:00
|
|
|
memory_ops
|
|
|
|
|
.iter()
|
|
|
|
|
.tuple_windows()
|
|
|
|
|
.map(|(curr, next)| {
|
2022-11-30 12:55:41 -08:00
|
|
|
if curr.address.context != next.address.context {
|
|
|
|
|
next.address.context - curr.address.context - 1
|
|
|
|
|
} else if curr.address.segment != next.address.segment {
|
2022-12-02 17:06:30 -08:00
|
|
|
next.address.segment - curr.address.segment - 1
|
2022-11-30 12:55:41 -08:00
|
|
|
} else if curr.address.virt != next.address.virt {
|
|
|
|
|
next.address.virt - curr.address.virt - 1
|
2022-07-12 17:25:46 -07:00
|
|
|
} else {
|
2022-12-03 21:09:57 -08:00
|
|
|
next.timestamp - curr.timestamp
|
2022-07-12 17:25:46 -07:00
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.max()
|
|
|
|
|
.unwrap_or(0)
|
|
|
|
|
}
|
|
|
|
|
|
2022-07-13 12:57:27 -07:00
|
|
|
/// Generates the `_FIRST_CHANGE` columns and the `RANGE_CHECK` column in the trace.
|
|
|
|
|
pub fn generate_first_change_flags_and_rc<F: RichField>(trace_rows: &mut [[F; NUM_COLUMNS]]) {
|
|
|
|
|
let num_ops = trace_rows.len();
|
2022-06-07 14:40:42 -07:00
|
|
|
for idx in 0..num_ops - 1 {
|
2022-07-13 12:57:27 -07:00
|
|
|
let row = trace_rows[idx].as_slice();
|
|
|
|
|
let next_row = trace_rows[idx + 1].as_slice();
|
|
|
|
|
|
|
|
|
|
let context = row[ADDR_CONTEXT];
|
|
|
|
|
let segment = row[ADDR_SEGMENT];
|
|
|
|
|
let virt = row[ADDR_VIRTUAL];
|
|
|
|
|
let timestamp = row[TIMESTAMP];
|
|
|
|
|
let next_context = next_row[ADDR_CONTEXT];
|
|
|
|
|
let next_segment = next_row[ADDR_SEGMENT];
|
|
|
|
|
let next_virt = next_row[ADDR_VIRTUAL];
|
|
|
|
|
let next_timestamp = next_row[TIMESTAMP];
|
|
|
|
|
|
|
|
|
|
let context_changed = context != next_context;
|
|
|
|
|
let segment_changed = segment != next_segment;
|
|
|
|
|
let virtual_changed = virt != next_virt;
|
|
|
|
|
|
|
|
|
|
let context_first_change = context_changed;
|
|
|
|
|
let segment_first_change = segment_changed && !context_first_change;
|
|
|
|
|
let virtual_first_change =
|
|
|
|
|
virtual_changed && !segment_first_change && !context_first_change;
|
|
|
|
|
|
|
|
|
|
let row = trace_rows[idx].as_mut_slice();
|
|
|
|
|
row[CONTEXT_FIRST_CHANGE] = F::from_bool(context_first_change);
|
|
|
|
|
row[SEGMENT_FIRST_CHANGE] = F::from_bool(segment_first_change);
|
|
|
|
|
row[VIRTUAL_FIRST_CHANGE] = F::from_bool(virtual_first_change);
|
|
|
|
|
|
|
|
|
|
row[RANGE_CHECK] = if context_first_change {
|
|
|
|
|
next_context - context - F::ONE
|
|
|
|
|
} else if segment_first_change {
|
|
|
|
|
next_segment - segment - F::ONE
|
|
|
|
|
} else if virtual_first_change {
|
|
|
|
|
next_virt - virt - F::ONE
|
|
|
|
|
} else {
|
2022-12-03 21:09:57 -08:00
|
|
|
next_timestamp - timestamp
|
2022-07-13 12:57:27 -07:00
|
|
|
};
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
2022-07-13 12:57:27 -07:00
|
|
|
/// Generate most of the trace rows. Excludes a few columns like `COUNTER`, which are generated
|
|
|
|
|
/// later, after transposing to column-major form.
|
2022-07-16 09:14:51 -07:00
|
|
|
fn generate_trace_row_major(&self, mut memory_ops: Vec<MemoryOp>) -> Vec<[F; NUM_COLUMNS]> {
|
2022-11-30 12:55:41 -08:00
|
|
|
memory_ops.sort_by_key(|op| {
|
|
|
|
|
(
|
|
|
|
|
op.address.context,
|
|
|
|
|
op.address.segment,
|
|
|
|
|
op.address.virt,
|
|
|
|
|
op.timestamp,
|
|
|
|
|
)
|
|
|
|
|
});
|
2022-07-13 12:57:27 -07:00
|
|
|
|
2022-07-12 17:25:46 -07:00
|
|
|
Self::pad_memory_ops(&mut memory_ops);
|
2022-06-17 16:28:23 -07:00
|
|
|
|
2022-07-13 12:57:27 -07:00
|
|
|
let mut trace_rows = memory_ops
|
|
|
|
|
.into_par_iter()
|
2022-12-02 17:06:30 -08:00
|
|
|
.map(|op| op.into_row())
|
2022-07-13 12:57:27 -07:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
generate_first_change_flags_and_rc(trace_rows.as_mut_slice());
|
|
|
|
|
trace_rows
|
|
|
|
|
}
|
2022-06-17 16:28:23 -07:00
|
|
|
|
2022-07-13 12:57:27 -07:00
|
|
|
/// Generates the `COUNTER`, `RANGE_CHECK_PERMUTED` and `COUNTER_PERMUTED` columns, given a
|
|
|
|
|
/// trace in column-major form.
|
|
|
|
|
fn generate_trace_col_major(trace_col_vecs: &mut [Vec<F>]) {
|
|
|
|
|
let height = trace_col_vecs[0].len();
|
|
|
|
|
trace_col_vecs[COUNTER] = (0..height).map(|i| F::from_canonical_usize(i)).collect();
|
2022-06-17 16:28:23 -07:00
|
|
|
|
2022-07-13 12:57:27 -07:00
|
|
|
let (permuted_inputs, permuted_table) =
|
|
|
|
|
permuted_cols(&trace_col_vecs[RANGE_CHECK], &trace_col_vecs[COUNTER]);
|
|
|
|
|
trace_col_vecs[RANGE_CHECK_PERMUTED] = permuted_inputs;
|
|
|
|
|
trace_col_vecs[COUNTER_PERMUTED] = permuted_table;
|
2022-06-17 16:28:23 -07:00
|
|
|
}
|
|
|
|
|
|
2022-07-16 09:14:51 -07:00
|
|
|
fn pad_memory_ops(memory_ops: &mut Vec<MemoryOp>) {
|
2022-07-12 17:25:46 -07:00
|
|
|
let num_ops = memory_ops.len();
|
2022-07-12 17:52:49 -07:00
|
|
|
let max_range_check = get_max_range_check(memory_ops);
|
2022-07-12 17:25:46 -07:00
|
|
|
let num_ops_padded = num_ops.max(max_range_check + 1).next_power_of_two();
|
|
|
|
|
let to_pad = num_ops_padded - num_ops;
|
|
|
|
|
|
2022-12-02 17:06:30 -08:00
|
|
|
let last_op = *memory_ops.last().expect("No memory ops?");
|
2022-07-12 17:25:46 -07:00
|
|
|
|
|
|
|
|
// We essentially repeat the last operation until our operation list has the desired size,
|
|
|
|
|
// with a few changes:
|
2022-08-23 17:24:35 -07:00
|
|
|
// - We change its filter to 0 to indicate that this is a dummy operation.
|
2022-07-12 17:25:46 -07:00
|
|
|
// - We increment its timestamp in order to pass the ordering check.
|
|
|
|
|
// - We make sure it's a read, sine dummy operations must be reads.
|
|
|
|
|
for i in 0..to_pad {
|
|
|
|
|
memory_ops.push(MemoryOp {
|
2022-08-23 17:24:35 -07:00
|
|
|
filter: false,
|
2022-07-12 17:25:46 -07:00
|
|
|
timestamp: last_op.timestamp + i + 1,
|
2022-12-02 17:06:30 -08:00
|
|
|
kind: Read,
|
2022-07-12 17:25:46 -07:00
|
|
|
..last_op
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-09-23 10:54:17 -07:00
|
|
|
pub(crate) fn generate_trace(
|
|
|
|
|
&self,
|
|
|
|
|
memory_ops: Vec<MemoryOp>,
|
|
|
|
|
timing: &mut TimingTree,
|
|
|
|
|
) -> Vec<PolynomialValues<F>> {
|
2022-07-13 12:57:27 -07:00
|
|
|
// Generate most of the trace in row-major form.
|
2022-06-23 13:59:57 -07:00
|
|
|
let trace_rows = timed!(
|
2022-09-23 10:54:17 -07:00
|
|
|
timing,
|
2022-06-23 13:59:57 -07:00
|
|
|
"generate trace rows",
|
2022-07-13 12:57:27 -07:00
|
|
|
self.generate_trace_row_major(memory_ops)
|
2022-06-23 13:59:57 -07:00
|
|
|
);
|
2022-07-13 12:57:27 -07:00
|
|
|
let trace_row_vecs: Vec<_> = trace_rows.into_iter().map(|row| row.to_vec()).collect();
|
2022-06-23 13:59:57 -07:00
|
|
|
|
2022-07-13 12:57:27 -07:00
|
|
|
// Transpose to column-major form.
|
|
|
|
|
let mut trace_col_vecs = transpose(&trace_row_vecs);
|
|
|
|
|
|
|
|
|
|
// A few final generation steps, which work better in column-major form.
|
|
|
|
|
Self::generate_trace_col_major(&mut trace_col_vecs);
|
|
|
|
|
|
2022-09-23 10:54:17 -07:00
|
|
|
trace_col_vecs
|
2022-07-13 12:57:27 -07:00
|
|
|
.into_iter()
|
|
|
|
|
.map(|column| PolynomialValues::new(column))
|
2022-09-23 10:54:17 -07:00
|
|
|
.collect()
|
2022-06-23 13:59:57 -07:00
|
|
|
}
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
|
2022-06-29 10:06:46 +02:00
|
|
|
const COLUMNS: usize = NUM_COLUMNS;
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-06-09 10:39:25 -07:00
|
|
|
fn eval_packed_generic<FE, P, const D2: usize>(
|
|
|
|
|
&self,
|
2022-08-25 12:24:22 -07:00
|
|
|
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
2022-06-07 14:40:42 -07:00
|
|
|
yield_constr: &mut ConstraintConsumer<P>,
|
2022-06-09 10:39:25 -07:00
|
|
|
) where
|
|
|
|
|
FE: FieldExtension<D2, BaseField = F>,
|
|
|
|
|
P: PackedField<Scalar = FE>,
|
|
|
|
|
{
|
|
|
|
|
let one = P::from(FE::ONE);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-07-11 14:47:16 -07:00
|
|
|
let timestamp = vars.local_values[TIMESTAMP];
|
|
|
|
|
let addr_context = vars.local_values[ADDR_CONTEXT];
|
|
|
|
|
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
|
|
|
|
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
|
|
|
|
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
|
|
|
|
|
|
|
|
|
let next_timestamp = vars.next_values[TIMESTAMP];
|
|
|
|
|
let next_is_read = vars.next_values[IS_READ];
|
|
|
|
|
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
|
|
|
|
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
|
|
|
|
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
|
|
|
|
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-08-23 17:24:35 -07:00
|
|
|
// The filter must be 0 or 1.
|
|
|
|
|
let filter = vars.local_values[FILTER];
|
|
|
|
|
yield_constr.constraint(filter * (filter - P::ONES));
|
2022-07-12 17:25:46 -07:00
|
|
|
|
2022-08-23 17:24:35 -07:00
|
|
|
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
2022-07-12 17:25:46 -07:00
|
|
|
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
2022-08-23 17:24:35 -07:00
|
|
|
let is_dummy = P::ONES - filter;
|
2022-07-12 17:25:46 -07:00
|
|
|
let is_write = P::ONES - vars.local_values[IS_READ];
|
|
|
|
|
yield_constr.constraint(is_dummy * is_write);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-06-17 16:51:32 -07:00
|
|
|
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
|
|
|
|
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
|
|
|
|
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged =
|
2022-06-07 14:40:42 -07:00
|
|
|
one - context_first_change - segment_first_change - virtual_first_change;
|
|
|
|
|
|
2022-06-17 16:51:32 -07:00
|
|
|
let range_check = vars.local_values[RANGE_CHECK];
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
let not_context_first_change = one - context_first_change;
|
|
|
|
|
let not_segment_first_change = one - segment_first_change;
|
|
|
|
|
let not_virtual_first_change = one - virtual_first_change;
|
2022-06-23 14:00:44 -07:00
|
|
|
let not_address_unchanged = one - address_unchanged;
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// First set of ordering constraint: first_change flags are boolean.
|
2022-06-23 11:52:27 -07:00
|
|
|
yield_constr.constraint(context_first_change * not_context_first_change);
|
|
|
|
|
yield_constr.constraint(segment_first_change * not_segment_first_change);
|
|
|
|
|
yield_constr.constraint(virtual_first_change * not_virtual_first_change);
|
|
|
|
|
yield_constr.constraint(address_unchanged * not_address_unchanged);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Second set of ordering constraints: no change before the column corresponding to the nonzero first_change flag.
|
2022-06-23 11:52:27 -07:00
|
|
|
yield_constr
|
|
|
|
|
.constraint_transition(segment_first_change * (next_addr_context - addr_context));
|
|
|
|
|
yield_constr
|
|
|
|
|
.constraint_transition(virtual_first_change * (next_addr_context - addr_context));
|
|
|
|
|
yield_constr
|
|
|
|
|
.constraint_transition(virtual_first_change * (next_addr_segment - addr_segment));
|
2022-07-12 17:25:46 -07:00
|
|
|
yield_constr.constraint_transition(address_unchanged * (next_addr_context - addr_context));
|
|
|
|
|
yield_constr.constraint_transition(address_unchanged * (next_addr_segment - addr_segment));
|
|
|
|
|
yield_constr.constraint_transition(address_unchanged * (next_addr_virtual - addr_virtual));
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Third set of ordering constraints: range-check difference in the column that should be increasing.
|
2022-06-17 23:32:46 -07:00
|
|
|
let computed_range_check = context_first_change * (next_addr_context - addr_context - one)
|
2022-06-07 14:40:42 -07:00
|
|
|
+ segment_first_change * (next_addr_segment - addr_segment - one)
|
|
|
|
|
+ virtual_first_change * (next_addr_virtual - addr_virtual - one)
|
2022-12-03 21:09:57 -08:00
|
|
|
+ address_unchanged * (next_timestamp - timestamp);
|
2022-06-23 11:52:27 -07:00
|
|
|
yield_constr.constraint_transition(range_check - computed_range_check);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Enumerate purportedly-ordered log.
|
|
|
|
|
for i in 0..8 {
|
2022-06-23 11:52:27 -07:00
|
|
|
yield_constr
|
|
|
|
|
.constraint(next_is_read * address_unchanged * (next_values[i] - values[i]));
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
2022-06-09 11:48:11 -07:00
|
|
|
|
2022-06-21 14:35:19 -07:00
|
|
|
eval_lookups(vars, yield_constr, RANGE_CHECK_PERMUTED, COUNTER_PERMUTED)
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
|
|
|
|
|
2022-06-09 10:39:25 -07:00
|
|
|
fn eval_ext_circuit(
|
|
|
|
|
&self,
|
|
|
|
|
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
2022-08-25 12:24:22 -07:00
|
|
|
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
2022-06-07 14:40:42 -07:00
|
|
|
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
|
|
|
|
) {
|
|
|
|
|
let one = builder.one_extension();
|
|
|
|
|
|
2022-07-11 14:47:16 -07:00
|
|
|
let addr_context = vars.local_values[ADDR_CONTEXT];
|
|
|
|
|
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
|
|
|
|
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
|
|
|
|
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
|
|
|
|
let timestamp = vars.local_values[TIMESTAMP];
|
|
|
|
|
|
|
|
|
|
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
|
|
|
|
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
|
|
|
|
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
|
|
|
|
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
|
|
|
|
let next_is_read = vars.next_values[IS_READ];
|
|
|
|
|
let next_timestamp = vars.next_values[TIMESTAMP];
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-08-23 17:24:35 -07:00
|
|
|
// The filter must be 0 or 1.
|
|
|
|
|
let filter = vars.local_values[FILTER];
|
|
|
|
|
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
|
|
|
|
yield_constr.constraint(builder, constraint);
|
2022-07-12 17:25:46 -07:00
|
|
|
|
2022-08-23 17:24:35 -07:00
|
|
|
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
2022-07-12 17:25:46 -07:00
|
|
|
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
2022-08-23 17:24:35 -07:00
|
|
|
let is_dummy = builder.sub_extension(one, filter);
|
2022-07-12 17:25:46 -07:00
|
|
|
let is_write = builder.sub_extension(one, vars.local_values[IS_READ]);
|
|
|
|
|
let is_dummy_write = builder.mul_extension(is_dummy, is_write);
|
|
|
|
|
yield_constr.constraint(builder, is_dummy_write);
|
|
|
|
|
|
2022-06-17 16:51:32 -07:00
|
|
|
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
|
|
|
|
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
|
|
|
|
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged = {
|
2022-06-07 14:40:42 -07:00
|
|
|
let mut cur = builder.sub_extension(one, context_first_change);
|
|
|
|
|
cur = builder.sub_extension(cur, segment_first_change);
|
|
|
|
|
builder.sub_extension(cur, virtual_first_change)
|
|
|
|
|
};
|
|
|
|
|
|
2022-06-17 16:51:32 -07:00
|
|
|
let range_check = vars.local_values[RANGE_CHECK];
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
let not_context_first_change = builder.sub_extension(one, context_first_change);
|
|
|
|
|
let not_segment_first_change = builder.sub_extension(one, segment_first_change);
|
|
|
|
|
let not_virtual_first_change = builder.sub_extension(one, virtual_first_change);
|
2022-06-23 14:00:44 -07:00
|
|
|
let not_address_unchanged = builder.sub_extension(one, address_unchanged);
|
2022-06-07 14:40:42 -07:00
|
|
|
let addr_context_diff = builder.sub_extension(next_addr_context, addr_context);
|
|
|
|
|
let addr_segment_diff = builder.sub_extension(next_addr_segment, addr_segment);
|
|
|
|
|
let addr_virtual_diff = builder.sub_extension(next_addr_virtual, addr_virtual);
|
|
|
|
|
|
|
|
|
|
// First set of ordering constraint: traces are boolean.
|
|
|
|
|
let context_first_change_bool =
|
|
|
|
|
builder.mul_extension(context_first_change, not_context_first_change);
|
|
|
|
|
yield_constr.constraint(builder, context_first_change_bool);
|
|
|
|
|
let segment_first_change_bool =
|
|
|
|
|
builder.mul_extension(segment_first_change, not_segment_first_change);
|
|
|
|
|
yield_constr.constraint(builder, segment_first_change_bool);
|
|
|
|
|
let virtual_first_change_bool =
|
|
|
|
|
builder.mul_extension(virtual_first_change, not_virtual_first_change);
|
|
|
|
|
yield_constr.constraint(builder, virtual_first_change_bool);
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged_bool =
|
|
|
|
|
builder.mul_extension(address_unchanged, not_address_unchanged);
|
|
|
|
|
yield_constr.constraint(builder, address_unchanged_bool);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Second set of ordering constraints: no change before the column corresponding to the nonzero first_change flag.
|
2022-06-09 10:39:25 -07:00
|
|
|
let segment_first_change_check =
|
|
|
|
|
builder.mul_extension(segment_first_change, addr_context_diff);
|
2022-06-17 16:38:31 -07:00
|
|
|
yield_constr.constraint_transition(builder, segment_first_change_check);
|
2022-06-07 14:40:42 -07:00
|
|
|
let virtual_first_change_check_1 =
|
|
|
|
|
builder.mul_extension(virtual_first_change, addr_context_diff);
|
2022-06-17 16:38:31 -07:00
|
|
|
yield_constr.constraint_transition(builder, virtual_first_change_check_1);
|
2022-06-07 14:40:42 -07:00
|
|
|
let virtual_first_change_check_2 =
|
|
|
|
|
builder.mul_extension(virtual_first_change, addr_segment_diff);
|
2022-06-17 16:38:31 -07:00
|
|
|
yield_constr.constraint_transition(builder, virtual_first_change_check_2);
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged_check_1 = builder.mul_extension(address_unchanged, addr_context_diff);
|
2022-07-12 17:25:46 -07:00
|
|
|
yield_constr.constraint_transition(builder, address_unchanged_check_1);
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged_check_2 = builder.mul_extension(address_unchanged, addr_segment_diff);
|
2022-07-12 17:25:46 -07:00
|
|
|
yield_constr.constraint_transition(builder, address_unchanged_check_2);
|
2022-06-23 14:00:44 -07:00
|
|
|
let address_unchanged_check_3 = builder.mul_extension(address_unchanged, addr_virtual_diff);
|
2022-07-12 17:25:46 -07:00
|
|
|
yield_constr.constraint_transition(builder, address_unchanged_check_3);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Third set of ordering constraints: range-check difference in the column that should be increasing.
|
|
|
|
|
let context_diff = {
|
|
|
|
|
let diff = builder.sub_extension(next_addr_context, addr_context);
|
|
|
|
|
builder.sub_extension(diff, one)
|
|
|
|
|
};
|
|
|
|
|
let context_range_check = builder.mul_extension(context_first_change, context_diff);
|
|
|
|
|
let segment_diff = {
|
|
|
|
|
let diff = builder.sub_extension(next_addr_segment, addr_segment);
|
|
|
|
|
builder.sub_extension(diff, one)
|
|
|
|
|
};
|
|
|
|
|
let segment_range_check = builder.mul_extension(segment_first_change, segment_diff);
|
|
|
|
|
let virtual_diff = {
|
|
|
|
|
let diff = builder.sub_extension(next_addr_virtual, addr_virtual);
|
|
|
|
|
builder.sub_extension(diff, one)
|
|
|
|
|
};
|
|
|
|
|
let virtual_range_check = builder.mul_extension(virtual_first_change, virtual_diff);
|
2022-12-03 21:09:57 -08:00
|
|
|
let timestamp_diff = builder.sub_extension(next_timestamp, timestamp);
|
2022-06-23 14:00:44 -07:00
|
|
|
let timestamp_range_check = builder.mul_extension(address_unchanged, timestamp_diff);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
2022-06-17 23:32:46 -07:00
|
|
|
let computed_range_check = {
|
2022-06-07 14:40:42 -07:00
|
|
|
let mut sum = builder.add_extension(context_range_check, segment_range_check);
|
|
|
|
|
sum = builder.add_extension(sum, virtual_range_check);
|
|
|
|
|
builder.add_extension(sum, timestamp_range_check)
|
|
|
|
|
};
|
2022-06-17 23:32:46 -07:00
|
|
|
let range_check_diff = builder.sub_extension(range_check, computed_range_check);
|
2022-06-13 14:36:48 -07:00
|
|
|
yield_constr.constraint_transition(builder, range_check_diff);
|
2022-06-07 14:40:42 -07:00
|
|
|
|
|
|
|
|
// Enumerate purportedly-ordered log.
|
|
|
|
|
for i in 0..8 {
|
|
|
|
|
let value_diff = builder.sub_extension(next_values[i], values[i]);
|
2022-06-23 14:00:44 -07:00
|
|
|
let zero_if_read = builder.mul_extension(address_unchanged, value_diff);
|
2022-06-07 14:40:42 -07:00
|
|
|
let read_constraint = builder.mul_extension(next_is_read, zero_if_read);
|
2022-06-13 14:36:48 -07:00
|
|
|
yield_constr.constraint(builder, read_constraint);
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
2022-06-09 11:48:11 -07:00
|
|
|
|
2022-06-21 14:35:19 -07:00
|
|
|
eval_lookups_circuit(
|
|
|
|
|
builder,
|
|
|
|
|
vars,
|
|
|
|
|
yield_constr,
|
|
|
|
|
RANGE_CHECK_PERMUTED,
|
|
|
|
|
COUNTER_PERMUTED,
|
|
|
|
|
)
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
2022-06-09 10:39:25 -07:00
|
|
|
|
|
|
|
|
fn constraint_degree(&self) -> usize {
|
|
|
|
|
3
|
|
|
|
|
}
|
2022-06-17 11:09:01 -07:00
|
|
|
|
|
|
|
|
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
|
|
|
|
vec![
|
|
|
|
|
PermutationPair::singletons(RANGE_CHECK, RANGE_CHECK_PERMUTED),
|
|
|
|
|
PermutationPair::singletons(COUNTER, COUNTER_PERMUTED),
|
|
|
|
|
]
|
|
|
|
|
}
|
2022-06-07 14:40:42 -07:00
|
|
|
}
|
2022-06-09 12:00:00 -07:00
|
|
|
|
|
|
|
|
#[cfg(test)]
|
2022-07-16 09:59:23 -07:00
|
|
|
pub(crate) mod tests {
|
2022-06-09 12:00:00 -07:00
|
|
|
use anyhow::Result;
|
|
|
|
|
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
|
|
|
|
|
2022-12-03 11:21:31 -08:00
|
|
|
use crate::memory::memory_stark::MemoryStark;
|
2022-06-09 12:00:00 -07:00
|
|
|
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
2022-07-16 09:59:23 -07:00
|
|
|
|
2022-06-09 12:00:00 -07:00
|
|
|
#[test]
|
|
|
|
|
fn test_stark_degree() -> Result<()> {
|
|
|
|
|
const D: usize = 2;
|
|
|
|
|
type C = PoseidonGoldilocksConfig;
|
|
|
|
|
type F = <C as GenericConfig<D>>::F;
|
|
|
|
|
type S = MemoryStark<F, D>;
|
|
|
|
|
|
|
|
|
|
let stark = S {
|
|
|
|
|
f: Default::default(),
|
|
|
|
|
};
|
|
|
|
|
test_stark_low_degree(stark)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn test_stark_circuit() -> Result<()> {
|
|
|
|
|
const D: usize = 2;
|
|
|
|
|
type C = PoseidonGoldilocksConfig;
|
|
|
|
|
type F = <C as GenericConfig<D>>::F;
|
|
|
|
|
type S = MemoryStark<F, D>;
|
|
|
|
|
|
|
|
|
|
let stark = S {
|
|
|
|
|
f: Default::default(),
|
|
|
|
|
};
|
|
|
|
|
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
|
|
|
|
}
|
|
|
|
|
}
|