mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-07 00:03:10 +00:00
Merge remote-tracking branch 'mir-plonky2/main' into constrain-genesis-state
This commit is contained in:
commit
5694af79f9
@ -7,18 +7,20 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::util::transpose;
|
||||
use static_assertions::const_assert;
|
||||
|
||||
use super::columns::NUM_ARITH_COLUMNS;
|
||||
use crate::all_stark::Table;
|
||||
use crate::arithmetic::{addcy, byte, columns, divmod, modular, mul, Operation};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Link the 16-bit columns of the arithmetic table, split into groups
|
||||
/// of N_LIMBS at a time in `regs`, with the corresponding 32-bit
|
||||
@ -168,11 +170,16 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
|
||||
const COLUMNS: usize = columns::NUM_ARITH_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_ARITH_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -183,8 +190,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
eval_lookups(vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let lv = vars.local_values;
|
||||
let nv = vars.next_values;
|
||||
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
// Check the range column: First value must be 0, last row
|
||||
// must be 2^16-1, and intermediate rows must increment by 0
|
||||
@ -207,7 +214,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Range check all the columns
|
||||
@ -215,8 +222,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
|
||||
eval_lookups_circuit(builder, vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let lv = vars.local_values;
|
||||
let nv = vars.next_values;
|
||||
let lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
|
||||
let rc1 = lv[columns::RANGE_COUNTER];
|
||||
let rc2 = nv[columns::RANGE_COUNTER];
|
||||
|
||||
@ -51,9 +51,9 @@ use crate::byte_packing::columns::{
|
||||
};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
/// Strict upper bound for the individual bytes range-check.
|
||||
@ -211,7 +211,7 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
row[value_bytes(i)] = F::from_canonical_u8(byte);
|
||||
row[index_bytes(i)] = F::ONE;
|
||||
|
||||
rows.push(row.into());
|
||||
rows.push(row);
|
||||
row[index_bytes(i)] = F::ZERO;
|
||||
row[ADDR_VIRTUAL] -= F::ONE;
|
||||
}
|
||||
@ -248,7 +248,7 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
}
|
||||
}
|
||||
|
||||
/// There is only one `i` for which `vars.local_values[index_bytes(i)]` is non-zero,
|
||||
/// There is only one `i` for which `local_values[index_bytes(i)]` is non-zero,
|
||||
/// and `i+1` is the current position:
|
||||
fn get_active_position<FE, P, const D2: usize>(&self, row: &[P; NUM_COLUMNS]) -> P
|
||||
where
|
||||
@ -281,11 +281,16 @@ impl<F: RichField + Extendable<D>, const D: usize> BytePackingStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -296,68 +301,62 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
eval_lookups(vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let local_values: &[P; NUM_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[P; NUM_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
|
||||
let one = P::ONES;
|
||||
|
||||
// We filter active columns by summing all the byte indices.
|
||||
// Constraining each of them to be boolean is done later on below.
|
||||
let current_filter = vars.local_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let current_filter = local_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint(current_filter * (current_filter - one));
|
||||
|
||||
// The filter column must start by one.
|
||||
yield_constr.constraint_first_row(current_filter - one);
|
||||
|
||||
// The is_read flag must be boolean.
|
||||
let current_is_read = vars.local_values[IS_READ];
|
||||
let current_is_read = local_values[IS_READ];
|
||||
yield_constr.constraint(current_is_read * (current_is_read - one));
|
||||
|
||||
// Each byte index must be boolean.
|
||||
for i in 0..NUM_BYTES {
|
||||
let idx_i = vars.local_values[index_bytes(i)];
|
||||
let idx_i = local_values[index_bytes(i)];
|
||||
yield_constr.constraint(idx_i * (idx_i - one));
|
||||
}
|
||||
|
||||
// The sequence start flag column must start by one.
|
||||
let current_sequence_start = vars.local_values[index_bytes(0)];
|
||||
let current_sequence_start = local_values[index_bytes(0)];
|
||||
yield_constr.constraint_first_row(current_sequence_start - one);
|
||||
|
||||
// The sequence end flag must be boolean
|
||||
let current_sequence_end = vars.local_values[SEQUENCE_END];
|
||||
let current_sequence_end = local_values[SEQUENCE_END];
|
||||
yield_constr.constraint(current_sequence_end * (current_sequence_end - one));
|
||||
|
||||
// If filter is off, all flags and byte indices must be off.
|
||||
let byte_indices = vars.local_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let byte_indices = local_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint(
|
||||
(current_filter - one) * (current_is_read + current_sequence_end + byte_indices),
|
||||
);
|
||||
|
||||
// Only padding rows have their filter turned off.
|
||||
let next_filter = vars.next_values[BYTE_INDICES_COLS]
|
||||
.iter()
|
||||
.copied()
|
||||
.sum::<P>();
|
||||
let next_filter = next_values[BYTE_INDICES_COLS].iter().copied().sum::<P>();
|
||||
yield_constr.constraint_transition(next_filter * (next_filter - current_filter));
|
||||
|
||||
// Unless the current sequence end flag is activated, the is_read filter must remain unchanged.
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
yield_constr
|
||||
.constraint_transition((current_sequence_end - one) * (next_is_read - current_is_read));
|
||||
|
||||
// If the sequence end flag is activated, the next row must be a new sequence or filter must be off.
|
||||
let next_sequence_start = vars.next_values[index_bytes(0)];
|
||||
let next_sequence_start = next_values[index_bytes(0)];
|
||||
yield_constr.constraint_transition(
|
||||
current_sequence_end * next_filter * (next_sequence_start - one),
|
||||
);
|
||||
|
||||
// The active position in a byte sequence must increase by one on every row
|
||||
// or be one on the next row (i.e. at the start of a new sequence).
|
||||
let current_position = self.get_active_position(vars.local_values);
|
||||
let next_position = self.get_active_position(vars.next_values);
|
||||
let current_position = self.get_active_position(local_values);
|
||||
let next_position = self.get_active_position(next_values);
|
||||
yield_constr.constraint_transition(
|
||||
next_filter * (next_position - one) * (next_position - current_position - one),
|
||||
);
|
||||
@ -371,14 +370,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The context, segment and timestamp fields must remain unchanged throughout a byte sequence.
|
||||
// The virtual address must decrement by one at each step of a sequence.
|
||||
let current_context = vars.local_values[ADDR_CONTEXT];
|
||||
let next_context = vars.next_values[ADDR_CONTEXT];
|
||||
let current_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let next_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let current_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = vars.local_values[TIMESTAMP];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let current_context = local_values[ADDR_CONTEXT];
|
||||
let next_context = next_values[ADDR_CONTEXT];
|
||||
let current_segment = local_values[ADDR_SEGMENT];
|
||||
let next_segment = next_values[ADDR_SEGMENT];
|
||||
let current_virtual = local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = local_values[TIMESTAMP];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
yield_constr.constraint_transition(
|
||||
next_filter * (next_sequence_start - one) * (next_context - current_context),
|
||||
);
|
||||
@ -395,9 +394,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
// If not at the end of a sequence, each next byte must equal the current one
|
||||
// when reading through the sequence, or the next byte index must be one.
|
||||
for i in 0..NUM_BYTES {
|
||||
let current_byte = vars.local_values[value_bytes(i)];
|
||||
let next_byte = vars.next_values[value_bytes(i)];
|
||||
let next_byte_index = vars.next_values[index_bytes(i)];
|
||||
let current_byte = local_values[value_bytes(i)];
|
||||
let next_byte = next_values[value_bytes(i)];
|
||||
let next_byte_index = next_values[index_bytes(i)];
|
||||
yield_constr.constraint_transition(
|
||||
(current_sequence_end - one) * (next_byte_index - one) * (next_byte - current_byte),
|
||||
);
|
||||
@ -407,7 +406,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Range check all the columns
|
||||
@ -415,9 +414,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
eval_lookups_circuit(builder, vars, yield_constr, col, col + 1);
|
||||
}
|
||||
|
||||
let local_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
|
||||
// We filter active columns by summing all the byte indices.
|
||||
// Constraining each of them to be boolean is done later on below.
|
||||
let current_filter = builder.add_many_extension(&vars.local_values[BYTE_INDICES_COLS]);
|
||||
let current_filter = builder.add_many_extension(&local_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.mul_sub_extension(current_filter, current_filter, current_filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
@ -426,25 +430,25 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
yield_constr.constraint_first_row(builder, constraint);
|
||||
|
||||
// The is_read flag must be boolean.
|
||||
let current_is_read = vars.local_values[IS_READ];
|
||||
let current_is_read = local_values[IS_READ];
|
||||
let constraint =
|
||||
builder.mul_sub_extension(current_is_read, current_is_read, current_is_read);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// Each byte index must be boolean.
|
||||
for i in 0..NUM_BYTES {
|
||||
let idx_i = vars.local_values[index_bytes(i)];
|
||||
let idx_i = local_values[index_bytes(i)];
|
||||
let constraint = builder.mul_sub_extension(idx_i, idx_i, idx_i);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
}
|
||||
|
||||
// The sequence start flag column must start by one.
|
||||
let current_sequence_start = vars.local_values[index_bytes(0)];
|
||||
let current_sequence_start = local_values[index_bytes(0)];
|
||||
let constraint = builder.add_const_extension(current_sequence_start, F::NEG_ONE);
|
||||
yield_constr.constraint_first_row(builder, constraint);
|
||||
|
||||
// The sequence end flag must be boolean
|
||||
let current_sequence_end = vars.local_values[SEQUENCE_END];
|
||||
let current_sequence_end = local_values[SEQUENCE_END];
|
||||
let constraint = builder.mul_sub_extension(
|
||||
current_sequence_end,
|
||||
current_sequence_end,
|
||||
@ -453,27 +457,27 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If filter is off, all flags and byte indices must be off.
|
||||
let byte_indices = builder.add_many_extension(&vars.local_values[BYTE_INDICES_COLS]);
|
||||
let byte_indices = builder.add_many_extension(&local_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.add_extension(current_sequence_end, byte_indices);
|
||||
let constraint = builder.add_extension(constraint, current_is_read);
|
||||
let constraint = builder.mul_sub_extension(constraint, current_filter, constraint);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// Only padding rows have their filter turned off.
|
||||
let next_filter = builder.add_many_extension(&vars.next_values[BYTE_INDICES_COLS]);
|
||||
let next_filter = builder.add_many_extension(&next_values[BYTE_INDICES_COLS]);
|
||||
let constraint = builder.sub_extension(next_filter, current_filter);
|
||||
let constraint = builder.mul_extension(next_filter, constraint);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
// Unless the current sequence end flag is activated, the is_read filter must remain unchanged.
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let diff_is_read = builder.sub_extension(next_is_read, current_is_read);
|
||||
let constraint =
|
||||
builder.mul_sub_extension(diff_is_read, current_sequence_end, diff_is_read);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
// If the sequence end flag is activated, the next row must be a new sequence or filter must be off.
|
||||
let next_sequence_start = vars.next_values[index_bytes(0)];
|
||||
let next_sequence_start = next_values[index_bytes(0)];
|
||||
let constraint = builder.mul_sub_extension(
|
||||
current_sequence_end,
|
||||
next_sequence_start,
|
||||
@ -484,8 +488,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The active position in a byte sequence must increase by one on every row
|
||||
// or be one on the next row (i.e. at the start of a new sequence).
|
||||
let current_position = self.get_active_position_circuit(builder, vars.local_values);
|
||||
let next_position = self.get_active_position_circuit(builder, vars.next_values);
|
||||
let current_position = self.get_active_position_circuit(builder, local_values);
|
||||
let next_position = self.get_active_position_circuit(builder, next_values);
|
||||
|
||||
let position_diff = builder.sub_extension(next_position, current_position);
|
||||
let is_new_or_inactive = builder.mul_sub_extension(next_filter, next_position, next_filter);
|
||||
@ -505,14 +509,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
|
||||
// The context, segment and timestamp fields must remain unchanged throughout a byte sequence.
|
||||
// The virtual address must decrement by one at each step of a sequence.
|
||||
let current_context = vars.local_values[ADDR_CONTEXT];
|
||||
let next_context = vars.next_values[ADDR_CONTEXT];
|
||||
let current_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let next_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let current_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = vars.local_values[TIMESTAMP];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let current_context = local_values[ADDR_CONTEXT];
|
||||
let next_context = next_values[ADDR_CONTEXT];
|
||||
let current_segment = local_values[ADDR_SEGMENT];
|
||||
let next_segment = next_values[ADDR_SEGMENT];
|
||||
let current_virtual = local_values[ADDR_VIRTUAL];
|
||||
let next_virtual = next_values[ADDR_VIRTUAL];
|
||||
let current_timestamp = local_values[TIMESTAMP];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
let addr_filter = builder.mul_sub_extension(next_filter, next_sequence_start, next_filter);
|
||||
{
|
||||
let constraint = builder.sub_extension(next_context, current_context);
|
||||
@ -538,9 +542,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for BytePackingSt
|
||||
// If not at the end of a sequence, each next byte must equal the current one
|
||||
// when reading through the sequence, or the next byte index must be one.
|
||||
for i in 0..NUM_BYTES {
|
||||
let current_byte = vars.local_values[value_bytes(i)];
|
||||
let next_byte = vars.next_values[value_bytes(i)];
|
||||
let next_byte_index = vars.next_values[index_bytes(i)];
|
||||
let current_byte = local_values[value_bytes(i)];
|
||||
let next_byte = next_values[value_bytes(i)];
|
||||
let next_byte_index = next_values[index_bytes(i)];
|
||||
let byte_diff = builder.sub_extension(next_byte, current_byte);
|
||||
let constraint = builder.mul_sub_extension(byte_diff, next_byte_index, byte_diff);
|
||||
let constraint =
|
||||
|
||||
@ -1,22 +1,20 @@
|
||||
//! The initial phase of execution, where the kernel code is hashed while being written to memory.
|
||||
//! The hash is then checked against a precomputed kernel hash.
|
||||
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
|
||||
use crate::cpu::columns::CpuColumnsView;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::membus::NUM_GP_CHANNELS;
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
use crate::witness::util::{keccak_sponge_log, mem_write_gp_log_and_fill};
|
||||
|
||||
@ -58,13 +56,11 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
|
||||
log::info!("Bootstrapping took {} cycles", state.traces.clock());
|
||||
}
|
||||
|
||||
pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS>,
|
||||
pub(crate) fn eval_bootstrap_kernel_packed<F: Field, P: PackedField<Scalar = F>>(
|
||||
local_values: &CpuColumnsView<P>,
|
||||
next_values: &CpuColumnsView<P>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
|
||||
let next_values: &CpuColumnsView<_> = vars.next_values.borrow();
|
||||
|
||||
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
|
||||
let local_is_bootstrap = local_values.is_bootstrap_kernel;
|
||||
let next_is_bootstrap = next_values.is_bootstrap_kernel;
|
||||
@ -103,13 +99,12 @@ pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
pub(crate) fn eval_bootstrap_kernel_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS>,
|
||||
local_values: &CpuColumnsView<ExtensionTarget<D>>,
|
||||
next_values: &CpuColumnsView<ExtensionTarget<D>>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
|
||||
let next_values: &CpuColumnsView<_> = vars.next_values.borrow();
|
||||
let one = builder.one_extension();
|
||||
|
||||
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
|
||||
|
||||
@ -7,6 +7,7 @@ use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
|
||||
use super::halt;
|
||||
use crate::all_stark::Table;
|
||||
@ -18,10 +19,10 @@ use crate::cpu::{
|
||||
modfp254, pc, push0, shift, simple_logic, stack, stack_bounds, syscalls_exceptions,
|
||||
};
|
||||
use crate::cross_table_lookup::{Column, TableWithColumns};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub fn ctl_data_keccak_sponge<F: Field>() -> Vec<Column<F>> {
|
||||
// When executing KECCAK_GENERAL, the GP memory channels are used as follows:
|
||||
@ -227,19 +228,27 @@ impl<F: RichField, const D: usize> CpuStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
|
||||
const COLUMNS: usize = NUM_CPU_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_CPU_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_CPU_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values = vars.local_values.borrow();
|
||||
let next_values = vars.next_values.borrow();
|
||||
bootstrap_kernel::eval_bootstrap_kernel(vars, yield_constr);
|
||||
let local_values: &[P; NUM_CPU_COLUMNS] = vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &CpuColumnsView<P> = local_values.borrow();
|
||||
let next_values: &[P; NUM_CPU_COLUMNS] = vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &CpuColumnsView<P> = next_values.borrow();
|
||||
|
||||
bootstrap_kernel::eval_bootstrap_kernel_packed(local_values, next_values, yield_constr);
|
||||
contextops::eval_packed(local_values, next_values, yield_constr);
|
||||
control_flow::eval_packed_generic(local_values, next_values, yield_constr);
|
||||
decode::eval_packed_generic(local_values, yield_constr);
|
||||
@ -262,12 +271,22 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.local_values.borrow();
|
||||
let next_values = vars.next_values.borrow();
|
||||
bootstrap_kernel::eval_bootstrap_kernel_circuit(builder, vars, yield_constr);
|
||||
let local_values: &[ExtensionTarget<D>; NUM_CPU_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &CpuColumnsView<ExtensionTarget<D>> = local_values.borrow();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_CPU_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &CpuColumnsView<ExtensionTarget<D>> = next_values.borrow();
|
||||
|
||||
bootstrap_kernel::eval_bootstrap_kernel_ext_circuit(
|
||||
builder,
|
||||
local_values,
|
||||
next_values,
|
||||
yield_constr,
|
||||
);
|
||||
contextops::eval_ext_circuit(builder, local_values, next_values, yield_constr);
|
||||
control_flow::eval_ext_circuit(builder, local_values, next_values, yield_constr);
|
||||
decode::eval_ext_circuit(builder, local_values, yield_constr);
|
||||
|
||||
@ -36,7 +36,6 @@ pub(crate) fn combined_kernel() -> Kernel {
|
||||
include_str!("asm/core/nonce.asm"),
|
||||
include_str!("asm/core/process_txn.asm"),
|
||||
include_str!("asm/core/syscall.asm"),
|
||||
include_str!("asm/core/syscall_stubs.asm"),
|
||||
include_str!("asm/core/terminate.asm"),
|
||||
include_str!("asm/core/transfer.asm"),
|
||||
include_str!("asm/core/util.asm"),
|
||||
|
||||
@ -1,12 +0,0 @@
|
||||
// Labels for unimplemented syscalls to make the kernel assemble.
|
||||
// Each label should be removed from this file once it is implemented.
|
||||
|
||||
// This is a temporary version that returns the block difficulty (i.e. the old version of this opcode).
|
||||
// TODO: Fix this.
|
||||
// TODO: What semantics will this have for Edge?
|
||||
global sys_prevrandao:
|
||||
// stack: kexit_info
|
||||
%charge_gas_const(@GAS_BASE)
|
||||
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_DIFFICULTY)
|
||||
%stack (difficulty, kexit_info) -> (kexit_info, difficulty)
|
||||
EXIT_KERNEL
|
||||
@ -383,3 +383,10 @@ zero_hash:
|
||||
%decrement
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_CALL_STACK_DEPTH)
|
||||
%endmacro
|
||||
|
||||
global sys_prevrandao:
|
||||
// stack: kexit_info
|
||||
%charge_gas_const(@GAS_BASE)
|
||||
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_RANDOM)
|
||||
%stack (random, kexit_info) -> (kexit_info, random)
|
||||
EXIT_KERNEL
|
||||
|
||||
@ -39,55 +39,56 @@ pub(crate) enum GlobalMetadata {
|
||||
BlockTimestamp = 15,
|
||||
BlockNumber = 16,
|
||||
BlockDifficulty = 17,
|
||||
BlockGasLimit = 18,
|
||||
BlockChainId = 19,
|
||||
BlockBaseFee = 20,
|
||||
BlockGasUsed = 21,
|
||||
BlockRandom = 18,
|
||||
BlockGasLimit = 19,
|
||||
BlockChainId = 20,
|
||||
BlockBaseFee = 21,
|
||||
BlockGasUsed = 22,
|
||||
/// Before current transactions block values.
|
||||
BlockGasUsedBefore = 22,
|
||||
BlockGasUsedBefore = 23,
|
||||
/// After current transactions block values.
|
||||
BlockGasUsedAfter = 23,
|
||||
BlockGasUsedAfter = 24,
|
||||
/// Current block header hash
|
||||
BlockCurrentHash = 24,
|
||||
BlockCurrentHash = 25,
|
||||
|
||||
/// Gas to refund at the end of the transaction.
|
||||
RefundCounter = 25,
|
||||
RefundCounter = 26,
|
||||
/// Length of the addresses access list.
|
||||
AccessedAddressesLen = 26,
|
||||
AccessedAddressesLen = 27,
|
||||
/// Length of the storage keys access list.
|
||||
AccessedStorageKeysLen = 27,
|
||||
AccessedStorageKeysLen = 28,
|
||||
/// Length of the self-destruct list.
|
||||
SelfDestructListLen = 28,
|
||||
SelfDestructListLen = 29,
|
||||
/// Length of the bloom entry buffer.
|
||||
BloomEntryLen = 29,
|
||||
BloomEntryLen = 30,
|
||||
|
||||
/// Length of the journal.
|
||||
JournalLen = 30,
|
||||
JournalLen = 31,
|
||||
/// Length of the `JournalData` segment.
|
||||
JournalDataLen = 31,
|
||||
JournalDataLen = 32,
|
||||
/// Current checkpoint.
|
||||
CurrentCheckpoint = 32,
|
||||
TouchedAddressesLen = 33,
|
||||
CurrentCheckpoint = 33,
|
||||
TouchedAddressesLen = 34,
|
||||
// Gas cost for the access list in type-1 txns. See EIP-2930.
|
||||
AccessListDataCost = 34,
|
||||
AccessListDataCost = 35,
|
||||
// Start of the access list in the RLP for type-1 txns.
|
||||
AccessListRlpStart = 35,
|
||||
AccessListRlpStart = 36,
|
||||
// Length of the access list in the RLP for type-1 txns.
|
||||
AccessListRlpLen = 36,
|
||||
AccessListRlpLen = 37,
|
||||
// Boolean flag indicating if the txn is a contract creation txn.
|
||||
ContractCreation = 37,
|
||||
IsPrecompileFromEoa = 38,
|
||||
CallStackDepth = 39,
|
||||
ContractCreation = 38,
|
||||
IsPrecompileFromEoa = 39,
|
||||
CallStackDepth = 40,
|
||||
/// Transaction logs list length
|
||||
LogsLen = 40,
|
||||
LogsDataLen = 41,
|
||||
LogsPayloadLen = 42,
|
||||
TxnNumberBefore = 43,
|
||||
TxnNumberAfter = 44,
|
||||
LogsLen = 41,
|
||||
LogsDataLen = 42,
|
||||
LogsPayloadLen = 43,
|
||||
TxnNumberBefore = 44,
|
||||
TxnNumberAfter = 45,
|
||||
}
|
||||
|
||||
impl GlobalMetadata {
|
||||
pub(crate) const COUNT: usize = 45;
|
||||
pub(crate) const COUNT: usize = 46;
|
||||
|
||||
pub(crate) fn all() -> [Self; Self::COUNT] {
|
||||
[
|
||||
@ -109,6 +110,7 @@ impl GlobalMetadata {
|
||||
Self::BlockTimestamp,
|
||||
Self::BlockNumber,
|
||||
Self::BlockDifficulty,
|
||||
Self::BlockRandom,
|
||||
Self::BlockGasLimit,
|
||||
Self::BlockChainId,
|
||||
Self::BlockBaseFee,
|
||||
@ -160,6 +162,7 @@ impl GlobalMetadata {
|
||||
Self::BlockTimestamp => "GLOBAL_METADATA_BLOCK_TIMESTAMP",
|
||||
Self::BlockNumber => "GLOBAL_METADATA_BLOCK_NUMBER",
|
||||
Self::BlockDifficulty => "GLOBAL_METADATA_BLOCK_DIFFICULTY",
|
||||
Self::BlockRandom => "GLOBAL_METADATA_BLOCK_RANDOM",
|
||||
Self::BlockGasLimit => "GLOBAL_METADATA_BLOCK_GAS_LIMIT",
|
||||
Self::BlockChainId => "GLOBAL_METADATA_BLOCK_CHAIN_ID",
|
||||
Self::BlockBaseFee => "GLOBAL_METADATA_BLOCK_BASE_FEE",
|
||||
|
||||
@ -117,7 +117,7 @@ impl<'a> Interpreter<'a> {
|
||||
let mut result = Self {
|
||||
kernel_mode: true,
|
||||
jumpdests: find_jumpdests(code),
|
||||
generation_state: GenerationState::new(GenerationInputs::default(), code),
|
||||
generation_state: GenerationState::new(GenerationInputs::default(), code).unwrap(),
|
||||
prover_inputs_map: prover_inputs,
|
||||
context: 0,
|
||||
halt_offsets: vec![DEFAULT_HALT_OFFSET],
|
||||
@ -905,7 +905,10 @@ impl<'a> Interpreter<'a> {
|
||||
.prover_inputs_map
|
||||
.get(&(self.generation_state.registers.program_counter - 1))
|
||||
.ok_or_else(|| anyhow!("Offset not in prover inputs."))?;
|
||||
let output = self.generation_state.prover_input(prover_input_fn);
|
||||
let output = self
|
||||
.generation_state
|
||||
.prover_input(prover_input_fn)
|
||||
.map_err(|_| anyhow!("Invalid prover inputs."))?;
|
||||
self.push(output);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use keccak_hash::keccak;
|
||||
@ -46,7 +46,9 @@ fn prepare_interpreter(
|
||||
interpreter.generation_state.registers.program_counter = load_all_mpts;
|
||||
interpreter.push(0xDEADBEEFu32.into());
|
||||
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use keccak_hash::keccak;
|
||||
@ -37,7 +37,9 @@ fn prepare_interpreter(
|
||||
interpreter.generation_state.registers.program_counter = load_all_mpts;
|
||||
interpreter.push(0xDEADBEEFu32.into());
|
||||
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{BigEndianHash, H256};
|
||||
@ -61,7 +61,8 @@ fn test_state_trie(
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs).map_err(|_| anyhow!("Invalid MPT data"))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use eth_trie_utils::partial_trie::PartialTrie;
|
||||
use ethereum_types::{BigEndianHash, H256};
|
||||
|
||||
@ -113,7 +113,8 @@ fn test_state_trie(trie_inputs: TrieInputs) -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs).map_err(|_| anyhow!("Invalid MPT data"))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{BigEndianHash, H256};
|
||||
@ -174,7 +174,8 @@ fn test_state_trie(
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs).map_err(|_| anyhow!("Invalid MPT data"))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use ethereum_types::{BigEndianHash, H256, U256};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -23,7 +23,9 @@ fn load_all_mpts_empty() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -62,7 +64,9 @@ fn load_all_mpts_leaf() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -111,7 +115,9 @@ fn load_all_mpts_hash() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -152,7 +158,9 @@ fn load_all_mpts_empty_branch() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -207,7 +215,9 @@ fn load_all_mpts_ext_to_leaf() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use ethereum_types::BigEndianHash;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -22,7 +22,9 @@ fn mpt_read() -> Result<()> {
|
||||
|
||||
let initial_stack = vec![0xdeadbeefu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use ethereum_types::{Address, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
@ -413,7 +413,9 @@ fn test_mpt_insert_receipt() -> Result<()> {
|
||||
let initial_stack = vec![retdest];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.generation_state.mpt_prover_inputs =
|
||||
all_mpt_prover_inputs_reversed(&trie_inputs)
|
||||
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
|
||||
interpreter.run()?;
|
||||
|
||||
// If TrieData is empty, we need to push 0 because the first value is always 0.
|
||||
|
||||
@ -16,10 +16,10 @@ use plonky2::plonk::config::GenericConfig;
|
||||
use crate::all_stark::{Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::permutation::{GrandProductChallenge, GrandProductChallengeSet};
|
||||
use crate::proof::{StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Represent a linear combination of columns.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -473,7 +473,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
/// Z(w) = Z(gw) * combine(w) where combine is called on the local row
|
||||
/// and not the next. This enables CTLs across two rows.
|
||||
pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
@ -482,6 +482,9 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
for lookup_vars in ctl_vars {
|
||||
let CtlCheckVars {
|
||||
local_z,
|
||||
@ -493,11 +496,11 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
|
||||
|
||||
let evals = columns
|
||||
.iter()
|
||||
.map(|c| c.eval_with_next(vars.local_values, vars.next_values))
|
||||
.map(|c| c.eval_with_next(local_values, next_values))
|
||||
.collect::<Vec<_>>();
|
||||
let combined = challenges.combine(evals.iter());
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_with_next(vars.local_values, vars.next_values)
|
||||
column.eval_with_next(local_values, next_values)
|
||||
} else {
|
||||
P::ONES
|
||||
};
|
||||
@ -580,10 +583,13 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
for lookup_vars in ctl_vars {
|
||||
let CtlCheckVarsTarget {
|
||||
local_z,
|
||||
@ -595,7 +601,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
|
||||
let one = builder.one_extension();
|
||||
let local_filter = if let Some(column) = filter_column {
|
||||
column.eval_circuit(builder, vars.local_values)
|
||||
column.eval_circuit(builder, local_values)
|
||||
} else {
|
||||
one
|
||||
};
|
||||
@ -611,7 +617,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
|
||||
|
||||
let evals = columns
|
||||
.iter()
|
||||
.map(|c| c.eval_with_next_circuit(builder, vars.local_values, vars.next_values))
|
||||
.map(|c| c.eval_with_next_circuit(builder, local_values, next_values))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let combined = challenges.combine_circuit(builder, &evals);
|
||||
|
||||
47
evm/src/evaluation_frame.rs
Normal file
47
evm/src/evaluation_frame.rs
Normal file
@ -0,0 +1,47 @@
|
||||
/// A trait for viewing an evaluation frame of a STARK table.
|
||||
///
|
||||
/// It allows to access the current and next rows at a given step
|
||||
/// and can be used to implement constraint evaluation both natively
|
||||
/// and recursively.
|
||||
pub trait StarkEvaluationFrame<T: Copy + Clone + Default>: Sized {
|
||||
/// The number of columns for the STARK table this evaluation frame views.
|
||||
const COLUMNS: usize;
|
||||
|
||||
/// Returns the local values (i.e. current row) for this evaluation frame.
|
||||
fn get_local_values(&self) -> &[T];
|
||||
/// Returns the next values (i.e. next row) for this evaluation frame.
|
||||
fn get_next_values(&self) -> &[T];
|
||||
|
||||
/// Outputs a new evaluation frame from the provided local and next values.
|
||||
///
|
||||
/// **NOTE**: Concrete implementations of this method SHOULD ensure that
|
||||
/// the provided slices lengths match the `Self::COLUMNS` value.
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self;
|
||||
}
|
||||
|
||||
pub struct StarkFrame<T: Copy + Clone + Default, const N: usize> {
|
||||
local_values: [T; N],
|
||||
next_values: [T; N],
|
||||
}
|
||||
|
||||
impl<T: Copy + Clone + Default, const N: usize> StarkEvaluationFrame<T> for StarkFrame<T, N> {
|
||||
const COLUMNS: usize = N;
|
||||
|
||||
fn get_local_values(&self) -> &[T] {
|
||||
&self.local_values
|
||||
}
|
||||
|
||||
fn get_next_values(&self) -> &[T] {
|
||||
&self.next_values
|
||||
}
|
||||
|
||||
fn from_values(lv: &[T], nv: &[T]) -> Self {
|
||||
assert_eq!(lv.len(), Self::COLUMNS);
|
||||
assert_eq!(nv.len(), Self::COLUMNS);
|
||||
|
||||
Self {
|
||||
local_values: lv.try_into().unwrap(),
|
||||
next_values: nv.try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -28,17 +28,10 @@ use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::log2_ceil;
|
||||
|
||||
use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup};
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::get_challenges::observe_public_values_target;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::permutation::{get_grand_product_challenge_set_target, GrandProductChallengeSet};
|
||||
use crate::proof::{
|
||||
BlockHashesTarget, BlockMetadataTarget, ExtraBlockDataTarget, PublicValues, PublicValuesTarget,
|
||||
@ -297,13 +290,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F> + 'static,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
pub fn to_bytes(
|
||||
&self,
|
||||
@ -1141,10 +1127,7 @@ where
|
||||
degree_bits_range: Range<usize>,
|
||||
all_ctls: &[CrossTableLookup<F>],
|
||||
stark_config: &StarkConfig,
|
||||
) -> Self
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Self {
|
||||
let by_stark_size = degree_bits_range
|
||||
.map(|degree_bits| {
|
||||
(
|
||||
@ -1265,10 +1248,7 @@ where
|
||||
degree_bits: usize,
|
||||
all_ctls: &[CrossTableLookup<F>],
|
||||
stark_config: &StarkConfig,
|
||||
) -> Self
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Self {
|
||||
let initial_wrapper = recursive_stark_circuit(
|
||||
table,
|
||||
stark,
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use plonky2::field::extension::Extendable;
|
||||
@ -102,6 +103,10 @@ fn apply_metadata_and_tries_memops<F: RichField + Extendable<D>, const D: usize>
|
||||
(GlobalMetadata::BlockTimestamp, metadata.block_timestamp),
|
||||
(GlobalMetadata::BlockNumber, metadata.block_number),
|
||||
(GlobalMetadata::BlockDifficulty, metadata.block_difficulty),
|
||||
(
|
||||
GlobalMetadata::BlockRandom,
|
||||
metadata.block_random.into_uint(),
|
||||
),
|
||||
(GlobalMetadata::BlockGasLimit, metadata.block_gaslimit),
|
||||
(GlobalMetadata::BlockChainId, metadata.block_chain_id),
|
||||
(GlobalMetadata::BlockBaseFee, metadata.block_base_fee),
|
||||
@ -218,7 +223,8 @@ pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
|
||||
PublicValues,
|
||||
GenerationOutputs,
|
||||
)> {
|
||||
let mut state = GenerationState::<F>::new(inputs.clone(), &KERNEL.code);
|
||||
let mut state = GenerationState::<F>::new(inputs.clone(), &KERNEL.code)
|
||||
.map_err(|err| anyhow!("Failed to parse all the initial prover inputs: {:?}", err))?;
|
||||
|
||||
apply_metadata_and_tries_memops(&mut state, &inputs);
|
||||
|
||||
@ -236,7 +242,8 @@ pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
|
||||
state.traces.get_lengths()
|
||||
);
|
||||
|
||||
let outputs = get_outputs(&mut state);
|
||||
let outputs = get_outputs(&mut state)
|
||||
.map_err(|err| anyhow!("Failed to generate post-state info: {:?}", err))?;
|
||||
|
||||
let read_metadata = |field| state.memory.read_global_metadata(field);
|
||||
let trie_roots_before = TrieRoots {
|
||||
|
||||
@ -11,6 +11,7 @@ use rlp_derive::{RlpDecodable, RlpEncodable};
|
||||
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
use crate::generation::TrieInputs;
|
||||
use crate::witness::errors::{ProgramError, ProverInputError};
|
||||
use crate::Node;
|
||||
|
||||
#[derive(RlpEncodable, RlpDecodable, Debug)]
|
||||
@ -60,15 +61,18 @@ pub struct LegacyReceiptRlp {
|
||||
pub logs: Vec<LogRlp>,
|
||||
}
|
||||
|
||||
pub(crate) fn all_mpt_prover_inputs_reversed(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
let mut inputs = all_mpt_prover_inputs(trie_inputs);
|
||||
pub(crate) fn all_mpt_prover_inputs_reversed(
|
||||
trie_inputs: &TrieInputs,
|
||||
) -> Result<Vec<U256>, ProgramError> {
|
||||
let mut inputs = all_mpt_prover_inputs(trie_inputs)?;
|
||||
inputs.reverse();
|
||||
inputs
|
||||
Ok(inputs)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_receipts(rlp: &[u8]) -> Vec<U256> {
|
||||
let payload_info = PayloadInfo::from(rlp).unwrap();
|
||||
let decoded_receipt: LegacyReceiptRlp = rlp::decode(rlp).unwrap();
|
||||
pub(crate) fn parse_receipts(rlp: &[u8]) -> Result<Vec<U256>, ProgramError> {
|
||||
let payload_info = PayloadInfo::from(rlp).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
let decoded_receipt: LegacyReceiptRlp =
|
||||
rlp::decode(rlp).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
let mut parsed_receipt = Vec::new();
|
||||
|
||||
parsed_receipt.push(payload_info.value_len.into()); // payload_len of the entire receipt
|
||||
@ -76,13 +80,15 @@ pub(crate) fn parse_receipts(rlp: &[u8]) -> Vec<U256> {
|
||||
parsed_receipt.push(decoded_receipt.cum_gas_used);
|
||||
parsed_receipt.extend(decoded_receipt.bloom.iter().map(|byte| U256::from(*byte)));
|
||||
let encoded_logs = rlp::encode_list(&decoded_receipt.logs);
|
||||
let logs_payload_info = PayloadInfo::from(&encoded_logs).unwrap();
|
||||
let logs_payload_info =
|
||||
PayloadInfo::from(&encoded_logs).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
parsed_receipt.push(logs_payload_info.value_len.into()); // payload_len of all the logs
|
||||
parsed_receipt.push(decoded_receipt.logs.len().into());
|
||||
|
||||
for log in decoded_receipt.logs {
|
||||
let encoded_log = rlp::encode(&log);
|
||||
let log_payload_info = PayloadInfo::from(&encoded_log).unwrap();
|
||||
let log_payload_info =
|
||||
PayloadInfo::from(&encoded_log).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
parsed_receipt.push(log_payload_info.value_len.into()); // payload of one log
|
||||
parsed_receipt.push(U256::from_big_endian(&log.address.to_fixed_bytes()));
|
||||
parsed_receipt.push(log.topics.len().into());
|
||||
@ -91,10 +97,10 @@ pub(crate) fn parse_receipts(rlp: &[u8]) -> Vec<U256> {
|
||||
parsed_receipt.extend(log.data.iter().map(|byte| U256::from(*byte)));
|
||||
}
|
||||
|
||||
parsed_receipt
|
||||
Ok(parsed_receipt)
|
||||
}
|
||||
/// Generate prover inputs for the initial MPT data, in the format expected by `mpt/load.asm`.
|
||||
pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Result<Vec<U256>, ProgramError> {
|
||||
let mut prover_inputs = vec![];
|
||||
|
||||
let storage_tries_by_state_key = trie_inputs
|
||||
@ -111,19 +117,19 @@ pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
empty_nibbles(),
|
||||
&mut prover_inputs,
|
||||
&storage_tries_by_state_key,
|
||||
);
|
||||
)?;
|
||||
|
||||
mpt_prover_inputs(&trie_inputs.transactions_trie, &mut prover_inputs, &|rlp| {
|
||||
rlp::decode_list(rlp)
|
||||
});
|
||||
Ok(rlp::decode_list(rlp))
|
||||
})?;
|
||||
|
||||
mpt_prover_inputs(
|
||||
&trie_inputs.receipts_trie,
|
||||
&mut prover_inputs,
|
||||
&parse_receipts,
|
||||
);
|
||||
)?;
|
||||
|
||||
prover_inputs
|
||||
Ok(prover_inputs)
|
||||
}
|
||||
|
||||
/// Given a trie, generate the prover input data for that trie. In essence, this serializes a trie
|
||||
@ -134,36 +140,52 @@ pub(crate) fn mpt_prover_inputs<F>(
|
||||
trie: &HashedPartialTrie,
|
||||
prover_inputs: &mut Vec<U256>,
|
||||
parse_value: &F,
|
||||
) where
|
||||
F: Fn(&[u8]) -> Vec<U256>,
|
||||
) -> Result<(), ProgramError>
|
||||
where
|
||||
F: Fn(&[u8]) -> Result<Vec<U256>, ProgramError>,
|
||||
{
|
||||
prover_inputs.push((PartialTrieType::of(trie) as u32).into());
|
||||
|
||||
match trie.deref() {
|
||||
Node::Empty => {}
|
||||
Node::Hash(h) => prover_inputs.push(U256::from_big_endian(h.as_bytes())),
|
||||
Node::Empty => Ok(()),
|
||||
Node::Hash(h) => {
|
||||
prover_inputs.push(U256::from_big_endian(h.as_bytes()));
|
||||
Ok(())
|
||||
}
|
||||
Node::Branch { children, value } => {
|
||||
if value.is_empty() {
|
||||
prover_inputs.push(U256::zero()); // value_present = 0
|
||||
} else {
|
||||
let parsed_value = parse_value(value);
|
||||
let parsed_value = parse_value(value)?;
|
||||
prover_inputs.push(U256::one()); // value_present = 1
|
||||
prover_inputs.extend(parsed_value);
|
||||
}
|
||||
for child in children {
|
||||
mpt_prover_inputs(child, prover_inputs, parse_value);
|
||||
mpt_prover_inputs(child, prover_inputs, parse_value)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Node::Extension { nibbles, child } => {
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.try_into_u256().unwrap());
|
||||
mpt_prover_inputs(child, prover_inputs, parse_value);
|
||||
prover_inputs.push(
|
||||
nibbles
|
||||
.try_into_u256()
|
||||
.map_err(|_| ProgramError::IntegerTooLarge)?,
|
||||
);
|
||||
mpt_prover_inputs(child, prover_inputs, parse_value)
|
||||
}
|
||||
Node::Leaf { nibbles, value } => {
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.try_into_u256().unwrap());
|
||||
let leaf = parse_value(value);
|
||||
prover_inputs.push(
|
||||
nibbles
|
||||
.try_into_u256()
|
||||
.map_err(|_| ProgramError::IntegerTooLarge)?,
|
||||
);
|
||||
let leaf = parse_value(value)?;
|
||||
prover_inputs.extend(leaf);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -175,13 +197,20 @@ pub(crate) fn mpt_prover_inputs_state_trie(
|
||||
key: Nibbles,
|
||||
prover_inputs: &mut Vec<U256>,
|
||||
storage_tries_by_state_key: &HashMap<Nibbles, &HashedPartialTrie>,
|
||||
) {
|
||||
) -> Result<(), ProgramError> {
|
||||
prover_inputs.push((PartialTrieType::of(trie) as u32).into());
|
||||
match trie.deref() {
|
||||
Node::Empty => {}
|
||||
Node::Hash(h) => prover_inputs.push(U256::from_big_endian(h.as_bytes())),
|
||||
Node::Empty => Ok(()),
|
||||
Node::Hash(h) => {
|
||||
prover_inputs.push(U256::from_big_endian(h.as_bytes()));
|
||||
Ok(())
|
||||
}
|
||||
Node::Branch { children, value } => {
|
||||
assert!(value.is_empty(), "State trie should not have branch values");
|
||||
if !value.is_empty() {
|
||||
return Err(ProgramError::ProverInputError(
|
||||
ProverInputError::InvalidMptInput,
|
||||
));
|
||||
}
|
||||
prover_inputs.push(U256::zero()); // value_present = 0
|
||||
|
||||
for (i, child) in children.iter().enumerate() {
|
||||
@ -194,22 +223,28 @@ pub(crate) fn mpt_prover_inputs_state_trie(
|
||||
extended_key,
|
||||
prover_inputs,
|
||||
storage_tries_by_state_key,
|
||||
);
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Node::Extension { nibbles, child } => {
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.try_into_u256().unwrap());
|
||||
prover_inputs.push(
|
||||
nibbles
|
||||
.try_into_u256()
|
||||
.map_err(|_| ProgramError::IntegerTooLarge)?,
|
||||
);
|
||||
let extended_key = key.merge_nibbles(nibbles);
|
||||
mpt_prover_inputs_state_trie(
|
||||
child,
|
||||
extended_key,
|
||||
prover_inputs,
|
||||
storage_tries_by_state_key,
|
||||
);
|
||||
)
|
||||
}
|
||||
Node::Leaf { nibbles, value } => {
|
||||
let account: AccountRlp = rlp::decode(value).expect("Decoding failed");
|
||||
let account: AccountRlp = rlp::decode(value).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
let AccountRlp {
|
||||
nonce,
|
||||
balance,
|
||||
@ -228,18 +263,24 @@ pub(crate) fn mpt_prover_inputs_state_trie(
|
||||
"In TrieInputs, an account's storage_root didn't match the associated storage trie hash");
|
||||
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.try_into_u256().unwrap());
|
||||
prover_inputs.push(
|
||||
nibbles
|
||||
.try_into_u256()
|
||||
.map_err(|_| ProgramError::IntegerTooLarge)?,
|
||||
);
|
||||
prover_inputs.push(nonce);
|
||||
prover_inputs.push(balance);
|
||||
mpt_prover_inputs(storage_trie, prover_inputs, &parse_storage_value);
|
||||
mpt_prover_inputs(storage_trie, prover_inputs, &parse_storage_value)?;
|
||||
prover_inputs.push(code_hash.into_uint());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_storage_value(value_rlp: &[u8]) -> Vec<U256> {
|
||||
let value: U256 = rlp::decode(value_rlp).expect("Decoding failed");
|
||||
vec![value]
|
||||
fn parse_storage_value(value_rlp: &[u8]) -> Result<Vec<U256>, ProgramError> {
|
||||
let value: U256 = rlp::decode(value_rlp).map_err(|_| ProgramError::InvalidRlp)?;
|
||||
Ok(vec![value])
|
||||
}
|
||||
|
||||
fn empty_nibbles() -> Nibbles {
|
||||
|
||||
@ -8,6 +8,8 @@ use crate::generation::state::GenerationState;
|
||||
use crate::generation::trie_extractor::{
|
||||
read_state_trie_value, read_storage_trie_value, read_trie, AccountTrieRecord,
|
||||
};
|
||||
use crate::util::u256_to_usize;
|
||||
use crate::witness::errors::ProgramError;
|
||||
|
||||
/// The post-state after trace generation; intended for debugging.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -29,47 +31,44 @@ pub struct AccountOutput {
|
||||
pub storage: HashMap<U256, U256>,
|
||||
}
|
||||
|
||||
pub(crate) fn get_outputs<F: Field>(state: &mut GenerationState<F>) -> GenerationOutputs {
|
||||
// First observe all addresses passed in the by caller.
|
||||
pub(crate) fn get_outputs<F: Field>(
|
||||
state: &mut GenerationState<F>,
|
||||
) -> Result<GenerationOutputs, ProgramError> {
|
||||
// First observe all addresses passed in by caller.
|
||||
for address in state.inputs.addresses.clone() {
|
||||
state.observe_address(address);
|
||||
}
|
||||
|
||||
let account_map = read_trie::<AccountTrieRecord>(
|
||||
&state.memory,
|
||||
state.memory.read_global_metadata(StateTrieRoot).as_usize(),
|
||||
read_state_trie_value,
|
||||
);
|
||||
let ptr = u256_to_usize(state.memory.read_global_metadata(StateTrieRoot))?;
|
||||
let account_map = read_trie::<AccountTrieRecord>(&state.memory, ptr, read_state_trie_value)?;
|
||||
|
||||
let accounts = account_map
|
||||
.into_iter()
|
||||
.map(|(state_key_nibbles, account)| {
|
||||
assert_eq!(
|
||||
state_key_nibbles.count, 64,
|
||||
"Each state key should have 64 nibbles = 256 bits"
|
||||
);
|
||||
let state_key_h256 = H256::from_uint(&state_key_nibbles.try_into_u256().unwrap());
|
||||
let mut accounts = HashMap::with_capacity(account_map.len());
|
||||
|
||||
let addr_or_state_key =
|
||||
if let Some(address) = state.state_key_to_address.get(&state_key_h256) {
|
||||
AddressOrStateKey::Address(*address)
|
||||
} else {
|
||||
AddressOrStateKey::StateKey(state_key_h256)
|
||||
};
|
||||
for (state_key_nibbles, account) in account_map.into_iter() {
|
||||
if state_key_nibbles.count != 64 {
|
||||
return Err(ProgramError::IntegerTooLarge);
|
||||
}
|
||||
let state_key_h256 = H256::from_uint(&state_key_nibbles.try_into_u256().unwrap());
|
||||
|
||||
let account_output = account_trie_record_to_output(state, account);
|
||||
(addr_or_state_key, account_output)
|
||||
})
|
||||
.collect();
|
||||
let addr_or_state_key =
|
||||
if let Some(address) = state.state_key_to_address.get(&state_key_h256) {
|
||||
AddressOrStateKey::Address(*address)
|
||||
} else {
|
||||
AddressOrStateKey::StateKey(state_key_h256)
|
||||
};
|
||||
|
||||
GenerationOutputs { accounts }
|
||||
let account_output = account_trie_record_to_output(state, account)?;
|
||||
accounts.insert(addr_or_state_key, account_output);
|
||||
}
|
||||
|
||||
Ok(GenerationOutputs { accounts })
|
||||
}
|
||||
|
||||
fn account_trie_record_to_output<F: Field>(
|
||||
state: &GenerationState<F>,
|
||||
account: AccountTrieRecord,
|
||||
) -> AccountOutput {
|
||||
let storage = get_storage(state, account.storage_ptr);
|
||||
) -> Result<AccountOutput, ProgramError> {
|
||||
let storage = get_storage(state, account.storage_ptr)?;
|
||||
|
||||
// TODO: This won't work if the account was created during the txn.
|
||||
// Need to track changes to code, similar to how we track addresses
|
||||
@ -78,27 +77,33 @@ fn account_trie_record_to_output<F: Field>(
|
||||
.inputs
|
||||
.contract_code
|
||||
.get(&account.code_hash)
|
||||
.unwrap_or_else(|| panic!("Code not found: {:?}", account.code_hash))
|
||||
.ok_or(ProgramError::UnknownContractCode)?
|
||||
.clone();
|
||||
|
||||
AccountOutput {
|
||||
Ok(AccountOutput {
|
||||
balance: account.balance,
|
||||
nonce: account.nonce,
|
||||
storage,
|
||||
code,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Get an account's storage trie, given a pointer to its root.
|
||||
fn get_storage<F: Field>(state: &GenerationState<F>, storage_ptr: usize) -> HashMap<U256, U256> {
|
||||
read_trie::<U256>(&state.memory, storage_ptr, read_storage_trie_value)
|
||||
.into_iter()
|
||||
.map(|(storage_key_nibbles, value)| {
|
||||
assert_eq!(
|
||||
storage_key_nibbles.count, 64,
|
||||
"Each storage key should have 64 nibbles = 256 bits"
|
||||
);
|
||||
(storage_key_nibbles.try_into_u256().unwrap(), value)
|
||||
})
|
||||
.collect()
|
||||
fn get_storage<F: Field>(
|
||||
state: &GenerationState<F>,
|
||||
storage_ptr: usize,
|
||||
) -> Result<HashMap<U256, U256>, ProgramError> {
|
||||
let storage_trie = read_trie::<U256>(&state.memory, storage_ptr, |x| {
|
||||
Ok(read_storage_trie_value(x))
|
||||
})?;
|
||||
|
||||
let mut map = HashMap::with_capacity(storage_trie.len());
|
||||
for (storage_key_nibbles, value) in storage_trie.into_iter() {
|
||||
if storage_key_nibbles.count != 64 {
|
||||
return Err(ProgramError::IntegerTooLarge);
|
||||
};
|
||||
map.insert(storage_key_nibbles.try_into_u256().unwrap(), value);
|
||||
}
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
@ -16,7 +16,9 @@ use crate::generation::prover_input::FieldOp::{Inverse, Sqrt};
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::segments::Segment::BnPairing;
|
||||
use crate::util::{biguint_to_mem_vec, mem_vec_to_biguint};
|
||||
use crate::util::{biguint_to_mem_vec, mem_vec_to_biguint, u256_to_usize};
|
||||
use crate::witness::errors::ProgramError;
|
||||
use crate::witness::errors::ProverInputError::*;
|
||||
use crate::witness::util::{current_context_peek, stack_peek};
|
||||
|
||||
/// Prover input function represented as a scoped function name.
|
||||
@ -31,7 +33,7 @@ impl From<Vec<String>> for ProverInputFn {
|
||||
}
|
||||
|
||||
impl<F: Field> GenerationState<F> {
|
||||
pub(crate) fn prover_input(&mut self, input_fn: &ProverInputFn) -> U256 {
|
||||
pub(crate) fn prover_input(&mut self, input_fn: &ProverInputFn) -> Result<U256, ProgramError> {
|
||||
match input_fn.0[0].as_str() {
|
||||
"end_of_txns" => self.run_end_of_txns(),
|
||||
"ff" => self.run_ff(input_fn),
|
||||
@ -42,51 +44,59 @@ impl<F: Field> GenerationState<F> {
|
||||
"current_hash" => self.run_current_hash(),
|
||||
"account_code" => self.run_account_code(input_fn),
|
||||
"bignum_modmul" => self.run_bignum_modmul(),
|
||||
_ => panic!("Unrecognized prover input function."),
|
||||
_ => Err(ProgramError::ProverInputError(InvalidFunction)),
|
||||
}
|
||||
}
|
||||
|
||||
fn run_end_of_txns(&mut self) -> U256 {
|
||||
fn run_end_of_txns(&mut self) -> Result<U256, ProgramError> {
|
||||
let end = self.next_txn_index == self.inputs.signed_txns.len();
|
||||
if end {
|
||||
U256::one()
|
||||
Ok(U256::one())
|
||||
} else {
|
||||
self.next_txn_index += 1;
|
||||
U256::zero()
|
||||
Ok(U256::zero())
|
||||
}
|
||||
}
|
||||
|
||||
/// Finite field operations.
|
||||
fn run_ff(&self, input_fn: &ProverInputFn) -> U256 {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str()).unwrap();
|
||||
let op = FieldOp::from_str(input_fn.0[2].as_str()).unwrap();
|
||||
let x = stack_peek(self, 0).expect("Empty stack");
|
||||
fn run_ff(&self, input_fn: &ProverInputFn) -> Result<U256, ProgramError> {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str())
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidFunction))?;
|
||||
let op = FieldOp::from_str(input_fn.0[2].as_str())
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidFunction))?;
|
||||
let x = stack_peek(self, 0)?;
|
||||
field.op(op, x)
|
||||
}
|
||||
|
||||
/// Special finite field operations.
|
||||
fn run_sf(&self, input_fn: &ProverInputFn) -> U256 {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str()).unwrap();
|
||||
fn run_sf(&self, input_fn: &ProverInputFn) -> Result<U256, ProgramError> {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str())
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidFunction))?;
|
||||
let inputs: [U256; 4] = match field {
|
||||
Bls381Base => std::array::from_fn(|i| {
|
||||
stack_peek(self, i).expect("Insufficient number of items on stack")
|
||||
}),
|
||||
Bls381Base => (0..4)
|
||||
.map(|i| stack_peek(self, i))
|
||||
.collect::<Result<Vec<U256>, _>>()?
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
_ => todo!(),
|
||||
};
|
||||
match input_fn.0[2].as_str() {
|
||||
let res = match input_fn.0[2].as_str() {
|
||||
"add_lo" => field.add_lo(inputs),
|
||||
"add_hi" => field.add_hi(inputs),
|
||||
"mul_lo" => field.mul_lo(inputs),
|
||||
"mul_hi" => field.mul_hi(inputs),
|
||||
"sub_lo" => field.sub_lo(inputs),
|
||||
"sub_hi" => field.sub_hi(inputs),
|
||||
_ => todo!(),
|
||||
}
|
||||
_ => return Err(ProgramError::ProverInputError(InvalidFunction)),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Finite field extension operations.
|
||||
fn run_ffe(&self, input_fn: &ProverInputFn) -> U256 {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str()).unwrap();
|
||||
fn run_ffe(&self, input_fn: &ProverInputFn) -> Result<U256, ProgramError> {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str())
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidFunction))?;
|
||||
let n = input_fn.0[2]
|
||||
.as_str()
|
||||
.split('_')
|
||||
@ -94,61 +104,61 @@ impl<F: Field> GenerationState<F> {
|
||||
.unwrap()
|
||||
.parse::<usize>()
|
||||
.unwrap();
|
||||
let ptr = stack_peek(self, 11 - n)
|
||||
.expect("Insufficient number of items on stack")
|
||||
.as_usize();
|
||||
let ptr = stack_peek(self, 11 - n).map(u256_to_usize)??;
|
||||
|
||||
let f: [U256; 12] = match field {
|
||||
Bn254Base => std::array::from_fn(|i| current_context_peek(self, BnPairing, ptr + i)),
|
||||
_ => todo!(),
|
||||
};
|
||||
field.field_extension_inverse(n, f)
|
||||
Ok(field.field_extension_inverse(n, f))
|
||||
}
|
||||
|
||||
/// MPT data.
|
||||
fn run_mpt(&mut self) -> U256 {
|
||||
fn run_mpt(&mut self) -> Result<U256, ProgramError> {
|
||||
self.mpt_prover_inputs
|
||||
.pop()
|
||||
.unwrap_or_else(|| panic!("Out of MPT data"))
|
||||
.ok_or(ProgramError::ProverInputError(OutOfMptData))
|
||||
}
|
||||
|
||||
/// RLP data.
|
||||
fn run_rlp(&mut self) -> U256 {
|
||||
fn run_rlp(&mut self) -> Result<U256, ProgramError> {
|
||||
self.rlp_prover_inputs
|
||||
.pop()
|
||||
.unwrap_or_else(|| panic!("Out of RLP data"))
|
||||
.ok_or(ProgramError::ProverInputError(OutOfRlpData))
|
||||
}
|
||||
|
||||
fn run_current_hash(&mut self) -> U256 {
|
||||
U256::from_big_endian(&self.inputs.block_hashes.cur_hash.0)
|
||||
fn run_current_hash(&mut self) -> Result<U256, ProgramError> {
|
||||
Ok(U256::from_big_endian(&self.inputs.block_hashes.cur_hash.0))
|
||||
}
|
||||
|
||||
/// Account code.
|
||||
fn run_account_code(&mut self, input_fn: &ProverInputFn) -> U256 {
|
||||
fn run_account_code(&mut self, input_fn: &ProverInputFn) -> Result<U256, ProgramError> {
|
||||
match input_fn.0[1].as_str() {
|
||||
"length" => {
|
||||
// Return length of code.
|
||||
// stack: codehash, ...
|
||||
let codehash = stack_peek(self, 0).expect("Empty stack");
|
||||
self.inputs
|
||||
let codehash = stack_peek(self, 0)?;
|
||||
Ok(self
|
||||
.inputs
|
||||
.contract_code
|
||||
.get(&H256::from_uint(&codehash))
|
||||
.unwrap_or_else(|| panic!("No code found with hash {codehash}"))
|
||||
.ok_or(ProgramError::ProverInputError(CodeHashNotFound))?
|
||||
.len()
|
||||
.into()
|
||||
.into())
|
||||
}
|
||||
"get" => {
|
||||
// Return `code[i]`.
|
||||
// stack: i, code_length, codehash, ...
|
||||
let i = stack_peek(self, 0).expect("Unexpected stack").as_usize();
|
||||
let codehash = stack_peek(self, 2).expect("Unexpected stack");
|
||||
self.inputs
|
||||
let i = stack_peek(self, 0).map(u256_to_usize)??;
|
||||
let codehash = stack_peek(self, 2)?;
|
||||
Ok(self
|
||||
.inputs
|
||||
.contract_code
|
||||
.get(&H256::from_uint(&codehash))
|
||||
.unwrap_or_else(|| panic!("No code found with hash {codehash}"))[i]
|
||||
.into()
|
||||
.ok_or(ProgramError::ProverInputError(CodeHashNotFound))?[i]
|
||||
.into())
|
||||
}
|
||||
_ => panic!("Invalid prover input function."),
|
||||
_ => Err(ProgramError::ProverInputError(InvalidInput)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,24 +166,12 @@ impl<F: Field> GenerationState<F> {
|
||||
// On the first call, calculates the remainder and quotient of the given inputs.
|
||||
// These are stored, as limbs, in self.bignum_modmul_result_limbs.
|
||||
// Subsequent calls return one limb at a time, in order (first remainder and then quotient).
|
||||
fn run_bignum_modmul(&mut self) -> U256 {
|
||||
fn run_bignum_modmul(&mut self) -> Result<U256, ProgramError> {
|
||||
if self.bignum_modmul_result_limbs.is_empty() {
|
||||
let len = stack_peek(self, 1)
|
||||
.expect("Stack does not have enough items")
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let a_start_loc = stack_peek(self, 2)
|
||||
.expect("Stack does not have enough items")
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let b_start_loc = stack_peek(self, 3)
|
||||
.expect("Stack does not have enough items")
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let m_start_loc = stack_peek(self, 4)
|
||||
.expect("Stack does not have enough items")
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let len = stack_peek(self, 1).map(u256_to_usize)??;
|
||||
let a_start_loc = stack_peek(self, 2).map(u256_to_usize)??;
|
||||
let b_start_loc = stack_peek(self, 3).map(u256_to_usize)??;
|
||||
let m_start_loc = stack_peek(self, 4).map(u256_to_usize)??;
|
||||
|
||||
let (remainder, quotient) =
|
||||
self.bignum_modmul(len, a_start_loc, b_start_loc, m_start_loc);
|
||||
@ -187,7 +185,9 @@ impl<F: Field> GenerationState<F> {
|
||||
self.bignum_modmul_result_limbs.reverse();
|
||||
}
|
||||
|
||||
self.bignum_modmul_result_limbs.pop().unwrap()
|
||||
self.bignum_modmul_result_limbs
|
||||
.pop()
|
||||
.ok_or(ProgramError::ProverInputError(InvalidInput))
|
||||
}
|
||||
|
||||
fn bignum_modmul(
|
||||
@ -284,27 +284,33 @@ impl EvmField {
|
||||
}
|
||||
}
|
||||
|
||||
fn op(&self, op: FieldOp, x: U256) -> U256 {
|
||||
fn op(&self, op: FieldOp, x: U256) -> Result<U256, ProgramError> {
|
||||
match op {
|
||||
FieldOp::Inverse => self.inverse(x),
|
||||
FieldOp::Sqrt => self.sqrt(x),
|
||||
}
|
||||
}
|
||||
|
||||
fn inverse(&self, x: U256) -> U256 {
|
||||
fn inverse(&self, x: U256) -> Result<U256, ProgramError> {
|
||||
let n = self.order();
|
||||
assert!(x < n);
|
||||
if x >= n {
|
||||
return Err(ProgramError::ProverInputError(InvalidInput));
|
||||
};
|
||||
modexp(x, n - 2, n)
|
||||
}
|
||||
|
||||
fn sqrt(&self, x: U256) -> U256 {
|
||||
fn sqrt(&self, x: U256) -> Result<U256, ProgramError> {
|
||||
let n = self.order();
|
||||
assert!(x < n);
|
||||
if x >= n {
|
||||
return Err(ProgramError::ProverInputError(InvalidInput));
|
||||
};
|
||||
let (q, r) = (n + 1).div_mod(4.into());
|
||||
assert!(
|
||||
r.is_zero(),
|
||||
"Only naive sqrt implementation for now. If needed implement Tonelli-Shanks."
|
||||
);
|
||||
|
||||
if !r.is_zero() {
|
||||
return Err(ProgramError::ProverInputError(InvalidInput));
|
||||
};
|
||||
|
||||
// Only naive sqrt implementation for now. If needed implement Tonelli-Shanks
|
||||
modexp(x, q, n)
|
||||
}
|
||||
|
||||
@ -363,15 +369,18 @@ impl EvmField {
|
||||
}
|
||||
}
|
||||
|
||||
fn modexp(x: U256, e: U256, n: U256) -> U256 {
|
||||
fn modexp(x: U256, e: U256, n: U256) -> Result<U256, ProgramError> {
|
||||
let mut current = x;
|
||||
let mut product = U256::one();
|
||||
|
||||
for j in 0..256 {
|
||||
if e.bit(j) {
|
||||
product = U256::try_from(product.full_mul(current) % n).unwrap();
|
||||
product = U256::try_from(product.full_mul(current) % n)
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidInput))?;
|
||||
}
|
||||
current = U256::try_from(current.full_mul(current) % n).unwrap();
|
||||
current = U256::try_from(current.full_mul(current) % n)
|
||||
.map_err(|_| ProgramError::ProverInputError(InvalidInput))?;
|
||||
}
|
||||
product
|
||||
|
||||
Ok(product)
|
||||
}
|
||||
|
||||
@ -10,6 +10,8 @@ use crate::generation::mpt::all_mpt_prover_inputs_reversed;
|
||||
use crate::generation::rlp::all_rlp_prover_inputs_reversed;
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::util::u256_to_usize;
|
||||
use crate::witness::errors::ProgramError;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryState};
|
||||
use crate::witness::state::RegistersState;
|
||||
use crate::witness::traces::{TraceCheckpoint, Traces};
|
||||
@ -49,7 +51,7 @@ pub(crate) struct GenerationState<F: Field> {
|
||||
}
|
||||
|
||||
impl<F: Field> GenerationState<F> {
|
||||
pub(crate) fn new(inputs: GenerationInputs, kernel_code: &[u8]) -> Self {
|
||||
pub(crate) fn new(inputs: GenerationInputs, kernel_code: &[u8]) -> Result<Self, ProgramError> {
|
||||
log::debug!("Input signed_txns: {:?}", &inputs.signed_txns);
|
||||
log::debug!("Input state_trie: {:?}", &inputs.tries.state_trie);
|
||||
log::debug!(
|
||||
@ -59,11 +61,11 @@ impl<F: Field> GenerationState<F> {
|
||||
log::debug!("Input receipts_trie: {:?}", &inputs.tries.receipts_trie);
|
||||
log::debug!("Input storage_tries: {:?}", &inputs.tries.storage_tries);
|
||||
log::debug!("Input contract_code: {:?}", &inputs.contract_code);
|
||||
let mpt_prover_inputs = all_mpt_prover_inputs_reversed(&inputs.tries);
|
||||
let mpt_prover_inputs = all_mpt_prover_inputs_reversed(&inputs.tries)?;
|
||||
let rlp_prover_inputs = all_rlp_prover_inputs_reversed(&inputs.signed_txns);
|
||||
let bignum_modmul_result_limbs = Vec::new();
|
||||
|
||||
Self {
|
||||
Ok(Self {
|
||||
inputs,
|
||||
registers: Default::default(),
|
||||
memory: MemoryState::new(kernel_code),
|
||||
@ -73,23 +75,25 @@ impl<F: Field> GenerationState<F> {
|
||||
rlp_prover_inputs,
|
||||
state_key_to_address: HashMap::new(),
|
||||
bignum_modmul_result_limbs,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Updates `program_counter`, and potentially adds some extra handling if we're jumping to a
|
||||
/// special location.
|
||||
pub fn jump_to(&mut self, dst: usize) {
|
||||
pub fn jump_to(&mut self, dst: usize) -> Result<(), ProgramError> {
|
||||
self.registers.program_counter = dst;
|
||||
if dst == KERNEL.global_labels["observe_new_address"] {
|
||||
let tip_u256 = stack_peek(self, 0).expect("Empty stack");
|
||||
let tip_u256 = stack_peek(self, 0)?;
|
||||
let tip_h256 = H256::from_uint(&tip_u256);
|
||||
let tip_h160 = H160::from(tip_h256);
|
||||
self.observe_address(tip_h160);
|
||||
} else if dst == KERNEL.global_labels["observe_new_contract"] {
|
||||
let tip_u256 = stack_peek(self, 0).expect("Empty stack");
|
||||
let tip_u256 = stack_peek(self, 0)?;
|
||||
let tip_h256 = H256::from_uint(&tip_u256);
|
||||
self.observe_contract(tip_h256);
|
||||
self.observe_contract(tip_h256)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Observe the given address, so that we will be able to recognize the associated state key.
|
||||
@ -101,9 +105,9 @@ impl<F: Field> GenerationState<F> {
|
||||
|
||||
/// Observe the given code hash and store the associated code.
|
||||
/// When called, the code corresponding to `codehash` should be stored in the return data.
|
||||
pub fn observe_contract(&mut self, codehash: H256) {
|
||||
pub fn observe_contract(&mut self, codehash: H256) -> Result<(), ProgramError> {
|
||||
if self.inputs.contract_code.contains_key(&codehash) {
|
||||
return; // Return early if the code hash has already been observed.
|
||||
return Ok(()); // Return early if the code hash has already been observed.
|
||||
}
|
||||
|
||||
let ctx = self.registers.context;
|
||||
@ -112,7 +116,7 @@ impl<F: Field> GenerationState<F> {
|
||||
Segment::ContextMetadata,
|
||||
ContextMetadata::ReturndataSize as usize,
|
||||
);
|
||||
let returndata_size = self.memory.get(returndata_size_addr).as_usize();
|
||||
let returndata_size = u256_to_usize(self.memory.get(returndata_size_addr))?;
|
||||
let code = self.memory.contexts[ctx].segments[Segment::Returndata as usize].content
|
||||
[..returndata_size]
|
||||
.iter()
|
||||
@ -121,6 +125,8 @@ impl<F: Field> GenerationState<F> {
|
||||
debug_assert_eq!(keccak(&code), codehash);
|
||||
|
||||
self.inputs.contract_code.insert(codehash, code);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn checkpoint(&self) -> GenerationStateCheckpoint {
|
||||
|
||||
@ -7,6 +7,8 @@ use ethereum_types::{BigEndianHash, H256, U256, U512};
|
||||
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::util::u256_to_usize;
|
||||
use crate::witness::errors::ProgramError;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryState};
|
||||
|
||||
/// Account data as it's stored in the state trie, with a pointer to the storage trie.
|
||||
@ -18,13 +20,13 @@ pub(crate) struct AccountTrieRecord {
|
||||
pub(crate) code_hash: H256,
|
||||
}
|
||||
|
||||
pub(crate) fn read_state_trie_value(slice: &[U256]) -> AccountTrieRecord {
|
||||
AccountTrieRecord {
|
||||
pub(crate) fn read_state_trie_value(slice: &[U256]) -> Result<AccountTrieRecord, ProgramError> {
|
||||
Ok(AccountTrieRecord {
|
||||
nonce: slice[0].low_u64(),
|
||||
balance: slice[1],
|
||||
storage_ptr: slice[2].as_usize(),
|
||||
storage_ptr: u256_to_usize(slice[2])?,
|
||||
code_hash: H256::from_uint(&slice[3]),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn read_storage_trie_value(slice: &[U256]) -> U256 {
|
||||
@ -34,72 +36,76 @@ pub(crate) fn read_storage_trie_value(slice: &[U256]) -> U256 {
|
||||
pub(crate) fn read_trie<V>(
|
||||
memory: &MemoryState,
|
||||
ptr: usize,
|
||||
read_value: fn(&[U256]) -> V,
|
||||
) -> HashMap<Nibbles, V> {
|
||||
read_value: fn(&[U256]) -> Result<V, ProgramError>,
|
||||
) -> Result<HashMap<Nibbles, V>, ProgramError> {
|
||||
let mut res = HashMap::new();
|
||||
let empty_nibbles = Nibbles {
|
||||
count: 0,
|
||||
packed: U512::zero(),
|
||||
};
|
||||
read_trie_helper::<V>(memory, ptr, read_value, empty_nibbles, &mut res);
|
||||
res
|
||||
read_trie_helper::<V>(memory, ptr, read_value, empty_nibbles, &mut res)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn read_trie_helper<V>(
|
||||
memory: &MemoryState,
|
||||
ptr: usize,
|
||||
read_value: fn(&[U256]) -> V,
|
||||
read_value: fn(&[U256]) -> Result<V, ProgramError>,
|
||||
prefix: Nibbles,
|
||||
res: &mut HashMap<Nibbles, V>,
|
||||
) {
|
||||
) -> Result<(), ProgramError> {
|
||||
let load = |offset| memory.get(MemoryAddress::new(0, Segment::TrieData, offset));
|
||||
let load_slice_from = |init_offset| {
|
||||
&memory.contexts[0].segments[Segment::TrieData as usize].content[init_offset..]
|
||||
};
|
||||
|
||||
let trie_type = PartialTrieType::all()[load(ptr).as_usize()];
|
||||
let trie_type = PartialTrieType::all()[u256_to_usize(load(ptr))?];
|
||||
match trie_type {
|
||||
PartialTrieType::Empty => {}
|
||||
PartialTrieType::Hash => {}
|
||||
PartialTrieType::Empty => Ok(()),
|
||||
PartialTrieType::Hash => Ok(()),
|
||||
PartialTrieType::Branch => {
|
||||
let ptr_payload = ptr + 1;
|
||||
for i in 0u8..16 {
|
||||
let child_ptr = load(ptr_payload + i as usize).as_usize();
|
||||
read_trie_helper::<V>(memory, child_ptr, read_value, prefix.merge_nibble(i), res);
|
||||
let child_ptr = u256_to_usize(load(ptr_payload + i as usize))?;
|
||||
read_trie_helper::<V>(memory, child_ptr, read_value, prefix.merge_nibble(i), res)?;
|
||||
}
|
||||
let value_ptr = load(ptr_payload + 16).as_usize();
|
||||
let value_ptr = u256_to_usize(load(ptr_payload + 16))?;
|
||||
if value_ptr != 0 {
|
||||
res.insert(prefix, read_value(load_slice_from(value_ptr)));
|
||||
res.insert(prefix, read_value(load_slice_from(value_ptr))?);
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
PartialTrieType::Extension => {
|
||||
let count = load(ptr + 1).as_usize();
|
||||
let count = u256_to_usize(load(ptr + 1))?;
|
||||
let packed = load(ptr + 2);
|
||||
let nibbles = Nibbles {
|
||||
count,
|
||||
packed: packed.into(),
|
||||
};
|
||||
let child_ptr = load(ptr + 3).as_usize();
|
||||
let child_ptr = u256_to_usize(load(ptr + 3))?;
|
||||
read_trie_helper::<V>(
|
||||
memory,
|
||||
child_ptr,
|
||||
read_value,
|
||||
prefix.merge_nibbles(&nibbles),
|
||||
res,
|
||||
);
|
||||
)
|
||||
}
|
||||
PartialTrieType::Leaf => {
|
||||
let count = load(ptr + 1).as_usize();
|
||||
let count = u256_to_usize(load(ptr + 1))?;
|
||||
let packed = load(ptr + 2);
|
||||
let nibbles = Nibbles {
|
||||
count,
|
||||
packed: packed.into(),
|
||||
};
|
||||
let value_ptr = load(ptr + 3).as_usize();
|
||||
let value_ptr = u256_to_usize(load(ptr + 3))?;
|
||||
res.insert(
|
||||
prefix.merge_nibbles(&nibbles),
|
||||
read_value(load_slice_from(value_ptr)),
|
||||
read_value(load_slice_from(value_ptr))?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -64,6 +64,7 @@ fn observe_block_metadata<
|
||||
challenger.observe_element(u256_to_u32(block_metadata.block_timestamp)?);
|
||||
challenger.observe_element(u256_to_u32(block_metadata.block_number)?);
|
||||
challenger.observe_element(u256_to_u32(block_metadata.block_difficulty)?);
|
||||
challenger.observe_elements(&h256_limbs::<F>(block_metadata.block_random));
|
||||
challenger.observe_element(u256_to_u32(block_metadata.block_gaslimit)?);
|
||||
challenger.observe_element(u256_to_u32(block_metadata.block_chain_id)?);
|
||||
let basefee = u256_to_u64(block_metadata.block_base_fee)?;
|
||||
@ -91,6 +92,7 @@ fn observe_block_metadata_target<
|
||||
challenger.observe_element(block_metadata.block_timestamp);
|
||||
challenger.observe_element(block_metadata.block_number);
|
||||
challenger.observe_element(block_metadata.block_difficulty);
|
||||
challenger.observe_elements(&block_metadata.block_random);
|
||||
challenger.observe_element(block_metadata.block_gaslimit);
|
||||
challenger.observe_element(block_metadata.block_chain_id);
|
||||
challenger.observe_elements(&block_metadata.block_base_fee);
|
||||
|
||||
@ -6,12 +6,14 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers_ext_circuit;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak::columns::{
|
||||
reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime,
|
||||
reg_b, reg_c, reg_c_prime, reg_input_limb, reg_output_limb, reg_preimage, reg_step,
|
||||
@ -24,7 +26,6 @@ use crate::keccak::logic::{
|
||||
use crate::keccak::round_flags::{eval_round_flags, eval_round_flags_recursively};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Number of rounds in a Keccak permutation.
|
||||
pub(crate) const NUM_ROUNDS: usize = 24;
|
||||
@ -239,11 +240,16 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -251,33 +257,34 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
{
|
||||
eval_round_flags(vars, yield_constr);
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
yield_constr.constraint(filter * (filter - P::ONES));
|
||||
|
||||
// If this is not the final step, the filter must be off.
|
||||
let final_step = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_final_step = P::ONES - final_step;
|
||||
yield_constr.constraint(not_final_step * filter);
|
||||
|
||||
// If this is not the final step, the local and next preimages must match.
|
||||
// Also, if this is the first step, the preimage must match A.
|
||||
let is_first_step = vars.local_values[reg_step(0)];
|
||||
let is_first_step = local_values[reg_step(0)];
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let reg_preimage_lo = reg_preimage(x, y);
|
||||
let reg_preimage_hi = reg_preimage_lo + 1;
|
||||
let diff_lo =
|
||||
vars.local_values[reg_preimage_lo] - vars.next_values[reg_preimage_lo];
|
||||
let diff_hi =
|
||||
vars.local_values[reg_preimage_hi] - vars.next_values[reg_preimage_hi];
|
||||
let diff_lo = local_values[reg_preimage_lo] - next_values[reg_preimage_lo];
|
||||
let diff_hi = local_values[reg_preimage_hi] - next_values[reg_preimage_hi];
|
||||
yield_constr.constraint_transition(not_final_step * diff_lo);
|
||||
yield_constr.constraint_transition(not_final_step * diff_hi);
|
||||
|
||||
let reg_a_lo = reg_a(x, y);
|
||||
let reg_a_hi = reg_a_lo + 1;
|
||||
let diff_lo = vars.local_values[reg_preimage_lo] - vars.local_values[reg_a_lo];
|
||||
let diff_hi = vars.local_values[reg_preimage_hi] - vars.local_values[reg_a_hi];
|
||||
let diff_lo = local_values[reg_preimage_lo] - local_values[reg_a_lo];
|
||||
let diff_hi = local_values[reg_preimage_hi] - local_values[reg_a_hi];
|
||||
yield_constr.constraint(is_first_step * diff_lo);
|
||||
yield_constr.constraint(is_first_step * diff_hi);
|
||||
}
|
||||
@ -287,11 +294,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let xor = xor3_gen(
|
||||
vars.local_values[reg_c(x, z)],
|
||||
vars.local_values[reg_c((x + 4) % 5, z)],
|
||||
vars.local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
local_values[reg_c(x, z)],
|
||||
local_values[reg_c((x + 4) % 5, z)],
|
||||
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
);
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
yield_constr.constraint(c_prime - xor);
|
||||
}
|
||||
}
|
||||
@ -304,12 +311,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// It isn't required, but makes this check a bit cleaner.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let a_lo = vars.local_values[reg_a(x, y)];
|
||||
let a_hi = vars.local_values[reg_a(x, y) + 1];
|
||||
let a_lo = local_values[reg_a(x, y)];
|
||||
let a_hi = local_values[reg_a(x, y) + 1];
|
||||
let get_bit = |z| {
|
||||
let a_prime = vars.local_values[reg_a_prime(x, y, z)];
|
||||
let c = vars.local_values[reg_c(x, z)];
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let a_prime = local_values[reg_a_prime(x, y, z)];
|
||||
let c = local_values[reg_c(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
xor3_gen(a_prime, c, c_prime)
|
||||
};
|
||||
let computed_lo = (0..32)
|
||||
@ -329,10 +336,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let sum: P = [0, 1, 2, 3, 4]
|
||||
.map(|i| vars.local_values[reg_a_prime(x, i, z)])
|
||||
.map(|i| local_values[reg_a_prime(x, i, z)])
|
||||
.into_iter()
|
||||
.sum();
|
||||
let diff = sum - vars.local_values[reg_c_prime(x, z)];
|
||||
let diff = sum - local_values[reg_c_prime(x, z)];
|
||||
yield_constr
|
||||
.constraint(diff * (diff - FE::TWO) * (diff - FE::from_canonical_u8(4)));
|
||||
}
|
||||
@ -343,18 +350,18 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for y in 0..5 {
|
||||
let get_bit = |z| {
|
||||
xor_gen(
|
||||
vars.local_values[reg_b(x, y, z)],
|
||||
local_values[reg_b(x, y, z)],
|
||||
andn_gen(
|
||||
vars.local_values[reg_b((x + 1) % 5, y, z)],
|
||||
vars.local_values[reg_b((x + 2) % 5, y, z)],
|
||||
local_values[reg_b((x + 1) % 5, y, z)],
|
||||
local_values[reg_b((x + 2) % 5, y, z)],
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
let reg_lo = reg_a_prime_prime(x, y);
|
||||
let reg_hi = reg_lo + 1;
|
||||
let lo = vars.local_values[reg_lo];
|
||||
let hi = vars.local_values[reg_hi];
|
||||
let lo = local_values[reg_lo];
|
||||
let hi = local_values[reg_hi];
|
||||
let computed_lo = (0..32)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
|
||||
@ -369,7 +376,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
// A'''[0, 0] = A''[0, 0] XOR RC
|
||||
let a_prime_prime_0_0_bits = (0..64)
|
||||
.map(|i| vars.local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.collect_vec();
|
||||
let computed_a_prime_prime_0_0_lo = (0..32)
|
||||
.rev()
|
||||
@ -377,15 +384,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let computed_a_prime_prime_0_0_hi = (32..64)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + a_prime_prime_0_0_bits[z]);
|
||||
let a_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
yield_constr.constraint(computed_a_prime_prime_0_0_lo - a_prime_prime_0_0_lo);
|
||||
yield_constr.constraint(computed_a_prime_prime_0_0_hi - a_prime_prime_0_0_hi);
|
||||
|
||||
let get_xored_bit = |i| {
|
||||
let mut rc_bit_i = P::ZEROS;
|
||||
for r in 0..NUM_ROUNDS {
|
||||
let this_round = vars.local_values[reg_step(r)];
|
||||
let this_round = local_values[reg_step(r)];
|
||||
let this_round_constant =
|
||||
P::from(FE::from_canonical_u32(rc_value_bit(r, i) as u32));
|
||||
rc_bit_i += this_round * this_round_constant;
|
||||
@ -394,8 +401,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
xor_gen(a_prime_prime_0_0_bits[i], rc_bit_i)
|
||||
};
|
||||
|
||||
let a_prime_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let computed_a_prime_prime_prime_0_0_lo = (0..32)
|
||||
.rev()
|
||||
.fold(P::ZEROS, |acc, z| acc.doubles() + get_xored_bit(z));
|
||||
@ -408,11 +415,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// Enforce that this round's output equals the next round's input.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let output_lo = vars.local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = vars.local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = vars.next_values[reg_a(x, y)];
|
||||
let input_hi = vars.next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = next_values[reg_a(x, y)];
|
||||
let input_hi = next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_last_round = P::ONES - is_last_round;
|
||||
yield_constr.constraint_transition(not_last_round * (output_lo - input_lo));
|
||||
yield_constr.constraint_transition(not_last_round * (output_hi - input_hi));
|
||||
@ -423,7 +430,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one_ext = builder.one_extension();
|
||||
@ -433,49 +440,44 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
eval_round_flags_recursively(builder, vars, yield_constr);
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is not the final step, the filter must be off.
|
||||
let final_step = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let not_final_step = builder.sub_extension(one_ext, final_step);
|
||||
let constraint = builder.mul_extension(not_final_step, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is not the final step, the local and next preimages must match.
|
||||
// Also, if this is the first step, the preimage must match A.
|
||||
let is_first_step = vars.local_values[reg_step(0)];
|
||||
let is_first_step = local_values[reg_step(0)];
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let reg_preimage_lo = reg_preimage(x, y);
|
||||
let reg_preimage_hi = reg_preimage_lo + 1;
|
||||
let diff = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_lo],
|
||||
vars.next_values[reg_preimage_lo],
|
||||
);
|
||||
let diff = builder
|
||||
.sub_extension(local_values[reg_preimage_lo], next_values[reg_preimage_lo]);
|
||||
let constraint = builder.mul_extension(not_final_step, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
let diff = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_hi],
|
||||
vars.next_values[reg_preimage_hi],
|
||||
);
|
||||
let diff = builder
|
||||
.sub_extension(local_values[reg_preimage_hi], next_values[reg_preimage_hi]);
|
||||
let constraint = builder.mul_extension(not_final_step, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
|
||||
let reg_a_lo = reg_a(x, y);
|
||||
let reg_a_hi = reg_a_lo + 1;
|
||||
let diff_lo = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_lo],
|
||||
vars.local_values[reg_a_lo],
|
||||
);
|
||||
let diff_lo =
|
||||
builder.sub_extension(local_values[reg_preimage_lo], local_values[reg_a_lo]);
|
||||
let constraint = builder.mul_extension(is_first_step, diff_lo);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
let diff_hi = builder.sub_extension(
|
||||
vars.local_values[reg_preimage_hi],
|
||||
vars.local_values[reg_a_hi],
|
||||
);
|
||||
let diff_hi =
|
||||
builder.sub_extension(local_values[reg_preimage_hi], local_values[reg_a_hi]);
|
||||
let constraint = builder.mul_extension(is_first_step, diff_hi);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
}
|
||||
@ -486,11 +488,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for z in 0..64 {
|
||||
let xor = xor3_gen_circuit(
|
||||
builder,
|
||||
vars.local_values[reg_c(x, z)],
|
||||
vars.local_values[reg_c((x + 4) % 5, z)],
|
||||
vars.local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
local_values[reg_c(x, z)],
|
||||
local_values[reg_c((x + 4) % 5, z)],
|
||||
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
|
||||
);
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
let diff = builder.sub_extension(c_prime, xor);
|
||||
yield_constr.constraint(builder, diff);
|
||||
}
|
||||
@ -504,12 +506,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// It isn't required, but makes this check a bit cleaner.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let a_lo = vars.local_values[reg_a(x, y)];
|
||||
let a_hi = vars.local_values[reg_a(x, y) + 1];
|
||||
let a_lo = local_values[reg_a(x, y)];
|
||||
let a_hi = local_values[reg_a(x, y) + 1];
|
||||
let mut get_bit = |z| {
|
||||
let a_prime = vars.local_values[reg_a_prime(x, y, z)];
|
||||
let c = vars.local_values[reg_c(x, z)];
|
||||
let c_prime = vars.local_values[reg_c_prime(x, z)];
|
||||
let a_prime = local_values[reg_a_prime(x, y, z)];
|
||||
let c = local_values[reg_c(x, z)];
|
||||
let c_prime = local_values[reg_c_prime(x, z)];
|
||||
xor3_gen_circuit(builder, a_prime, c, c_prime)
|
||||
};
|
||||
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
|
||||
@ -529,9 +531,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
for x in 0..5 {
|
||||
for z in 0..64 {
|
||||
let sum = builder.add_many_extension(
|
||||
[0, 1, 2, 3, 4].map(|i| vars.local_values[reg_a_prime(x, i, z)]),
|
||||
[0, 1, 2, 3, 4].map(|i| local_values[reg_a_prime(x, i, z)]),
|
||||
);
|
||||
let diff = builder.sub_extension(sum, vars.local_values[reg_c_prime(x, z)]);
|
||||
let diff = builder.sub_extension(sum, local_values[reg_c_prime(x, z)]);
|
||||
let diff_minus_two = builder.sub_extension(diff, two_ext);
|
||||
let diff_minus_four = builder.sub_extension(diff, four_ext);
|
||||
let constraint =
|
||||
@ -546,16 +548,16 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let mut get_bit = |z| {
|
||||
let andn = andn_gen_circuit(
|
||||
builder,
|
||||
vars.local_values[reg_b((x + 1) % 5, y, z)],
|
||||
vars.local_values[reg_b((x + 2) % 5, y, z)],
|
||||
local_values[reg_b((x + 1) % 5, y, z)],
|
||||
local_values[reg_b((x + 2) % 5, y, z)],
|
||||
);
|
||||
xor_gen_circuit(builder, vars.local_values[reg_b(x, y, z)], andn)
|
||||
xor_gen_circuit(builder, local_values[reg_b(x, y, z)], andn)
|
||||
};
|
||||
|
||||
let reg_lo = reg_a_prime_prime(x, y);
|
||||
let reg_hi = reg_lo + 1;
|
||||
let lo = vars.local_values[reg_lo];
|
||||
let hi = vars.local_values[reg_hi];
|
||||
let lo = local_values[reg_lo];
|
||||
let hi = local_values[reg_hi];
|
||||
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
|
||||
let bits_hi = (32..64).map(get_bit).collect_vec();
|
||||
let computed_lo = reduce_with_powers_ext_circuit(builder, &bits_lo, two);
|
||||
@ -569,14 +571,14 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
|
||||
// A'''[0, 0] = A''[0, 0] XOR RC
|
||||
let a_prime_prime_0_0_bits = (0..64)
|
||||
.map(|i| vars.local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
|
||||
.collect_vec();
|
||||
let computed_a_prime_prime_0_0_lo =
|
||||
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[0..32], two);
|
||||
let computed_a_prime_prime_0_0_hi =
|
||||
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[32..64], two);
|
||||
let a_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
|
||||
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
|
||||
let diff = builder.sub_extension(computed_a_prime_prime_0_0_lo, a_prime_prime_0_0_lo);
|
||||
yield_constr.constraint(builder, diff);
|
||||
let diff = builder.sub_extension(computed_a_prime_prime_0_0_hi, a_prime_prime_0_0_hi);
|
||||
@ -585,7 +587,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
let mut get_xored_bit = |i| {
|
||||
let mut rc_bit_i = builder.zero_extension();
|
||||
for r in 0..NUM_ROUNDS {
|
||||
let this_round = vars.local_values[reg_step(r)];
|
||||
let this_round = local_values[reg_step(r)];
|
||||
let this_round_constant = builder
|
||||
.constant_extension(F::from_canonical_u32(rc_value_bit(r, i) as u32).into());
|
||||
rc_bit_i = builder.mul_add_extension(this_round, this_round_constant, rc_bit_i);
|
||||
@ -594,8 +596,8 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
xor_gen_circuit(builder, a_prime_prime_0_0_bits[i], rc_bit_i)
|
||||
};
|
||||
|
||||
let a_prime_prime_prime_0_0_lo = vars.local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = vars.local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
|
||||
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
|
||||
let bits_lo = (0..32).map(&mut get_xored_bit).collect_vec();
|
||||
let bits_hi = (32..64).map(get_xored_bit).collect_vec();
|
||||
let computed_a_prime_prime_prime_0_0_lo =
|
||||
@ -616,11 +618,11 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
|
||||
// Enforce that this round's output equals the next round's input.
|
||||
for x in 0..5 {
|
||||
for y in 0..5 {
|
||||
let output_lo = vars.local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = vars.local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = vars.next_values[reg_a(x, y)];
|
||||
let input_hi = vars.next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = vars.local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
|
||||
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
|
||||
let input_lo = next_values[reg_a(x, y)];
|
||||
let input_hi = next_values[reg_a(x, y) + 1];
|
||||
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
|
||||
let diff = builder.sub_extension(input_lo, output_lo);
|
||||
let filtered_diff = builder.mul_sub_extension(is_last_round, diff, diff);
|
||||
yield_constr.constraint_transition(builder, filtered_diff);
|
||||
@ -696,7 +698,7 @@ mod tests {
|
||||
f: Default::default(),
|
||||
};
|
||||
|
||||
let rows = stark.generate_trace_rows(vec![input.try_into().unwrap()], 8);
|
||||
let rows = stark.generate_trace_rows(vec![input], 8);
|
||||
let last_row = rows[NUM_ROUNDS - 1];
|
||||
let output = (0..NUM_INPUTS)
|
||||
.map(|i| {
|
||||
@ -735,7 +737,7 @@ mod tests {
|
||||
let trace_poly_values = timed!(
|
||||
timing,
|
||||
"generate trace",
|
||||
stark.generate_trace(input.try_into().unwrap(), 8, &mut timing)
|
||||
stark.generate_trace(input, 8, &mut timing)
|
||||
);
|
||||
|
||||
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
|
||||
|
||||
@ -2,60 +2,64 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak::columns::{reg_step, NUM_COLUMNS};
|
||||
use crate::keccak::keccak_stark::NUM_ROUNDS;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: StarkEvaluationVars<F, P, NUM_COLUMNS>,
|
||||
vars: &StarkFrame<P, NUM_COLUMNS>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// Initially, the first step flag should be 1 while the others should be 0.
|
||||
yield_constr.constraint_first_row(vars.local_values[reg_step(0)] - F::ONE);
|
||||
yield_constr.constraint_first_row(local_values[reg_step(0)] - F::ONE);
|
||||
for i in 1..NUM_ROUNDS {
|
||||
yield_constr.constraint_first_row(vars.local_values[reg_step(i)]);
|
||||
yield_constr.constraint_first_row(local_values[reg_step(i)]);
|
||||
}
|
||||
|
||||
// Flags should circularly increment, or be all zero for padding rows.
|
||||
let next_any_flag = (0..NUM_ROUNDS)
|
||||
.map(|i| vars.next_values[reg_step(i)])
|
||||
.sum::<P>();
|
||||
let next_any_flag = (0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]).sum::<P>();
|
||||
for i in 0..NUM_ROUNDS {
|
||||
let current_round_flag = vars.local_values[reg_step(i)];
|
||||
let next_round_flag = vars.next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let current_round_flag = local_values[reg_step(i)];
|
||||
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
yield_constr.constraint_transition(next_any_flag * (next_round_flag - current_round_flag));
|
||||
}
|
||||
|
||||
// Padding rows should always be followed by padding rows.
|
||||
let current_any_flag = (0..NUM_ROUNDS)
|
||||
.map(|i| vars.local_values[reg_step(i)])
|
||||
.map(|i| local_values[reg_step(i)])
|
||||
.sum::<P>();
|
||||
yield_constr.constraint_transition(next_any_flag * (current_any_flag - F::ONE));
|
||||
}
|
||||
|
||||
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, NUM_COLUMNS>,
|
||||
vars: &StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
// Initially, the first step flag should be 1 while the others should be 0.
|
||||
let step_0_minus_1 = builder.sub_extension(vars.local_values[reg_step(0)], one);
|
||||
let step_0_minus_1 = builder.sub_extension(local_values[reg_step(0)], one);
|
||||
yield_constr.constraint_first_row(builder, step_0_minus_1);
|
||||
for i in 1..NUM_ROUNDS {
|
||||
yield_constr.constraint_first_row(builder, vars.local_values[reg_step(i)]);
|
||||
yield_constr.constraint_first_row(builder, local_values[reg_step(i)]);
|
||||
}
|
||||
|
||||
// Flags should circularly increment, or be all zero for padding rows.
|
||||
let next_any_flag =
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| vars.next_values[reg_step(i)]));
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]));
|
||||
for i in 0..NUM_ROUNDS {
|
||||
let current_round_flag = vars.local_values[reg_step(i)];
|
||||
let next_round_flag = vars.next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let current_round_flag = local_values[reg_step(i)];
|
||||
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
|
||||
let diff = builder.sub_extension(next_round_flag, current_round_flag);
|
||||
let constraint = builder.mul_extension(next_any_flag, diff);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
@ -63,7 +67,7 @@ pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D
|
||||
|
||||
// Padding rows should always be followed by padding rows.
|
||||
let current_any_flag =
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| vars.local_values[reg_step(i)]));
|
||||
builder.add_many_extension((0..NUM_ROUNDS).map(|i| local_values[reg_step(i)]));
|
||||
let constraint = builder.mul_sub_extension(next_any_flag, current_any_flag, next_any_flag);
|
||||
yield_constr.constraint_transition(builder, constraint);
|
||||
}
|
||||
|
||||
@ -17,10 +17,10 @@ use plonky2_util::ceil_div_usize;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cpu::kernel::keccak_util::keccakf_u32s;
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::keccak_sponge::columns::*;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
|
||||
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
||||
@ -423,18 +423,27 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakSpongeStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeStark<F, D> {
|
||||
const COLUMNS: usize = NUM_KECCAK_SPONGE_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_KECCAK_SPONGE_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_KECCAK_SPONGE_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values: &KeccakSpongeColumnsView<P> = vars.local_values.borrow();
|
||||
let next_values: &KeccakSpongeColumnsView<P> = vars.next_values.borrow();
|
||||
let local_values: &[P; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &KeccakSpongeColumnsView<P> = local_values.borrow();
|
||||
let next_values: &[P; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &KeccakSpongeColumnsView<P> = next_values.borrow();
|
||||
|
||||
// Each flag (full-input block, final block or implied dummy flag) must be boolean.
|
||||
let is_full_input_block = local_values.is_full_input_block;
|
||||
@ -537,11 +546,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = vars.local_values.borrow();
|
||||
let next_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = vars.next_values.borrow();
|
||||
let local_values: &[ExtensionTarget<D>; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_local_values().try_into().unwrap();
|
||||
let local_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = local_values.borrow();
|
||||
let next_values: &[ExtensionTarget<D>; NUM_KECCAK_SPONGE_COLUMNS] =
|
||||
vars.get_next_values().try_into().unwrap();
|
||||
let next_values: &KeccakSpongeColumnsView<ExtensionTarget<D>> = next_values.borrow();
|
||||
|
||||
let one = builder.one_extension();
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::field_reassign_with_default)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(generic_const_exprs)]
|
||||
|
||||
pub mod all_stark;
|
||||
pub mod arithmetic;
|
||||
@ -14,6 +13,7 @@ pub mod constraint_consumer;
|
||||
pub mod cpu;
|
||||
pub mod cross_table_lookup;
|
||||
pub mod curve_pairings;
|
||||
pub mod evaluation_frame;
|
||||
pub mod extension_tower;
|
||||
pub mod fixed_recursive_verifier;
|
||||
pub mod generation;
|
||||
@ -31,7 +31,6 @@ pub mod stark;
|
||||
pub mod stark_testing;
|
||||
pub mod util;
|
||||
pub mod vanishing_poly;
|
||||
pub mod vars;
|
||||
pub mod verifier;
|
||||
pub mod witness;
|
||||
|
||||
|
||||
@ -7,16 +7,17 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::logic::columns::NUM_COLUMNS;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive, trace_rows_to_poly_values};
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
// Total number of bits per input/output.
|
||||
const VAL_BITS: usize = 256;
|
||||
@ -181,17 +182,22 @@ impl<F: RichField, const D: usize> LogicStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let lv = &vars.local_values;
|
||||
let lv = vars.get_local_values();
|
||||
|
||||
// IS_AND, IS_OR, and IS_XOR come from the CPU table, so we assume they're valid.
|
||||
let is_and = lv[columns::IS_AND];
|
||||
@ -237,10 +243,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F,
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let lv = &vars.local_values;
|
||||
let lv = vars.get_local_values();
|
||||
|
||||
// IS_AND, IS_OR, and IS_XOR come from the CPU table, so we assume they're valid.
|
||||
let is_and = lv[columns::IS_AND];
|
||||
|
||||
@ -5,20 +5,24 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
|
||||
pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, const COLS: usize>(
|
||||
vars: StarkEvaluationVars<F, P, COLS>,
|
||||
pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, E: StarkEvaluationFrame<P>>(
|
||||
vars: &E,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
col_permuted_input: usize,
|
||||
col_permuted_table: usize,
|
||||
) {
|
||||
let local_perm_input = vars.local_values[col_permuted_input];
|
||||
let next_perm_table = vars.next_values[col_permuted_table];
|
||||
let next_perm_input = vars.next_values[col_permuted_input];
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let local_perm_input = local_values[col_permuted_input];
|
||||
let next_perm_table = next_values[col_permuted_table];
|
||||
let next_perm_input = next_values[col_permuted_input];
|
||||
|
||||
// A "vertical" diff between the local and next permuted inputs.
|
||||
let diff_input_prev = next_perm_input - local_perm_input;
|
||||
@ -35,18 +39,21 @@ pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, const COLS: usi
|
||||
|
||||
pub(crate) fn eval_lookups_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
E: StarkEvaluationFrame<ExtensionTarget<D>>,
|
||||
const D: usize,
|
||||
const COLS: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, COLS>,
|
||||
vars: &E,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
col_permuted_input: usize,
|
||||
col_permuted_table: usize,
|
||||
) {
|
||||
let local_perm_input = vars.local_values[col_permuted_input];
|
||||
let next_perm_table = vars.next_values[col_permuted_table];
|
||||
let next_perm_input = vars.next_values[col_permuted_input];
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let local_perm_input = local_values[col_permuted_input];
|
||||
let next_perm_table = next_values[col_permuted_table];
|
||||
let next_perm_input = next_values[col_permuted_input];
|
||||
|
||||
// A "vertical" diff between the local and next permuted inputs.
|
||||
let diff_input_prev = builder.sub_extension(next_perm_input, local_perm_input);
|
||||
|
||||
@ -7,6 +7,7 @@ use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
@ -14,6 +15,7 @@ use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::Column;
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{eval_lookups, eval_lookups_circuit, permuted_cols};
|
||||
use crate::memory::columns::{
|
||||
value_limb, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, CONTEXT_FIRST_CHANGE, COUNTER,
|
||||
@ -23,7 +25,6 @@ use crate::memory::columns::{
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
use crate::witness::memory::MemoryOpKind::Read;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryOp};
|
||||
|
||||
@ -238,48 +239,55 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
|
||||
const COLUMNS: usize = NUM_COLUMNS;
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_COLUMNS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let one = P::from(FE::ONE);
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let timestamp = vars.local_values[TIMESTAMP];
|
||||
let addr_context = vars.local_values[ADDR_CONTEXT];
|
||||
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
||||
let timestamp = local_values[TIMESTAMP];
|
||||
let addr_context = local_values[ADDR_CONTEXT];
|
||||
let addr_segment = local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = local_values[ADDR_VIRTUAL];
|
||||
let value_limbs: Vec<_> = (0..8).map(|i| local_values[value_limb(i)]).collect();
|
||||
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let next_addr_context = next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = next_values[ADDR_VIRTUAL];
|
||||
let next_values_limbs: Vec<_> = (0..8).map(|i| next_values[value_limb(i)]).collect();
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[FILTER];
|
||||
let filter = local_values[FILTER];
|
||||
yield_constr.constraint(filter * (filter - P::ONES));
|
||||
|
||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||
let is_dummy = P::ONES - filter;
|
||||
let is_write = P::ONES - vars.local_values[IS_READ];
|
||||
let is_write = P::ONES - local_values[IS_READ];
|
||||
yield_constr.constraint(is_dummy * is_write);
|
||||
|
||||
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let context_first_change = local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let address_unchanged =
|
||||
one - context_first_change - segment_first_change - virtual_first_change;
|
||||
|
||||
let range_check = vars.local_values[RANGE_CHECK];
|
||||
let range_check = local_values[RANGE_CHECK];
|
||||
|
||||
let not_context_first_change = one - context_first_change;
|
||||
let not_segment_first_change = one - segment_first_change;
|
||||
@ -313,7 +321,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
// Enumerate purportedly-ordered log.
|
||||
for i in 0..8 {
|
||||
yield_constr.constraint_transition(
|
||||
next_is_read * address_unchanged * (next_values[i] - values[i]),
|
||||
next_is_read * address_unchanged * (next_values_limbs[i] - value_limbs[i]),
|
||||
);
|
||||
}
|
||||
|
||||
@ -323,46 +331,48 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let one = builder.one_extension();
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
|
||||
let addr_context = vars.local_values[ADDR_CONTEXT];
|
||||
let addr_segment = vars.local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = vars.local_values[ADDR_VIRTUAL];
|
||||
let values: Vec<_> = (0..8).map(|i| vars.local_values[value_limb(i)]).collect();
|
||||
let timestamp = vars.local_values[TIMESTAMP];
|
||||
let addr_context = local_values[ADDR_CONTEXT];
|
||||
let addr_segment = local_values[ADDR_SEGMENT];
|
||||
let addr_virtual = local_values[ADDR_VIRTUAL];
|
||||
let value_limbs: Vec<_> = (0..8).map(|i| local_values[value_limb(i)]).collect();
|
||||
let timestamp = local_values[TIMESTAMP];
|
||||
|
||||
let next_addr_context = vars.next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = vars.next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = vars.next_values[ADDR_VIRTUAL];
|
||||
let next_values: Vec<_> = (0..8).map(|i| vars.next_values[value_limb(i)]).collect();
|
||||
let next_is_read = vars.next_values[IS_READ];
|
||||
let next_timestamp = vars.next_values[TIMESTAMP];
|
||||
let next_addr_context = next_values[ADDR_CONTEXT];
|
||||
let next_addr_segment = next_values[ADDR_SEGMENT];
|
||||
let next_addr_virtual = next_values[ADDR_VIRTUAL];
|
||||
let next_values_limbs: Vec<_> = (0..8).map(|i| next_values[value_limb(i)]).collect();
|
||||
let next_is_read = next_values[IS_READ];
|
||||
let next_timestamp = next_values[TIMESTAMP];
|
||||
|
||||
// The filter must be 0 or 1.
|
||||
let filter = vars.local_values[FILTER];
|
||||
let filter = local_values[FILTER];
|
||||
let constraint = builder.mul_sub_extension(filter, filter, filter);
|
||||
yield_constr.constraint(builder, constraint);
|
||||
|
||||
// If this is a dummy row (filter is off), it must be a read. This means the prover can
|
||||
// insert reads which never appear in the CPU trace (which are harmless), but not writes.
|
||||
let is_dummy = builder.sub_extension(one, filter);
|
||||
let is_write = builder.sub_extension(one, vars.local_values[IS_READ]);
|
||||
let is_write = builder.sub_extension(one, local_values[IS_READ]);
|
||||
let is_dummy_write = builder.mul_extension(is_dummy, is_write);
|
||||
yield_constr.constraint(builder, is_dummy_write);
|
||||
|
||||
let context_first_change = vars.local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = vars.local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = vars.local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let context_first_change = local_values[CONTEXT_FIRST_CHANGE];
|
||||
let segment_first_change = local_values[SEGMENT_FIRST_CHANGE];
|
||||
let virtual_first_change = local_values[VIRTUAL_FIRST_CHANGE];
|
||||
let address_unchanged = {
|
||||
let mut cur = builder.sub_extension(one, context_first_change);
|
||||
cur = builder.sub_extension(cur, segment_first_change);
|
||||
builder.sub_extension(cur, virtual_first_change)
|
||||
};
|
||||
|
||||
let range_check = vars.local_values[RANGE_CHECK];
|
||||
let range_check = local_values[RANGE_CHECK];
|
||||
|
||||
let not_context_first_change = builder.sub_extension(one, context_first_change);
|
||||
let not_segment_first_change = builder.sub_extension(one, segment_first_change);
|
||||
@ -433,7 +443,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
|
||||
// Enumerate purportedly-ordered log.
|
||||
for i in 0..8 {
|
||||
let value_diff = builder.sub_extension(next_values[i], values[i]);
|
||||
let value_diff = builder.sub_extension(next_values_limbs[i], value_limbs[i]);
|
||||
let zero_if_read = builder.mul_extension(address_unchanged, value_diff);
|
||||
let read_constraint = builder.mul_extension(next_is_read, zero_if_read);
|
||||
yield_constr.constraint_transition(builder, read_constraint);
|
||||
|
||||
@ -23,8 +23,8 @@ use plonky2_maybe_rayon::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
|
||||
/// In particular, there should exist some permutation `pi` such that for any `i`,
|
||||
@ -326,7 +326,7 @@ where
|
||||
pub(crate) fn eval_permutation_checks<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
@ -335,6 +335,8 @@ pub(crate) fn eval_permutation_checks<F, FE, P, S, const D: usize, const D2: usi
|
||||
P: PackedField<Scalar = FE>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
|
||||
let PermutationCheckVars {
|
||||
local_zs,
|
||||
next_zs,
|
||||
@ -368,7 +370,7 @@ pub(crate) fn eval_permutation_checks<F, FE, P, S, const D: usize, const D2: usi
|
||||
let mut factor = ReducingFactor::new(*beta);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
||||
.map(|&(i, j)| (local_values[i], local_values[j]))
|
||||
.unzip();
|
||||
(
|
||||
factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma),
|
||||
@ -392,14 +394,15 @@ pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
permutation_data: PermutationCheckDataTarget<D>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
|
||||
let PermutationCheckDataTarget {
|
||||
local_zs,
|
||||
next_zs,
|
||||
@ -437,7 +440,7 @@ pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
|
||||
let mut factor = ReducingFactorTarget::new(beta_ext);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
||||
.map(|&(i, j)| (local_values[i], local_values[j]))
|
||||
.unzip();
|
||||
let reduced_lhs = factor.reduce(&lhs, builder);
|
||||
let reduced_rhs = factor.reduce(&rhs, builder);
|
||||
|
||||
@ -101,6 +101,7 @@ pub struct BlockMetadata {
|
||||
pub block_number: U256,
|
||||
/// The difficulty (before PoS transition) of this block.
|
||||
pub block_difficulty: U256,
|
||||
pub block_random: H256,
|
||||
/// The gas limit of this block. It must fit in a `u32`.
|
||||
pub block_gaslimit: U256,
|
||||
/// The chain id of this block.
|
||||
@ -177,6 +178,7 @@ impl PublicValuesTarget {
|
||||
block_timestamp,
|
||||
block_number,
|
||||
block_difficulty,
|
||||
block_random,
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
@ -188,6 +190,7 @@ impl PublicValuesTarget {
|
||||
buffer.write_target(block_timestamp)?;
|
||||
buffer.write_target(block_number)?;
|
||||
buffer.write_target(block_difficulty)?;
|
||||
buffer.write_target_array(&block_random)?;
|
||||
buffer.write_target(block_gaslimit)?;
|
||||
buffer.write_target(block_chain_id)?;
|
||||
buffer.write_target_array(&block_base_fee)?;
|
||||
@ -239,6 +242,7 @@ impl PublicValuesTarget {
|
||||
block_timestamp: buffer.read_target()?,
|
||||
block_number: buffer.read_target()?,
|
||||
block_difficulty: buffer.read_target()?,
|
||||
block_random: buffer.read_target_array()?,
|
||||
block_gaslimit: buffer.read_target()?,
|
||||
block_chain_id: buffer.read_target()?,
|
||||
block_base_fee: buffer.read_target_array()?,
|
||||
@ -412,6 +416,7 @@ pub struct BlockMetadataTarget {
|
||||
pub block_timestamp: Target,
|
||||
pub block_number: Target,
|
||||
pub block_difficulty: Target,
|
||||
pub block_random: [Target; 8],
|
||||
pub block_gaslimit: Target,
|
||||
pub block_chain_id: Target,
|
||||
pub block_base_fee: [Target; 2],
|
||||
@ -420,24 +425,26 @@ pub struct BlockMetadataTarget {
|
||||
}
|
||||
|
||||
impl BlockMetadataTarget {
|
||||
pub const SIZE: usize = 77;
|
||||
pub const SIZE: usize = 85;
|
||||
|
||||
pub fn from_public_inputs(pis: &[Target]) -> Self {
|
||||
let block_beneficiary = pis[0..5].try_into().unwrap();
|
||||
let block_timestamp = pis[5];
|
||||
let block_number = pis[6];
|
||||
let block_difficulty = pis[7];
|
||||
let block_gaslimit = pis[8];
|
||||
let block_chain_id = pis[9];
|
||||
let block_base_fee = pis[10..12].try_into().unwrap();
|
||||
let block_gas_used = pis[12];
|
||||
let block_bloom = pis[13..77].try_into().unwrap();
|
||||
let block_random = pis[8..16].try_into().unwrap();
|
||||
let block_gaslimit = pis[16];
|
||||
let block_chain_id = pis[17];
|
||||
let block_base_fee = pis[18..20].try_into().unwrap();
|
||||
let block_gas_used = pis[20];
|
||||
let block_bloom = pis[21..85].try_into().unwrap();
|
||||
|
||||
Self {
|
||||
block_beneficiary,
|
||||
block_timestamp,
|
||||
block_number,
|
||||
block_difficulty,
|
||||
block_random,
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
@ -463,6 +470,9 @@ impl BlockMetadataTarget {
|
||||
block_timestamp: builder.select(condition, bm0.block_timestamp, bm1.block_timestamp),
|
||||
block_number: builder.select(condition, bm0.block_number, bm1.block_number),
|
||||
block_difficulty: builder.select(condition, bm0.block_difficulty, bm1.block_difficulty),
|
||||
block_random: core::array::from_fn(|i| {
|
||||
builder.select(condition, bm0.block_random[i], bm1.block_random[i])
|
||||
}),
|
||||
block_gaslimit: builder.select(condition, bm0.block_gaslimit, bm1.block_gaslimit),
|
||||
block_chain_id: builder.select(condition, bm0.block_chain_id, bm1.block_chain_id),
|
||||
block_base_fee: core::array::from_fn(|i| {
|
||||
@ -486,11 +496,15 @@ impl BlockMetadataTarget {
|
||||
builder.connect(bm0.block_timestamp, bm1.block_timestamp);
|
||||
builder.connect(bm0.block_number, bm1.block_number);
|
||||
builder.connect(bm0.block_difficulty, bm1.block_difficulty);
|
||||
for i in 0..8 {
|
||||
builder.connect(bm0.block_random[i], bm1.block_random[i]);
|
||||
}
|
||||
builder.connect(bm0.block_gaslimit, bm1.block_gaslimit);
|
||||
builder.connect(bm0.block_chain_id, bm1.block_chain_id);
|
||||
for i in 0..2 {
|
||||
builder.connect(bm0.block_base_fee[i], bm1.block_base_fee[i])
|
||||
}
|
||||
builder.connect(bm0.block_gas_used, bm1.block_gas_used);
|
||||
for i in 0..64 {
|
||||
builder.connect(bm0.block_bloom[i], bm1.block_bloom[i])
|
||||
}
|
||||
|
||||
@ -20,20 +20,14 @@ use plonky2_maybe_rayon::*;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::generation::outputs::GenerationOutputs;
|
||||
use crate::generation::{generate_traces, GenerationInputs};
|
||||
use crate::get_challenges::observe_public_values;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::permutation::{
|
||||
compute_permutation_z_polys, get_grand_product_challenge_set,
|
||||
get_n_grand_product_challenge_sets, GrandProductChallengeSet, PermutationCheckVars,
|
||||
@ -41,7 +35,6 @@ use crate::permutation::{
|
||||
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
/// Generate traces, then create all STARK proofs.
|
||||
pub fn prove<F, C, const D: usize>(
|
||||
@ -53,13 +46,6 @@ pub fn prove<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let (proof, _outputs) = prove_with_outputs(all_stark, config, inputs, timing)?;
|
||||
Ok(proof)
|
||||
@ -76,13 +62,6 @@ pub fn prove_with_outputs<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
timed!(timing, "build kernel", Lazy::force(&KERNEL));
|
||||
let (traces, public_values, outputs) = timed!(
|
||||
@ -105,13 +84,6 @@ pub(crate) fn prove_with_traces<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
@ -197,13 +169,6 @@ fn prove_with_commitments<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let arithmetic_proof = timed!(
|
||||
timing,
|
||||
@ -322,7 +287,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = trace_poly_values[0].len();
|
||||
let degree_bits = log2_strict(degree);
|
||||
@ -507,7 +471,6 @@ where
|
||||
P: PackedField<Scalar = F>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
@ -530,12 +493,8 @@ where
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_trace_values_packed = |i_start| -> [P; S::COLUMNS] {
|
||||
trace_commitment
|
||||
.get_lde_values_packed(i_start, step)
|
||||
.try_into()
|
||||
.unwrap()
|
||||
};
|
||||
let get_trace_values_packed =
|
||||
|i_start| -> Vec<P> { trace_commitment.get_lde_values_packed(i_start, step) };
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
@ -566,10 +525,10 @@ where
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &get_trace_values_packed(i_start),
|
||||
next_values: &get_trace_values_packed(i_next_start),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&get_trace_values_packed(i_start),
|
||||
&get_trace_values_packed(i_next_start),
|
||||
);
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
|
||||
@ -597,7 +556,7 @@ where
|
||||
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
&vars,
|
||||
permutation_check_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
@ -642,7 +601,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
||||
@ -688,10 +646,10 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(),
|
||||
next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_subgroup_evals[i],
|
||||
&trace_subgroup_evals[i_next],
|
||||
);
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_subgroup_evals[i][..num_permutation_zs].to_vec(),
|
||||
@ -715,7 +673,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
&vars,
|
||||
permutation_check_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
|
||||
@ -30,6 +30,7 @@ use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups, CrossTableLookup, CtlCheckVarsTarget};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::{
|
||||
@ -45,7 +46,6 @@ use crate::proof::{
|
||||
use crate::stark::Stark;
|
||||
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::witness::errors::ProgramError;
|
||||
|
||||
/// Table-wise recursive proofs of an `AllProof`.
|
||||
@ -297,7 +297,6 @@ pub(crate) fn recursive_stark_circuit<
|
||||
min_degree_bits: usize,
|
||||
) -> StarkWrapperCircuit<F, C, D>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config.clone());
|
||||
@ -405,7 +404,6 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let zero = builder.zero();
|
||||
let one = builder.one_extension();
|
||||
@ -418,10 +416,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationTargets {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrameTarget::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(inner_config);
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
@ -456,7 +451,7 @@ fn verify_stark_proof_with_challenges_circuit<
|
||||
builder,
|
||||
stark,
|
||||
inner_config,
|
||||
vars,
|
||||
&vars,
|
||||
permutation_data,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
@ -553,11 +548,15 @@ pub(crate) fn get_memory_extra_looking_products_circuit<
|
||||
),
|
||||
];
|
||||
|
||||
let beneficiary_base_fee_cur_hash_fields: [(usize, &[Target]); 3] = [
|
||||
let beneficiary_random_base_fee_cur_hash_fields: [(usize, &[Target]); 4] = [
|
||||
(
|
||||
GlobalMetadata::BlockBeneficiary as usize,
|
||||
&public_values.block_metadata.block_beneficiary,
|
||||
),
|
||||
(
|
||||
GlobalMetadata::BlockRandom as usize,
|
||||
&public_values.block_metadata.block_random,
|
||||
),
|
||||
(
|
||||
GlobalMetadata::BlockBaseFee as usize,
|
||||
&public_values.block_metadata.block_base_fee,
|
||||
@ -581,7 +580,7 @@ pub(crate) fn get_memory_extra_looking_products_circuit<
|
||||
);
|
||||
});
|
||||
|
||||
beneficiary_base_fee_cur_hash_fields.map(|(field, targets)| {
|
||||
beneficiary_random_base_fee_cur_hash_fields.map(|(field, targets)| {
|
||||
product = add_data_write(
|
||||
builder,
|
||||
challenge,
|
||||
@ -777,6 +776,7 @@ pub(crate) fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D:
|
||||
let block_timestamp = builder.add_virtual_public_input();
|
||||
let block_number = builder.add_virtual_public_input();
|
||||
let block_difficulty = builder.add_virtual_public_input();
|
||||
let block_random = builder.add_virtual_public_input_arr();
|
||||
let block_gaslimit = builder.add_virtual_public_input();
|
||||
let block_chain_id = builder.add_virtual_public_input();
|
||||
let block_base_fee = builder.add_virtual_public_input_arr();
|
||||
@ -787,6 +787,7 @@ pub(crate) fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D:
|
||||
block_timestamp,
|
||||
block_number,
|
||||
block_difficulty,
|
||||
block_random,
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
@ -936,7 +937,7 @@ where
|
||||
witness,
|
||||
&public_values_target.extra_block_data,
|
||||
&public_values.extra_block_data,
|
||||
);
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1021,6 +1022,10 @@ where
|
||||
block_metadata_target.block_difficulty,
|
||||
u256_to_u32(block_metadata.block_difficulty)?,
|
||||
);
|
||||
witness.set_target_arr(
|
||||
&block_metadata_target.block_random,
|
||||
&h256_limbs(block_metadata.block_random),
|
||||
);
|
||||
witness.set_target(
|
||||
block_metadata_target.block_gaslimit,
|
||||
u256_to_u32(block_metadata.block_gaslimit)?,
|
||||
@ -1069,7 +1074,8 @@ pub(crate) fn set_extra_public_values_target<F, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
ed_target: &ExtraBlockDataTarget,
|
||||
ed: &ExtraBlockData,
|
||||
) where
|
||||
) -> Result<(), ProgramError>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
W: Witness<F>,
|
||||
{
|
||||
@ -1079,20 +1085,14 @@ pub(crate) fn set_extra_public_values_target<F, W, const D: usize>(
|
||||
);
|
||||
witness.set_target(
|
||||
ed_target.txn_number_before,
|
||||
F::from_canonical_usize(ed.txn_number_before.as_usize()),
|
||||
u256_to_u32(ed.txn_number_before)?,
|
||||
);
|
||||
witness.set_target(
|
||||
ed_target.txn_number_after,
|
||||
F::from_canonical_usize(ed.txn_number_after.as_usize()),
|
||||
);
|
||||
witness.set_target(
|
||||
ed_target.gas_used_before,
|
||||
F::from_canonical_usize(ed.gas_used_before.as_usize()),
|
||||
);
|
||||
witness.set_target(
|
||||
ed_target.gas_used_after,
|
||||
F::from_canonical_usize(ed.gas_used_after.as_usize()),
|
||||
u256_to_u32(ed.txn_number_after)?,
|
||||
);
|
||||
witness.set_target(ed_target.gas_used_before, u256_to_u32(ed.gas_used_before)?);
|
||||
witness.set_target(ed_target.gas_used_after, u256_to_u32(ed.gas_used_after)?);
|
||||
|
||||
let block_bloom_before = ed.block_bloom_before;
|
||||
let mut block_bloom_limbs = [F::ZERO; 64];
|
||||
@ -1109,4 +1109,6 @@ pub(crate) fn set_extra_public_values_target<F, W, const D: usize>(
|
||||
}
|
||||
|
||||
witness.set_target_arr(&ed_target.block_bloom_after, &block_bloom_limbs);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -12,8 +12,8 @@ use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const PERMUTATION_CTL_ORACLE_INDEX: usize = 1;
|
||||
@ -22,7 +22,16 @@ const QUOTIENT_ORACLE_INDEX: usize = 2;
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The total number of columns in the trace.
|
||||
const COLUMNS: usize;
|
||||
const COLUMNS: usize = Self::EvaluationFrameTarget::COLUMNS;
|
||||
|
||||
/// This is used to evaluate constraints natively.
|
||||
type EvaluationFrame<FE, P, const D2: usize>: StarkEvaluationFrame<P>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// The `Target` version of `Self::EvaluationFrame`, used to evaluate constraints recursively.
|
||||
type EvaluationFrameTarget: StarkEvaluationFrame<ExtensionTarget<D>>;
|
||||
|
||||
/// Evaluate constraints at a vector of points.
|
||||
///
|
||||
@ -32,7 +41,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// constraints over `F`.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
@ -41,7 +50,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// Evaluate constraints at a vector of points from the base field `F`.
|
||||
fn eval_packed_base<P: PackedField<Scalar = F>>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<F, P, 1>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
@ -50,7 +59,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// Evaluate constraints at a single point from the degree `D` extension field.
|
||||
fn eval_ext(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<F::Extension, F::Extension, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrame<F::Extension, F::Extension, D>,
|
||||
yield_constr: &mut ConstraintConsumer<F::Extension>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
@ -63,7 +72,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
|
||||
@ -3,17 +3,16 @@ use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::types::{Field, Sample};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::hash::hashing::PlonkyPermutation;
|
||||
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
const WITNESS_SIZE: usize = 1 << 5;
|
||||
|
||||
@ -21,10 +20,7 @@ const WITNESS_SIZE: usize = 1 << 5;
|
||||
/// low-degree witness polynomials.
|
||||
pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
|
||||
|
||||
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
|
||||
@ -39,13 +35,10 @@ where
|
||||
let alpha = F::rand();
|
||||
let constraint_evals = (0..size)
|
||||
.map(|i| {
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &trace_ldes[i].clone().try_into().unwrap(),
|
||||
next_values: &trace_ldes[(i + (1 << rate_bits)) % size]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&trace_ldes[i],
|
||||
&trace_ldes[(i + (1 << rate_bits)) % size],
|
||||
);
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F>::new(
|
||||
vec![alpha],
|
||||
@ -53,7 +46,7 @@ where
|
||||
lagrange_first.values[i],
|
||||
lagrange_last.values[i],
|
||||
);
|
||||
stark.eval_packed_base(vars, &mut consumer);
|
||||
stark.eval_packed_base(&vars, &mut consumer);
|
||||
consumer.accumulators()[0]
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@ -84,17 +77,13 @@ pub fn test_stark_circuit_constraints<
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); <C::Hasher as Hasher<F>>::Permutation::WIDTH]:,
|
||||
[(); <C::InnerHasher as Hasher<F>>::Permutation::WIDTH]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
// Compute native constraint evaluation on random values.
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &F::Extension::rand_array::<{ S::COLUMNS }>(),
|
||||
next_values: &F::Extension::rand_array::<{ S::COLUMNS }>(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
&F::Extension::rand_vec(S::COLUMNS),
|
||||
);
|
||||
|
||||
let alphas = F::rand_vec(1);
|
||||
let z_last = F::Extension::rand();
|
||||
let lagrange_first = F::Extension::rand();
|
||||
@ -109,7 +98,7 @@ where
|
||||
lagrange_first,
|
||||
lagrange_last,
|
||||
);
|
||||
stark.eval_ext(vars, &mut consumer);
|
||||
stark.eval_ext(&vars, &mut consumer);
|
||||
let native_eval = consumer.accumulators()[0];
|
||||
|
||||
// Compute circuit constraint evaluation on same random values.
|
||||
@ -118,9 +107,9 @@ where
|
||||
let mut pw = PartialWitness::<F>::new();
|
||||
|
||||
let locals_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&locals_t, vars.local_values);
|
||||
pw.set_extension_targets(&locals_t, vars.get_local_values());
|
||||
let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS);
|
||||
pw.set_extension_targets(&nexts_t, vars.next_values);
|
||||
pw.set_extension_targets(&nexts_t, vars.get_next_values());
|
||||
let alphas_t = builder.add_virtual_targets(1);
|
||||
pw.set_target(alphas_t[0], alphas[0]);
|
||||
let z_last_t = builder.add_virtual_extension_target();
|
||||
@ -130,10 +119,7 @@ where
|
||||
let lagrange_last_t = builder.add_virtual_extension_target();
|
||||
pw.set_extension_target(lagrange_last_t, lagrange_last);
|
||||
|
||||
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }> {
|
||||
local_values: &locals_t.try_into().unwrap(),
|
||||
next_values: &nexts_t.try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrameTarget::from_values(&locals_t, &nexts_t);
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
alphas_t,
|
||||
@ -141,7 +127,7 @@ where
|
||||
lagrange_first_t,
|
||||
lagrange_last_t,
|
||||
);
|
||||
stark.eval_ext_circuit(&mut builder, vars, &mut consumer);
|
||||
stark.eval_ext_circuit(&mut builder, &vars, &mut consumer);
|
||||
let circuit_eval = consumer.accumulators()[0];
|
||||
let native_eval_t = builder.constant_extension(native_eval);
|
||||
builder.connect_extension(circuit_eval, native_eval_t);
|
||||
|
||||
@ -70,6 +70,11 @@ pub(crate) fn u256_to_u64<F: Field>(u256: U256) -> Result<(F, F), ProgramError>
|
||||
))
|
||||
}
|
||||
|
||||
/// Safe alternative to `U256::as_usize()`, which errors in case of overflow instead of panicking.
|
||||
pub(crate) fn u256_to_usize(u256: U256) -> Result<usize, ProgramError> {
|
||||
u256.try_into().map_err(|_| ProgramError::IntegerTooLarge)
|
||||
}
|
||||
|
||||
#[allow(unused)] // TODO: Remove?
|
||||
/// Returns the 32-bit little-endian limbs of a `U256`.
|
||||
pub(crate) fn u256_limbs<F: Field>(u256: U256) -> [F; 8] {
|
||||
@ -171,6 +176,8 @@ pub(crate) fn u256_to_biguint(x: U256) -> BigUint {
|
||||
|
||||
pub(crate) fn biguint_to_u256(x: BigUint) -> U256 {
|
||||
let bytes = x.to_bytes_le();
|
||||
// This could panic if `bytes.len() > 32` but this is only
|
||||
// used here with `BigUint` constructed from `U256`.
|
||||
U256::from_little_endian(&bytes)
|
||||
}
|
||||
|
||||
|
||||
@ -14,12 +14,11 @@ use crate::permutation::{
|
||||
PermutationCheckVars,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrame<FE, P, D2>,
|
||||
permutation_vars: Option<PermutationCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
@ -46,14 +45,13 @@ pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
||||
vars: &S::EvaluationFrameTarget,
|
||||
permutation_data: Option<PermutationCheckDataTarget<D>>,
|
||||
ctl_vars: &[CtlCheckVarsTarget<F, D>],
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(permutation_data) = permutation_data {
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize>
|
||||
where
|
||||
F: Field,
|
||||
P: PackedField<Scalar = F>,
|
||||
{
|
||||
pub local_values: &'a [P; COLUMNS],
|
||||
pub next_values: &'a [P; COLUMNS],
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationTargets<'a, const D: usize, const COLUMNS: usize> {
|
||||
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
}
|
||||
@ -1,7 +1,7 @@
|
||||
use std::any::type_name;
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use ethereum_types::U256;
|
||||
use ethereum_types::{BigEndianHash, U256};
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::types::Field;
|
||||
@ -11,17 +11,11 @@ use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
|
||||
use crate::byte_packing::byte_packing_stark::BytePackingStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::VALUE_LIMBS;
|
||||
use crate::permutation::{GrandProductChallenge, PermutationCheckVars};
|
||||
@ -31,7 +25,6 @@ use crate::proof::{
|
||||
use crate::stark::Stark;
|
||||
use crate::util::h2u;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
all_stark: &AllStark<F, D>,
|
||||
@ -39,13 +32,6 @@ pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); ArithmeticStark::<F, D>::COLUMNS]:,
|
||||
[(); BytePackingStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
|
||||
[(); LogicStark::<F, D>::COLUMNS]:,
|
||||
[(); MemoryStark::<F, D>::COLUMNS]:,
|
||||
{
|
||||
let AllProofChallenges {
|
||||
stark_challenges,
|
||||
@ -171,6 +157,10 @@ where
|
||||
GlobalMetadata::BlockNumber,
|
||||
public_values.block_metadata.block_number,
|
||||
),
|
||||
(
|
||||
GlobalMetadata::BlockRandom,
|
||||
public_values.block_metadata.block_random.into_uint(),
|
||||
),
|
||||
(
|
||||
GlobalMetadata::BlockDifficulty,
|
||||
public_values.block_metadata.block_difficulty,
|
||||
@ -301,10 +291,7 @@ pub(crate) fn verify_stark_proof_with_challenges<
|
||||
challenges: &StarkProofChallenges<F, D>,
|
||||
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
log::debug!("Checking proof: {}", type_name::<S>());
|
||||
validate_proof_shape(stark, proof, config, ctl_vars.len())?;
|
||||
let StarkOpeningSet {
|
||||
@ -315,10 +302,7 @@ where
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
};
|
||||
let vars = S::EvaluationFrame::from_values(local_values, next_values);
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, challenges.stark_zeta);
|
||||
@ -343,7 +327,7 @@ where
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
&vars,
|
||||
permutation_data,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
@ -401,7 +385,6 @@ where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
{
|
||||
let StarkProof {
|
||||
trace_cap,
|
||||
|
||||
@ -6,6 +6,7 @@ pub enum ProgramError {
|
||||
OutOfGas,
|
||||
InvalidOpcode,
|
||||
StackUnderflow,
|
||||
InvalidRlp,
|
||||
InvalidJumpDestination,
|
||||
InvalidJumpiDestination,
|
||||
StackOverflow,
|
||||
@ -14,6 +15,8 @@ pub enum ProgramError {
|
||||
GasLimitError,
|
||||
InterpreterError,
|
||||
IntegerTooLarge,
|
||||
ProverInputError(ProverInputError),
|
||||
UnknownContractCode,
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
@ -23,3 +26,13 @@ pub enum MemoryError {
|
||||
SegmentTooLarge { segment: U256 },
|
||||
VirtTooLarge { virt: U256 },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ProverInputError {
|
||||
OutOfMptData,
|
||||
OutOfRlpData,
|
||||
CodeHashNotFound,
|
||||
InvalidMptInput,
|
||||
InvalidInput,
|
||||
InvalidFunction,
|
||||
}
|
||||
|
||||
@ -58,6 +58,8 @@ impl MemoryAddress {
|
||||
if virt.bits() > 32 {
|
||||
return Err(MemoryError(VirtTooLarge { virt }));
|
||||
}
|
||||
|
||||
// Calling `as_usize` here is safe as those have been checked above.
|
||||
Ok(Self {
|
||||
context: context.as_usize(),
|
||||
segment: segment.as_usize(),
|
||||
|
||||
@ -15,6 +15,7 @@ use crate::cpu::stack_bounds::MAX_USER_STACK_SIZE;
|
||||
use crate::extension_tower::BN_BASE;
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::util::u256_to_usize;
|
||||
use crate::witness::errors::MemoryError::{ContextTooLarge, SegmentTooLarge, VirtTooLarge};
|
||||
use crate::witness::errors::ProgramError;
|
||||
use crate::witness::errors::ProgramError::MemoryError;
|
||||
@ -127,7 +128,7 @@ pub(crate) fn generate_keccak_general<F: Field>(
|
||||
row.is_keccak_sponge = F::ONE;
|
||||
let [(context, log_in0), (segment, log_in1), (base_virt, log_in2), (len, log_in3)] =
|
||||
stack_pop_with_log_and_fill::<4, _>(state, &mut row)?;
|
||||
let len = len.as_usize();
|
||||
let len = u256_to_usize(len)?;
|
||||
|
||||
let base_address = MemoryAddress::new_u256s(context, segment, base_virt)?;
|
||||
let input = (0..len)
|
||||
@ -162,7 +163,7 @@ pub(crate) fn generate_prover_input<F: Field>(
|
||||
) -> Result<(), ProgramError> {
|
||||
let pc = state.registers.program_counter;
|
||||
let input_fn = &KERNEL.prover_inputs[&pc];
|
||||
let input = state.prover_input(input_fn);
|
||||
let input = state.prover_input(input_fn)?;
|
||||
let write = stack_push_log_and_fill(state, &mut row, input)?;
|
||||
|
||||
state.traces.push_memory(write);
|
||||
@ -217,7 +218,7 @@ pub(crate) fn generate_jump<F: Field>(
|
||||
|
||||
state.traces.push_memory(log_in0);
|
||||
state.traces.push_cpu(row);
|
||||
state.jump_to(dst as usize);
|
||||
state.jump_to(dst as usize)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -241,7 +242,7 @@ pub(crate) fn generate_jumpi<F: Field>(
|
||||
let dst: u32 = dst
|
||||
.try_into()
|
||||
.map_err(|_| ProgramError::InvalidJumpiDestination)?;
|
||||
state.jump_to(dst as usize);
|
||||
state.jump_to(dst as usize)?;
|
||||
} else {
|
||||
row.general.jumps_mut().should_jump = F::ZERO;
|
||||
row.general.jumps_mut().cond_sum_pinv = F::ZERO;
|
||||
@ -312,7 +313,7 @@ pub(crate) fn generate_set_context<F: Field>(
|
||||
let [(ctx, log_in)] = stack_pop_with_log_and_fill::<1, _>(state, &mut row)?;
|
||||
let sp_to_save = state.registers.stack_len.into();
|
||||
let old_ctx = state.registers.context;
|
||||
let new_ctx = ctx.as_usize();
|
||||
let new_ctx = u256_to_usize(ctx)?;
|
||||
|
||||
let sp_field = ContextMetadata::StackSize as usize;
|
||||
let old_sp_addr = MemoryAddress::new(old_ctx, Segment::ContextMetadata, sp_field);
|
||||
@ -347,7 +348,8 @@ pub(crate) fn generate_set_context<F: Field>(
|
||||
};
|
||||
|
||||
state.registers.context = new_ctx;
|
||||
state.registers.stack_len = new_sp.as_usize();
|
||||
let new_sp = u256_to_usize(new_sp)?;
|
||||
state.registers.stack_len = new_sp;
|
||||
state.traces.push_memory(log_in);
|
||||
state.traces.push_memory(log_write_old_sp);
|
||||
state.traces.push_memory(log_read_new_sp);
|
||||
@ -362,6 +364,10 @@ pub(crate) fn generate_push<F: Field>(
|
||||
) -> Result<(), ProgramError> {
|
||||
let code_context = state.registers.code_context();
|
||||
let num_bytes = n as usize;
|
||||
if num_bytes > 32 {
|
||||
// The call to `U256::from_big_endian()` would panic.
|
||||
return Err(ProgramError::IntegerTooLarge);
|
||||
}
|
||||
let initial_offset = state.registers.program_counter + 1;
|
||||
|
||||
// First read val without going through `mem_read_with_log` type methods, so we can pass it
|
||||
@ -589,7 +595,7 @@ pub(crate) fn generate_syscall<F: Field>(
|
||||
);
|
||||
|
||||
let handler_addr = (handler_addr0 << 16) + (handler_addr1 << 8) + handler_addr2;
|
||||
let new_program_counter = handler_addr.as_usize();
|
||||
let new_program_counter = u256_to_usize(handler_addr)?;
|
||||
|
||||
let syscall_info = U256::from(state.registers.program_counter + 1)
|
||||
+ (U256::from(u64::from(state.registers.is_kernel)) << 32)
|
||||
@ -694,7 +700,11 @@ pub(crate) fn generate_mload_32bytes<F: Field>(
|
||||
) -> Result<(), ProgramError> {
|
||||
let [(context, log_in0), (segment, log_in1), (base_virt, log_in2), (len, log_in3)] =
|
||||
stack_pop_with_log_and_fill::<4, _>(state, &mut row)?;
|
||||
let len = len.as_usize();
|
||||
let len = u256_to_usize(len)?;
|
||||
if len > 32 {
|
||||
// The call to `U256::from_big_endian()` would panic.
|
||||
return Err(ProgramError::IntegerTooLarge);
|
||||
}
|
||||
|
||||
let base_address = MemoryAddress::new_u256s(context, segment, base_virt)?;
|
||||
if usize::MAX - base_address.virt < len {
|
||||
@ -762,7 +772,7 @@ pub(crate) fn generate_mstore_32bytes<F: Field>(
|
||||
) -> Result<(), ProgramError> {
|
||||
let [(context, log_in0), (segment, log_in1), (base_virt, log_in2), (val, log_in3), (len, log_in4)] =
|
||||
stack_pop_with_log_and_fill::<5, _>(state, &mut row)?;
|
||||
let len = len.as_usize();
|
||||
let len = u256_to_usize(len)?;
|
||||
|
||||
let base_address = MemoryAddress::new_u256s(context, segment, base_virt)?;
|
||||
|
||||
@ -827,7 +837,7 @@ pub(crate) fn generate_exception<F: Field>(
|
||||
);
|
||||
|
||||
let handler_addr = (handler_addr0 << 16) + (handler_addr1 << 8) + handler_addr2;
|
||||
let new_program_counter = handler_addr.as_usize();
|
||||
let new_program_counter = u256_to_usize(handler_addr)?;
|
||||
|
||||
let exc_info =
|
||||
U256::from(state.registers.program_counter) + (U256::from(state.registers.gas_used) << 192);
|
||||
|
||||
@ -29,11 +29,14 @@ fn to_bits_le<F: Field>(n: u8) -> [F; 8] {
|
||||
}
|
||||
|
||||
/// Peek at the stack item `i`th from the top. If `i=0` this gives the tip.
|
||||
pub(crate) fn stack_peek<F: Field>(state: &GenerationState<F>, i: usize) -> Option<U256> {
|
||||
pub(crate) fn stack_peek<F: Field>(
|
||||
state: &GenerationState<F>,
|
||||
i: usize,
|
||||
) -> Result<U256, ProgramError> {
|
||||
if i >= state.registers.stack_len {
|
||||
return None;
|
||||
return Err(ProgramError::StackUnderflow);
|
||||
}
|
||||
Some(state.memory.get(MemoryAddress::new(
|
||||
Ok(state.memory.get(MemoryAddress::new(
|
||||
state.registers.context,
|
||||
Segment::Stack,
|
||||
state.registers.stack_len - 1 - i,
|
||||
|
||||
@ -5,7 +5,7 @@ use std::time::Duration;
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, H256};
|
||||
use ethereum_types::{Address, BigEndianHash, H256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
@ -83,6 +83,7 @@ fn add11_yml() -> anyhow::Result<()> {
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_random: H256::from_uint(&0x020000.into()),
|
||||
block_gaslimit: 0xff112233u32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
|
||||
@ -115,6 +115,7 @@ fn test_basic_smart_contract() -> anyhow::Result<()> {
|
||||
block_gas_used: gas_used.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
block_base_fee: 0xa.into(),
|
||||
block_random: Default::default(),
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
|
||||
@ -8,7 +8,7 @@ use bytes::Bytes;
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, H256, U256};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
@ -135,6 +135,7 @@ fn test_log_opcodes() -> anyhow::Result<()> {
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_random: H256::from_uint(&0x020000.into()),
|
||||
block_gaslimit: 0xffffffffu32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
@ -366,6 +367,7 @@ fn test_log_with_aggreg() -> anyhow::Result<()> {
|
||||
.unwrap(),
|
||||
U256::from_dec_str("2722259584404615024560450425766186844160").unwrap(),
|
||||
],
|
||||
block_random: Default::default(),
|
||||
};
|
||||
|
||||
let beneficiary_account_after = AccountRlp {
|
||||
@ -795,6 +797,7 @@ fn test_two_txn() -> anyhow::Result<()> {
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_random: H256::from_uint(&0x020000.into()),
|
||||
block_gaslimit: 0xffffffffu32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
|
||||
@ -104,6 +104,7 @@ fn self_balance_gas_cost() -> anyhow::Result<()> {
|
||||
block_gas_used: gas_used.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
block_base_fee: 0xa.into(),
|
||||
block_random: Default::default(),
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
|
||||
@ -5,7 +5,7 @@ use std::time::Duration;
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, H256, U256};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
@ -71,6 +71,7 @@ fn test_simple_transfer() -> anyhow::Result<()> {
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_random: H256::from_uint(&0x020000.into()),
|
||||
block_gaslimit: 0xff112233u32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user