Begin work on witness generation and kernel bootstrapping

This commit is contained in:
Daniel Lubarov 2022-06-15 09:33:52 -07:00
parent f6d48f1328
commit e7b480deaf
16 changed files with 425 additions and 107 deletions

View File

@ -117,6 +117,7 @@ mod tests {
use std::borrow::BorrowMut;
use anyhow::Result;
use ethereum_types::U256;
use itertools::{izip, Itertools};
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
@ -131,7 +132,7 @@ mod tests {
use crate::config::StarkConfig;
use crate::cpu::cpu_stark::CpuStark;
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
use crate::logic::{self, LogicStark};
use crate::logic::{self, LogicStark, Operation};
use crate::memory::memory_stark::{generate_random_memory_ops, MemoryStark};
use crate::memory::NUM_CHANNELS;
use crate::proof::AllProof;
@ -164,32 +165,20 @@ mod tests {
logic_stark: &LogicStark<F, D>,
rng: &mut R,
) -> Vec<PolynomialValues<F>> {
let mut trace_rows = vec![];
for _ in 0..num_rows {
let mut row = [F::ZERO; logic::columns::NUM_COLUMNS];
assert_eq!(logic::PACKED_LIMB_BITS, 16);
for col in logic::columns::INPUT0 {
row[col] = F::from_bool(rng.gen());
}
for col in logic::columns::INPUT1 {
row[col] = F::from_bool(rng.gen());
}
let op: usize = rng.gen_range(0..3);
let op_col = [
logic::columns::IS_AND,
logic::columns::IS_OR,
logic::columns::IS_XOR,
][op];
row[op_col] = F::ONE;
logic_stark.generate(&mut row);
trace_rows.push(row);
}
for _ in num_rows..num_rows.next_power_of_two() {
trace_rows.push([F::ZERO; logic::columns::NUM_COLUMNS])
}
trace_rows_to_poly_values(trace_rows)
let ops = (0..num_rows)
.map(|_| {
let input0 = U256(rng.gen());
let input1 = U256(rng.gen());
let result = input0 ^ input1;
Operation {
operator: logic::Op::Xor,
input0,
input1,
result,
}
})
.collect();
logic_stark.generate_trace(ops)
}
fn make_memory_trace<R: Rng>(

View File

@ -0,0 +1,77 @@
//! The initial phase of execution, where the kernel code is hashed while being written to memory.
//! The hash is then checked against a precomputed kernel hash.
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::{COL_MAP, NUM_CPU_COLUMNS};
use crate::cpu::public_inputs::NUM_PUBLIC_INPUTS;
use crate::generation::state::GenerationState;
use crate::memory;
use crate::memory::segments;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>) {
for chunk in &state.kernel.code.clone().into_iter().enumerate().chunks(4) {
for (addr, byte) in chunk {
let mut value = [F::ZERO; memory::VALUE_LIMBS];
value[0] = F::from_canonical_u8(byte);
let channel = addr % memory::NUM_CHANNELS;
state.set_mem_current(channel, segments::CODE, addr, value);
// TODO: Set other registers.
state.commit_cpu_row();
}
}
todo!()
}
pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
let local_is_bootstrap = vars.local_values[COL_MAP.is_bootstrap_kernel];
let next_is_bootstrap = vars.next_values[COL_MAP.is_bootstrap_kernel];
yield_constr.constraint_first_row(local_is_bootstrap - P::ONES);
yield_constr.constraint_last_row(local_is_bootstrap);
let delta_is_bootstrap = next_is_bootstrap - local_is_bootstrap;
yield_constr.constraint_transition(delta_is_bootstrap * (delta_is_bootstrap + P::ONES));
// If IS_BOOTSTRAP_KERNEL changed (from 1 to 0), check that the current kernel hash matches a
// precomputed one.
let hash_diff = F::ZERO; // TODO
yield_constr.constraint_transition(delta_is_bootstrap * hash_diff)
}
pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();
// IS_BOOTSTRAP_KERNEL must have an init value of 1, a final value of 0, and a delta in {0, -1}.
let local_is_bootstrap = vars.local_values[COL_MAP.is_bootstrap_kernel];
let next_is_bootstrap = vars.next_values[COL_MAP.is_bootstrap_kernel];
let constraint = builder.sub_extension(local_is_bootstrap, one);
yield_constr.constraint_first_row(builder, constraint);
yield_constr.constraint_last_row(builder, local_is_bootstrap);
let delta_is_bootstrap = builder.sub_extension(next_is_bootstrap, local_is_bootstrap);
let constraint =
builder.mul_add_extension(delta_is_bootstrap, delta_is_bootstrap, delta_is_bootstrap);
yield_constr.constraint_transition(builder, constraint);
// If IS_BOOTSTRAP_KERNEL changed (from 1 to 0), check that the current kernel hash matches a
// precomputed one.
let hash_diff = builder.zero_extension(); // TODO
let constraint = builder.mul_extension(delta_is_bootstrap, hash_diff);
yield_constr.constraint_transition(builder, constraint)
}

View File

@ -9,7 +9,7 @@ use plonky2::hash::hash_types::RichField;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::{CpuColumnsView, COL_MAP, NUM_CPU_COLUMNS};
use crate::cpu::{decode, simple_logic};
use crate::cpu::{bootstrap_kernel, decode, simple_logic};
use crate::cross_table_lookup::Column;
use crate::memory::NUM_CHANNELS;
use crate::stark::Stark;
@ -81,6 +81,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
P: PackedField<Scalar = FE>,
{
let local_values = vars.local_values.borrow();
bootstrap_kernel::eval_bootstrap_kernel(vars, yield_constr);
decode::eval_packed_generic(local_values, yield_constr);
simple_logic::eval_packed(local_values, yield_constr);
}
@ -92,6 +93,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values = vars.local_values.borrow();
bootstrap_kernel::eval_bootstrap_kernel_circuit(builder, vars, yield_constr);
decode::eval_ext_circuit(builder, local_values, yield_constr);
simple_logic::eval_ext_circuit(builder, local_values, yield_constr);
}

View File

@ -13,8 +13,8 @@ const BYTES_PER_OFFSET: u8 = 3;
#[derive(PartialEq, Eq, Debug)]
pub struct Kernel {
pub code: Vec<u8>,
global_labels: HashMap<String, usize>,
pub(crate) code: Vec<u8>,
pub(crate) global_labels: HashMap<String, usize>,
}
pub(crate) fn assemble(files: Vec<File>) -> Kernel {

View File

@ -1,5 +1,5 @@
pub mod aggregator;
mod assembler;
pub mod assembler;
mod ast;
mod opcodes;
mod parser;

View File

@ -1,5 +1,7 @@
pub(crate) mod bootstrap_kernel;
pub(crate) mod columns;
pub mod cpu_stark;
pub(crate) mod decode;
pub mod kernel;
pub mod public_inputs;
mod simple_logic;

View File

@ -0,0 +1 @@
pub const NUM_PUBLIC_INPUTS: usize = 0; // PIs will be added later.

View File

@ -0,0 +1,52 @@
use plonky2::field::types::Field;
use crate::memory::memory_stark::MemoryOp;
use crate::memory::segments::NUM_SEGMENTS;
use crate::memory::VALUE_LIMBS;
#[allow(unused)] // TODO: Should be used soon.
#[derive(Debug)]
pub(crate) struct MemoryState<F: Field> {
/// A log of each memory operation, in the order that it occurred.
pub log: Vec<MemoryOp<F>>,
pub contexts: Vec<MemoryContextState<F>>,
}
impl<F: Field> Default for MemoryState<F> {
fn default() -> Self {
Self {
log: vec![],
// We start with an initial context for the kernel.
contexts: vec![MemoryContextState::default()],
}
}
}
#[derive(Default, Debug)]
pub(crate) struct MemoryContextState<F: Field> {
/// The content of each memory segment.
pub segments: [MemorySegmentState<F>; NUM_SEGMENTS],
}
#[derive(Default, Debug)]
pub(crate) struct MemorySegmentState<F: Field> {
pub content: Vec<[F; VALUE_LIMBS]>,
}
impl<F: Field> MemorySegmentState<F> {
pub(super) fn get(&self, virtual_addr: usize) -> [F; VALUE_LIMBS] {
self.content
.get(virtual_addr)
.copied()
.unwrap_or([F::ZERO; VALUE_LIMBS])
}
pub(super) fn set(&mut self, virtual_addr: usize, value: [F; VALUE_LIMBS]) {
if virtual_addr + 1 > self.content.len() {
self.content
.resize(virtual_addr + 1, [F::ZERO; VALUE_LIMBS]);
}
self.content[virtual_addr] = value;
}
}

57
evm/src/generation/mod.rs Normal file
View File

@ -0,0 +1,57 @@
use plonky2::field::extension::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use crate::all_stark::AllStark;
use crate::cpu::bootstrap_kernel::generate_bootstrap_kernel;
use crate::cpu::columns::NUM_CPU_COLUMNS;
use crate::generation::state::GenerationState;
use crate::util::trace_rows_to_poly_values;
mod memory;
pub(crate) mod state;
pub type RlpMerkleProof = Vec<Vec<u8>>;
#[allow(unused)] // TODO: Should be used soon.
pub struct TransactionData {
pub(crate) payload: Vec<u8>,
pub(crate) signature: Vec<u8>,
/// A Merkle proof for each interaction with the state trie, ordered chronologically.
pub(crate) trie_proofs: Vec<RlpMerkleProof>,
}
#[allow(unused)] // TODO: Should be used soon.
fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
all_stark: &AllStark<F, D>,
txns: &[TransactionData],
) -> Vec<Vec<PolynomialValues<F>>> {
let mut state = GenerationState::<F>::default();
generate_bootstrap_kernel::<F>(&mut state);
for txn in txns {
generate_txn(&mut state, txn);
}
let GenerationState {
cpu_rows,
current_cpu_row,
memory,
keccak_inputs,
logic_ops: logic_inputs,
..
} = state;
assert_eq!(current_cpu_row, [F::ZERO; NUM_CPU_COLUMNS]);
let cpu_trace = trace_rows_to_poly_values(cpu_rows);
let keccak_trace = all_stark.keccak_stark.generate_trace(keccak_inputs);
let logic_trace = all_stark.logic_stark.generate_trace(logic_inputs);
let memory_trace = all_stark.memory_stark.generate_trace(memory.log);
vec![cpu_trace, keccak_trace, logic_trace, memory_trace]
}
fn generate_txn<F: Field>(_state: &mut GenerationState<F>, _txn: &TransactionData) {
todo!()
}

131
evm/src/generation/state.rs Normal file
View File

@ -0,0 +1,131 @@
use ethereum_types::U256;
use plonky2::field::types::Field;
use crate::cpu::columns::NUM_CPU_COLUMNS;
use crate::cpu::kernel::aggregator::combined_kernel;
use crate::cpu::kernel::assembler::Kernel;
use crate::generation::memory::MemoryState;
use crate::logic::Operation;
use crate::memory::memory_stark::MemoryOp;
use crate::{keccak, logic};
#[derive(Debug)]
pub(crate) struct GenerationState<F: Field> {
pub(crate) kernel: Kernel,
pub(crate) cpu_rows: Vec<[F; NUM_CPU_COLUMNS]>,
pub(crate) current_cpu_row: [F; NUM_CPU_COLUMNS],
pub(crate) current_context: usize,
pub(crate) memory: MemoryState<F>,
pub(crate) keccak_inputs: Vec<[u64; keccak::keccak_stark::NUM_INPUTS]>,
pub(crate) logic_ops: Vec<logic::Operation>,
}
impl<F: Field> GenerationState<F> {
/// Compute logical AND, and record the operation to be added in the logic table later.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn and(&mut self, input0: U256, input1: U256) -> U256 {
let result = input0 & input1;
self.logic_ops.push(Operation {
operator: logic::Op::And,
input0,
input1,
result,
});
result
}
/// Compute logical OR, and record the operation to be added in the logic table later.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn or(&mut self, input0: U256, input1: U256) -> U256 {
let result = input0 | input1;
self.logic_ops.push(Operation {
operator: logic::Op::Or,
input0,
input1,
result,
});
result
}
/// Compute logical XOR, and record the operation to be added in the logic table later.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn xor(&mut self, input0: U256, input1: U256) -> U256 {
let result = input0 ^ input1;
self.logic_ops.push(Operation {
operator: logic::Op::Xor,
input0,
input1,
result,
});
result
}
/// Read some memory within the current execution context, and log the operation.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn get_mem_current(
&mut self,
channel_index: usize,
segment: usize,
virt: usize,
) -> [F; crate::memory::VALUE_LIMBS] {
let timestamp = self.cpu_rows.len();
let context = self.current_context;
let value = self.memory.contexts[context].segments[segment].get(virt);
self.memory.log.push(MemoryOp {
channel_index,
timestamp,
is_read: true,
context,
segment,
virt,
value,
});
value
}
/// Write some memory within the current execution context, and log the operation.
pub(crate) fn set_mem_current(
&mut self,
channel_index: usize,
segment: usize,
virt: usize,
value: [F; crate::memory::VALUE_LIMBS],
) {
let timestamp = self.cpu_rows.len();
let context = self.current_context;
self.memory.log.push(MemoryOp {
channel_index,
timestamp,
is_read: false,
context,
segment,
virt,
value,
});
self.memory.contexts[context].segments[segment].set(virt, value)
}
pub(crate) fn commit_cpu_row(&mut self) {
self.cpu_rows.push(self.current_cpu_row);
self.current_cpu_row = [F::ZERO; NUM_CPU_COLUMNS];
}
}
// `GenerationState` can't `derive(Default)` because `Default` is only implemented for arrays up to
// length 32 :-\.
impl<F: Field> Default for GenerationState<F> {
fn default() -> Self {
Self {
kernel: combined_kernel(),
cpu_rows: vec![],
current_cpu_row: [F::ZERO; NUM_CPU_COLUMNS],
current_context: 0,
memory: MemoryState::default(),
keccak_inputs: vec![],
logic_ops: vec![],
}
}
}

View File

@ -10,6 +10,7 @@ pub mod config;
pub mod constraint_consumer;
pub mod cpu;
pub mod cross_table_lookup;
pub mod generation;
mod get_challenges;
pub mod keccak;
pub mod logic;

View File

@ -1,15 +1,17 @@
use std::marker::PhantomData;
use ethereum_types::U256;
use itertools::izip;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::Column;
use crate::stark::Stark;
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive};
use crate::util::{limb_from_bits_le, limb_from_bits_le_recursive, trace_rows_to_poly_values};
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
// Total number of bits per input/output.
@ -66,66 +68,57 @@ pub struct LogicStark<F, const D: usize> {
pub f: PhantomData<F>,
}
enum Op {
// The `Zero` op is just for convenience. The all-zero row already satisfies the constraints;
// `Zero` lets us call `generate` on it without crashing.
Zero,
#[derive(Debug)]
pub(crate) enum Op {
And,
Or,
Xor,
}
fn check_op_flags<F: RichField>(lv: &[F; columns::NUM_COLUMNS]) -> Op {
let is_and = lv[columns::IS_AND].to_canonical_u64();
assert!(is_and <= 1);
let is_or = lv[columns::IS_OR].to_canonical_u64();
assert!(is_or <= 1);
let is_xor = lv[columns::IS_XOR].to_canonical_u64();
assert!(is_xor <= 1);
assert!(is_and + is_or + is_xor <= 1);
if is_and == 1 {
Op::And
} else if is_or == 1 {
Op::Or
} else if is_xor == 1 {
Op::Xor
} else {
Op::Zero
}
}
fn check_bits<F: RichField>(lv: &[F; columns::NUM_COLUMNS]) {
for bit_cols in [columns::INPUT0, columns::INPUT1] {
for bit_col in bit_cols {
let bit = lv[bit_col].to_canonical_u64();
assert!(bit <= 1);
}
}
}
fn make_result<F: RichField>(lv: &mut [F; columns::NUM_COLUMNS], op: Op) {
for (res_col, limb_in0_cols, limb_in1_cols) in izip!(
columns::RESULT,
columns::limb_bit_cols_for_input(columns::INPUT0),
columns::limb_bit_cols_for_input(columns::INPUT1),
) {
let limb_in0: u64 = limb_from_bits_le(limb_in0_cols.map(|col| lv[col])).to_canonical_u64();
let limb_in1: u64 = limb_from_bits_le(limb_in1_cols.map(|col| lv[col])).to_canonical_u64();
let res = match op {
Op::Zero => 0,
Op::And => limb_in0 & limb_in1,
Op::Or => limb_in0 | limb_in1,
Op::Xor => limb_in0 ^ limb_in1,
};
lv[res_col] = F::from_canonical_u64(res);
}
#[derive(Debug)]
pub(crate) struct Operation {
pub(crate) operator: Op,
pub(crate) input0: U256,
pub(crate) input1: U256,
pub(crate) result: U256,
}
impl<F: RichField, const D: usize> LogicStark<F, D> {
pub fn generate(&self, lv: &mut [F; columns::NUM_COLUMNS]) {
let op = check_op_flags(lv);
check_bits(lv);
make_result(lv, op);
pub(crate) fn generate_trace(&self, operations: Vec<Operation>) -> Vec<PolynomialValues<F>> {
let len = operations.len();
let padded_len = len.next_power_of_two();
let mut rows = Vec::with_capacity(padded_len);
for op in operations {
rows.push(Self::generate_row(op));
}
// Pad to a power of two.
for _ in len..padded_len {
rows.push([F::ZERO; columns::NUM_COLUMNS]);
}
trace_rows_to_poly_values(rows)
}
fn generate_row(operation: Operation) -> [F; columns::NUM_COLUMNS] {
let mut row = [F::ZERO; columns::NUM_COLUMNS];
match operation.operator {
Op::And => row[columns::IS_AND] = F::ONE,
Op::Or => row[columns::IS_OR] = F::ONE,
Op::Xor => row[columns::IS_XOR] = F::ONE,
}
for (i, col) in columns::INPUT0.enumerate() {
row[col] = F::from_bool(operation.input0.bit(i));
}
for (i, col) in columns::INPUT1.enumerate() {
row[col] = F::from_bool(operation.input1.bit(i));
}
for (i, col) in columns::RESULT.enumerate() {
let bit_range = i * PACKED_LIMB_BITS..(i + 1) * PACKED_LIMB_BITS;
row[col] = limb_from_bits_le(bit_range.map(|j| F::from_bool(operation.result.bit(j))));
}
row
}
}

View File

@ -45,14 +45,15 @@ pub struct MemoryStark<F, const D: usize> {
pub(crate) f: PhantomData<F>,
}
#[derive(Debug)]
pub struct MemoryOp<F> {
channel_index: usize,
timestamp: F,
is_read: F,
context: F,
segment: F,
virt: F,
value: [F; 8],
pub channel_index: usize,
pub timestamp: usize,
pub is_read: bool,
pub context: usize,
pub segment: usize,
pub virt: usize,
pub value: [F; 8],
}
pub fn generate_random_memory_ops<F: RichField, R: Rng>(
@ -61,10 +62,9 @@ pub fn generate_random_memory_ops<F: RichField, R: Rng>(
) -> Vec<MemoryOp<F>> {
let mut memory_ops = Vec::new();
let mut current_memory_values: HashMap<(F, F, F), [F; 8]> = HashMap::new();
let mut current_memory_values: HashMap<(usize, usize, usize), [F; 8]> = HashMap::new();
let num_cycles = num_ops / 2;
for i in 0..num_cycles {
let timestamp = F::from_canonical_usize(i);
let mut used_indices = HashSet::new();
let mut new_writes_this_cycle = HashMap::new();
let mut has_read = false;
@ -81,7 +81,6 @@ pub fn generate_random_memory_ops<F: RichField, R: Rng>(
!has_read && rng.gen()
};
has_read = has_read || is_read;
let is_read_field = F::from_bool(is_read);
let (context, segment, virt, vals) = if is_read {
let written: Vec<_> = current_memory_values.keys().collect();
@ -94,13 +93,13 @@ pub fn generate_random_memory_ops<F: RichField, R: Rng>(
} else {
// TODO: with taller memory table or more padding (to enable range-checking bigger diffs),
// test larger address values.
let mut context = F::from_canonical_usize(rng.gen_range(0..40));
let mut segment = F::from_canonical_usize(rng.gen_range(0..8));
let mut virt = F::from_canonical_usize(rng.gen_range(0..20));
let mut context = rng.gen_range(0..40);
let mut segment = rng.gen_range(0..8);
let mut virt = rng.gen_range(0..20);
while new_writes_this_cycle.contains_key(&(context, segment, virt)) {
context = F::from_canonical_usize(rng.gen_range(0..40));
segment = F::from_canonical_usize(rng.gen_range(0..8));
virt = F::from_canonical_usize(rng.gen_range(0..20));
context = rng.gen_range(0..40);
segment = rng.gen_range(0..8);
virt = rng.gen_range(0..20);
}
let val: [u32; 8] = rng.gen();
@ -113,8 +112,8 @@ pub fn generate_random_memory_ops<F: RichField, R: Rng>(
memory_ops.push(MemoryOp {
channel_index,
timestamp,
is_read: is_read_field,
timestamp: i,
is_read,
context,
segment,
virt,
@ -242,11 +241,11 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
value,
} = memory_ops[i];
trace_cols[is_channel(channel_index)][i] = F::ONE;
trace_cols[TIMESTAMP][i] = timestamp;
trace_cols[IS_READ][i] = is_read;
trace_cols[ADDR_CONTEXT][i] = context;
trace_cols[ADDR_SEGMENT][i] = segment;
trace_cols[ADDR_VIRTUAL][i] = virt;
trace_cols[TIMESTAMP][i] = F::from_canonical_usize(timestamp);
trace_cols[IS_READ][i] = F::from_bool(is_read);
trace_cols[ADDR_CONTEXT][i] = F::from_canonical_usize(context);
trace_cols[ADDR_SEGMENT][i] = F::from_canonical_usize(segment);
trace_cols[ADDR_VIRTUAL][i] = F::from_canonical_usize(virt);
for j in 0..8 {
trace_cols[value_limb(j)][i] = value[j];
}

View File

@ -1,5 +1,6 @@
pub mod columns;
pub mod memory_stark;
pub mod segments;
pub(crate) const NUM_CHANNELS: usize = 4;
pub(crate) const VALUE_LIMBS: usize = 8;

View File

@ -0,0 +1,7 @@
pub const CODE: usize = 0;
pub const STACK: usize = 1;
pub const MAIN_MEM: usize = 2;
pub const CALLDATA: usize = 3;
pub const RETURNDATA: usize = 4;
pub const NUM_SEGMENTS: usize = 5;

View File

@ -284,6 +284,12 @@ pub trait Field:
Self::from_canonical_u64(n as u64)
}
/// Returns `n`. Assumes that `n` is already in canonical form, i.e. `n < Self::order()`.
// TODO: Should probably be unsafe.
fn from_canonical_u8(n: u8) -> Self {
Self::from_canonical_u64(n as u64)
}
/// Returns `n`. Assumes that `n` is already in canonical form, i.e. `n < Self::order()`.
// TODO: Should probably be unsafe.
fn from_canonical_usize(n: usize) -> Self {