Merge branch 'main' into per_table_recursion

# Conflicts:
#	evm/src/proof.rs
This commit is contained in:
wborgeaud 2022-08-26 09:41:00 +02:00
commit 8600a5a46f
47 changed files with 1411 additions and 532 deletions

View File

@ -8,6 +8,9 @@ use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns};
use crate::keccak::keccak_stark;
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_memory::columns::KECCAK_WIDTH_BYTES;
use crate::keccak_memory::keccak_memory_stark;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
@ -18,6 +21,7 @@ use crate::stark::Stark;
pub struct AllStark<F: RichField + Extendable<D>, const D: usize> {
pub cpu_stark: CpuStark<F, D>,
pub keccak_stark: KeccakStark<F, D>,
pub keccak_memory_stark: KeccakMemoryStark<F, D>,
pub logic_stark: LogicStark<F, D>,
pub memory_stark: MemoryStark<F, D>,
pub cross_table_lookups: Vec<CrossTableLookup<F>>,
@ -28,6 +32,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Default for AllStark<F, D> {
Self {
cpu_stark: CpuStark::default(),
keccak_stark: KeccakStark::default(),
keccak_memory_stark: KeccakMemoryStark::default(),
logic_stark: LogicStark::default(),
memory_stark: MemoryStark::default(),
cross_table_lookups: all_cross_table_lookups(),
@ -40,6 +45,7 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
let ans = vec![
self.cpu_stark.num_permutation_batches(config),
self.keccak_stark.num_permutation_batches(config),
self.keccak_memory_stark.num_permutation_batches(config),
self.logic_stark.num_permutation_batches(config),
self.memory_stark.num_permutation_batches(config),
];
@ -51,6 +57,7 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
let ans = vec![
self.cpu_stark.permutation_batch_size(),
self.keccak_stark.permutation_batch_size(),
self.keccak_memory_stark.permutation_batch_size(),
self.logic_stark.permutation_batch_size(),
self.memory_stark.permutation_batch_size(),
];
@ -63,8 +70,9 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
pub enum Table {
Cpu = 0,
Keccak = 1,
Logic = 2,
Memory = 3,
KeccakMemory = 2,
Logic = 3,
Memory = 4,
}
impl Table {
@ -75,16 +83,22 @@ impl Table {
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn all_cross_table_lookups<F: Field>() -> Vec<CrossTableLookup<F>> {
vec![ctl_keccak(), ctl_logic(), ctl_memory()]
vec![ctl_keccak(), ctl_logic(), ctl_memory(), ctl_keccak_memory()]
}
fn ctl_keccak<F: Field>() -> CrossTableLookup<F> {
let cpu_looking = TableWithColumns::new(
Table::Cpu,
cpu_stark::ctl_data_keccak(),
Some(cpu_stark::ctl_filter_keccak()),
);
let keccak_memory_looking = TableWithColumns::new(
Table::KeccakMemory,
keccak_memory_stark::ctl_looking_keccak(),
Some(keccak_memory_stark::ctl_filter()),
);
CrossTableLookup::new(
vec![TableWithColumns::new(
Table::Cpu,
cpu_stark::ctl_data_keccak(),
Some(cpu_stark::ctl_filter_keccak()),
)],
vec![cpu_looking, keccak_memory_looking],
TableWithColumns::new(
Table::Keccak,
keccak_stark::ctl_data(),
@ -94,6 +108,22 @@ fn ctl_keccak<F: Field>() -> CrossTableLookup<F> {
)
}
fn ctl_keccak_memory<F: Field>() -> CrossTableLookup<F> {
CrossTableLookup::new(
vec![TableWithColumns::new(
Table::Cpu,
cpu_stark::ctl_data_keccak_memory(),
Some(cpu_stark::ctl_filter_keccak_memory()),
)],
TableWithColumns::new(
Table::KeccakMemory,
keccak_memory_stark::ctl_looked_data(),
Some(keccak_memory_stark::ctl_filter()),
),
None,
)
}
fn ctl_logic<F: Field>() -> CrossTableLookup<F> {
CrossTableLookup::new(
vec![TableWithColumns::new(
@ -107,16 +137,33 @@ fn ctl_logic<F: Field>() -> CrossTableLookup<F> {
}
fn ctl_memory<F: Field>() -> CrossTableLookup<F> {
let cpu_memory_ops = (0..NUM_CHANNELS).map(|channel| {
TableWithColumns::new(
Table::Cpu,
cpu_stark::ctl_data_memory(channel),
Some(cpu_stark::ctl_filter_memory(channel)),
)
});
let keccak_memory_reads = (0..KECCAK_WIDTH_BYTES).map(|i| {
TableWithColumns::new(
Table::KeccakMemory,
keccak_memory_stark::ctl_looking_memory(i, true),
Some(keccak_memory_stark::ctl_filter()),
)
});
let keccak_memory_writes = (0..KECCAK_WIDTH_BYTES).map(|i| {
TableWithColumns::new(
Table::KeccakMemory,
keccak_memory_stark::ctl_looking_memory(i, false),
Some(keccak_memory_stark::ctl_filter()),
)
});
let all_lookers = cpu_memory_ops
.chain(keccak_memory_reads)
.chain(keccak_memory_writes)
.collect();
CrossTableLookup::new(
(0..NUM_CHANNELS)
.map(|channel| {
TableWithColumns::new(
Table::Cpu,
cpu_stark::ctl_data_memory(channel),
Some(cpu_stark::ctl_filter_memory(channel)),
)
})
.collect(),
all_lookers,
TableWithColumns::new(
Table::Memory,
memory_stark::ctl_data(),
@ -148,12 +195,13 @@ mod tests {
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cross_table_lookup::testutils::check_ctls;
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::{self, LogicStark, Operation};
use crate::memory::memory_stark::tests::generate_random_memory_ops;
use crate::memory::memory_stark::MemoryStark;
use crate::memory::NUM_CHANNELS;
use crate::proof::AllProof;
use crate::prover::prove;
use crate::proof::{AllProof, PublicValues};
use crate::prover::prove_with_traces;
use crate::recursive_verifier::{
add_virtual_all_proof, set_all_proof_target, verify_proof_circuit,
};
@ -177,6 +225,13 @@ mod tests {
keccak_stark.generate_trace(keccak_inputs)
}
fn make_keccak_memory_trace(
keccak_memory_stark: &KeccakMemoryStark<F, D>,
config: &StarkConfig,
) -> Vec<PolynomialValues<F>> {
keccak_memory_stark.generate_trace(vec![], 1 << config.fri_config.cap_height)
}
fn make_logic_trace<R: Rng>(
num_rows: usize,
logic_stark: &LogicStark<F, D>,
@ -205,6 +260,19 @@ mod tests {
(trace, num_ops)
}
fn bits_from_opcode(opcode: u8) -> [F; 8] {
[
F::from_bool(opcode & (1 << 0) != 0),
F::from_bool(opcode & (1 << 1) != 0),
F::from_bool(opcode & (1 << 2) != 0),
F::from_bool(opcode & (1 << 3) != 0),
F::from_bool(opcode & (1 << 4) != 0),
F::from_bool(opcode & (1 << 5) != 0),
F::from_bool(opcode & (1 << 6) != 0),
F::from_bool(opcode & (1 << 7) != 0),
]
}
fn make_cpu_trace(
num_keccak_perms: usize,
num_logic_rows: usize,
@ -263,16 +331,21 @@ mod tests {
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.is_kernel_mode = F::ONE;
// Since these are the first cycle rows, we must start with PC=route_txn then increment.
row.program_counter = F::from_canonical_usize(KERNEL.global_labels["route_txn"] + i);
row.opcode = [
(logic::columns::IS_AND, 0x16),
(logic::columns::IS_OR, 0x17),
(logic::columns::IS_XOR, 0x18),
]
.into_iter()
.map(|(col, opcode)| logic_trace[col].values[i] * F::from_canonical_u64(opcode))
.sum();
row.opcode_bits = bits_from_opcode(
if logic_trace[logic::columns::IS_AND].values[i] != F::ZERO {
0x16
} else if logic_trace[logic::columns::IS_OR].values[i] != F::ZERO {
0x17
} else if logic_trace[logic::columns::IS_XOR].values[i] != F::ZERO {
0x18
} else {
panic!()
},
);
let logic = row.general.logic_mut();
let input0_bit_cols = logic::columns::limb_bit_cols_for_input(logic::columns::INPUT0);
@ -330,7 +403,7 @@ mod tests {
let last_row: cpu::columns::CpuColumnsView<F> =
cpu_trace_rows[cpu_trace_rows.len() - 1].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x0a); // `EXP` is implemented in software
row.opcode_bits = bits_from_opcode(0x0a); // `EXP` is implemented in software
row.is_kernel_mode = F::ONE;
row.program_counter = last_row.program_counter + F::ONE;
row.general.syscalls_mut().output = [
@ -352,7 +425,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0xf9);
row.opcode_bits = bits_from_opcode(0xf9);
row.is_kernel_mode = F::ONE;
row.program_counter = F::from_canonical_usize(KERNEL.global_labels["sys_exp"]);
row.general.jumps_mut().input0 = [
@ -374,7 +447,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x56);
row.opcode_bits = bits_from_opcode(0x56);
row.is_kernel_mode = F::ONE;
row.program_counter = F::from_canonical_u16(15682);
row.general.jumps_mut().input0 = [
@ -411,7 +484,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0xf9);
row.opcode_bits = bits_from_opcode(0xf9);
row.is_kernel_mode = F::ONE;
row.program_counter = F::from_canonical_u16(15106);
row.general.jumps_mut().input0 = [
@ -433,7 +506,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x56);
row.opcode_bits = bits_from_opcode(0x56);
row.is_kernel_mode = F::ZERO;
row.program_counter = F::from_canonical_u16(63064);
row.general.jumps_mut().input0 = [
@ -471,7 +544,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x57);
row.opcode_bits = bits_from_opcode(0x57);
row.is_kernel_mode = F::ZERO;
row.program_counter = F::from_canonical_u16(3754);
row.general.jumps_mut().input0 = [
@ -509,7 +582,7 @@ mod tests {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x57);
row.opcode_bits = bits_from_opcode(0x57);
row.is_kernel_mode = F::ZERO;
row.program_counter = F::from_canonical_u16(37543);
row.general.jumps_mut().input0 = [
@ -538,7 +611,7 @@ mod tests {
let last_row: cpu::columns::CpuColumnsView<F> =
cpu_trace_rows[cpu_trace_rows.len() - 1].into();
row.is_cpu_cycle = F::ONE;
row.opcode = F::from_canonical_u8(0x56);
row.opcode_bits = bits_from_opcode(0x56);
row.is_kernel_mode = F::ZERO;
row.program_counter = last_row.program_counter + F::ONE;
row.general.jumps_mut().input0 = [
@ -575,7 +648,7 @@ mod tests {
for i in 0..cpu_trace_rows.len().next_power_of_two() - cpu_trace_rows.len() {
let mut row: cpu::columns::CpuColumnsView<F> =
[F::ZERO; CpuStark::<F, D>::COLUMNS].into();
row.opcode = F::from_canonical_u8(0xff);
row.opcode_bits = bits_from_opcode(0xff);
row.is_cpu_cycle = F::ONE;
row.is_kernel_mode = F::ONE;
row.program_counter =
@ -607,6 +680,7 @@ mod tests {
let num_keccak_perms = 2;
let keccak_trace = make_keccak_trace(num_keccak_perms, &all_stark.keccak_stark, &mut rng);
let keccak_memory_trace = make_keccak_memory_trace(&all_stark.keccak_memory_stark, config);
let logic_trace = make_logic_trace(num_logic_rows, &all_stark.logic_stark, &mut rng);
let mem_trace = make_memory_trace(num_memory_ops, &all_stark.memory_stark, &mut rng);
let mut memory_trace = mem_trace.0;
@ -621,14 +695,21 @@ mod tests {
&mut memory_trace,
);
let traces = vec![cpu_trace, keccak_trace, logic_trace, memory_trace];
let traces = vec![
cpu_trace,
keccak_trace,
keccak_memory_trace,
logic_trace,
memory_trace,
];
check_ctls(&traces, &all_stark.cross_table_lookups);
let proof = prove::<F, C, D>(
let public_values = PublicValues::default();
let proof = prove_with_traces::<F, C, D>(
&all_stark,
config,
traces,
vec![vec![]; 4],
public_values,
&mut TimingTree::default(),
)?;

View File

@ -9,6 +9,21 @@ use crate::arithmetic::columns::*;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::range_check_error;
pub(crate) fn u256_add_cc(input0: [u64; N_LIMBS], input1: [u64; N_LIMBS]) -> ([u64; N_LIMBS], u64) {
// Input and output have 16-bit limbs
let mut output = [0u64; N_LIMBS];
const MASK: u64 = (1u64 << LIMB_BITS) - 1u64;
let mut cy = 0u64;
for (i, a, b) in izip!(0.., input0, input1) {
let s = a + b + cy;
cy = s >> LIMB_BITS;
assert!(cy <= 1u64, "input limbs were larger than 16 bits");
output[i] = s & MASK;
}
(output, cy)
}
/// Given two sequences `larger` and `smaller` of equal length (not
/// checked), verifies that \sum_i larger[i] 2^(LIMB_BITS * i) ==
/// \sum_i smaller[i] 2^(LIMB_BITS * i), taking care of carry propagation.
@ -19,7 +34,8 @@ pub(crate) fn eval_packed_generic_are_equal<P, I, J>(
is_op: P,
larger: I,
smaller: J,
) where
) -> P
where
P: PackedField,
I: Iterator<Item = P>,
J: Iterator<Item = P>,
@ -36,6 +52,7 @@ pub(crate) fn eval_packed_generic_are_equal<P, I, J>(
// increase the degree of the constraint.
cy = t * overflow_inv;
}
cy
}
pub(crate) fn eval_ext_circuit_are_equal<F, const D: usize, I, J>(
@ -44,7 +61,8 @@ pub(crate) fn eval_ext_circuit_are_equal<F, const D: usize, I, J>(
is_op: ExtensionTarget<D>,
larger: I,
smaller: J,
) where
) -> ExtensionTarget<D>
where
F: RichField + Extendable<D>,
I: Iterator<Item = ExtensionTarget<D>>,
J: Iterator<Item = ExtensionTarget<D>>,
@ -72,6 +90,7 @@ pub(crate) fn eval_ext_circuit_are_equal<F, const D: usize, I, J>(
cy = builder.mul_const_extension(overflow_inv, t);
}
cy
}
pub fn generate<F: RichField>(lv: &mut [F; NUM_ARITH_COLUMNS]) {
@ -79,17 +98,7 @@ pub fn generate<F: RichField>(lv: &mut [F; NUM_ARITH_COLUMNS]) {
let input1_limbs = ADD_INPUT_1.map(|c| lv[c].to_canonical_u64());
// Input and output have 16-bit limbs
let mut output_limbs = [0u64; N_LIMBS];
const MASK: u64 = (1u64 << LIMB_BITS) - 1u64;
let mut cy = 0u64;
for (i, a, b) in izip!(0.., input0_limbs, input1_limbs) {
let s = a + b + cy;
cy = s >> LIMB_BITS;
assert!(cy <= 1u64, "input limbs were larger than 16 bits");
output_limbs[i] = s & MASK;
}
// last carry is dropped because this is addition modulo 2^256.
let (output_limbs, _) = u256_add_cc(input0_limbs, input1_limbs);
for (&c, output_limb) in ADD_OUTPUT.iter().zip(output_limbs) {
lv[c] = F::from_canonical_u64(output_limb);

View File

@ -8,6 +8,7 @@ use plonky2::hash::hash_types::RichField;
use crate::arithmetic::add;
use crate::arithmetic::columns;
use crate::arithmetic::compare;
use crate::arithmetic::mul;
use crate::arithmetic::sub;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
@ -45,6 +46,10 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
sub::generate(local_values);
} else if local_values[columns::IS_MUL].is_one() {
mul::generate(local_values);
} else if local_values[columns::IS_LT].is_one() {
compare::generate(local_values, columns::IS_LT);
} else if local_values[columns::IS_GT].is_one() {
compare::generate(local_values, columns::IS_GT);
} else {
todo!("the requested operation has not yet been implemented");
}
@ -53,11 +58,10 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
const COLUMNS: usize = columns::NUM_ARITH_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -67,18 +71,20 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
add::eval_packed_generic(lv, yield_constr);
sub::eval_packed_generic(lv, yield_constr);
mul::eval_packed_generic(lv, yield_constr);
compare::eval_packed_generic(lv, yield_constr);
}
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv = vars.local_values;
add::eval_ext_circuit(builder, lv, yield_constr);
sub::eval_ext_circuit(builder, lv, yield_constr);
mul::eval_ext_circuit(builder, lv, yield_constr);
compare::eval_ext_circuit(builder, lv, yield_constr);
}
fn constraint_degree(&self) -> usize {

View File

@ -79,4 +79,9 @@ pub(crate) const MUL_INPUT_1: [usize; N_LIMBS] = GENERAL_INPUT_1;
pub(crate) const MUL_OUTPUT: [usize; N_LIMBS] = GENERAL_INPUT_2;
pub(crate) const MUL_AUX_INPUT: [usize; N_LIMBS] = AUX_INPUT_0;
pub(crate) const CMP_INPUT_0: [usize; N_LIMBS] = GENERAL_INPUT_0;
pub(crate) const CMP_INPUT_1: [usize; N_LIMBS] = GENERAL_INPUT_1;
pub(crate) const CMP_OUTPUT: usize = GENERAL_INPUT_2[0];
pub(crate) const CMP_AUX_INPUT: [usize; N_LIMBS] = AUX_INPUT_0;
pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS;

View File

@ -0,0 +1,219 @@
//! Support for EVM LT and GT instructions
//!
//! This crate verifies EVM LT and GT instructions (i.e. for unsigned
//! inputs). The difference between LT and GT is of course just a
//! matter of the order of the inputs. The verification is essentially
//! identical to the SUB instruction: For both SUB and LT we have values
//!
//! - `input0`
//! - `input1`
//! - `difference` (mod 2^256)
//! - `borrow` (= 0 or 1)
//!
//! satisfying `input0 - input1 = difference + borrow * 2^256`. Where
//! SUB verifies `difference` and ignores `borrow`, LT verifies
//! `borrow` (and uses `difference` as an auxiliary input).
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use crate::arithmetic::add::{eval_ext_circuit_are_equal, eval_packed_generic_are_equal};
use crate::arithmetic::columns::*;
use crate::arithmetic::sub::u256_sub_br;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::range_check_error;
pub(crate) fn generate<F: RichField>(lv: &mut [F; NUM_ARITH_COLUMNS], op: usize) {
let input0 = CMP_INPUT_0.map(|c| lv[c].to_canonical_u64());
let input1 = CMP_INPUT_1.map(|c| lv[c].to_canonical_u64());
let (diff, br) = match op {
// input0 - input1 == diff + br*2^256
IS_LT => u256_sub_br(input0, input1),
// input1 - input0 == diff + br*2^256
IS_GT => u256_sub_br(input1, input0),
IS_SLT => todo!(),
IS_SGT => todo!(),
_ => panic!("op code not a comparison"),
};
for (&c, diff_limb) in CMP_AUX_INPUT.iter().zip(diff) {
lv[c] = F::from_canonical_u64(diff_limb);
}
lv[CMP_OUTPUT] = F::from_canonical_u64(br);
}
pub(crate) fn eval_packed_generic_lt<P: PackedField>(
yield_constr: &mut ConstraintConsumer<P>,
is_op: P,
input0: [P; N_LIMBS],
input1: [P; N_LIMBS],
aux: [P; N_LIMBS],
output: P,
) {
// Verify (input0 < input1) == output by providing aux such that
// input0 - input1 == aux + output*2^256.
let lhs_limbs = input0.iter().zip(input1).map(|(&a, b)| a - b);
let cy = eval_packed_generic_are_equal(yield_constr, is_op, aux.into_iter(), lhs_limbs);
// We don't need to check that cy is 0 or 1, since output has
// already been checked to be 0 or 1.
yield_constr.constraint(is_op * (cy - output));
}
pub fn eval_packed_generic<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
yield_constr: &mut ConstraintConsumer<P>,
) {
range_check_error!(CMP_INPUT_0, 16);
range_check_error!(CMP_INPUT_1, 16);
range_check_error!(CMP_AUX_INPUT, 16);
range_check_error!([CMP_OUTPUT], 1);
let input0 = CMP_INPUT_0.map(|c| lv[c]);
let input1 = CMP_INPUT_1.map(|c| lv[c]);
let aux = CMP_AUX_INPUT.map(|c| lv[c]);
let output = lv[CMP_OUTPUT];
eval_packed_generic_lt(yield_constr, lv[IS_LT], input0, input1, aux, output);
eval_packed_generic_lt(yield_constr, lv[IS_GT], input1, input0, aux, output);
}
#[allow(clippy::needless_collect)]
pub(crate) fn eval_ext_circuit_lt<F: RichField + Extendable<D>, const D: usize>(
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
is_op: ExtensionTarget<D>,
input0: [ExtensionTarget<D>; N_LIMBS],
input1: [ExtensionTarget<D>; N_LIMBS],
aux: [ExtensionTarget<D>; N_LIMBS],
output: ExtensionTarget<D>,
) {
// Since `map` is lazy and the closure passed to it borrows
// `builder`, we can't then borrow builder again below in the call
// to `eval_ext_circuit_are_equal`. The solution is to force
// evaluation with `collect`.
let lhs_limbs = input0
.iter()
.zip(input1)
.map(|(&a, b)| builder.sub_extension(a, b))
.collect::<Vec<ExtensionTarget<D>>>();
let cy = eval_ext_circuit_are_equal(
builder,
yield_constr,
is_op,
aux.into_iter(),
lhs_limbs.into_iter(),
);
let good_output = builder.sub_extension(cy, output);
let filter = builder.mul_extension(is_op, good_output);
yield_constr.constraint(builder, filter);
}
pub fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let input0 = CMP_INPUT_0.map(|c| lv[c]);
let input1 = CMP_INPUT_1.map(|c| lv[c]);
let aux = CMP_AUX_INPUT.map(|c| lv[c]);
let output = lv[CMP_OUTPUT];
eval_ext_circuit_lt(
builder,
yield_constr,
lv[IS_LT],
input0,
input1,
aux,
output,
);
eval_ext_circuit_lt(
builder,
yield_constr,
lv[IS_GT],
input1,
input0,
aux,
output,
);
}
#[cfg(test)]
mod tests {
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::types::Field;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use super::*;
use crate::arithmetic::columns::NUM_ARITH_COLUMNS;
use crate::constraint_consumer::ConstraintConsumer;
// TODO: Should be able to refactor this test to apply to all operations.
#[test]
fn generate_eval_consistency_not_compare() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng));
// if `IS_LT == 0`, then the constraints should be met even if
// all values are garbage. `eval_packed_generic` handles IS_GT
// at the same time, so we check both at once.
lv[IS_LT] = F::ZERO;
lv[IS_GT] = F::ZERO;
let mut constrant_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
assert_eq!(acc, F::ZERO);
}
}
#[test]
fn generate_eval_consistency_compare() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng));
const N_ITERS: usize = 1000;
for _ in 0..N_ITERS {
for (op, other_op) in [(IS_LT, IS_GT), (IS_GT, IS_LT)] {
// set op == 1 and ensure all constraints are satisfied.
// we have to explicitly set the other op to zero since both
// are treated by the call.
lv[op] = F::ONE;
lv[other_op] = F::ZERO;
// set inputs to random values
for (&ai, bi) in CMP_INPUT_0.iter().zip(CMP_INPUT_1) {
lv[ai] = F::from_canonical_u16(rng.gen());
lv[bi] = F::from_canonical_u16(rng.gen());
}
generate(&mut lv, op);
let mut constrant_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
assert_eq!(acc, F::ZERO);
}
}
}
}
}

View File

@ -1,4 +1,5 @@
mod add;
mod compare;
mod mul;
mod sub;
mod utils;

View File

@ -9,26 +9,29 @@ use crate::arithmetic::columns::*;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::range_check_error;
pub fn generate<F: RichField>(lv: &mut [F; NUM_ARITH_COLUMNS]) {
let input0_limbs = SUB_INPUT_0.map(|c| lv[c].to_canonical_u64());
let input1_limbs = SUB_INPUT_1.map(|c| lv[c].to_canonical_u64());
// Input and output have 16-bit limbs
let mut output_limbs = [0u64; N_LIMBS];
pub(crate) fn u256_sub_br(input0: [u64; N_LIMBS], input1: [u64; N_LIMBS]) -> ([u64; N_LIMBS], u64) {
const LIMB_BOUNDARY: u64 = 1 << LIMB_BITS;
const MASK: u64 = LIMB_BOUNDARY - 1u64;
let mut output = [0u64; N_LIMBS];
let mut br = 0u64;
for (i, a, b) in izip!(0.., input0_limbs, input1_limbs) {
for (i, a, b) in izip!(0.., input0, input1) {
let d = LIMB_BOUNDARY + a - b - br;
// if a < b, then d < 2^16 so br = 1
// if a >= b, then d >= 2^16 so br = 0
br = 1u64 - (d >> LIMB_BITS);
assert!(br <= 1u64, "input limbs were larger than 16 bits");
output_limbs[i] = d & MASK;
output[i] = d & MASK;
}
// last borrow is dropped because this is subtraction modulo 2^256.
(output, br)
}
pub fn generate<F: RichField>(lv: &mut [F; NUM_ARITH_COLUMNS]) {
let input0_limbs = SUB_INPUT_0.map(|c| lv[c].to_canonical_u64());
let input1_limbs = SUB_INPUT_1.map(|c| lv[c].to_canonical_u64());
let (output_limbs, _) = u256_sub_br(input0_limbs, input1_limbs);
for (&c, output_limb) in SUB_OUTPUT.iter().zip(output_limbs) {
lv[c] = F::from_canonical_u64(output_limb);

View File

@ -19,4 +19,7 @@ macro_rules! range_check_error {
($cols:ident, $rc_bits:expr) => {
$crate::arithmetic::utils::_range_check_error::<$rc_bits>(file!(), line!(), &$cols);
};
([$cols:ident], $rc_bits:expr) => {
$crate::arithmetic::utils::_range_check_error::<$rc_bits>(file!(), line!(), &[$cols]);
};
}

View File

@ -15,7 +15,6 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::keccak_util::keccakf_u32s;
use crate::cpu::public_inputs::NUM_PUBLIC_INPUTS;
use crate::generation::state::GenerationState;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
@ -50,7 +49,7 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
let mut packed_bytes: u32 = 0;
for (addr, byte) in chunk {
let channel = addr % NUM_CHANNELS;
state.set_mem_current(channel, Segment::Code, addr, byte.into());
state.set_mem_cpu_current(channel, Segment::Code, addr, byte.into());
packed_bytes = (packed_bytes << 8) | byte as u32;
}
@ -73,7 +72,7 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
}
pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
@ -109,7 +108,7 @@ pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();

View File

@ -27,9 +27,6 @@ pub struct CpuColumnsView<T: Copy> {
/// If CPU cycle: We're in kernel (privileged) mode.
pub is_kernel_mode: T,
/// If CPU cycle: The opcode being decoded, in {0, ..., 255}.
pub opcode: T,
// If CPU cycle: flags for EVM instructions. PUSHn, DUPn, and SWAPn only get one flag each.
// Invalid opcodes are split between a number of flags for practical reasons. Exactly one of
// these flags must be 1.
@ -153,9 +150,12 @@ pub struct CpuColumnsView<T: Copy> {
/// If CPU cycle: the opcode, broken up into bits in little-endian order.
pub opcode_bits: [T; 8],
/// Filter. 1 iff a Keccak permutation is computed on this row.
/// Filter. 1 iff a Keccak lookup is performed on this row.
pub is_keccak: T,
/// Filter. 1 iff a Keccak memory lookup is performed on this row.
pub is_keccak_memory: T,
pub(crate) general: CpuGeneralColumnsView<T>,
pub(crate) clock: T,

View File

@ -22,10 +22,30 @@ pub fn ctl_data_keccak<F: Field>() -> Vec<Column<F>> {
res
}
pub fn ctl_data_keccak_memory<F: Field>() -> Vec<Column<F>> {
// When executing KECCAK_GENERAL, the memory channels are used as follows:
// channel 0: instruction
// channel 1: stack[-1] = context
// channel 2: stack[-2] = segment
// channel 3: stack[-3] = virtual
let context = Column::single(COL_MAP.mem_value[1][0]);
let segment = Column::single(COL_MAP.mem_value[2][0]);
let virt = Column::single(COL_MAP.mem_value[3][0]);
let num_channels = F::from_canonical_usize(NUM_CHANNELS);
let clock = Column::linear_combination([(COL_MAP.clock, num_channels)]);
vec![context, segment, virt, clock]
}
pub fn ctl_filter_keccak<F: Field>() -> Column<F> {
Column::single(COL_MAP.is_keccak)
}
pub fn ctl_filter_keccak_memory<F: Field>() -> Column<F> {
Column::single(COL_MAP.is_keccak_memory)
}
pub fn ctl_data_logic<F: Field>() -> Vec<Column<F>> {
let mut res = Column::singles([COL_MAP.is_and, COL_MAP.is_or, COL_MAP.is_xor]).collect_vec();
let logic = COL_MAP.general.logic();
@ -53,7 +73,7 @@ pub fn ctl_data_memory<F: Field>(channel: usize) -> Vec<Column<F>> {
let scalar = F::from_canonical_usize(NUM_CHANNELS);
let addend = F::from_canonical_usize(channel);
cols.push(Column::linear_combination_with_constant(
vec![(COL_MAP.clock, scalar)],
[(COL_MAP.clock, scalar)],
addend,
));
@ -79,11 +99,10 @@ impl<F: RichField, const D: usize> CpuStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
const COLUMNS: usize = NUM_CPU_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -102,7 +121,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values = vars.local_values.borrow();

View File

@ -1,6 +1,5 @@
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
@ -158,13 +157,16 @@ pub fn generate<F: RichField>(lv: &mut CpuColumnsView<F>) {
// This assert is not _strictly_ necessary, but I include it as a sanity check.
assert_eq!(cycle_filter, F::ONE, "cycle_filter should be 0 or 1");
let opcode = lv.opcode.to_canonical_u64();
assert!(opcode < 256, "opcode should be in {{0, ..., 255}}");
let opcode = opcode as u8;
for (i, bit) in lv.opcode_bits.iter_mut().enumerate() {
*bit = F::from_bool(opcode & (1 << i) != 0);
// Validate all opcode bits.
for bit in lv.opcode_bits.into_iter() {
assert!(bit.to_canonical_u64() <= 1);
}
let opcode = lv
.opcode_bits
.into_iter()
.enumerate()
.map(|(i, bit)| bit.to_canonical_u64() << i)
.sum::<u64>() as u8;
let top_bits: [u8; 9] = [
0,
@ -217,23 +219,10 @@ pub fn eval_packed_generic<P: PackedField>(
let kernel_mode = lv.is_kernel_mode;
yield_constr.constraint(cycle_filter * kernel_mode * (kernel_mode - P::ONES));
// Ensure that the opcode bits are valid: each has to be either 0 or 1, and they must match
// the opcode. Note that this also implicitly range-checks the opcode.
let bits = lv.opcode_bits;
// First check that the bits are either 0 or 1.
for bit in bits {
// Ensure that the opcode bits are valid: each has to be either 0 or 1.
for bit in lv.opcode_bits {
yield_constr.constraint(cycle_filter * bit * (bit - P::ONES));
}
// Now check that they match the opcode.
{
let opcode = lv.opcode;
let reconstructed_opcode: P = bits
.into_iter()
.enumerate()
.map(|(i, bit)| bit * P::Scalar::from_canonical_u64(1 << i))
.sum();
yield_constr.constraint(cycle_filter * (opcode - reconstructed_opcode));
}
// Check that the instruction flags are valid.
// First, check that they are all either 0 or 1.
@ -258,7 +247,8 @@ pub fn eval_packed_generic<P: PackedField>(
Kernel => P::ONES - kernel_mode,
};
// 0 if all the opcode bits match, and something in {1, ..., 8}, otherwise.
let opcode_mismatch: P = bits
let opcode_mismatch: P = lv
.opcode_bits
.into_iter()
.zip(bits_from_opcode(oc))
.rev()
@ -294,28 +284,12 @@ pub fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
yield_constr.constraint(builder, constr);
}
// Ensure that the opcode bits are valid: each has to be either 0 or 1, and they must match
// the opcode. Note that this also implicitly range-checks the opcode.
let bits = lv.opcode_bits;
// First check that the bits are either 0 or 1.
for bit in bits {
// Ensure that the opcode bits are valid: each has to be either 0 or 1.
for bit in lv.opcode_bits {
let constr = builder.mul_sub_extension(bit, bit, bit);
let constr = builder.mul_extension(cycle_filter, constr);
yield_constr.constraint(builder, constr);
}
// Now check that they match the opcode.
{
let opcode = lv.opcode;
let reconstructed_opcode =
bits.into_iter()
.enumerate()
.fold(builder.zero_extension(), |cumul, (i, bit)| {
builder.mul_const_add_extension(F::from_canonical_u64(1 << i), bit, cumul)
});
let diff = builder.sub_extension(opcode, reconstructed_opcode);
let constr = builder.mul_extension(cycle_filter, diff);
yield_constr.constraint(builder, constr);
}
// Check that the instruction flags are valid.
// First, check that they are all either 0 or 1.
@ -346,7 +320,8 @@ pub fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
Kernel => builder.sub_extension(one, kernel_mode),
};
// 0 if all the opcode bits match, and something in {1, ..., 8}, otherwise.
let opcode_mismatch = bits
let opcode_mismatch = lv
.opcode_bits
.into_iter()
.zip(bits_from_opcode(oc))
.rev()

View File

@ -18,13 +18,23 @@ pub(crate) enum GlobalMetadata {
TransactionTrieRoot = 5,
/// A pointer to the root of the receipt trie within the `TrieData` buffer.
ReceiptTrieRoot = 6,
/// The number of storage tries involved in this transaction. I.e. the number of values in
/// The number of storage tries involved in these transactions. I.e. the number of values in
/// `StorageTrieAddresses`, `StorageTriePointers` and `StorageTrieCheckpointPointers`.
NumStorageTries = 7,
// The root digests of each Merkle trie before these transactions.
StateTrieRootDigestBefore = 8,
TransactionsTrieRootDigestBefore = 9,
ReceiptsTrieRootDigestBefore = 10,
// The root digests of each Merkle trie after these transactions.
StateTrieRootDigestAfter = 11,
TransactionsTrieRootDigestAfter = 12,
ReceiptsTrieRootDigestAfter = 13,
}
impl GlobalMetadata {
pub(crate) const COUNT: usize = 8;
pub(crate) const COUNT: usize = 14;
pub(crate) fn all() -> [Self; Self::COUNT] {
[
@ -36,6 +46,12 @@ impl GlobalMetadata {
Self::TransactionTrieRoot,
Self::ReceiptTrieRoot,
Self::NumStorageTries,
Self::StateTrieRootDigestBefore,
Self::TransactionsTrieRootDigestBefore,
Self::ReceiptsTrieRootDigestBefore,
Self::StateTrieRootDigestAfter,
Self::TransactionsTrieRootDigestAfter,
Self::ReceiptsTrieRootDigestAfter,
]
}
@ -50,6 +66,20 @@ impl GlobalMetadata {
GlobalMetadata::TransactionTrieRoot => "GLOBAL_METADATA_TXN_TRIE_ROOT",
GlobalMetadata::ReceiptTrieRoot => "GLOBAL_METADATA_RECEIPT_TRIE_ROOT",
GlobalMetadata::NumStorageTries => "GLOBAL_METADATA_NUM_STORAGE_TRIES",
GlobalMetadata::StateTrieRootDigestBefore => "GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE",
GlobalMetadata::TransactionsTrieRootDigestBefore => {
"GLOBAL_METADATA_TXNS_TRIE_DIGEST_BEFORE"
}
GlobalMetadata::ReceiptsTrieRootDigestBefore => {
"GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_BEFORE"
}
GlobalMetadata::StateTrieRootDigestAfter => "GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER",
GlobalMetadata::TransactionsTrieRootDigestAfter => {
"GLOBAL_METADATA_TXNS_TRIE_DIGEST_AFTER"
}
GlobalMetadata::ReceiptsTrieRootDigestAfter => {
"GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_AFTER"
}
}
}
}

View File

@ -2,9 +2,9 @@ pub mod aggregator;
pub mod assembler;
mod ast;
mod constants;
mod context_metadata;
pub(crate) mod context_metadata;
mod cost_estimator;
mod global_metadata;
pub(crate) mod global_metadata;
pub(crate) mod keccak_util;
mod opcodes;
mod optimizer;

View File

@ -5,6 +5,5 @@ pub mod cpu_stark;
pub(crate) mod decode;
mod jumps;
pub mod kernel;
pub mod public_inputs;
mod simple_logic;
mod syscalls;

View File

@ -1 +0,0 @@
pub const NUM_PUBLIC_INPUTS: usize = 0; // PIs will be added later.

View File

@ -19,12 +19,12 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer
use crate::permutation::{
get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::proof::{StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget};
use crate::proof::{StarkProof, StarkProofTarget};
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
/// Represent a linear combination of columns.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct Column<F: Field> {
linear_combination: Vec<(usize, F)>,
constant: F,
@ -42,6 +42,17 @@ impl<F: Field> Column<F> {
cs.into_iter().map(Self::single)
}
pub fn constant(constant: F) -> Self {
Self {
linear_combination: vec![],
constant,
}
}
pub fn zero() -> Self {
Self::constant(F::ZERO)
}
pub fn linear_combination_with_constant<I: IntoIterator<Item = (usize, F)>>(
iter: I,
constant: F,
@ -67,6 +78,10 @@ impl<F: Field> Column<F> {
Self::linear_combination(cs.into_iter().zip(F::TWO.powers()))
}
pub fn le_bytes<I: IntoIterator<Item = usize>>(cs: I) -> Self {
Self::linear_combination(cs.into_iter().zip(F::from_canonical_u16(256).powers()))
}
pub fn sum<I: IntoIterator<Item = usize>>(cs: I) -> Self {
Self::linear_combination(cs.into_iter().zip(repeat(F::ONE)))
}
@ -115,7 +130,7 @@ impl<F: Field> Column<F> {
}
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct TableWithColumns<F: Field> {
table: Table,
columns: Vec<Column<F>>,
@ -322,7 +337,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
{
pub(crate) fn from_proofs<C: GenericConfig<D, F = F>>(
proofs: &[StarkProofWithPublicInputs<F, C, D>],
proofs: &[StarkProof<F, C, D>],
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<F>,
num_permutation_zs: &[usize],
@ -332,7 +347,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
.iter()
.zip(num_permutation_zs)
.map(|(p, &num_perms)| {
let openings = &p.proof.openings;
let openings = &p.openings;
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms);
ctl_zs.zip(ctl_zs_next)
@ -373,7 +388,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}
pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
consumer: &mut ConstraintConsumer<P>,
) where
@ -426,7 +441,7 @@ pub struct CtlCheckVarsTarget<'a, F: Field, const D: usize> {
impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
pub(crate) fn from_proofs(
proofs: &[StarkProofWithPublicInputsTarget<D>],
proofs: &[StarkProofTarget<D>],
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<Target>,
num_permutation_zs: &[usize],
@ -436,7 +451,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
.iter()
.zip(num_permutation_zs)
.map(|(p, &num_perms)| {
let openings = &p.proof.openings;
let openings = &p.openings;
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms);
ctl_zs.zip(ctl_zs_next)
@ -535,7 +550,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
consumer: &mut RecursiveConstraintConsumer<F, D>,
) {
@ -597,17 +612,17 @@ pub(crate) fn verify_cross_table_lookups<
const D: usize,
>(
cross_table_lookups: Vec<CrossTableLookup<F>>,
proofs: &[StarkProofWithPublicInputs<F, C, D>],
proofs: &[StarkProof<F, C, D>],
challenges: GrandProductChallengeSet<F>,
config: &StarkConfig,
) -> Result<()> {
let degrees_bits = proofs
.iter()
.map(|p| p.proof.recover_degree_bits(config))
.map(|p| p.recover_degree_bits(config))
.collect::<Vec<_>>();
let mut ctl_zs_openings = proofs
.iter()
.map(|p| p.proof.openings.ctl_zs_last.iter())
.map(|p| p.openings.ctl_zs_last.iter())
.collect::<Vec<_>>();
for (
i,
@ -655,17 +670,17 @@ pub(crate) fn verify_cross_table_lookups_circuit<
>(
builder: &mut CircuitBuilder<F, D>,
cross_table_lookups: Vec<CrossTableLookup<F>>,
proofs: &[StarkProofWithPublicInputsTarget<D>],
proofs: &[StarkProofTarget<D>],
challenges: GrandProductChallengeSet<Target>,
inner_config: &StarkConfig,
) {
let degrees_bits = proofs
.iter()
.map(|p| p.proof.recover_degree_bits(inner_config))
.map(|p| p.recover_degree_bits(inner_config))
.collect::<Vec<_>>();
let mut ctl_zs_openings = proofs
.iter()
.map(|p| p.proof.openings.ctl_zs_last.iter())
.map(|p| p.openings.ctl_zs_last.iter())
.collect::<Vec<_>>();
for (
i,

View File

@ -1,4 +1,4 @@
use ethereum_types::U256;
use ethereum_types::Address;
use plonky2::field::extension::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
@ -7,67 +7,102 @@ use plonky2::hash::hash_types::RichField;
use crate::all_stark::AllStark;
use crate::cpu::bootstrap_kernel::generate_bootstrap_kernel;
use crate::cpu::columns::NUM_CPU_COLUMNS;
use crate::cpu::kernel::global_metadata::GlobalMetadata;
use crate::generation::partial_trie::PartialTrie;
use crate::generation::state::GenerationState;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
use crate::proof::{BlockMetadata, PublicValues, TrieRoots};
use crate::util::trace_rows_to_poly_values;
pub(crate) mod memory;
pub mod partial_trie;
pub(crate) mod state;
/// A piece of data which has been encoded using Recursive Length Prefix (RLP) serialization.
/// See https://ethereum.org/en/developers/docs/data-structures-and-encoding/rlp/
pub type RlpBlob = Vec<u8>;
/// Inputs needed for trace generation.
pub struct GenerationInputs {
pub signed_txns: Vec<Vec<u8>>,
/// Merkle proofs are encoded using an RLP blob for each node in the path.
pub type RlpMerkleProof = Vec<RlpBlob>;
/// A partial version of the state trie prior to these transactions. It should include all nodes
/// that will be accessed by these transactions.
pub state_trie: PartialTrie,
#[allow(unused)] // TODO: Should be used soon.
pub struct TransactionData {
pub signed_txn: Vec<u8>,
/// A partial version of the transaction trie prior to these transactions. It should include all
/// nodes that will be accessed by these transactions.
pub transactions_trie: PartialTrie,
/// A Merkle proof for each interaction with the state trie, ordered chronologically.
pub trie_proofs: Vec<RlpMerkleProof>,
/// A partial version of the receipt trie prior to these transactions. It should include all nodes
/// that will be accessed by these transactions.
pub receipts_trie: PartialTrie,
/// A partial version of each storage trie prior to these transactions. It should include all
/// storage tries, and nodes therein, that will be accessed by these transactions.
pub storage_tries: Vec<(Address, PartialTrie)>,
pub block_metadata: BlockMetadata,
}
#[allow(unused)] // TODO: Should be used soon.
pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
all_stark: &AllStark<F, D>,
txns: &[TransactionData],
) -> Vec<Vec<PolynomialValues<F>>> {
inputs: GenerationInputs,
) -> (Vec<Vec<PolynomialValues<F>>>, PublicValues) {
let mut state = GenerationState::<F>::default();
generate_bootstrap_kernel::<F>(&mut state);
for txn in txns {
for txn in &inputs.signed_txns {
generate_txn(&mut state, txn);
}
// TODO: Pad to a power of two, ending in the `halt` kernel function.
let cpu_rows = state.cpu_rows.len();
let mem_end_timestamp = cpu_rows * NUM_CHANNELS;
let mut read_metadata = |field| {
state.get_mem(
0,
Segment::GlobalMetadata,
field as usize,
mem_end_timestamp,
)
};
let trie_roots_before = TrieRoots {
state_root: read_metadata(GlobalMetadata::StateTrieRootDigestBefore),
transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestBefore),
receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestBefore),
};
let trie_roots_after = TrieRoots {
state_root: read_metadata(GlobalMetadata::StateTrieRootDigestAfter),
transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestAfter),
receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestAfter),
};
let GenerationState {
cpu_rows,
current_cpu_row,
memory,
keccak_inputs,
logic_ops,
prover_inputs,
..
} = state;
assert_eq!(current_cpu_row, [F::ZERO; NUM_CPU_COLUMNS].into());
assert_eq!(prover_inputs, vec![], "Not all prover inputs were consumed");
let cpu_trace = trace_rows_to_poly_values(cpu_rows);
let keccak_trace = all_stark.keccak_stark.generate_trace(keccak_inputs);
let logic_trace = all_stark.logic_stark.generate_trace(logic_ops);
let memory_trace = all_stark.memory_stark.generate_trace(memory.log);
vec![cpu_trace, keccak_trace, logic_trace, memory_trace]
let traces = vec![cpu_trace, keccak_trace, logic_trace, memory_trace];
let public_values = PublicValues {
trie_roots_before,
trie_roots_after,
block_metadata: inputs.block_metadata,
};
(traces, public_values)
}
fn generate_txn<F: Field>(state: &mut GenerationState<F>, txn: &TransactionData) {
// TODO: Add transaction RLP to prover_input.
// Supply Merkle trie proofs as prover inputs.
for proof in &txn.trie_proofs {
let proof = proof
.iter()
.flat_map(|node_rlp| node_rlp.iter().map(|byte| U256::from(*byte)));
state.prover_inputs.extend(proof);
}
fn generate_txn<F: Field>(_state: &mut GenerationState<F>, _signed_txn: &[u8]) {
// TODO
}

View File

@ -0,0 +1,32 @@
use ethereum_types::U256;
/// A partial trie, or a sub-trie thereof. This mimics the structure of an Ethereum trie, except
/// with an additional `Hash` node type, representing a node whose data is not needed to process
/// our transaction.
pub enum PartialTrie {
/// An empty trie.
Empty,
/// The digest of trie whose data does not need to be stored.
Hash(U256),
/// A branch node, which consists of 16 children and an optional value.
Branch {
children: [Box<PartialTrie>; 16],
value: Option<U256>,
},
/// An extension node, which consists of a list of nibbles and a single child.
Extension {
nibbles: Nibbles,
child: Box<PartialTrie>,
},
/// A leaf node, which consists of a list of nibbles and a value.
Leaf { nibbles: Nibbles, value: Vec<u8> },
}
/// A sequence of nibbles.
pub struct Nibbles {
/// The number of nibbles in this sequence.
pub count: usize,
/// A packed encoding of these nibbles. Only the first (least significant) `4 * count` bits are
/// used. The rest are unused and should be zero.
pub packed: U256,
}

View File

@ -2,11 +2,15 @@ use std::mem;
use ethereum_types::U256;
use plonky2::field::types::Field;
use tiny_keccak::keccakf;
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
use crate::generation::memory::MemoryState;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryOp;
use crate::memory::memory_stark::MemoryOp;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
use crate::util::u256_limbs;
use crate::{keccak, logic};
#[derive(Debug)]
@ -18,10 +22,8 @@ pub(crate) struct GenerationState<F: Field> {
pub(crate) memory: MemoryState,
pub(crate) keccak_inputs: Vec<[u64; keccak::keccak_stark::NUM_INPUTS]>,
pub(crate) keccak_memory_inputs: Vec<KeccakMemoryOp>,
pub(crate) logic_ops: Vec<logic::Operation>,
/// Non-deterministic inputs provided by the prover.
pub(crate) prover_inputs: Vec<U256>,
}
impl<F: Field> GenerationState<F> {
@ -51,17 +53,49 @@ impl<F: Field> GenerationState<F> {
result
}
/// Read some memory within the current execution context, and log the operation.
/// Like `get_mem_cpu`, but reads from the current context specifically.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn get_mem_current(
pub(crate) fn get_mem_cpu_current(
&mut self,
channel_index: usize,
segment: Segment,
virt: usize,
) -> U256 {
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
let timestamp = self.cpu_rows.len();
let context = self.current_context;
self.get_mem_cpu(channel_index, context, segment, virt)
}
/// Simulates the CPU reading some memory through the given channel. Besides logging the memory
/// operation, this also generates the associated registers in the current CPU row.
pub(crate) fn get_mem_cpu(
&mut self,
channel_index: usize,
context: usize,
segment: Segment,
virt: usize,
) -> U256 {
let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index;
let value = self.get_mem(context, segment, virt, timestamp);
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
self.current_cpu_row.mem_is_read[channel_index] = F::ONE;
self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context);
self.current_cpu_row.mem_addr_segment[channel_index] =
F::from_canonical_usize(segment as usize);
self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt);
self.current_cpu_row.mem_value[channel_index] = u256_limbs(value);
value
}
/// Read some memory, and log the operation.
pub(crate) fn get_mem(
&mut self,
context: usize,
segment: Segment,
virt: usize,
timestamp: usize,
) -> U256 {
let value = self.memory.contexts[context].segments[segment as usize].get(virt);
self.memory.log.push(MemoryOp {
filter: true,
@ -76,16 +110,47 @@ impl<F: Field> GenerationState<F> {
}
/// Write some memory within the current execution context, and log the operation.
pub(crate) fn set_mem_current(
pub(crate) fn set_mem_cpu_current(
&mut self,
channel_index: usize,
segment: Segment,
virt: usize,
value: U256,
) {
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
let timestamp = self.cpu_rows.len();
let context = self.current_context;
self.set_mem_cpu(channel_index, context, segment, virt, value);
}
/// Write some memory, and log the operation.
pub(crate) fn set_mem_cpu(
&mut self,
channel_index: usize,
context: usize,
segment: Segment,
virt: usize,
value: U256,
) {
let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index;
self.set_mem(context, segment, virt, value, timestamp);
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
self.current_cpu_row.mem_is_read[channel_index] = F::ZERO; // For clarity; should already be 0.
self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context);
self.current_cpu_row.mem_addr_segment[channel_index] =
F::from_canonical_usize(segment as usize);
self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt);
self.current_cpu_row.mem_value[channel_index] = u256_limbs(value);
}
/// Write some memory, and log the operation.
pub(crate) fn set_mem(
&mut self,
context: usize,
segment: Segment,
virt: usize,
value: U256,
timestamp: usize,
) {
self.memory.log.push(MemoryOp {
filter: true,
timestamp,
@ -98,6 +163,54 @@ impl<F: Field> GenerationState<F> {
self.memory.contexts[context].segments[segment as usize].set(virt, value)
}
/// Evaluate the Keccak-f permutation in-place on some data in memory, and record the operations
/// for the purpose of witness generation.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn keccak_memory(
&mut self,
context: usize,
segment: Segment,
virt: usize,
) -> [u64; keccak::keccak_stark::NUM_INPUTS] {
let read_timestamp = self.cpu_rows.len() * NUM_CHANNELS;
let _write_timestamp = read_timestamp + 1;
let input = (0..25)
.map(|i| {
let bytes = [0, 1, 2, 3, 4, 5, 6, 7].map(|j| {
let virt = virt + i * 8 + j;
let byte = self.get_mem(context, segment, virt, read_timestamp);
debug_assert!(byte.bits() <= 8);
byte.as_u32() as u8
});
u64::from_le_bytes(bytes)
})
.collect::<Vec<_>>()
.try_into()
.unwrap();
let output = self.keccak(input);
self.keccak_memory_inputs.push(KeccakMemoryOp {
context,
segment,
virt,
read_timestamp,
input,
output,
});
// TODO: Write output to memory.
output
}
/// Evaluate the Keccak-f permutation, and record the operation for the purpose of witness
/// generation.
pub(crate) fn keccak(
&mut self,
mut input: [u64; keccak::keccak_stark::NUM_INPUTS],
) -> [u64; keccak::keccak_stark::NUM_INPUTS] {
self.keccak_inputs.push(input);
keccakf(&mut input);
input
}
pub(crate) fn commit_cpu_row(&mut self) {
let mut swapped_row = [F::ZERO; NUM_CPU_COLUMNS].into();
mem::swap(&mut self.current_cpu_row, &mut swapped_row);
@ -115,8 +228,8 @@ impl<F: Field> Default for GenerationState<F> {
current_context: 0,
memory: MemoryState::default(),
keccak_inputs: vec![],
keccak_memory_inputs: vec![],
logic_ops: vec![],
prover_inputs: vec![],
}
}
}

View File

@ -24,9 +24,11 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
let mut challenger = Challenger::<F, C::Hasher>::new();
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
challenger.observe_cap(&proof.trace_cap);
}
// TODO: Observe public values.
let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
@ -58,7 +60,7 @@ impl<const D: usize> AllProofTarget<D> {
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
challenger.observe_cap(&proof.trace_cap);
}
let ctl_challenges =
@ -85,7 +87,7 @@ impl<const D: usize> AllProofTarget<D> {
}
}
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
impl<F, C, const D: usize> StarkProof<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -98,7 +100,7 @@ where
stark_permutation_batch_size: usize,
config: &StarkConfig,
) -> StarkProofChallenges<F, D> {
let degree_bits = self.proof.recover_degree_bits(config);
let degree_bits = self.recover_degree_bits(config);
let StarkProof {
permutation_ctl_zs_cap,
@ -112,7 +114,7 @@ where
..
},
..
} = &self.proof;
} = &self;
let num_challenges = config.num_challenges;
@ -148,7 +150,7 @@ where
}
}
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
impl<const D: usize> StarkProofTarget<D> {
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
&self,
builder: &mut CircuitBuilder<F, D>,
@ -172,7 +174,7 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
..
},
..
} = &self.proof;
} = &self;
let num_challenges = config.num_challenges;

View File

@ -32,8 +32,6 @@ pub(crate) const NUM_ROUNDS: usize = 24;
/// Number of 64-bit elements in the Keccak permutation input.
pub(crate) const NUM_INPUTS: usize = 25;
pub(crate) const NUM_PUBLIC_INPUTS: usize = 0;
pub fn ctl_data<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = (0..2 * NUM_INPUTS).map(reg_input_limb).collect();
res.extend(Column::singles((0..2 * NUM_INPUTS).map(reg_output_limb)));
@ -134,9 +132,10 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
}
}
// Populate A'.
// A'[x, y] = xor(A[x, y], D[x])
// = xor(A[x, y], C[x - 1], ROT(C[x + 1], 1))
// Populate A'. To avoid shifting indices, we rewrite
// A'[x, y, z] = xor(A[x, y, z], C[x - 1, z], C[x + 1, z - 1])
// as
// A'[x, y, z] = xor(A[x, y, z], C[x, z], C'[x, z]).
for x in 0..5 {
for y in 0..5 {
for z in 0..64 {
@ -145,11 +144,8 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
let reg_a_limb = reg_a(x, y) + is_high_limb;
let a_limb = row[reg_a_limb].to_canonical_u64() as u32;
let a_bit = F::from_bool(((a_limb >> bit_in_limb) & 1) != 0);
row[reg_a_prime(x, y, z)] = xor([
a_bit,
row[reg_c((x + 4) % 5, z)],
row[reg_c((x + 1) % 5, (z + 64 - 1) % 64)],
]);
row[reg_a_prime(x, y, z)] =
xor([a_bit, row[reg_c(x, z)], row[reg_c_prime(x, z)]]);
}
}
}
@ -228,11 +224,10 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -380,7 +375,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let two = builder.two();

View File

@ -7,12 +7,12 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::keccak::columns::reg_step;
use crate::keccak::columns::NUM_COLUMNS;
use crate::keccak::keccak_stark::{NUM_PUBLIC_INPUTS, NUM_ROUNDS};
use crate::keccak::keccak_stark::NUM_ROUNDS;
use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
vars: StarkEvaluationVars<F, P, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationVars<F, P, NUM_COLUMNS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
// Initially, the first step flag should be 1 while the others should be 0.
@ -30,7 +30,7 @@ pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();

View File

@ -0,0 +1,29 @@
pub(crate) const KECCAK_WIDTH_BYTES: usize = 200;
/// 1 if this row represents a real operation; 0 if it's a padding row.
pub(crate) const COL_IS_REAL: usize = 0;
// The address at which we will read inputs and write outputs.
pub(crate) const COL_CONTEXT: usize = 1;
pub(crate) const COL_SEGMENT: usize = 2;
pub(crate) const COL_VIRTUAL: usize = 3;
/// The timestamp at which inputs should be read from memory.
/// Outputs will be written at the following timestamp.
pub(crate) const COL_READ_TIMESTAMP: usize = 4;
const START_INPUT_LIMBS: usize = 5;
/// A byte of the input.
pub(crate) fn col_input_byte(i: usize) -> usize {
debug_assert!(i < KECCAK_WIDTH_BYTES);
START_INPUT_LIMBS + i
}
const START_OUTPUT_LIMBS: usize = START_INPUT_LIMBS + KECCAK_WIDTH_BYTES;
/// A byte of the output.
pub(crate) fn col_output_byte(i: usize) -> usize {
debug_assert!(i < KECCAK_WIDTH_BYTES);
START_OUTPUT_LIMBS + i
}
pub const NUM_COLUMNS: usize = START_OUTPUT_LIMBS + KECCAK_WIDTH_BYTES;

View File

@ -0,0 +1,227 @@
use std::marker::PhantomData;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::Column;
use crate::keccak::keccak_stark::NUM_INPUTS;
use crate::keccak_memory::columns::*;
use crate::memory::segments::Segment;
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL, COL_READ_TIMESTAMP]).collect()
}
pub(crate) fn ctl_looking_keccak<F: Field>() -> Vec<Column<F>> {
let input_cols = (0..50).map(|i| {
Column::le_bytes((0..4).map(|j| {
let byte_index = i * 4 + j;
col_input_byte(byte_index)
}))
});
let output_cols = (0..50).map(|i| {
Column::le_bytes((0..4).map(|j| {
let byte_index = i * 4 + j;
col_output_byte(byte_index)
}))
});
input_cols.chain(output_cols).collect()
}
pub(crate) fn ctl_looking_memory<F: Field>(i: usize, is_read: bool) -> Vec<Column<F>> {
let mut res = vec![Column::constant(F::from_bool(is_read))];
res.extend(Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL]));
res.push(Column::single(col_input_byte(i)));
// Since we're reading or writing a single byte, the higher limbs must be zero.
res.extend((1..8).map(|_| Column::zero()));
// Since COL_READ_TIMESTAMP is the read time, we add 1 if this is a write.
let is_write_f = F::from_bool(!is_read);
res.push(Column::linear_combination_with_constant(
[(COL_READ_TIMESTAMP, F::ONE)],
is_write_f,
));
assert_eq!(
res.len(),
crate::memory::memory_stark::ctl_data::<F>().len()
);
res
}
/// CTL filter used for both directions (looked and looking).
pub(crate) fn ctl_filter<F: Field>() -> Column<F> {
Column::single(COL_IS_REAL)
}
/// Information about a Keccak memory operation needed for witness generation.
#[derive(Debug)]
pub(crate) struct KeccakMemoryOp {
// The address at which we will read inputs and write outputs.
pub(crate) context: usize,
pub(crate) segment: Segment,
pub(crate) virt: usize,
/// The timestamp at which inputs should be read from memory.
/// Outputs will be written at the following timestamp.
pub(crate) read_timestamp: usize,
/// The input that was read at that address.
pub(crate) input: [u64; NUM_INPUTS],
pub(crate) output: [u64; NUM_INPUTS],
}
#[derive(Copy, Clone, Default)]
pub struct KeccakMemoryStark<F, const D: usize> {
pub(crate) f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> KeccakMemoryStark<F, D> {
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn generate_trace(
&self,
operations: Vec<KeccakMemoryOp>,
min_rows: usize,
) -> Vec<PolynomialValues<F>> {
let mut timing = TimingTree::new("generate trace", log::Level::Debug);
// Generate the witness row-wise.
let trace_rows = timed!(
&mut timing,
"generate trace rows",
self.generate_trace_rows(operations, min_rows)
);
let trace_polys = timed!(
&mut timing,
"convert to PolynomialValues",
trace_rows_to_poly_values(trace_rows)
);
timing.print();
trace_polys
}
fn generate_trace_rows(
&self,
operations: Vec<KeccakMemoryOp>,
min_rows: usize,
) -> Vec<[F; NUM_COLUMNS]> {
let num_rows = operations.len().max(min_rows).next_power_of_two();
let mut rows = Vec::with_capacity(num_rows);
for op in operations {
rows.push(self.generate_row_for_op(op));
}
let padding_row = self.generate_padding_row();
for _ in rows.len()..num_rows {
rows.push(padding_row);
}
rows
}
fn generate_row_for_op(&self, op: KeccakMemoryOp) -> [F; NUM_COLUMNS] {
let mut row = [F::ZERO; NUM_COLUMNS];
row[COL_IS_REAL] = F::ONE;
row[COL_CONTEXT] = F::from_canonical_usize(op.context);
row[COL_SEGMENT] = F::from_canonical_usize(op.segment as usize);
row[COL_VIRTUAL] = F::from_canonical_usize(op.virt);
row[COL_READ_TIMESTAMP] = F::from_canonical_usize(op.read_timestamp);
for i in 0..25 {
let input_u64 = op.input[i];
let output_u64 = op.output[i];
for j in 0..8 {
let byte_index = i * 8 + j;
row[col_input_byte(byte_index)] = F::from_canonical_u8(input_u64.to_le_bytes()[j]);
row[col_output_byte(byte_index)] =
F::from_canonical_u8(output_u64.to_le_bytes()[j]);
}
}
row
}
fn generate_padding_row(&self) -> [F; NUM_COLUMNS] {
// We just need COL_IS_REAL to be zero, which it is by default.
// The other fields will have no effect.
[F::ZERO; NUM_COLUMNS]
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakMemoryStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
// is_real must be 0 or 1.
let is_real = vars.local_values[COL_IS_REAL];
yield_constr.constraint(is_real * (is_real - P::ONES));
}
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
// is_real must be 0 or 1.
let is_real = vars.local_values[COL_IS_REAL];
let constraint = builder.mul_sub_extension(is_real, is_real, is_real);
yield_constr.constraint(builder, constraint);
}
fn constraint_degree(&self) -> usize {
2
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
#[test]
fn test_stark_degree() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakMemoryStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_low_degree(stark)
}
#[test]
fn test_stark_circuit() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakMemoryStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_circuit_constraints::<F, C, S, D>(stark)
}
}

View File

@ -0,0 +1,2 @@
pub mod columns;
pub mod keccak_memory_stark;

View File

@ -13,6 +13,7 @@ pub mod cross_table_lookup;
pub mod generation;
mod get_challenges;
pub mod keccak;
pub mod keccak_memory;
pub mod logic;
pub mod lookup;
pub mod memory;

View File

@ -140,11 +140,10 @@ impl<F: RichField, const D: usize> LogicStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F, D> {
const COLUMNS: usize = columns::NUM_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -196,7 +195,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F,
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv = &vars.local_values;

View File

@ -10,13 +10,8 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_lookups<
F: Field,
P: PackedField<Scalar = F>,
const COLS: usize,
const PUB_INPUTS: usize,
>(
vars: StarkEvaluationVars<F, P, COLS, PUB_INPUTS>,
pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, const COLS: usize>(
vars: StarkEvaluationVars<F, P, COLS>,
yield_constr: &mut ConstraintConsumer<P>,
col_permuted_input: usize,
col_permuted_table: usize,
@ -42,10 +37,9 @@ pub(crate) fn eval_lookups_circuit<
F: RichField + Extendable<D>,
const D: usize,
const COLS: usize,
const PUB_INPUTS: usize,
>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, COLS, PUB_INPUTS>,
vars: StarkEvaluationTargets<D, COLS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
col_permuted_input: usize,
col_permuted_table: usize,

View File

@ -26,8 +26,6 @@ use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) const NUM_PUBLIC_INPUTS: usize = 0;
pub fn ctl_data<F: Field>() -> Vec<Column<F>> {
let mut res =
Column::singles([IS_READ, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL]).collect_vec();
@ -218,11 +216,10 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -302,7 +299,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();

View File

@ -298,7 +298,7 @@ where
pub(crate) fn eval_permutation_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
consumer: &mut ConstraintConsumer<P>,
) where
@ -365,14 +365,13 @@ pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
permutation_data: PermutationCheckDataTarget<D>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckDataTarget {
local_zs,

View File

@ -1,10 +1,9 @@
use ethereum_types::{Address, U256};
use itertools::Itertools;
use maybe_rayon::*;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::fri::proof::{
CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget,
};
use plonky2::fri::proof::{FriChallenges, FriChallengesTarget, FriProof, FriProofTarget};
use plonky2::fri::structure::{
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
};
@ -19,21 +18,22 @@ use crate::permutation::GrandProductChallengeSet;
#[derive(Debug, Clone)]
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
pub stark_proofs: Vec<StarkProofWithPublicInputs<F, C, D>>,
pub stark_proofs: Vec<StarkProof<F, C, D>>,
pub public_values: PublicValues,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
pub fn degree_bits(&self, config: &StarkConfig) -> Vec<usize> {
self.stark_proofs
.iter()
.map(|proof| proof.proof.recover_degree_bits(config))
.map(|proof| proof.recover_degree_bits(config))
.collect()
}
pub fn nums_ctl_zs(&self) -> Vec<usize> {
self.stark_proofs
.iter()
.map(|proof| proof.proof.num_ctl_zs())
.map(|proof| proof.num_ctl_zs())
.collect()
}
}
@ -44,7 +44,58 @@ pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usiz
}
pub struct AllProofTarget<const D: usize> {
pub stark_proofs: Vec<StarkProofWithPublicInputsTarget<D>>,
pub stark_proofs: Vec<StarkProofTarget<D>>,
pub public_values: PublicValuesTarget,
}
/// Memory values which are public.
#[derive(Debug, Clone, Default)]
pub struct PublicValues {
pub trie_roots_before: TrieRoots,
pub trie_roots_after: TrieRoots,
pub block_metadata: BlockMetadata,
}
#[derive(Debug, Clone, Default)]
pub struct TrieRoots {
pub state_root: U256,
pub transactions_root: U256,
pub receipts_root: U256,
}
#[derive(Debug, Clone, Default)]
pub struct BlockMetadata {
pub block_beneficiary: Address,
pub block_timestamp: U256,
pub block_number: U256,
pub block_difficulty: U256,
pub block_gaslimit: U256,
pub block_chain_id: U256,
pub block_base_fee: U256,
}
/// Memory values which are public.
/// Note: All the larger integers are encoded with 32-bit limbs in little-endian order.
pub struct PublicValuesTarget {
pub trie_roots_before: TrieRootsTarget,
pub trie_roots_after: TrieRootsTarget,
pub block_metadata: BlockMetadataTarget,
}
pub struct TrieRootsTarget {
pub state_root: [Target; 8],
pub transactions_root: [Target; 8],
pub receipts_root: [Target; 8],
}
pub struct BlockMetadataTarget {
pub block_beneficiary: [Target; 5],
pub block_timestamp: Target,
pub block_number: Target,
pub block_difficulty: Target,
pub block_gaslimit: Target,
pub block_chain_id: Target,
pub block_base_fee: Target,
}
pub(crate) struct AllProofChallengesTarget<const D: usize> {
@ -102,44 +153,6 @@ impl<const D: usize> StarkProofTarget<D> {
}
}
#[derive(Debug, Clone)]
pub struct StarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub proof: StarkProof<F, C, D>,
// TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`.
pub public_inputs: Vec<F>,
}
pub struct StarkProofWithPublicInputsTarget<const D: usize> {
pub proof: StarkProofTarget<D>,
pub public_inputs: Vec<Target>,
}
pub struct CompressedStarkProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}
pub struct CompressedStarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub proof: CompressedStarkProof<F, C, D>,
pub public_inputs: Vec<F>,
}
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Randomness used in any permutation arguments.
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<F>>>,

View File

@ -22,24 +22,25 @@ use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
use crate::generation::{generate_traces, GenerationInputs};
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckVars;
use crate::permutation::{
compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet,
};
use crate::proof::{AllProof, StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
/// Compute all STARK proofs.
/// Generate traces, then create all STARK proofs.
pub fn prove<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
public_inputs: Vec<Vec<F>>,
inputs: GenerationInputs,
timing: &mut TimingTree,
) -> Result<AllProof<F, C, D>>
where
@ -47,17 +48,35 @@ where
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
{
let (traces, public_values) = generate_traces(all_stark, inputs);
prove_with_traces(all_stark, config, traces, public_values, timing)
}
/// Compute all STARK proofs.
pub(crate) fn prove_with_traces<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
public_values: PublicValues,
timing: &mut TimingTree,
) -> Result<AllProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
{
let num_starks = Table::num_tables();
debug_assert_eq!(num_starks, trace_poly_values.len());
debug_assert_eq!(num_starks, public_inputs.len());
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
@ -104,10 +123,6 @@ where
&trace_poly_values[Table::Cpu as usize],
&trace_commitments[Table::Cpu as usize],
&ctl_data_per_table[Table::Cpu as usize],
public_inputs[Table::Cpu as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -117,10 +132,15 @@ where
&trace_poly_values[Table::Keccak as usize],
&trace_commitments[Table::Keccak as usize],
&ctl_data_per_table[Table::Keccak as usize],
public_inputs[Table::Keccak as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
let keccak_memory_proof = prove_single_table(
&all_stark.keccak_memory_stark,
config,
&trace_poly_values[Table::KeccakMemory as usize],
&trace_commitments[Table::KeccakMemory as usize],
&ctl_data_per_table[Table::KeccakMemory as usize],
&mut challenger,
timing,
)?;
@ -130,10 +150,6 @@ where
&trace_poly_values[Table::Logic as usize],
&trace_commitments[Table::Logic as usize],
&ctl_data_per_table[Table::Logic as usize],
public_inputs[Table::Logic as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -143,18 +159,23 @@ where
&trace_poly_values[Table::Memory as usize],
&trace_commitments[Table::Memory as usize],
&ctl_data_per_table[Table::Memory as usize],
public_inputs[Table::Memory as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
let stark_proofs = vec![cpu_proof, keccak_proof, logic_proof, memory_proof];
let stark_proofs = vec![
cpu_proof,
keccak_proof,
keccak_memory_proof,
logic_proof,
memory_proof,
];
debug_assert_eq!(stark_proofs.len(), num_starks);
Ok(AllProof { stark_proofs })
Ok(AllProof {
stark_proofs,
public_values,
})
}
/// Compute proof for a single STARK table.
@ -164,17 +185,15 @@ fn prove_single_table<F, C, S, const D: usize>(
trace_poly_values: &[PolynomialValues<F>],
trace_commitment: &PolynomialBatch<F, C, D>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
challenger: &mut Challenger<F, C::Hasher>,
timing: &mut TimingTree,
) -> Result<StarkProofWithPublicInputs<F, C, D>>
) -> Result<StarkProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); C::Hasher::HASH_SIZE]:,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = trace_poly_values[0].len();
let degree_bits = log2_strict(degree);
@ -228,7 +247,6 @@ where
&permutation_ctl_zs_commitment,
permutation_challenges.as_ref(),
ctl_data,
public_inputs,
alphas.clone(),
degree_bits,
num_permutation_zs,
@ -241,7 +259,6 @@ where
&permutation_ctl_zs_commitment,
permutation_challenges.as_ref(),
ctl_data,
public_inputs,
alphas,
degree_bits,
num_permutation_zs,
@ -310,17 +327,13 @@ where
timing,
)
);
let proof = StarkProof {
Ok(StarkProof {
trace_cap: trace_commitment.merkle_tree.cap.clone(),
permutation_ctl_zs_cap,
quotient_polys_cap,
openings,
opening_proof,
};
Ok(StarkProofWithPublicInputs {
proof,
public_inputs: public_inputs.to_vec(),
})
}
@ -332,7 +345,6 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
num_permutation_zs: usize,
@ -344,7 +356,6 @@ where
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = config.fri_config.rate_bits;
@ -406,7 +417,6 @@ where
let vars = StarkEvaluationVars {
local_values: &get_trace_values_packed(i_start),
next_values: &get_trace_values_packed(i_next_start),
public_inputs: &public_inputs,
};
let permutation_check_vars =
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
@ -472,7 +482,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
num_permutation_zs: usize,
@ -482,7 +491,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = 0; // Set this to higher value to check constraint degree.
@ -531,7 +539,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
let vars = StarkEvaluationVars {
local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(),
next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(),
public_inputs: &public_inputs,
};
let permutation_check_vars =
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {

View File

@ -23,15 +23,17 @@ use crate::cross_table_lookup::{
verify_cross_table_lookups_circuit, CrossTableLookup, CtlCheckVarsTarget,
};
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckDataTarget;
use crate::proof::{
AllProof, AllProofChallengesTarget, AllProofTarget, StarkOpeningSetTarget, StarkProof,
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs,
StarkProofWithPublicInputsTarget,
AllProof, AllProofChallengesTarget, AllProofTarget, BlockMetadata, BlockMetadataTarget,
PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof,
StarkProofChallengesTarget, StarkProofTarget, TrieRoots, TrieRootsTarget,
};
use crate::stark::Stark;
use crate::util::{h160_limbs, u256_limbs};
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::vars::StarkEvaluationTargets;
@ -115,13 +117,10 @@ pub fn verify_proof_circuit<
inner_config: &StarkConfig,
) where
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
C::Hasher: AlgebraicHasher<F>,
{
let AllProofChallengesTarget {
@ -134,6 +133,7 @@ pub fn verify_proof_circuit<
let AllStark {
cpu_stark,
keccak_stark,
keccak_memory_stark,
logic_stark,
memory_stark,
cross_table_lookups,
@ -170,6 +170,18 @@ pub fn verify_proof_circuit<
inner_config,
)
);
with_context!(
builder,
"verify Keccak memory proof",
verify_stark_proof_with_challenges_circuit::<F, C, _, D>(
builder,
keccak_memory_stark,
&all_proof.stark_proofs[Table::KeccakMemory as usize],
&stark_challenges[Table::KeccakMemory as usize],
&ctl_vars_per_table[Table::KeccakMemory as usize],
inner_config,
)
);
with_context!(
builder,
"verify logic proof",
@ -217,23 +229,17 @@ fn verify_stark_proof_with_challenges_circuit<
>(
builder: &mut CircuitBuilder<F, D>,
stark: S,
proof_with_pis: &StarkProofWithPublicInputsTarget<D>,
proof: &StarkProofTarget<D>,
challenges: &StarkProofChallengesTarget<D>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
inner_config: &StarkConfig,
) where
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let zero = builder.zero();
let one = builder.one_extension();
let StarkProofWithPublicInputsTarget {
proof,
public_inputs,
} = proof_with_pis;
assert_eq!(public_inputs.len(), S::PUBLIC_INPUTS);
let StarkOpeningSetTarget {
local_values,
next_values,
@ -245,12 +251,6 @@ fn verify_stark_proof_with_challenges_circuit<
let vars = StarkEvaluationTargets {
local_values: &local_values.to_vec().try_into().unwrap(),
next_values: &next_values.to_vec().try_into().unwrap(),
public_inputs: &public_inputs
.iter()
.map(|&t| builder.convert_to_ext(t))
.collect::<Vec<_>>()
.try_into()
.unwrap(),
};
let degree_bits = proof.recover_degree_bits(inner_config);
@ -356,84 +356,95 @@ pub fn add_virtual_all_proof<F: RichField + Extendable<D>, const D: usize>(
nums_ctl_zs: &[usize],
) -> AllProofTarget<D> {
let stark_proofs = vec![
{
let proof = add_virtual_stark_proof(
builder,
all_stark.cpu_stark,
config,
degree_bits[Table::Cpu as usize],
nums_ctl_zs[Table::Cpu as usize],
);
let public_inputs = builder.add_virtual_targets(CpuStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.keccak_stark,
config,
degree_bits[Table::Keccak as usize],
nums_ctl_zs[Table::Keccak as usize],
);
let public_inputs = builder.add_virtual_targets(KeccakStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.logic_stark,
config,
degree_bits[Table::Logic as usize],
nums_ctl_zs[Table::Logic as usize],
);
let public_inputs = builder.add_virtual_targets(LogicStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.memory_stark,
config,
degree_bits[Table::Memory as usize],
nums_ctl_zs[Table::Memory as usize],
);
let public_inputs = builder.add_virtual_targets(KeccakStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
add_virtual_stark_proof(
builder,
all_stark.cpu_stark,
config,
degree_bits[Table::Cpu as usize],
nums_ctl_zs[Table::Cpu as usize],
),
add_virtual_stark_proof(
builder,
all_stark.keccak_stark,
config,
degree_bits[Table::Keccak as usize],
nums_ctl_zs[Table::Keccak as usize],
),
add_virtual_stark_proof(
builder,
all_stark.keccak_memory_stark,
config,
degree_bits[Table::KeccakMemory as usize],
nums_ctl_zs[Table::KeccakMemory as usize],
),
add_virtual_stark_proof(
builder,
all_stark.logic_stark,
config,
degree_bits[Table::Logic as usize],
nums_ctl_zs[Table::Logic as usize],
),
add_virtual_stark_proof(
builder,
all_stark.memory_stark,
config,
degree_bits[Table::Memory as usize],
nums_ctl_zs[Table::Memory as usize],
),
];
assert_eq!(stark_proofs.len(), Table::num_tables());
AllProofTarget { stark_proofs }
let public_values = add_virtual_public_values(builder);
AllProofTarget {
stark_proofs,
public_values,
}
}
pub fn add_virtual_stark_proof_with_pis<
F: RichField + Extendable<D>,
S: Stark<F, D>,
const D: usize,
>(
pub fn add_virtual_public_values<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: S,
config: &StarkConfig,
degree_bits: usize,
num_ctl_zs: usize,
) -> StarkProofWithPublicInputsTarget<D> {
let proof = add_virtual_stark_proof::<F, S, D>(builder, stark, config, degree_bits, num_ctl_zs);
let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
) -> PublicValuesTarget {
let trie_roots_before = add_virtual_trie_roots(builder);
let trie_roots_after = add_virtual_trie_roots(builder);
let block_metadata = add_virtual_block_metadata(builder);
PublicValuesTarget {
trie_roots_before,
trie_roots_after,
block_metadata,
}
}
pub fn add_virtual_trie_roots<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
) -> TrieRootsTarget {
let state_root = builder.add_virtual_target_arr();
let transactions_root = builder.add_virtual_target_arr();
let receipts_root = builder.add_virtual_target_arr();
TrieRootsTarget {
state_root,
transactions_root,
receipts_root,
}
}
pub fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
) -> BlockMetadataTarget {
let block_beneficiary = builder.add_virtual_target_arr();
let block_timestamp = builder.add_virtual_target();
let block_number = builder.add_virtual_target();
let block_difficulty = builder.add_virtual_target();
let block_gaslimit = builder.add_virtual_target();
let block_chain_id = builder.add_virtual_target();
let block_base_fee = builder.add_virtual_target();
BlockMetadataTarget {
block_beneficiary,
block_timestamp,
block_number,
block_difficulty,
block_gaslimit,
block_chain_id,
block_base_fee,
}
}
@ -499,35 +510,13 @@ pub fn set_all_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
.iter()
.zip_eq(&all_proof.stark_proofs)
{
set_stark_proof_with_pis_target(witness, pt, p, zero);
set_stark_proof_target(witness, pt, p, zero);
}
}
pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
witness: &mut W,
stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
zero: Target,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F>,
W: Witness<F>,
{
let StarkProofWithPublicInputs {
proof,
public_inputs,
} = stark_proof_with_pis;
let StarkProofWithPublicInputsTarget {
proof: pt,
public_inputs: pi_targets,
} = stark_proof_with_pis_target;
// Set public inputs.
for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) {
witness.set_target(pi_t, pi);
}
set_stark_proof_target(witness, pt, proof, zero);
set_public_value_targets(
witness,
&all_proof_target.public_values,
&all_proof.public_values,
)
}
pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
@ -555,3 +544,88 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
}
pub fn set_public_value_targets<F, W, const D: usize>(
witness: &mut W,
public_values_target: &PublicValuesTarget,
public_values: &PublicValues,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
set_trie_roots_target(
witness,
&public_values_target.trie_roots_before,
&public_values.trie_roots_before,
);
set_trie_roots_target(
witness,
&public_values_target.trie_roots_after,
&public_values.trie_roots_after,
);
set_block_metadata_target(
witness,
&public_values_target.block_metadata,
&public_values.block_metadata,
);
}
pub fn set_trie_roots_target<F, W, const D: usize>(
witness: &mut W,
trie_roots_target: &TrieRootsTarget,
trie_roots: &TrieRoots,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
witness.set_target_arr(
trie_roots_target.state_root,
u256_limbs(trie_roots.state_root),
);
witness.set_target_arr(
trie_roots_target.transactions_root,
u256_limbs(trie_roots.transactions_root),
);
witness.set_target_arr(
trie_roots_target.receipts_root,
u256_limbs(trie_roots.receipts_root),
);
}
pub fn set_block_metadata_target<F, W, const D: usize>(
witness: &mut W,
block_metadata_target: &BlockMetadataTarget,
block_metadata: &BlockMetadata,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
witness.set_target_arr(
block_metadata_target.block_beneficiary,
h160_limbs(block_metadata.block_beneficiary),
);
witness.set_target(
block_metadata_target.block_timestamp,
F::from_canonical_u64(block_metadata.block_timestamp.as_u64()),
);
witness.set_target(
block_metadata_target.block_number,
F::from_canonical_u64(block_metadata.block_number.as_u64()),
);
witness.set_target(
block_metadata_target.block_difficulty,
F::from_canonical_u64(block_metadata.block_difficulty.as_u64()),
);
witness.set_target(
block_metadata_target.block_gaslimit,
F::from_canonical_u64(block_metadata.block_gaslimit.as_u64()),
);
witness.set_target(
block_metadata_target.block_chain_id,
F::from_canonical_u64(block_metadata.block_chain_id.as_u64()),
);
witness.set_target(
block_metadata_target.block_base_fee,
F::from_canonical_u64(block_metadata.block_base_fee.as_u64()),
);
}

View File

@ -20,8 +20,6 @@ use crate::vars::StarkEvaluationVars;
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// The total number of columns in the trace.
const COLUMNS: usize;
/// The number of public inputs.
const PUBLIC_INPUTS: usize;
/// Evaluate constraints at a vector of points.
///
@ -31,7 +29,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// constraints over `F`.
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -40,7 +38,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// Evaluate constraints at a vector of points from the base field `F`.
fn eval_packed_base<P: PackedField<Scalar = F>>(
&self,
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) {
self.eval_packed_generic(vars, yield_constr)
@ -49,12 +47,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// Evaluate constraints at a single point from the degree `D` extension field.
fn eval_ext(
&self,
vars: StarkEvaluationVars<
F::Extension,
F::Extension,
{ Self::COLUMNS },
{ Self::PUBLIC_INPUTS },
>,
vars: StarkEvaluationVars<F::Extension, F::Extension, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<F::Extension>,
) {
self.eval_packed_generic(vars, yield_constr)
@ -67,7 +60,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
);

View File

@ -26,13 +26,11 @@ pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
let size = trace_ldes.len();
let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>();
let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits);
let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits);
@ -49,7 +47,6 @@ where
.clone()
.try_into()
.unwrap(),
public_inputs: &public_inputs,
};
let mut consumer = ConstraintConsumer::<F>::new(
@ -89,14 +86,12 @@ pub fn test_stark_circuit_constraints<
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
// Compute native constraint evaluation on random values.
let vars = StarkEvaluationVars {
local_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(),
next_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(),
public_inputs: &F::Extension::rand_arr::<{ S::PUBLIC_INPUTS }>(),
};
let alphas = F::rand_vec(1);
let z_last = F::Extension::rand();
@ -124,8 +119,6 @@ where
pw.set_extension_targets(&locals_t, vars.local_values);
let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS);
pw.set_extension_targets(&nexts_t, vars.next_values);
let pis_t = builder.add_virtual_extension_targets(S::PUBLIC_INPUTS);
pw.set_extension_targets(&pis_t, vars.public_inputs);
let alphas_t = builder.add_virtual_targets(1);
pw.set_target(alphas_t[0], alphas[0]);
let z_last_t = builder.add_virtual_extension_target();
@ -135,10 +128,9 @@ where
let lagrange_last_t = builder.add_virtual_extension_target();
pw.set_extension_target(lagrange_last_t, lagrange_last);
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }> {
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }> {
local_values: &locals_t.try_into().unwrap(),
next_values: &nexts_t.try_into().unwrap(),
public_inputs: &pis_t.try_into().unwrap(),
};
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
builder.zero_extension(),

View File

@ -1,3 +1,4 @@
use ethereum_types::{H160, U256};
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
@ -40,3 +41,29 @@ pub fn trace_rows_to_poly_values<F: Field, const COLUMNS: usize>(
.map(|column| PolynomialValues::new(column))
.collect()
}
/// Returns the 32-bit little-endian limbs of a `U256`.
pub(crate) fn u256_limbs<F: Field>(u256: U256) -> [F; 8] {
u256.0
.into_iter()
.flat_map(|limb_64| {
let lo = limb_64 as u32;
let hi = (limb_64 >> 32) as u32;
[lo, hi]
})
.map(F::from_canonical_u32)
.collect_vec()
.try_into()
.unwrap()
}
/// Returns the 32-bit limbs of a `U160`.
pub(crate) fn h160_limbs<F: Field>(h160: H160) -> [F; 5] {
h160.0
.chunks(4)
.map(|chunk| u32::from_le_bytes(chunk.try_into().unwrap()))
.map(F::from_canonical_u32)
.collect_vec()
.try_into()
.unwrap()
}

View File

@ -20,7 +20,7 @@ use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_vanishing_poly<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
permutation_vars: Option<PermutationCheckVars<F, FE, P, D2>>,
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
consumer: &mut ConstraintConsumer<P>,
@ -48,7 +48,7 @@ pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
permutation_data: Option<PermutationCheckDataTarget<D>>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
consumer: &mut RecursiveConstraintConsumer<F, D>,
@ -57,7 +57,6 @@ pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_ext_circuit(builder, vars, consumer);
if let Some(permutation_data) = permutation_data {

View File

@ -3,24 +3,17 @@ use plonky2::field::types::Field;
use plonky2::iop::ext_target::ExtensionTarget;
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize>
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize>
where
F: Field,
P: PackedField<Scalar = F>,
{
pub local_values: &'a [P; COLUMNS],
pub next_values: &'a [P; COLUMNS],
pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS],
}
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationTargets<
'a,
const D: usize,
const COLUMNS: usize,
const PUBLIC_INPUTS: usize,
> {
pub struct StarkEvaluationTargets<'a, const D: usize, const COLUMNS: usize> {
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
pub public_inputs: &'a [ExtensionTarget<D>; PUBLIC_INPUTS],
}

View File

@ -12,11 +12,12 @@ use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckVars;
use crate::proof::{
AllProof, AllProofChallenges, StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs,
AllProof, AllProofChallenges, StarkOpeningSet, StarkProof, StarkProofChallenges,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
@ -29,13 +30,10 @@ pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
) -> Result<()>
where
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
let AllProofChallenges {
@ -48,6 +46,7 @@ where
let AllStark {
cpu_stark,
keccak_stark,
keccak_memory_stark,
logic_stark,
memory_stark,
cross_table_lookups,
@ -74,6 +73,13 @@ where
&ctl_vars_per_table[Table::Keccak as usize],
config,
)?;
verify_stark_proof_with_challenges(
keccak_memory_stark,
&all_proof.stark_proofs[Table::KeccakMemory as usize],
&stark_challenges[Table::KeccakMemory as usize],
&ctl_vars_per_table[Table::KeccakMemory as usize],
config,
)?;
verify_stark_proof_with_challenges(
memory_stark,
&all_proof.stark_proofs[Table::Memory as usize],
@ -104,21 +110,15 @@ pub(crate) fn verify_stark_proof_with_challenges<
const D: usize,
>(
stark: S,
proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
proof: &StarkProof<F, C, D>,
challenges: &StarkProofChallenges<F, D>,
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
config: &StarkConfig,
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
let StarkProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
ensure!(public_inputs.len() == S::PUBLIC_INPUTS);
let StarkOpeningSet {
local_values,
next_values,
@ -130,13 +130,6 @@ where
let vars = StarkEvaluationVars {
local_values: &local_values.to_vec().try_into().unwrap(),
next_values: &next_values.to_vec().try_into().unwrap(),
public_inputs: &public_inputs
.iter()
.copied()
.map(F::Extension::from_basefield)
.collect::<Vec<_>>()
.try_into()
.unwrap(),
};
let degree_bits = proof.recover_degree_bits(config);

View File

@ -4,7 +4,9 @@ use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::util::timing::TimingTree;
use plonky2_evm::all_stark::AllStark;
use plonky2_evm::config::StarkConfig;
use plonky2_evm::generation::{generate_traces, TransactionData};
use plonky2_evm::generation::partial_trie::PartialTrie;
use plonky2_evm::generation::GenerationInputs;
use plonky2_evm::proof::BlockMetadata;
use plonky2_evm::prover::prove;
use plonky2_evm::verifier::verify_proof;
@ -17,30 +19,22 @@ type C = PoseidonGoldilocksConfig;
#[ignore] // TODO: Won't work until txn parsing, storage, etc. are implemented.
fn test_simple_transfer() -> anyhow::Result<()> {
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
let txn = TransactionData {
signed_txn: hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38").to_vec(),
trie_proofs: vec![
vec![
hex!("f874a1202f93d0dfb1562c03c825a33eec4438e468c17fff649ae844c004065985ae2945b850f84e058a152d02c7e14af6800000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").to_vec(),
],
vec![
hex!("f8518080a0d36b8b6b60021940d5553689fb33e5d45e649dd8f4f211d26566238a83169da58080a0c62aa627943b70321f89a8b2fea274ecd47116e62042077dcdc0bdca7c1f66738080808080808080808080").to_vec(),
hex!("f873a03f93d0dfb1562c03c825a33eec4438e468c17fff649ae844c004065985ae2945b850f84e068a152d02c7e14af67ccb4ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").to_vec(),
],
]
let block_metadata = BlockMetadata::default();
let txn = hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38");
let inputs = GenerationInputs {
signed_txns: vec![txn.to_vec()],
state_trie: PartialTrie::Empty,
transactions_trie: PartialTrie::Empty,
receipts_trie: PartialTrie::Empty,
storage_tries: vec![],
block_metadata,
};
let traces = generate_traces(&all_stark, &[txn]);
let config = StarkConfig::standard_fast_config();
let proof = prove::<F, C, D>(
&all_stark,
&config,
traces,
vec![vec![]; 4],
&mut TimingTree::default(),
)?;
let proof = prove::<F, C, D>(&all_stark, &config, inputs, &mut TimingTree::default())?;
verify_proof(all_stark, proof, &config)
}

View File

@ -3,7 +3,6 @@
pub mod arithmetic_base;
pub mod arithmetic_extension;
pub mod assert_le;
pub mod base_sum;
pub mod constant;
pub mod exponentiation;

View File

@ -104,9 +104,12 @@ pub trait Witness<F: Field> {
where
F: RichField + Extendable<D>,
{
let limbs = value.to_basefield_array();
(0..D).for_each(|i| {
self.set_target(et.0[i], limbs[i]);
self.set_target_arr(et.0, value.to_basefield_array());
}
fn set_target_arr<const N: usize>(&mut self, targets: [Target; N], values: [F; N]) {
(0..N).for_each(|i| {
self.set_target(targets[i], values[i]);
});
}

View File

@ -157,6 +157,10 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
(0..n).map(|_i| self.add_virtual_target()).collect()
}
pub fn add_virtual_target_arr<const N: usize>(&mut self) -> [Target; N] {
[0; N].map(|_| self.add_virtual_target())
}
pub fn add_virtual_hash(&mut self) -> HashOutTarget {
HashOutTarget::from_vec(self.add_virtual_targets(4))
}

View File

@ -1,26 +1,25 @@
use std::marker::PhantomData;
use plonky2::gates::gate::Gate;
use plonky2::gates::packed_util::PackedEvaluableBase;
use plonky2::gates::util::StridedConstraintConsumer;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator};
use plonky2::iop::target::Target;
use plonky2::iop::wire::Wire;
use plonky2::iop::witness::{PartitionWitness, Witness};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit};
use plonky2::plonk::vars::{
EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch,
EvaluationVarsBasePacked,
};
use plonky2_field::extension::Extendable;
use plonky2_field::packed::PackedField;
use plonky2_field::types::{Field, Field64};
use plonky2_util::{bits_u64, ceil_div_usize};
use crate::gates::gate::Gate;
use crate::gates::packed_util::PackedEvaluableBase;
use crate::gates::util::StridedConstraintConsumer;
use crate::hash::hash_types::RichField;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator};
use crate::iop::target::Target;
use crate::iop::wire::Wire;
use crate::iop::witness::{PartitionWitness, Witness};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit};
use crate::plonk::vars::{
EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch,
EvaluationVarsBasePacked,
};
// TODO: replace/merge this gate with `ComparisonGate`.
/// A gate for checking that one value is less than or equal to another.
@ -450,6 +449,11 @@ mod tests {
use std::marker::PhantomData;
use anyhow::Result;
use plonky2::gates::gate::Gate;
use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree};
use plonky2::hash::hash_types::HashOut;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::plonk::vars::EvaluationVars;
use plonky2_field::extension::quartic::QuarticExtension;
use plonky2_field::goldilocks_field::GoldilocksField;
use plonky2_field::types::Field;
@ -457,11 +461,6 @@ mod tests {
use rand::Rng;
use crate::gates::assert_le::AssertLessThanGate;
use crate::gates::gate::Gate;
use crate::gates::gate_testing::{test_eval_fns, test_low_degree};
use crate::hash::hash_types::HashOut;
use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use crate::plonk::vars::EvaluationVars;
#[test]
fn wire_indices() {

View File

@ -1 +1,2 @@
pub mod assert_le;
pub mod switch;

View File

@ -3,7 +3,6 @@ use std::marker::PhantomData;
use itertools::izip;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::gates::assert_le::AssertLessThanGate;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::generator::{GeneratedValues, SimpleGenerator};
use plonky2::iop::target::{BoolTarget, Target};
@ -11,6 +10,7 @@ use plonky2::iop::witness::{PartitionWitness, Witness};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2_util::ceil_div_usize;
use crate::gates::assert_le::AssertLessThanGate;
use crate::permutation::assert_permutation_circuit;
pub struct MemoryOp<F: Field> {