diff --git a/evm/src/all_stark.rs b/evm/src/all_stark.rs index 77af0119..a47b5b7c 100644 --- a/evm/src/all_stark.rs +++ b/evm/src/all_stark.rs @@ -8,6 +8,9 @@ use crate::cpu::cpu_stark::CpuStark; use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns}; use crate::keccak::keccak_stark; use crate::keccak::keccak_stark::KeccakStark; +use crate::keccak_memory::columns::KECCAK_WIDTH_BYTES; +use crate::keccak_memory::keccak_memory_stark; +use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; @@ -18,6 +21,7 @@ use crate::stark::Stark; pub struct AllStark, const D: usize> { pub cpu_stark: CpuStark, pub keccak_stark: KeccakStark, + pub keccak_memory_stark: KeccakMemoryStark, pub logic_stark: LogicStark, pub memory_stark: MemoryStark, pub cross_table_lookups: Vec>, @@ -28,6 +32,7 @@ impl, const D: usize> Default for AllStark { Self { cpu_stark: CpuStark::default(), keccak_stark: KeccakStark::default(), + keccak_memory_stark: KeccakMemoryStark::default(), logic_stark: LogicStark::default(), memory_stark: MemoryStark::default(), cross_table_lookups: all_cross_table_lookups(), @@ -40,6 +45,7 @@ impl, const D: usize> AllStark { let ans = vec![ self.cpu_stark.num_permutation_batches(config), self.keccak_stark.num_permutation_batches(config), + self.keccak_memory_stark.num_permutation_batches(config), self.logic_stark.num_permutation_batches(config), self.memory_stark.num_permutation_batches(config), ]; @@ -51,6 +57,7 @@ impl, const D: usize> AllStark { let ans = vec![ self.cpu_stark.permutation_batch_size(), self.keccak_stark.permutation_batch_size(), + self.keccak_memory_stark.permutation_batch_size(), self.logic_stark.permutation_batch_size(), self.memory_stark.permutation_batch_size(), ]; @@ -63,8 +70,9 @@ impl, const D: usize> AllStark { pub enum Table { Cpu = 0, Keccak = 1, - Logic = 2, - Memory = 3, + KeccakMemory = 2, + Logic = 3, + Memory = 4, } impl Table { @@ -75,16 +83,22 @@ impl Table { #[allow(unused)] // TODO: Should be used soon. pub(crate) fn all_cross_table_lookups() -> Vec> { - vec![ctl_keccak(), ctl_logic(), ctl_memory()] + vec![ctl_keccak(), ctl_logic(), ctl_memory(), ctl_keccak_memory()] } fn ctl_keccak() -> CrossTableLookup { + let cpu_looking = TableWithColumns::new( + Table::Cpu, + cpu_stark::ctl_data_keccak(), + Some(cpu_stark::ctl_filter_keccak()), + ); + let keccak_memory_looking = TableWithColumns::new( + Table::KeccakMemory, + keccak_memory_stark::ctl_looking_keccak(), + Some(keccak_memory_stark::ctl_filter()), + ); CrossTableLookup::new( - vec![TableWithColumns::new( - Table::Cpu, - cpu_stark::ctl_data_keccak(), - Some(cpu_stark::ctl_filter_keccak()), - )], + vec![cpu_looking, keccak_memory_looking], TableWithColumns::new( Table::Keccak, keccak_stark::ctl_data(), @@ -94,6 +108,22 @@ fn ctl_keccak() -> CrossTableLookup { ) } +fn ctl_keccak_memory() -> CrossTableLookup { + CrossTableLookup::new( + vec![TableWithColumns::new( + Table::Cpu, + cpu_stark::ctl_data_keccak_memory(), + Some(cpu_stark::ctl_filter_keccak_memory()), + )], + TableWithColumns::new( + Table::KeccakMemory, + keccak_memory_stark::ctl_looked_data(), + Some(keccak_memory_stark::ctl_filter()), + ), + None, + ) +} + fn ctl_logic() -> CrossTableLookup { CrossTableLookup::new( vec![TableWithColumns::new( @@ -107,16 +137,33 @@ fn ctl_logic() -> CrossTableLookup { } fn ctl_memory() -> CrossTableLookup { + let cpu_memory_ops = (0..NUM_CHANNELS).map(|channel| { + TableWithColumns::new( + Table::Cpu, + cpu_stark::ctl_data_memory(channel), + Some(cpu_stark::ctl_filter_memory(channel)), + ) + }); + let keccak_memory_reads = (0..KECCAK_WIDTH_BYTES).map(|i| { + TableWithColumns::new( + Table::KeccakMemory, + keccak_memory_stark::ctl_looking_memory(i, true), + Some(keccak_memory_stark::ctl_filter()), + ) + }); + let keccak_memory_writes = (0..KECCAK_WIDTH_BYTES).map(|i| { + TableWithColumns::new( + Table::KeccakMemory, + keccak_memory_stark::ctl_looking_memory(i, false), + Some(keccak_memory_stark::ctl_filter()), + ) + }); + let all_lookers = cpu_memory_ops + .chain(keccak_memory_reads) + .chain(keccak_memory_writes) + .collect(); CrossTableLookup::new( - (0..NUM_CHANNELS) - .map(|channel| { - TableWithColumns::new( - Table::Cpu, - cpu_stark::ctl_data_memory(channel), - Some(cpu_stark::ctl_filter_memory(channel)), - ) - }) - .collect(), + all_lookers, TableWithColumns::new( Table::Memory, memory_stark::ctl_data(), @@ -148,12 +195,13 @@ mod tests { use crate::cpu::kernel::aggregator::KERNEL; use crate::cross_table_lookup::testutils::check_ctls; use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS}; + use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic::{self, LogicStark, Operation}; use crate::memory::memory_stark::tests::generate_random_memory_ops; use crate::memory::memory_stark::MemoryStark; use crate::memory::NUM_CHANNELS; - use crate::proof::AllProof; - use crate::prover::prove; + use crate::proof::{AllProof, PublicValues}; + use crate::prover::prove_with_traces; use crate::recursive_verifier::{ add_virtual_all_proof, set_all_proof_target, verify_proof_circuit, }; @@ -177,6 +225,13 @@ mod tests { keccak_stark.generate_trace(keccak_inputs) } + fn make_keccak_memory_trace( + keccak_memory_stark: &KeccakMemoryStark, + config: &StarkConfig, + ) -> Vec> { + keccak_memory_stark.generate_trace(vec![], 1 << config.fri_config.cap_height) + } + fn make_logic_trace( num_rows: usize, logic_stark: &LogicStark, @@ -205,6 +260,19 @@ mod tests { (trace, num_ops) } + fn bits_from_opcode(opcode: u8) -> [F; 8] { + [ + F::from_bool(opcode & (1 << 0) != 0), + F::from_bool(opcode & (1 << 1) != 0), + F::from_bool(opcode & (1 << 2) != 0), + F::from_bool(opcode & (1 << 3) != 0), + F::from_bool(opcode & (1 << 4) != 0), + F::from_bool(opcode & (1 << 5) != 0), + F::from_bool(opcode & (1 << 6) != 0), + F::from_bool(opcode & (1 << 7) != 0), + ] + } + fn make_cpu_trace( num_keccak_perms: usize, num_logic_rows: usize, @@ -263,16 +331,21 @@ mod tests { [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; row.is_kernel_mode = F::ONE; + // Since these are the first cycle rows, we must start with PC=route_txn then increment. row.program_counter = F::from_canonical_usize(KERNEL.global_labels["route_txn"] + i); - row.opcode = [ - (logic::columns::IS_AND, 0x16), - (logic::columns::IS_OR, 0x17), - (logic::columns::IS_XOR, 0x18), - ] - .into_iter() - .map(|(col, opcode)| logic_trace[col].values[i] * F::from_canonical_u64(opcode)) - .sum(); + row.opcode_bits = bits_from_opcode( + if logic_trace[logic::columns::IS_AND].values[i] != F::ZERO { + 0x16 + } else if logic_trace[logic::columns::IS_OR].values[i] != F::ZERO { + 0x17 + } else if logic_trace[logic::columns::IS_XOR].values[i] != F::ZERO { + 0x18 + } else { + panic!() + }, + ); + let logic = row.general.logic_mut(); let input0_bit_cols = logic::columns::limb_bit_cols_for_input(logic::columns::INPUT0); @@ -330,7 +403,7 @@ mod tests { let last_row: cpu::columns::CpuColumnsView = cpu_trace_rows[cpu_trace_rows.len() - 1].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x0a); // `EXP` is implemented in software + row.opcode_bits = bits_from_opcode(0x0a); // `EXP` is implemented in software row.is_kernel_mode = F::ONE; row.program_counter = last_row.program_counter + F::ONE; row.general.syscalls_mut().output = [ @@ -352,7 +425,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0xf9); + row.opcode_bits = bits_from_opcode(0xf9); row.is_kernel_mode = F::ONE; row.program_counter = F::from_canonical_usize(KERNEL.global_labels["sys_exp"]); row.general.jumps_mut().input0 = [ @@ -374,7 +447,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x56); + row.opcode_bits = bits_from_opcode(0x56); row.is_kernel_mode = F::ONE; row.program_counter = F::from_canonical_u16(15682); row.general.jumps_mut().input0 = [ @@ -411,7 +484,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0xf9); + row.opcode_bits = bits_from_opcode(0xf9); row.is_kernel_mode = F::ONE; row.program_counter = F::from_canonical_u16(15106); row.general.jumps_mut().input0 = [ @@ -433,7 +506,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x56); + row.opcode_bits = bits_from_opcode(0x56); row.is_kernel_mode = F::ZERO; row.program_counter = F::from_canonical_u16(63064); row.general.jumps_mut().input0 = [ @@ -471,7 +544,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x57); + row.opcode_bits = bits_from_opcode(0x57); row.is_kernel_mode = F::ZERO; row.program_counter = F::from_canonical_u16(3754); row.general.jumps_mut().input0 = [ @@ -509,7 +582,7 @@ mod tests { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x57); + row.opcode_bits = bits_from_opcode(0x57); row.is_kernel_mode = F::ZERO; row.program_counter = F::from_canonical_u16(37543); row.general.jumps_mut().input0 = [ @@ -538,7 +611,7 @@ mod tests { let last_row: cpu::columns::CpuColumnsView = cpu_trace_rows[cpu_trace_rows.len() - 1].into(); row.is_cpu_cycle = F::ONE; - row.opcode = F::from_canonical_u8(0x56); + row.opcode_bits = bits_from_opcode(0x56); row.is_kernel_mode = F::ZERO; row.program_counter = last_row.program_counter + F::ONE; row.general.jumps_mut().input0 = [ @@ -575,7 +648,7 @@ mod tests { for i in 0..cpu_trace_rows.len().next_power_of_two() - cpu_trace_rows.len() { let mut row: cpu::columns::CpuColumnsView = [F::ZERO; CpuStark::::COLUMNS].into(); - row.opcode = F::from_canonical_u8(0xff); + row.opcode_bits = bits_from_opcode(0xff); row.is_cpu_cycle = F::ONE; row.is_kernel_mode = F::ONE; row.program_counter = @@ -607,6 +680,7 @@ mod tests { let num_keccak_perms = 2; let keccak_trace = make_keccak_trace(num_keccak_perms, &all_stark.keccak_stark, &mut rng); + let keccak_memory_trace = make_keccak_memory_trace(&all_stark.keccak_memory_stark, config); let logic_trace = make_logic_trace(num_logic_rows, &all_stark.logic_stark, &mut rng); let mem_trace = make_memory_trace(num_memory_ops, &all_stark.memory_stark, &mut rng); let mut memory_trace = mem_trace.0; @@ -621,14 +695,21 @@ mod tests { &mut memory_trace, ); - let traces = vec![cpu_trace, keccak_trace, logic_trace, memory_trace]; + let traces = vec![ + cpu_trace, + keccak_trace, + keccak_memory_trace, + logic_trace, + memory_trace, + ]; check_ctls(&traces, &all_stark.cross_table_lookups); - let proof = prove::( + let public_values = PublicValues::default(); + let proof = prove_with_traces::( &all_stark, config, traces, - vec![vec![]; 4], + public_values, &mut TimingTree::default(), )?; diff --git a/evm/src/arithmetic/add.rs b/evm/src/arithmetic/add.rs index 80f03d63..e87566b6 100644 --- a/evm/src/arithmetic/add.rs +++ b/evm/src/arithmetic/add.rs @@ -9,6 +9,21 @@ use crate::arithmetic::columns::*; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::range_check_error; +pub(crate) fn u256_add_cc(input0: [u64; N_LIMBS], input1: [u64; N_LIMBS]) -> ([u64; N_LIMBS], u64) { + // Input and output have 16-bit limbs + let mut output = [0u64; N_LIMBS]; + + const MASK: u64 = (1u64 << LIMB_BITS) - 1u64; + let mut cy = 0u64; + for (i, a, b) in izip!(0.., input0, input1) { + let s = a + b + cy; + cy = s >> LIMB_BITS; + assert!(cy <= 1u64, "input limbs were larger than 16 bits"); + output[i] = s & MASK; + } + (output, cy) +} + /// Given two sequences `larger` and `smaller` of equal length (not /// checked), verifies that \sum_i larger[i] 2^(LIMB_BITS * i) == /// \sum_i smaller[i] 2^(LIMB_BITS * i), taking care of carry propagation. @@ -19,7 +34,8 @@ pub(crate) fn eval_packed_generic_are_equal( is_op: P, larger: I, smaller: J, -) where +) -> P +where P: PackedField, I: Iterator, J: Iterator, @@ -36,6 +52,7 @@ pub(crate) fn eval_packed_generic_are_equal( // increase the degree of the constraint. cy = t * overflow_inv; } + cy } pub(crate) fn eval_ext_circuit_are_equal( @@ -44,7 +61,8 @@ pub(crate) fn eval_ext_circuit_are_equal( is_op: ExtensionTarget, larger: I, smaller: J, -) where +) -> ExtensionTarget +where F: RichField + Extendable, I: Iterator>, J: Iterator>, @@ -72,6 +90,7 @@ pub(crate) fn eval_ext_circuit_are_equal( cy = builder.mul_const_extension(overflow_inv, t); } + cy } pub fn generate(lv: &mut [F; NUM_ARITH_COLUMNS]) { @@ -79,17 +98,7 @@ pub fn generate(lv: &mut [F; NUM_ARITH_COLUMNS]) { let input1_limbs = ADD_INPUT_1.map(|c| lv[c].to_canonical_u64()); // Input and output have 16-bit limbs - let mut output_limbs = [0u64; N_LIMBS]; - - const MASK: u64 = (1u64 << LIMB_BITS) - 1u64; - let mut cy = 0u64; - for (i, a, b) in izip!(0.., input0_limbs, input1_limbs) { - let s = a + b + cy; - cy = s >> LIMB_BITS; - assert!(cy <= 1u64, "input limbs were larger than 16 bits"); - output_limbs[i] = s & MASK; - } - // last carry is dropped because this is addition modulo 2^256. + let (output_limbs, _) = u256_add_cc(input0_limbs, input1_limbs); for (&c, output_limb) in ADD_OUTPUT.iter().zip(output_limbs) { lv[c] = F::from_canonical_u64(output_limb); diff --git a/evm/src/arithmetic/arithmetic_stark.rs b/evm/src/arithmetic/arithmetic_stark.rs index ce8c7528..58b8afff 100644 --- a/evm/src/arithmetic/arithmetic_stark.rs +++ b/evm/src/arithmetic/arithmetic_stark.rs @@ -8,6 +8,7 @@ use plonky2::hash::hash_types::RichField; use crate::arithmetic::add; use crate::arithmetic::columns; +use crate::arithmetic::compare; use crate::arithmetic::mul; use crate::arithmetic::sub; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; @@ -45,6 +46,10 @@ impl ArithmeticStark { sub::generate(local_values); } else if local_values[columns::IS_MUL].is_one() { mul::generate(local_values); + } else if local_values[columns::IS_LT].is_one() { + compare::generate(local_values, columns::IS_LT); + } else if local_values[columns::IS_GT].is_one() { + compare::generate(local_values, columns::IS_GT); } else { todo!("the requested operation has not yet been implemented"); } @@ -53,11 +58,10 @@ impl ArithmeticStark { impl, const D: usize> Stark for ArithmeticStark { const COLUMNS: usize = columns::NUM_ARITH_COLUMNS; - const PUBLIC_INPUTS: usize = 0; fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -67,18 +71,20 @@ impl, const D: usize> Stark for ArithmeticSta add::eval_packed_generic(lv, yield_constr); sub::eval_packed_generic(lv, yield_constr); mul::eval_packed_generic(lv, yield_constr); + compare::eval_packed_generic(lv, yield_constr); } fn eval_ext_circuit( &self, builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let lv = vars.local_values; add::eval_ext_circuit(builder, lv, yield_constr); sub::eval_ext_circuit(builder, lv, yield_constr); mul::eval_ext_circuit(builder, lv, yield_constr); + compare::eval_ext_circuit(builder, lv, yield_constr); } fn constraint_degree(&self) -> usize { diff --git a/evm/src/arithmetic/columns.rs b/evm/src/arithmetic/columns.rs index e51419a8..7b44adc1 100644 --- a/evm/src/arithmetic/columns.rs +++ b/evm/src/arithmetic/columns.rs @@ -79,4 +79,9 @@ pub(crate) const MUL_INPUT_1: [usize; N_LIMBS] = GENERAL_INPUT_1; pub(crate) const MUL_OUTPUT: [usize; N_LIMBS] = GENERAL_INPUT_2; pub(crate) const MUL_AUX_INPUT: [usize; N_LIMBS] = AUX_INPUT_0; +pub(crate) const CMP_INPUT_0: [usize; N_LIMBS] = GENERAL_INPUT_0; +pub(crate) const CMP_INPUT_1: [usize; N_LIMBS] = GENERAL_INPUT_1; +pub(crate) const CMP_OUTPUT: usize = GENERAL_INPUT_2[0]; +pub(crate) const CMP_AUX_INPUT: [usize; N_LIMBS] = AUX_INPUT_0; + pub const NUM_ARITH_COLUMNS: usize = START_SHARED_COLS + NUM_SHARED_COLS; diff --git a/evm/src/arithmetic/compare.rs b/evm/src/arithmetic/compare.rs new file mode 100644 index 00000000..8410cade --- /dev/null +++ b/evm/src/arithmetic/compare.rs @@ -0,0 +1,219 @@ +//! Support for EVM LT and GT instructions +//! +//! This crate verifies EVM LT and GT instructions (i.e. for unsigned +//! inputs). The difference between LT and GT is of course just a +//! matter of the order of the inputs. The verification is essentially +//! identical to the SUB instruction: For both SUB and LT we have values +//! +//! - `input0` +//! - `input1` +//! - `difference` (mod 2^256) +//! - `borrow` (= 0 or 1) +//! +//! satisfying `input0 - input1 = difference + borrow * 2^256`. Where +//! SUB verifies `difference` and ignores `borrow`, LT verifies +//! `borrow` (and uses `difference` as an auxiliary input). + +use plonky2::field::extension::Extendable; +use plonky2::field::packed::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; + +use crate::arithmetic::add::{eval_ext_circuit_are_equal, eval_packed_generic_are_equal}; +use crate::arithmetic::columns::*; +use crate::arithmetic::sub::u256_sub_br; +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::range_check_error; + +pub(crate) fn generate(lv: &mut [F; NUM_ARITH_COLUMNS], op: usize) { + let input0 = CMP_INPUT_0.map(|c| lv[c].to_canonical_u64()); + let input1 = CMP_INPUT_1.map(|c| lv[c].to_canonical_u64()); + + let (diff, br) = match op { + // input0 - input1 == diff + br*2^256 + IS_LT => u256_sub_br(input0, input1), + // input1 - input0 == diff + br*2^256 + IS_GT => u256_sub_br(input1, input0), + IS_SLT => todo!(), + IS_SGT => todo!(), + _ => panic!("op code not a comparison"), + }; + + for (&c, diff_limb) in CMP_AUX_INPUT.iter().zip(diff) { + lv[c] = F::from_canonical_u64(diff_limb); + } + lv[CMP_OUTPUT] = F::from_canonical_u64(br); +} + +pub(crate) fn eval_packed_generic_lt( + yield_constr: &mut ConstraintConsumer

, + is_op: P, + input0: [P; N_LIMBS], + input1: [P; N_LIMBS], + aux: [P; N_LIMBS], + output: P, +) { + // Verify (input0 < input1) == output by providing aux such that + // input0 - input1 == aux + output*2^256. + let lhs_limbs = input0.iter().zip(input1).map(|(&a, b)| a - b); + let cy = eval_packed_generic_are_equal(yield_constr, is_op, aux.into_iter(), lhs_limbs); + // We don't need to check that cy is 0 or 1, since output has + // already been checked to be 0 or 1. + yield_constr.constraint(is_op * (cy - output)); +} + +pub fn eval_packed_generic( + lv: &[P; NUM_ARITH_COLUMNS], + yield_constr: &mut ConstraintConsumer

, +) { + range_check_error!(CMP_INPUT_0, 16); + range_check_error!(CMP_INPUT_1, 16); + range_check_error!(CMP_AUX_INPUT, 16); + range_check_error!([CMP_OUTPUT], 1); + + let input0 = CMP_INPUT_0.map(|c| lv[c]); + let input1 = CMP_INPUT_1.map(|c| lv[c]); + let aux = CMP_AUX_INPUT.map(|c| lv[c]); + let output = lv[CMP_OUTPUT]; + + eval_packed_generic_lt(yield_constr, lv[IS_LT], input0, input1, aux, output); + eval_packed_generic_lt(yield_constr, lv[IS_GT], input1, input0, aux, output); +} + +#[allow(clippy::needless_collect)] +pub(crate) fn eval_ext_circuit_lt, const D: usize>( + builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, + yield_constr: &mut RecursiveConstraintConsumer, + is_op: ExtensionTarget, + input0: [ExtensionTarget; N_LIMBS], + input1: [ExtensionTarget; N_LIMBS], + aux: [ExtensionTarget; N_LIMBS], + output: ExtensionTarget, +) { + // Since `map` is lazy and the closure passed to it borrows + // `builder`, we can't then borrow builder again below in the call + // to `eval_ext_circuit_are_equal`. The solution is to force + // evaluation with `collect`. + let lhs_limbs = input0 + .iter() + .zip(input1) + .map(|(&a, b)| builder.sub_extension(a, b)) + .collect::>>(); + + let cy = eval_ext_circuit_are_equal( + builder, + yield_constr, + is_op, + aux.into_iter(), + lhs_limbs.into_iter(), + ); + let good_output = builder.sub_extension(cy, output); + let filter = builder.mul_extension(is_op, good_output); + yield_constr.constraint(builder, filter); +} + +pub fn eval_ext_circuit, const D: usize>( + builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, + lv: &[ExtensionTarget; NUM_ARITH_COLUMNS], + yield_constr: &mut RecursiveConstraintConsumer, +) { + let input0 = CMP_INPUT_0.map(|c| lv[c]); + let input1 = CMP_INPUT_1.map(|c| lv[c]); + let aux = CMP_AUX_INPUT.map(|c| lv[c]); + let output = lv[CMP_OUTPUT]; + + eval_ext_circuit_lt( + builder, + yield_constr, + lv[IS_LT], + input0, + input1, + aux, + output, + ); + eval_ext_circuit_lt( + builder, + yield_constr, + lv[IS_GT], + input1, + input0, + aux, + output, + ); +} + +#[cfg(test)] +mod tests { + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::Field; + use rand::{Rng, SeedableRng}; + use rand_chacha::ChaCha8Rng; + + use super::*; + use crate::arithmetic::columns::NUM_ARITH_COLUMNS; + use crate::constraint_consumer::ConstraintConsumer; + + // TODO: Should be able to refactor this test to apply to all operations. + #[test] + fn generate_eval_consistency_not_compare() { + type F = GoldilocksField; + + let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + + // if `IS_LT == 0`, then the constraints should be met even if + // all values are garbage. `eval_packed_generic` handles IS_GT + // at the same time, so we check both at once. + lv[IS_LT] = F::ZERO; + lv[IS_GT] = F::ZERO; + + let mut constrant_consumer = ConstraintConsumer::new( + vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], + F::ONE, + F::ONE, + F::ONE, + ); + eval_packed_generic(&lv, &mut constrant_consumer); + for &acc in &constrant_consumer.constraint_accs { + assert_eq!(acc, F::ZERO); + } + } + + #[test] + fn generate_eval_consistency_compare() { + type F = GoldilocksField; + + let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + const N_ITERS: usize = 1000; + + for _ in 0..N_ITERS { + for (op, other_op) in [(IS_LT, IS_GT), (IS_GT, IS_LT)] { + // set op == 1 and ensure all constraints are satisfied. + // we have to explicitly set the other op to zero since both + // are treated by the call. + lv[op] = F::ONE; + lv[other_op] = F::ZERO; + + // set inputs to random values + for (&ai, bi) in CMP_INPUT_0.iter().zip(CMP_INPUT_1) { + lv[ai] = F::from_canonical_u16(rng.gen()); + lv[bi] = F::from_canonical_u16(rng.gen()); + } + + generate(&mut lv, op); + + let mut constrant_consumer = ConstraintConsumer::new( + vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)], + F::ONE, + F::ONE, + F::ONE, + ); + eval_packed_generic(&lv, &mut constrant_consumer); + for &acc in &constrant_consumer.constraint_accs { + assert_eq!(acc, F::ZERO); + } + } + } + } +} diff --git a/evm/src/arithmetic/mod.rs b/evm/src/arithmetic/mod.rs index 07c4c5a9..69fbda09 100644 --- a/evm/src/arithmetic/mod.rs +++ b/evm/src/arithmetic/mod.rs @@ -1,4 +1,5 @@ mod add; +mod compare; mod mul; mod sub; mod utils; diff --git a/evm/src/arithmetic/sub.rs b/evm/src/arithmetic/sub.rs index ce7932e2..c632eb94 100644 --- a/evm/src/arithmetic/sub.rs +++ b/evm/src/arithmetic/sub.rs @@ -9,26 +9,29 @@ use crate::arithmetic::columns::*; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::range_check_error; -pub fn generate(lv: &mut [F; NUM_ARITH_COLUMNS]) { - let input0_limbs = SUB_INPUT_0.map(|c| lv[c].to_canonical_u64()); - let input1_limbs = SUB_INPUT_1.map(|c| lv[c].to_canonical_u64()); - - // Input and output have 16-bit limbs - let mut output_limbs = [0u64; N_LIMBS]; - +pub(crate) fn u256_sub_br(input0: [u64; N_LIMBS], input1: [u64; N_LIMBS]) -> ([u64; N_LIMBS], u64) { const LIMB_BOUNDARY: u64 = 1 << LIMB_BITS; const MASK: u64 = LIMB_BOUNDARY - 1u64; + let mut output = [0u64; N_LIMBS]; let mut br = 0u64; - for (i, a, b) in izip!(0.., input0_limbs, input1_limbs) { + for (i, a, b) in izip!(0.., input0, input1) { let d = LIMB_BOUNDARY + a - b - br; // if a < b, then d < 2^16 so br = 1 // if a >= b, then d >= 2^16 so br = 0 br = 1u64 - (d >> LIMB_BITS); assert!(br <= 1u64, "input limbs were larger than 16 bits"); - output_limbs[i] = d & MASK; + output[i] = d & MASK; } - // last borrow is dropped because this is subtraction modulo 2^256. + + (output, br) +} + +pub fn generate(lv: &mut [F; NUM_ARITH_COLUMNS]) { + let input0_limbs = SUB_INPUT_0.map(|c| lv[c].to_canonical_u64()); + let input1_limbs = SUB_INPUT_1.map(|c| lv[c].to_canonical_u64()); + + let (output_limbs, _) = u256_sub_br(input0_limbs, input1_limbs); for (&c, output_limb) in SUB_OUTPUT.iter().zip(output_limbs) { lv[c] = F::from_canonical_u64(output_limb); diff --git a/evm/src/arithmetic/utils.rs b/evm/src/arithmetic/utils.rs index dc9a0a2f..c50481f3 100644 --- a/evm/src/arithmetic/utils.rs +++ b/evm/src/arithmetic/utils.rs @@ -19,4 +19,7 @@ macro_rules! range_check_error { ($cols:ident, $rc_bits:expr) => { $crate::arithmetic::utils::_range_check_error::<$rc_bits>(file!(), line!(), &$cols); }; + ([$cols:ident], $rc_bits:expr) => { + $crate::arithmetic::utils::_range_check_error::<$rc_bits>(file!(), line!(), &[$cols]); + }; } diff --git a/evm/src/cpu/bootstrap_kernel.rs b/evm/src/cpu/bootstrap_kernel.rs index 2c6afb51..533589af 100644 --- a/evm/src/cpu/bootstrap_kernel.rs +++ b/evm/src/cpu/bootstrap_kernel.rs @@ -15,7 +15,6 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS}; use crate::cpu::kernel::aggregator::KERNEL; use crate::cpu::kernel::keccak_util::keccakf_u32s; -use crate::cpu::public_inputs::NUM_PUBLIC_INPUTS; use crate::generation::state::GenerationState; use crate::memory::segments::Segment; use crate::memory::NUM_CHANNELS; @@ -50,7 +49,7 @@ pub(crate) fn generate_bootstrap_kernel(state: &mut GenerationState let mut packed_bytes: u32 = 0; for (addr, byte) in chunk { let channel = addr % NUM_CHANNELS; - state.set_mem_current(channel, Segment::Code, addr, byte.into()); + state.set_mem_cpu_current(channel, Segment::Code, addr, byte.into()); packed_bytes = (packed_bytes << 8) | byte as u32; } @@ -73,7 +72,7 @@ pub(crate) fn generate_bootstrap_kernel(state: &mut GenerationState } pub(crate) fn eval_bootstrap_kernel>( - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) { let local_values: &CpuColumnsView<_> = vars.local_values.borrow(); @@ -109,7 +108,7 @@ pub(crate) fn eval_bootstrap_kernel>( pub(crate) fn eval_bootstrap_kernel_circuit, const D: usize>( builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let local_values: &CpuColumnsView<_> = vars.local_values.borrow(); diff --git a/evm/src/cpu/columns/mod.rs b/evm/src/cpu/columns/mod.rs index 3016b2fd..564ea246 100644 --- a/evm/src/cpu/columns/mod.rs +++ b/evm/src/cpu/columns/mod.rs @@ -27,9 +27,6 @@ pub struct CpuColumnsView { /// If CPU cycle: We're in kernel (privileged) mode. pub is_kernel_mode: T, - /// If CPU cycle: The opcode being decoded, in {0, ..., 255}. - pub opcode: T, - // If CPU cycle: flags for EVM instructions. PUSHn, DUPn, and SWAPn only get one flag each. // Invalid opcodes are split between a number of flags for practical reasons. Exactly one of // these flags must be 1. @@ -153,9 +150,12 @@ pub struct CpuColumnsView { /// If CPU cycle: the opcode, broken up into bits in little-endian order. pub opcode_bits: [T; 8], - /// Filter. 1 iff a Keccak permutation is computed on this row. + /// Filter. 1 iff a Keccak lookup is performed on this row. pub is_keccak: T, + /// Filter. 1 iff a Keccak memory lookup is performed on this row. + pub is_keccak_memory: T, + pub(crate) general: CpuGeneralColumnsView, pub(crate) clock: T, diff --git a/evm/src/cpu/cpu_stark.rs b/evm/src/cpu/cpu_stark.rs index 918f7d9b..852b7b54 100644 --- a/evm/src/cpu/cpu_stark.rs +++ b/evm/src/cpu/cpu_stark.rs @@ -22,10 +22,30 @@ pub fn ctl_data_keccak() -> Vec> { res } +pub fn ctl_data_keccak_memory() -> Vec> { + // When executing KECCAK_GENERAL, the memory channels are used as follows: + // channel 0: instruction + // channel 1: stack[-1] = context + // channel 2: stack[-2] = segment + // channel 3: stack[-3] = virtual + let context = Column::single(COL_MAP.mem_value[1][0]); + let segment = Column::single(COL_MAP.mem_value[2][0]); + let virt = Column::single(COL_MAP.mem_value[3][0]); + + let num_channels = F::from_canonical_usize(NUM_CHANNELS); + let clock = Column::linear_combination([(COL_MAP.clock, num_channels)]); + + vec![context, segment, virt, clock] +} + pub fn ctl_filter_keccak() -> Column { Column::single(COL_MAP.is_keccak) } +pub fn ctl_filter_keccak_memory() -> Column { + Column::single(COL_MAP.is_keccak_memory) +} + pub fn ctl_data_logic() -> Vec> { let mut res = Column::singles([COL_MAP.is_and, COL_MAP.is_or, COL_MAP.is_xor]).collect_vec(); let logic = COL_MAP.general.logic(); @@ -53,7 +73,7 @@ pub fn ctl_data_memory(channel: usize) -> Vec> { let scalar = F::from_canonical_usize(NUM_CHANNELS); let addend = F::from_canonical_usize(channel); cols.push(Column::linear_combination_with_constant( - vec![(COL_MAP.clock, scalar)], + [(COL_MAP.clock, scalar)], addend, )); @@ -79,11 +99,10 @@ impl CpuStark { impl, const D: usize> Stark for CpuStark { const COLUMNS: usize = NUM_CPU_COLUMNS; - const PUBLIC_INPUTS: usize = 0; fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -102,7 +121,7 @@ impl, const D: usize> Stark for CpuStark, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let local_values = vars.local_values.borrow(); diff --git a/evm/src/cpu/decode.rs b/evm/src/cpu/decode.rs index 4faf7925..e58b474d 100644 --- a/evm/src/cpu/decode.rs +++ b/evm/src/cpu/decode.rs @@ -1,6 +1,5 @@ use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; -use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; @@ -158,13 +157,16 @@ pub fn generate(lv: &mut CpuColumnsView) { // This assert is not _strictly_ necessary, but I include it as a sanity check. assert_eq!(cycle_filter, F::ONE, "cycle_filter should be 0 or 1"); - let opcode = lv.opcode.to_canonical_u64(); - assert!(opcode < 256, "opcode should be in {{0, ..., 255}}"); - let opcode = opcode as u8; - - for (i, bit) in lv.opcode_bits.iter_mut().enumerate() { - *bit = F::from_bool(opcode & (1 << i) != 0); + // Validate all opcode bits. + for bit in lv.opcode_bits.into_iter() { + assert!(bit.to_canonical_u64() <= 1); } + let opcode = lv + .opcode_bits + .into_iter() + .enumerate() + .map(|(i, bit)| bit.to_canonical_u64() << i) + .sum::() as u8; let top_bits: [u8; 9] = [ 0, @@ -217,23 +219,10 @@ pub fn eval_packed_generic( let kernel_mode = lv.is_kernel_mode; yield_constr.constraint(cycle_filter * kernel_mode * (kernel_mode - P::ONES)); - // Ensure that the opcode bits are valid: each has to be either 0 or 1, and they must match - // the opcode. Note that this also implicitly range-checks the opcode. - let bits = lv.opcode_bits; - // First check that the bits are either 0 or 1. - for bit in bits { + // Ensure that the opcode bits are valid: each has to be either 0 or 1. + for bit in lv.opcode_bits { yield_constr.constraint(cycle_filter * bit * (bit - P::ONES)); } - // Now check that they match the opcode. - { - let opcode = lv.opcode; - let reconstructed_opcode: P = bits - .into_iter() - .enumerate() - .map(|(i, bit)| bit * P::Scalar::from_canonical_u64(1 << i)) - .sum(); - yield_constr.constraint(cycle_filter * (opcode - reconstructed_opcode)); - } // Check that the instruction flags are valid. // First, check that they are all either 0 or 1. @@ -258,7 +247,8 @@ pub fn eval_packed_generic( Kernel => P::ONES - kernel_mode, }; // 0 if all the opcode bits match, and something in {1, ..., 8}, otherwise. - let opcode_mismatch: P = bits + let opcode_mismatch: P = lv + .opcode_bits .into_iter() .zip(bits_from_opcode(oc)) .rev() @@ -294,28 +284,12 @@ pub fn eval_ext_circuit, const D: usize>( yield_constr.constraint(builder, constr); } - // Ensure that the opcode bits are valid: each has to be either 0 or 1, and they must match - // the opcode. Note that this also implicitly range-checks the opcode. - let bits = lv.opcode_bits; - // First check that the bits are either 0 or 1. - for bit in bits { + // Ensure that the opcode bits are valid: each has to be either 0 or 1. + for bit in lv.opcode_bits { let constr = builder.mul_sub_extension(bit, bit, bit); let constr = builder.mul_extension(cycle_filter, constr); yield_constr.constraint(builder, constr); } - // Now check that they match the opcode. - { - let opcode = lv.opcode; - let reconstructed_opcode = - bits.into_iter() - .enumerate() - .fold(builder.zero_extension(), |cumul, (i, bit)| { - builder.mul_const_add_extension(F::from_canonical_u64(1 << i), bit, cumul) - }); - let diff = builder.sub_extension(opcode, reconstructed_opcode); - let constr = builder.mul_extension(cycle_filter, diff); - yield_constr.constraint(builder, constr); - } // Check that the instruction flags are valid. // First, check that they are all either 0 or 1. @@ -346,7 +320,8 @@ pub fn eval_ext_circuit, const D: usize>( Kernel => builder.sub_extension(one, kernel_mode), }; // 0 if all the opcode bits match, and something in {1, ..., 8}, otherwise. - let opcode_mismatch = bits + let opcode_mismatch = lv + .opcode_bits .into_iter() .zip(bits_from_opcode(oc)) .rev() diff --git a/evm/src/cpu/kernel/global_metadata.rs b/evm/src/cpu/kernel/global_metadata.rs index 6378cd74..ddc3c839 100644 --- a/evm/src/cpu/kernel/global_metadata.rs +++ b/evm/src/cpu/kernel/global_metadata.rs @@ -18,13 +18,23 @@ pub(crate) enum GlobalMetadata { TransactionTrieRoot = 5, /// A pointer to the root of the receipt trie within the `TrieData` buffer. ReceiptTrieRoot = 6, - /// The number of storage tries involved in this transaction. I.e. the number of values in + /// The number of storage tries involved in these transactions. I.e. the number of values in /// `StorageTrieAddresses`, `StorageTriePointers` and `StorageTrieCheckpointPointers`. NumStorageTries = 7, + + // The root digests of each Merkle trie before these transactions. + StateTrieRootDigestBefore = 8, + TransactionsTrieRootDigestBefore = 9, + ReceiptsTrieRootDigestBefore = 10, + + // The root digests of each Merkle trie after these transactions. + StateTrieRootDigestAfter = 11, + TransactionsTrieRootDigestAfter = 12, + ReceiptsTrieRootDigestAfter = 13, } impl GlobalMetadata { - pub(crate) const COUNT: usize = 8; + pub(crate) const COUNT: usize = 14; pub(crate) fn all() -> [Self; Self::COUNT] { [ @@ -36,6 +46,12 @@ impl GlobalMetadata { Self::TransactionTrieRoot, Self::ReceiptTrieRoot, Self::NumStorageTries, + Self::StateTrieRootDigestBefore, + Self::TransactionsTrieRootDigestBefore, + Self::ReceiptsTrieRootDigestBefore, + Self::StateTrieRootDigestAfter, + Self::TransactionsTrieRootDigestAfter, + Self::ReceiptsTrieRootDigestAfter, ] } @@ -50,6 +66,20 @@ impl GlobalMetadata { GlobalMetadata::TransactionTrieRoot => "GLOBAL_METADATA_TXN_TRIE_ROOT", GlobalMetadata::ReceiptTrieRoot => "GLOBAL_METADATA_RECEIPT_TRIE_ROOT", GlobalMetadata::NumStorageTries => "GLOBAL_METADATA_NUM_STORAGE_TRIES", + GlobalMetadata::StateTrieRootDigestBefore => "GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE", + GlobalMetadata::TransactionsTrieRootDigestBefore => { + "GLOBAL_METADATA_TXNS_TRIE_DIGEST_BEFORE" + } + GlobalMetadata::ReceiptsTrieRootDigestBefore => { + "GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_BEFORE" + } + GlobalMetadata::StateTrieRootDigestAfter => "GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER", + GlobalMetadata::TransactionsTrieRootDigestAfter => { + "GLOBAL_METADATA_TXNS_TRIE_DIGEST_AFTER" + } + GlobalMetadata::ReceiptsTrieRootDigestAfter => { + "GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_AFTER" + } } } } diff --git a/evm/src/cpu/kernel/mod.rs b/evm/src/cpu/kernel/mod.rs index eceba813..ef5a9ba0 100644 --- a/evm/src/cpu/kernel/mod.rs +++ b/evm/src/cpu/kernel/mod.rs @@ -2,9 +2,9 @@ pub mod aggregator; pub mod assembler; mod ast; mod constants; -mod context_metadata; +pub(crate) mod context_metadata; mod cost_estimator; -mod global_metadata; +pub(crate) mod global_metadata; pub(crate) mod keccak_util; mod opcodes; mod optimizer; diff --git a/evm/src/cpu/mod.rs b/evm/src/cpu/mod.rs index 5950c837..92e3e6ec 100644 --- a/evm/src/cpu/mod.rs +++ b/evm/src/cpu/mod.rs @@ -5,6 +5,5 @@ pub mod cpu_stark; pub(crate) mod decode; mod jumps; pub mod kernel; -pub mod public_inputs; mod simple_logic; mod syscalls; diff --git a/evm/src/cpu/public_inputs.rs b/evm/src/cpu/public_inputs.rs deleted file mode 100644 index 0a02e406..00000000 --- a/evm/src/cpu/public_inputs.rs +++ /dev/null @@ -1 +0,0 @@ -pub const NUM_PUBLIC_INPUTS: usize = 0; // PIs will be added later. diff --git a/evm/src/cross_table_lookup.rs b/evm/src/cross_table_lookup.rs index 899b46d7..40089ecd 100644 --- a/evm/src/cross_table_lookup.rs +++ b/evm/src/cross_table_lookup.rs @@ -19,12 +19,12 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer use crate::permutation::{ get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet, }; -use crate::proof::{StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget}; +use crate::proof::{StarkProof, StarkProofTarget}; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// Represent a linear combination of columns. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Column { linear_combination: Vec<(usize, F)>, constant: F, @@ -42,6 +42,17 @@ impl Column { cs.into_iter().map(Self::single) } + pub fn constant(constant: F) -> Self { + Self { + linear_combination: vec![], + constant, + } + } + + pub fn zero() -> Self { + Self::constant(F::ZERO) + } + pub fn linear_combination_with_constant>( iter: I, constant: F, @@ -67,6 +78,10 @@ impl Column { Self::linear_combination(cs.into_iter().zip(F::TWO.powers())) } + pub fn le_bytes>(cs: I) -> Self { + Self::linear_combination(cs.into_iter().zip(F::from_canonical_u16(256).powers())) + } + pub fn sum>(cs: I) -> Self { Self::linear_combination(cs.into_iter().zip(repeat(F::ONE))) } @@ -115,7 +130,7 @@ impl Column { } } -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct TableWithColumns { table: Table, columns: Vec>, @@ -322,7 +337,7 @@ impl<'a, F: RichField + Extendable, const D: usize> CtlCheckVars<'a, F, F::Extension, F::Extension, D> { pub(crate) fn from_proofs>( - proofs: &[StarkProofWithPublicInputs], + proofs: &[StarkProof], cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, num_permutation_zs: &[usize], @@ -332,7 +347,7 @@ impl<'a, F: RichField + Extendable, const D: usize> .iter() .zip(num_permutation_zs) .map(|(p, &num_perms)| { - let openings = &p.proof.openings; + let openings = &p.openings; let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms); let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms); ctl_zs.zip(ctl_zs_next) @@ -373,7 +388,7 @@ impl<'a, F: RichField + Extendable, const D: usize> } pub(crate) fn eval_cross_table_lookup_checks( - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, ctl_vars: &[CtlCheckVars], consumer: &mut ConstraintConsumer

, ) where @@ -426,7 +441,7 @@ pub struct CtlCheckVarsTarget<'a, F: Field, const D: usize> { impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { pub(crate) fn from_proofs( - proofs: &[StarkProofWithPublicInputsTarget], + proofs: &[StarkProofTarget], cross_table_lookups: &'a [CrossTableLookup], ctl_challenges: &'a GrandProductChallengeSet, num_permutation_zs: &[usize], @@ -436,7 +451,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> { .iter() .zip(num_permutation_zs) .map(|(p, &num_perms)| { - let openings = &p.proof.openings; + let openings = &p.openings; let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms); let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms); ctl_zs.zip(ctl_zs_next) @@ -535,7 +550,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit< const D: usize, >( builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, ctl_vars: &[CtlCheckVarsTarget], consumer: &mut RecursiveConstraintConsumer, ) { @@ -597,17 +612,17 @@ pub(crate) fn verify_cross_table_lookups< const D: usize, >( cross_table_lookups: Vec>, - proofs: &[StarkProofWithPublicInputs], + proofs: &[StarkProof], challenges: GrandProductChallengeSet, config: &StarkConfig, ) -> Result<()> { let degrees_bits = proofs .iter() - .map(|p| p.proof.recover_degree_bits(config)) + .map(|p| p.recover_degree_bits(config)) .collect::>(); let mut ctl_zs_openings = proofs .iter() - .map(|p| p.proof.openings.ctl_zs_last.iter()) + .map(|p| p.openings.ctl_zs_last.iter()) .collect::>(); for ( i, @@ -655,17 +670,17 @@ pub(crate) fn verify_cross_table_lookups_circuit< >( builder: &mut CircuitBuilder, cross_table_lookups: Vec>, - proofs: &[StarkProofWithPublicInputsTarget], + proofs: &[StarkProofTarget], challenges: GrandProductChallengeSet, inner_config: &StarkConfig, ) { let degrees_bits = proofs .iter() - .map(|p| p.proof.recover_degree_bits(inner_config)) + .map(|p| p.recover_degree_bits(inner_config)) .collect::>(); let mut ctl_zs_openings = proofs .iter() - .map(|p| p.proof.openings.ctl_zs_last.iter()) + .map(|p| p.openings.ctl_zs_last.iter()) .collect::>(); for ( i, diff --git a/evm/src/generation/mod.rs b/evm/src/generation/mod.rs index 02c91d16..67b65c31 100644 --- a/evm/src/generation/mod.rs +++ b/evm/src/generation/mod.rs @@ -1,4 +1,4 @@ -use ethereum_types::U256; +use ethereum_types::Address; use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; @@ -7,67 +7,102 @@ use plonky2::hash::hash_types::RichField; use crate::all_stark::AllStark; use crate::cpu::bootstrap_kernel::generate_bootstrap_kernel; use crate::cpu::columns::NUM_CPU_COLUMNS; +use crate::cpu::kernel::global_metadata::GlobalMetadata; +use crate::generation::partial_trie::PartialTrie; use crate::generation::state::GenerationState; +use crate::memory::segments::Segment; +use crate::memory::NUM_CHANNELS; +use crate::proof::{BlockMetadata, PublicValues, TrieRoots}; use crate::util::trace_rows_to_poly_values; pub(crate) mod memory; +pub mod partial_trie; pub(crate) mod state; -/// A piece of data which has been encoded using Recursive Length Prefix (RLP) serialization. -/// See https://ethereum.org/en/developers/docs/data-structures-and-encoding/rlp/ -pub type RlpBlob = Vec; +/// Inputs needed for trace generation. +pub struct GenerationInputs { + pub signed_txns: Vec>, -/// Merkle proofs are encoded using an RLP blob for each node in the path. -pub type RlpMerkleProof = Vec; + /// A partial version of the state trie prior to these transactions. It should include all nodes + /// that will be accessed by these transactions. + pub state_trie: PartialTrie, -#[allow(unused)] // TODO: Should be used soon. -pub struct TransactionData { - pub signed_txn: Vec, + /// A partial version of the transaction trie prior to these transactions. It should include all + /// nodes that will be accessed by these transactions. + pub transactions_trie: PartialTrie, - /// A Merkle proof for each interaction with the state trie, ordered chronologically. - pub trie_proofs: Vec, + /// A partial version of the receipt trie prior to these transactions. It should include all nodes + /// that will be accessed by these transactions. + pub receipts_trie: PartialTrie, + + /// A partial version of each storage trie prior to these transactions. It should include all + /// storage tries, and nodes therein, that will be accessed by these transactions. + pub storage_tries: Vec<(Address, PartialTrie)>, + + pub block_metadata: BlockMetadata, } -#[allow(unused)] // TODO: Should be used soon. -pub fn generate_traces, const D: usize>( +pub(crate) fn generate_traces, const D: usize>( all_stark: &AllStark, - txns: &[TransactionData], -) -> Vec>> { + inputs: GenerationInputs, +) -> (Vec>>, PublicValues) { let mut state = GenerationState::::default(); generate_bootstrap_kernel::(&mut state); - for txn in txns { + for txn in &inputs.signed_txns { generate_txn(&mut state, txn); } + // TODO: Pad to a power of two, ending in the `halt` kernel function. + + let cpu_rows = state.cpu_rows.len(); + let mem_end_timestamp = cpu_rows * NUM_CHANNELS; + let mut read_metadata = |field| { + state.get_mem( + 0, + Segment::GlobalMetadata, + field as usize, + mem_end_timestamp, + ) + }; + + let trie_roots_before = TrieRoots { + state_root: read_metadata(GlobalMetadata::StateTrieRootDigestBefore), + transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestBefore), + receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestBefore), + }; + let trie_roots_after = TrieRoots { + state_root: read_metadata(GlobalMetadata::StateTrieRootDigestAfter), + transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestAfter), + receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestAfter), + }; + let GenerationState { cpu_rows, current_cpu_row, memory, keccak_inputs, logic_ops, - prover_inputs, .. } = state; assert_eq!(current_cpu_row, [F::ZERO; NUM_CPU_COLUMNS].into()); - assert_eq!(prover_inputs, vec![], "Not all prover inputs were consumed"); let cpu_trace = trace_rows_to_poly_values(cpu_rows); let keccak_trace = all_stark.keccak_stark.generate_trace(keccak_inputs); let logic_trace = all_stark.logic_stark.generate_trace(logic_ops); let memory_trace = all_stark.memory_stark.generate_trace(memory.log); - vec![cpu_trace, keccak_trace, logic_trace, memory_trace] + let traces = vec![cpu_trace, keccak_trace, logic_trace, memory_trace]; + + let public_values = PublicValues { + trie_roots_before, + trie_roots_after, + block_metadata: inputs.block_metadata, + }; + + (traces, public_values) } -fn generate_txn(state: &mut GenerationState, txn: &TransactionData) { - // TODO: Add transaction RLP to prover_input. - - // Supply Merkle trie proofs as prover inputs. - for proof in &txn.trie_proofs { - let proof = proof - .iter() - .flat_map(|node_rlp| node_rlp.iter().map(|byte| U256::from(*byte))); - state.prover_inputs.extend(proof); - } +fn generate_txn(_state: &mut GenerationState, _signed_txn: &[u8]) { + // TODO } diff --git a/evm/src/generation/partial_trie.rs b/evm/src/generation/partial_trie.rs new file mode 100644 index 00000000..96751310 --- /dev/null +++ b/evm/src/generation/partial_trie.rs @@ -0,0 +1,32 @@ +use ethereum_types::U256; + +/// A partial trie, or a sub-trie thereof. This mimics the structure of an Ethereum trie, except +/// with an additional `Hash` node type, representing a node whose data is not needed to process +/// our transaction. +pub enum PartialTrie { + /// An empty trie. + Empty, + /// The digest of trie whose data does not need to be stored. + Hash(U256), + /// A branch node, which consists of 16 children and an optional value. + Branch { + children: [Box; 16], + value: Option, + }, + /// An extension node, which consists of a list of nibbles and a single child. + Extension { + nibbles: Nibbles, + child: Box, + }, + /// A leaf node, which consists of a list of nibbles and a value. + Leaf { nibbles: Nibbles, value: Vec }, +} + +/// A sequence of nibbles. +pub struct Nibbles { + /// The number of nibbles in this sequence. + pub count: usize, + /// A packed encoding of these nibbles. Only the first (least significant) `4 * count` bits are + /// used. The rest are unused and should be zero. + pub packed: U256, +} diff --git a/evm/src/generation/state.rs b/evm/src/generation/state.rs index 04ab4016..866f9fd7 100644 --- a/evm/src/generation/state.rs +++ b/evm/src/generation/state.rs @@ -2,11 +2,15 @@ use std::mem; use ethereum_types::U256; use plonky2::field::types::Field; +use tiny_keccak::keccakf; use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS}; use crate::generation::memory::MemoryState; +use crate::keccak_memory::keccak_memory_stark::KeccakMemoryOp; use crate::memory::memory_stark::MemoryOp; use crate::memory::segments::Segment; +use crate::memory::NUM_CHANNELS; +use crate::util::u256_limbs; use crate::{keccak, logic}; #[derive(Debug)] @@ -18,10 +22,8 @@ pub(crate) struct GenerationState { pub(crate) memory: MemoryState, pub(crate) keccak_inputs: Vec<[u64; keccak::keccak_stark::NUM_INPUTS]>, + pub(crate) keccak_memory_inputs: Vec, pub(crate) logic_ops: Vec, - - /// Non-deterministic inputs provided by the prover. - pub(crate) prover_inputs: Vec, } impl GenerationState { @@ -51,17 +53,49 @@ impl GenerationState { result } - /// Read some memory within the current execution context, and log the operation. + /// Like `get_mem_cpu`, but reads from the current context specifically. #[allow(unused)] // TODO: Should be used soon. - pub(crate) fn get_mem_current( + pub(crate) fn get_mem_cpu_current( &mut self, channel_index: usize, segment: Segment, virt: usize, ) -> U256 { - self.current_cpu_row.mem_channel_used[channel_index] = F::ONE; - let timestamp = self.cpu_rows.len(); let context = self.current_context; + self.get_mem_cpu(channel_index, context, segment, virt) + } + + /// Simulates the CPU reading some memory through the given channel. Besides logging the memory + /// operation, this also generates the associated registers in the current CPU row. + pub(crate) fn get_mem_cpu( + &mut self, + channel_index: usize, + context: usize, + segment: Segment, + virt: usize, + ) -> U256 { + let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index; + let value = self.get_mem(context, segment, virt, timestamp); + + self.current_cpu_row.mem_channel_used[channel_index] = F::ONE; + self.current_cpu_row.mem_is_read[channel_index] = F::ONE; + self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context); + self.current_cpu_row.mem_addr_segment[channel_index] = + F::from_canonical_usize(segment as usize); + self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt); + self.current_cpu_row.mem_value[channel_index] = u256_limbs(value); + + value + } + + /// Read some memory, and log the operation. + pub(crate) fn get_mem( + &mut self, + context: usize, + segment: Segment, + virt: usize, + timestamp: usize, + ) -> U256 { let value = self.memory.contexts[context].segments[segment as usize].get(virt); self.memory.log.push(MemoryOp { filter: true, @@ -76,16 +110,47 @@ impl GenerationState { } /// Write some memory within the current execution context, and log the operation. - pub(crate) fn set_mem_current( + pub(crate) fn set_mem_cpu_current( &mut self, channel_index: usize, segment: Segment, virt: usize, value: U256, ) { - self.current_cpu_row.mem_channel_used[channel_index] = F::ONE; - let timestamp = self.cpu_rows.len(); let context = self.current_context; + self.set_mem_cpu(channel_index, context, segment, virt, value); + } + + /// Write some memory, and log the operation. + pub(crate) fn set_mem_cpu( + &mut self, + channel_index: usize, + context: usize, + segment: Segment, + virt: usize, + value: U256, + ) { + let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index; + self.set_mem(context, segment, virt, value, timestamp); + + self.current_cpu_row.mem_channel_used[channel_index] = F::ONE; + self.current_cpu_row.mem_is_read[channel_index] = F::ZERO; // For clarity; should already be 0. + self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context); + self.current_cpu_row.mem_addr_segment[channel_index] = + F::from_canonical_usize(segment as usize); + self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt); + self.current_cpu_row.mem_value[channel_index] = u256_limbs(value); + } + + /// Write some memory, and log the operation. + pub(crate) fn set_mem( + &mut self, + context: usize, + segment: Segment, + virt: usize, + value: U256, + timestamp: usize, + ) { self.memory.log.push(MemoryOp { filter: true, timestamp, @@ -98,6 +163,54 @@ impl GenerationState { self.memory.contexts[context].segments[segment as usize].set(virt, value) } + /// Evaluate the Keccak-f permutation in-place on some data in memory, and record the operations + /// for the purpose of witness generation. + #[allow(unused)] // TODO: Should be used soon. + pub(crate) fn keccak_memory( + &mut self, + context: usize, + segment: Segment, + virt: usize, + ) -> [u64; keccak::keccak_stark::NUM_INPUTS] { + let read_timestamp = self.cpu_rows.len() * NUM_CHANNELS; + let _write_timestamp = read_timestamp + 1; + let input = (0..25) + .map(|i| { + let bytes = [0, 1, 2, 3, 4, 5, 6, 7].map(|j| { + let virt = virt + i * 8 + j; + let byte = self.get_mem(context, segment, virt, read_timestamp); + debug_assert!(byte.bits() <= 8); + byte.as_u32() as u8 + }); + u64::from_le_bytes(bytes) + }) + .collect::>() + .try_into() + .unwrap(); + let output = self.keccak(input); + self.keccak_memory_inputs.push(KeccakMemoryOp { + context, + segment, + virt, + read_timestamp, + input, + output, + }); + // TODO: Write output to memory. + output + } + + /// Evaluate the Keccak-f permutation, and record the operation for the purpose of witness + /// generation. + pub(crate) fn keccak( + &mut self, + mut input: [u64; keccak::keccak_stark::NUM_INPUTS], + ) -> [u64; keccak::keccak_stark::NUM_INPUTS] { + self.keccak_inputs.push(input); + keccakf(&mut input); + input + } + pub(crate) fn commit_cpu_row(&mut self) { let mut swapped_row = [F::ZERO; NUM_CPU_COLUMNS].into(); mem::swap(&mut self.current_cpu_row, &mut swapped_row); @@ -115,8 +228,8 @@ impl Default for GenerationState { current_context: 0, memory: MemoryState::default(), keccak_inputs: vec![], + keccak_memory_inputs: vec![], logic_ops: vec![], - prover_inputs: vec![], } } } diff --git a/evm/src/get_challenges.rs b/evm/src/get_challenges.rs index 88727ad3..52c2b796 100644 --- a/evm/src/get_challenges.rs +++ b/evm/src/get_challenges.rs @@ -24,9 +24,11 @@ impl, C: GenericConfig, const D: usize> A let mut challenger = Challenger::::new(); for proof in &self.stark_proofs { - challenger.observe_cap(&proof.proof.trace_cap); + challenger.observe_cap(&proof.trace_cap); } + // TODO: Observe public values. + let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges); @@ -58,7 +60,7 @@ impl AllProofTarget { let mut challenger = RecursiveChallenger::::new(builder); for proof in &self.stark_proofs { - challenger.observe_cap(&proof.proof.trace_cap); + challenger.observe_cap(&proof.trace_cap); } let ctl_challenges = @@ -85,7 +87,7 @@ impl AllProofTarget { } } -impl StarkProofWithPublicInputs +impl StarkProof where F: RichField + Extendable, C: GenericConfig, @@ -98,7 +100,7 @@ where stark_permutation_batch_size: usize, config: &StarkConfig, ) -> StarkProofChallenges { - let degree_bits = self.proof.recover_degree_bits(config); + let degree_bits = self.recover_degree_bits(config); let StarkProof { permutation_ctl_zs_cap, @@ -112,7 +114,7 @@ where .. }, .. - } = &self.proof; + } = &self; let num_challenges = config.num_challenges; @@ -148,7 +150,7 @@ where } } -impl StarkProofWithPublicInputsTarget { +impl StarkProofTarget { pub(crate) fn get_challenges, C: GenericConfig>( &self, builder: &mut CircuitBuilder, @@ -172,7 +174,7 @@ impl StarkProofWithPublicInputsTarget { .. }, .. - } = &self.proof; + } = &self; let num_challenges = config.num_challenges; diff --git a/evm/src/keccak/keccak_stark.rs b/evm/src/keccak/keccak_stark.rs index d405c0e8..23ffe0e9 100644 --- a/evm/src/keccak/keccak_stark.rs +++ b/evm/src/keccak/keccak_stark.rs @@ -32,8 +32,6 @@ pub(crate) const NUM_ROUNDS: usize = 24; /// Number of 64-bit elements in the Keccak permutation input. pub(crate) const NUM_INPUTS: usize = 25; -pub(crate) const NUM_PUBLIC_INPUTS: usize = 0; - pub fn ctl_data() -> Vec> { let mut res: Vec<_> = (0..2 * NUM_INPUTS).map(reg_input_limb).collect(); res.extend(Column::singles((0..2 * NUM_INPUTS).map(reg_output_limb))); @@ -134,9 +132,10 @@ impl, const D: usize> KeccakStark { } } - // Populate A'. - // A'[x, y] = xor(A[x, y], D[x]) - // = xor(A[x, y], C[x - 1], ROT(C[x + 1], 1)) + // Populate A'. To avoid shifting indices, we rewrite + // A'[x, y, z] = xor(A[x, y, z], C[x - 1, z], C[x + 1, z - 1]) + // as + // A'[x, y, z] = xor(A[x, y, z], C[x, z], C'[x, z]). for x in 0..5 { for y in 0..5 { for z in 0..64 { @@ -145,11 +144,8 @@ impl, const D: usize> KeccakStark { let reg_a_limb = reg_a(x, y) + is_high_limb; let a_limb = row[reg_a_limb].to_canonical_u64() as u32; let a_bit = F::from_bool(((a_limb >> bit_in_limb) & 1) != 0); - row[reg_a_prime(x, y, z)] = xor([ - a_bit, - row[reg_c((x + 4) % 5, z)], - row[reg_c((x + 1) % 5, (z + 64 - 1) % 64)], - ]); + row[reg_a_prime(x, y, z)] = + xor([a_bit, row[reg_c(x, z)], row[reg_c_prime(x, z)]]); } } } @@ -228,11 +224,10 @@ impl, const D: usize> KeccakStark { impl, const D: usize> Stark for KeccakStark { const COLUMNS: usize = NUM_COLUMNS; - const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS; fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -380,7 +375,7 @@ impl, const D: usize> Stark for KeccakStark, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let two = builder.two(); diff --git a/evm/src/keccak/round_flags.rs b/evm/src/keccak/round_flags.rs index 6a4d03b6..920ca4c8 100644 --- a/evm/src/keccak/round_flags.rs +++ b/evm/src/keccak/round_flags.rs @@ -7,12 +7,12 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::keccak::columns::reg_step; use crate::keccak::columns::NUM_COLUMNS; -use crate::keccak::keccak_stark::{NUM_PUBLIC_INPUTS, NUM_ROUNDS}; +use crate::keccak::keccak_stark::NUM_ROUNDS; use crate::vars::StarkEvaluationTargets; use crate::vars::StarkEvaluationVars; pub(crate) fn eval_round_flags>( - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) { // Initially, the first step flag should be 1 while the others should be 0. @@ -30,7 +30,7 @@ pub(crate) fn eval_round_flags>( pub(crate) fn eval_round_flags_recursively, const D: usize>( builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let one = builder.one_extension(); diff --git a/evm/src/keccak_memory/columns.rs b/evm/src/keccak_memory/columns.rs new file mode 100644 index 00000000..92bdbf2b --- /dev/null +++ b/evm/src/keccak_memory/columns.rs @@ -0,0 +1,29 @@ +pub(crate) const KECCAK_WIDTH_BYTES: usize = 200; + +/// 1 if this row represents a real operation; 0 if it's a padding row. +pub(crate) const COL_IS_REAL: usize = 0; + +// The address at which we will read inputs and write outputs. +pub(crate) const COL_CONTEXT: usize = 1; +pub(crate) const COL_SEGMENT: usize = 2; +pub(crate) const COL_VIRTUAL: usize = 3; + +/// The timestamp at which inputs should be read from memory. +/// Outputs will be written at the following timestamp. +pub(crate) const COL_READ_TIMESTAMP: usize = 4; + +const START_INPUT_LIMBS: usize = 5; +/// A byte of the input. +pub(crate) fn col_input_byte(i: usize) -> usize { + debug_assert!(i < KECCAK_WIDTH_BYTES); + START_INPUT_LIMBS + i +} + +const START_OUTPUT_LIMBS: usize = START_INPUT_LIMBS + KECCAK_WIDTH_BYTES; +/// A byte of the output. +pub(crate) fn col_output_byte(i: usize) -> usize { + debug_assert!(i < KECCAK_WIDTH_BYTES); + START_OUTPUT_LIMBS + i +} + +pub const NUM_COLUMNS: usize = START_OUTPUT_LIMBS + KECCAK_WIDTH_BYTES; diff --git a/evm/src/keccak_memory/keccak_memory_stark.rs b/evm/src/keccak_memory/keccak_memory_stark.rs new file mode 100644 index 00000000..cf8955b3 --- /dev/null +++ b/evm/src/keccak_memory/keccak_memory_stark.rs @@ -0,0 +1,227 @@ +use std::marker::PhantomData; + +use plonky2::field::extension::{Extendable, FieldExtension}; +use plonky2::field::packed::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use plonky2::timed; +use plonky2::util::timing::TimingTree; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::cross_table_lookup::Column; +use crate::keccak::keccak_stark::NUM_INPUTS; +use crate::keccak_memory::columns::*; +use crate::memory::segments::Segment; +use crate::stark::Stark; +use crate::util::trace_rows_to_poly_values; +use crate::vars::StarkEvaluationTargets; +use crate::vars::StarkEvaluationVars; + +pub(crate) fn ctl_looked_data() -> Vec> { + Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL, COL_READ_TIMESTAMP]).collect() +} + +pub(crate) fn ctl_looking_keccak() -> Vec> { + let input_cols = (0..50).map(|i| { + Column::le_bytes((0..4).map(|j| { + let byte_index = i * 4 + j; + col_input_byte(byte_index) + })) + }); + let output_cols = (0..50).map(|i| { + Column::le_bytes((0..4).map(|j| { + let byte_index = i * 4 + j; + col_output_byte(byte_index) + })) + }); + input_cols.chain(output_cols).collect() +} + +pub(crate) fn ctl_looking_memory(i: usize, is_read: bool) -> Vec> { + let mut res = vec![Column::constant(F::from_bool(is_read))]; + res.extend(Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL])); + + res.push(Column::single(col_input_byte(i))); + // Since we're reading or writing a single byte, the higher limbs must be zero. + res.extend((1..8).map(|_| Column::zero())); + + // Since COL_READ_TIMESTAMP is the read time, we add 1 if this is a write. + let is_write_f = F::from_bool(!is_read); + res.push(Column::linear_combination_with_constant( + [(COL_READ_TIMESTAMP, F::ONE)], + is_write_f, + )); + + assert_eq!( + res.len(), + crate::memory::memory_stark::ctl_data::().len() + ); + res +} + +/// CTL filter used for both directions (looked and looking). +pub(crate) fn ctl_filter() -> Column { + Column::single(COL_IS_REAL) +} + +/// Information about a Keccak memory operation needed for witness generation. +#[derive(Debug)] +pub(crate) struct KeccakMemoryOp { + // The address at which we will read inputs and write outputs. + pub(crate) context: usize, + pub(crate) segment: Segment, + pub(crate) virt: usize, + + /// The timestamp at which inputs should be read from memory. + /// Outputs will be written at the following timestamp. + pub(crate) read_timestamp: usize, + + /// The input that was read at that address. + pub(crate) input: [u64; NUM_INPUTS], + pub(crate) output: [u64; NUM_INPUTS], +} + +#[derive(Copy, Clone, Default)] +pub struct KeccakMemoryStark { + pub(crate) f: PhantomData, +} + +impl, const D: usize> KeccakMemoryStark { + #[allow(unused)] // TODO: Should be used soon. + pub(crate) fn generate_trace( + &self, + operations: Vec, + min_rows: usize, + ) -> Vec> { + let mut timing = TimingTree::new("generate trace", log::Level::Debug); + + // Generate the witness row-wise. + let trace_rows = timed!( + &mut timing, + "generate trace rows", + self.generate_trace_rows(operations, min_rows) + ); + + let trace_polys = timed!( + &mut timing, + "convert to PolynomialValues", + trace_rows_to_poly_values(trace_rows) + ); + + timing.print(); + trace_polys + } + + fn generate_trace_rows( + &self, + operations: Vec, + min_rows: usize, + ) -> Vec<[F; NUM_COLUMNS]> { + let num_rows = operations.len().max(min_rows).next_power_of_two(); + let mut rows = Vec::with_capacity(num_rows); + for op in operations { + rows.push(self.generate_row_for_op(op)); + } + + let padding_row = self.generate_padding_row(); + for _ in rows.len()..num_rows { + rows.push(padding_row); + } + rows + } + + fn generate_row_for_op(&self, op: KeccakMemoryOp) -> [F; NUM_COLUMNS] { + let mut row = [F::ZERO; NUM_COLUMNS]; + row[COL_IS_REAL] = F::ONE; + row[COL_CONTEXT] = F::from_canonical_usize(op.context); + row[COL_SEGMENT] = F::from_canonical_usize(op.segment as usize); + row[COL_VIRTUAL] = F::from_canonical_usize(op.virt); + row[COL_READ_TIMESTAMP] = F::from_canonical_usize(op.read_timestamp); + for i in 0..25 { + let input_u64 = op.input[i]; + let output_u64 = op.output[i]; + for j in 0..8 { + let byte_index = i * 8 + j; + row[col_input_byte(byte_index)] = F::from_canonical_u8(input_u64.to_le_bytes()[j]); + row[col_output_byte(byte_index)] = + F::from_canonical_u8(output_u64.to_le_bytes()[j]); + } + } + row + } + + fn generate_padding_row(&self) -> [F; NUM_COLUMNS] { + // We just need COL_IS_REAL to be zero, which it is by default. + // The other fields will have no effect. + [F::ZERO; NUM_COLUMNS] + } +} + +impl, const D: usize> Stark for KeccakMemoryStark { + const COLUMNS: usize = NUM_COLUMNS; + + fn eval_packed_generic( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + // is_real must be 0 or 1. + let is_real = vars.local_values[COL_IS_REAL]; + yield_constr.constraint(is_real * (is_real - P::ONES)); + } + + fn eval_ext_circuit( + &self, + builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + // is_real must be 0 or 1. + let is_real = vars.local_values[COL_IS_REAL]; + let constraint = builder.mul_sub_extension(is_real, is_real, is_real); + yield_constr.constraint(builder, constraint); + } + + fn constraint_degree(&self) -> usize { + 2 + } +} + +#[cfg(test)] +mod tests { + use anyhow::Result; + use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + + use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; + use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; + + #[test] + fn test_stark_degree() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = KeccakMemoryStark; + + let stark = S { + f: Default::default(), + }; + test_stark_low_degree(stark) + } + + #[test] + fn test_stark_circuit() -> Result<()> { + const D: usize = 2; + type C = PoseidonGoldilocksConfig; + type F = >::F; + type S = KeccakMemoryStark; + + let stark = S { + f: Default::default(), + }; + test_stark_circuit_constraints::(stark) + } +} diff --git a/evm/src/keccak_memory/mod.rs b/evm/src/keccak_memory/mod.rs new file mode 100644 index 00000000..7b5e3d01 --- /dev/null +++ b/evm/src/keccak_memory/mod.rs @@ -0,0 +1,2 @@ +pub mod columns; +pub mod keccak_memory_stark; diff --git a/evm/src/lib.rs b/evm/src/lib.rs index 47335db2..0a31a7ba 100644 --- a/evm/src/lib.rs +++ b/evm/src/lib.rs @@ -13,6 +13,7 @@ pub mod cross_table_lookup; pub mod generation; mod get_challenges; pub mod keccak; +pub mod keccak_memory; pub mod logic; pub mod lookup; pub mod memory; diff --git a/evm/src/logic.rs b/evm/src/logic.rs index 119c3d32..2499101b 100644 --- a/evm/src/logic.rs +++ b/evm/src/logic.rs @@ -140,11 +140,10 @@ impl LogicStark { impl, const D: usize> Stark for LogicStark { const COLUMNS: usize = columns::NUM_COLUMNS; - const PUBLIC_INPUTS: usize = 0; fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -196,7 +195,7 @@ impl, const D: usize> Stark for LogicStark, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let lv = &vars.local_values; diff --git a/evm/src/lookup.rs b/evm/src/lookup.rs index 2c93143f..ae92e864 100644 --- a/evm/src/lookup.rs +++ b/evm/src/lookup.rs @@ -10,13 +10,8 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) fn eval_lookups< - F: Field, - P: PackedField, - const COLS: usize, - const PUB_INPUTS: usize, ->( - vars: StarkEvaluationVars, +pub(crate) fn eval_lookups, const COLS: usize>( + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, col_permuted_input: usize, col_permuted_table: usize, @@ -42,10 +37,9 @@ pub(crate) fn eval_lookups_circuit< F: RichField + Extendable, const D: usize, const COLS: usize, - const PUB_INPUTS: usize, >( builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, col_permuted_input: usize, col_permuted_table: usize, diff --git a/evm/src/memory/memory_stark.rs b/evm/src/memory/memory_stark.rs index 8ed52ebb..1ec0c11c 100644 --- a/evm/src/memory/memory_stark.rs +++ b/evm/src/memory/memory_stark.rs @@ -26,8 +26,6 @@ use crate::permutation::PermutationPair; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; -pub(crate) const NUM_PUBLIC_INPUTS: usize = 0; - pub fn ctl_data() -> Vec> { let mut res = Column::singles([IS_READ, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL]).collect_vec(); @@ -218,11 +216,10 @@ impl, const D: usize> MemoryStark { impl, const D: usize> Stark for MemoryStark { const COLUMNS: usize = NUM_COLUMNS; - const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS; fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -302,7 +299,7 @@ impl, const D: usize> Stark for MemoryStark, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ) { let one = builder.one_extension(); diff --git a/evm/src/permutation.rs b/evm/src/permutation.rs index c21a06de..0bb8ab1d 100644 --- a/evm/src/permutation.rs +++ b/evm/src/permutation.rs @@ -298,7 +298,7 @@ where pub(crate) fn eval_permutation_checks( stark: &S, config: &StarkConfig, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, permutation_vars: PermutationCheckVars, consumer: &mut ConstraintConsumer

, ) where @@ -365,14 +365,13 @@ pub(crate) fn eval_permutation_checks_circuit( builder: &mut CircuitBuilder, stark: &S, config: &StarkConfig, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, permutation_data: PermutationCheckDataTarget, consumer: &mut RecursiveConstraintConsumer, ) where F: RichField + Extendable, S: Stark, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let PermutationCheckDataTarget { local_zs, diff --git a/evm/src/proof.rs b/evm/src/proof.rs index e81514eb..54f3618b 100644 --- a/evm/src/proof.rs +++ b/evm/src/proof.rs @@ -1,10 +1,9 @@ +use ethereum_types::{Address, U256}; use itertools::Itertools; use maybe_rayon::*; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::fri::oracle::PolynomialBatch; -use plonky2::fri::proof::{ - CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget, -}; +use plonky2::fri::proof::{FriChallenges, FriChallengesTarget, FriProof, FriProofTarget}; use plonky2::fri::structure::{ FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget, }; @@ -19,21 +18,22 @@ use crate::permutation::GrandProductChallengeSet; #[derive(Debug, Clone)] pub struct AllProof, C: GenericConfig, const D: usize> { - pub stark_proofs: Vec>, + pub stark_proofs: Vec>, + pub public_values: PublicValues, } impl, C: GenericConfig, const D: usize> AllProof { pub fn degree_bits(&self, config: &StarkConfig) -> Vec { self.stark_proofs .iter() - .map(|proof| proof.proof.recover_degree_bits(config)) + .map(|proof| proof.recover_degree_bits(config)) .collect() } pub fn nums_ctl_zs(&self) -> Vec { self.stark_proofs .iter() - .map(|proof| proof.proof.num_ctl_zs()) + .map(|proof| proof.num_ctl_zs()) .collect() } } @@ -44,7 +44,58 @@ pub(crate) struct AllProofChallenges, const D: usiz } pub struct AllProofTarget { - pub stark_proofs: Vec>, + pub stark_proofs: Vec>, + pub public_values: PublicValuesTarget, +} + +/// Memory values which are public. +#[derive(Debug, Clone, Default)] +pub struct PublicValues { + pub trie_roots_before: TrieRoots, + pub trie_roots_after: TrieRoots, + pub block_metadata: BlockMetadata, +} + +#[derive(Debug, Clone, Default)] +pub struct TrieRoots { + pub state_root: U256, + pub transactions_root: U256, + pub receipts_root: U256, +} + +#[derive(Debug, Clone, Default)] +pub struct BlockMetadata { + pub block_beneficiary: Address, + pub block_timestamp: U256, + pub block_number: U256, + pub block_difficulty: U256, + pub block_gaslimit: U256, + pub block_chain_id: U256, + pub block_base_fee: U256, +} + +/// Memory values which are public. +/// Note: All the larger integers are encoded with 32-bit limbs in little-endian order. +pub struct PublicValuesTarget { + pub trie_roots_before: TrieRootsTarget, + pub trie_roots_after: TrieRootsTarget, + pub block_metadata: BlockMetadataTarget, +} + +pub struct TrieRootsTarget { + pub state_root: [Target; 8], + pub transactions_root: [Target; 8], + pub receipts_root: [Target; 8], +} + +pub struct BlockMetadataTarget { + pub block_beneficiary: [Target; 5], + pub block_timestamp: Target, + pub block_number: Target, + pub block_difficulty: Target, + pub block_gaslimit: Target, + pub block_chain_id: Target, + pub block_base_fee: Target, } pub(crate) struct AllProofChallengesTarget { @@ -102,44 +153,6 @@ impl StarkProofTarget { } } -#[derive(Debug, Clone)] -pub struct StarkProofWithPublicInputs< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, -> { - pub proof: StarkProof, - // TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`. - pub public_inputs: Vec, -} - -pub struct StarkProofWithPublicInputsTarget { - pub proof: StarkProofTarget, - pub public_inputs: Vec, -} - -pub struct CompressedStarkProof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, -> { - /// Merkle cap of LDEs of trace values. - pub trace_cap: MerkleCap, - /// Purported values of each polynomial at the challenge point. - pub openings: StarkOpeningSet, - /// A batch FRI argument for all openings. - pub opening_proof: CompressedFriProof, -} - -pub struct CompressedStarkProofWithPublicInputs< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, -> { - pub proof: CompressedStarkProof, - pub public_inputs: Vec, -} - pub(crate) struct StarkProofChallenges, const D: usize> { /// Randomness used in any permutation arguments. pub permutation_challenge_sets: Option>>, diff --git a/evm/src/prover.rs b/evm/src/prover.rs index e93ad754..75152d61 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -22,24 +22,25 @@ use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::cpu_stark::CpuStark; use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData}; +use crate::generation::{generate_traces, GenerationInputs}; use crate::keccak::keccak_stark::KeccakStark; +use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; use crate::permutation::PermutationCheckVars; use crate::permutation::{ compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet, }; -use crate::proof::{AllProof, StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; +use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; use crate::vars::StarkEvaluationVars; -/// Compute all STARK proofs. +/// Generate traces, then create all STARK proofs. pub fn prove( all_stark: &AllStark, config: &StarkConfig, - trace_poly_values: Vec>>, - public_inputs: Vec>, + inputs: GenerationInputs, timing: &mut TimingTree, ) -> Result> where @@ -47,17 +48,35 @@ where C: GenericConfig, [(); C::Hasher::HASH_SIZE]:, [(); CpuStark::::COLUMNS]:, - [(); CpuStark::::PUBLIC_INPUTS]:, [(); KeccakStark::::COLUMNS]:, - [(); KeccakStark::::PUBLIC_INPUTS]:, + [(); KeccakMemoryStark::::COLUMNS]:, + [(); LogicStark::::COLUMNS]:, + [(); MemoryStark::::COLUMNS]:, +{ + let (traces, public_values) = generate_traces(all_stark, inputs); + prove_with_traces(all_stark, config, traces, public_values, timing) +} + +/// Compute all STARK proofs. +pub(crate) fn prove_with_traces( + all_stark: &AllStark, + config: &StarkConfig, + trace_poly_values: Vec>>, + public_values: PublicValues, + timing: &mut TimingTree, +) -> Result> +where + F: RichField + Extendable, + C: GenericConfig, + [(); C::Hasher::HASH_SIZE]:, + [(); CpuStark::::COLUMNS]:, + [(); KeccakStark::::COLUMNS]:, + [(); KeccakMemoryStark::::COLUMNS]:, [(); LogicStark::::COLUMNS]:, - [(); LogicStark::::PUBLIC_INPUTS]:, [(); MemoryStark::::COLUMNS]:, - [(); MemoryStark::::PUBLIC_INPUTS]:, { let num_starks = Table::num_tables(); debug_assert_eq!(num_starks, trace_poly_values.len()); - debug_assert_eq!(num_starks, public_inputs.len()); let rate_bits = config.fri_config.rate_bits; let cap_height = config.fri_config.cap_height; @@ -104,10 +123,6 @@ where &trace_poly_values[Table::Cpu as usize], &trace_commitments[Table::Cpu as usize], &ctl_data_per_table[Table::Cpu as usize], - public_inputs[Table::Cpu as usize] - .clone() - .try_into() - .unwrap(), &mut challenger, timing, )?; @@ -117,10 +132,15 @@ where &trace_poly_values[Table::Keccak as usize], &trace_commitments[Table::Keccak as usize], &ctl_data_per_table[Table::Keccak as usize], - public_inputs[Table::Keccak as usize] - .clone() - .try_into() - .unwrap(), + &mut challenger, + timing, + )?; + let keccak_memory_proof = prove_single_table( + &all_stark.keccak_memory_stark, + config, + &trace_poly_values[Table::KeccakMemory as usize], + &trace_commitments[Table::KeccakMemory as usize], + &ctl_data_per_table[Table::KeccakMemory as usize], &mut challenger, timing, )?; @@ -130,10 +150,6 @@ where &trace_poly_values[Table::Logic as usize], &trace_commitments[Table::Logic as usize], &ctl_data_per_table[Table::Logic as usize], - public_inputs[Table::Logic as usize] - .clone() - .try_into() - .unwrap(), &mut challenger, timing, )?; @@ -143,18 +159,23 @@ where &trace_poly_values[Table::Memory as usize], &trace_commitments[Table::Memory as usize], &ctl_data_per_table[Table::Memory as usize], - public_inputs[Table::Memory as usize] - .clone() - .try_into() - .unwrap(), &mut challenger, timing, )?; - let stark_proofs = vec![cpu_proof, keccak_proof, logic_proof, memory_proof]; + let stark_proofs = vec![ + cpu_proof, + keccak_proof, + keccak_memory_proof, + logic_proof, + memory_proof, + ]; debug_assert_eq!(stark_proofs.len(), num_starks); - Ok(AllProof { stark_proofs }) + Ok(AllProof { + stark_proofs, + public_values, + }) } /// Compute proof for a single STARK table. @@ -164,17 +185,15 @@ fn prove_single_table( trace_poly_values: &[PolynomialValues], trace_commitment: &PolynomialBatch, ctl_data: &CtlData, - public_inputs: [F; S::PUBLIC_INPUTS], challenger: &mut Challenger, timing: &mut TimingTree, -) -> Result> +) -> Result> where F: RichField + Extendable, C: GenericConfig, S: Stark, [(); C::Hasher::HASH_SIZE]:, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let degree = trace_poly_values[0].len(); let degree_bits = log2_strict(degree); @@ -228,7 +247,6 @@ where &permutation_ctl_zs_commitment, permutation_challenges.as_ref(), ctl_data, - public_inputs, alphas.clone(), degree_bits, num_permutation_zs, @@ -241,7 +259,6 @@ where &permutation_ctl_zs_commitment, permutation_challenges.as_ref(), ctl_data, - public_inputs, alphas, degree_bits, num_permutation_zs, @@ -310,17 +327,13 @@ where timing, ) ); - let proof = StarkProof { + + Ok(StarkProof { trace_cap: trace_commitment.merkle_tree.cap.clone(), permutation_ctl_zs_cap, quotient_polys_cap, openings, opening_proof, - }; - - Ok(StarkProofWithPublicInputs { - proof, - public_inputs: public_inputs.to_vec(), }) } @@ -332,7 +345,6 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( permutation_ctl_zs_commitment: &'a PolynomialBatch, permutation_challenges: Option<&'a Vec>>, ctl_data: &CtlData, - public_inputs: [F; S::PUBLIC_INPUTS], alphas: Vec, degree_bits: usize, num_permutation_zs: usize, @@ -344,7 +356,6 @@ where C: GenericConfig, S: Stark, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let degree = 1 << degree_bits; let rate_bits = config.fri_config.rate_bits; @@ -406,7 +417,6 @@ where let vars = StarkEvaluationVars { local_values: &get_trace_values_packed(i_start), next_values: &get_trace_values_packed(i_next_start), - public_inputs: &public_inputs, }; let permutation_check_vars = permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars { @@ -472,7 +482,6 @@ fn check_constraints<'a, F, C, S, const D: usize>( permutation_ctl_zs_commitment: &'a PolynomialBatch, permutation_challenges: Option<&'a Vec>>, ctl_data: &CtlData, - public_inputs: [F; S::PUBLIC_INPUTS], alphas: Vec, degree_bits: usize, num_permutation_zs: usize, @@ -482,7 +491,6 @@ fn check_constraints<'a, F, C, S, const D: usize>( C: GenericConfig, S: Stark, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let degree = 1 << degree_bits; let rate_bits = 0; // Set this to higher value to check constraint degree. @@ -531,7 +539,6 @@ fn check_constraints<'a, F, C, S, const D: usize>( let vars = StarkEvaluationVars { local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(), next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(), - public_inputs: &public_inputs, }; let permutation_check_vars = permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars { diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index cfd1b36a..f845c364 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -23,15 +23,17 @@ use crate::cross_table_lookup::{ verify_cross_table_lookups_circuit, CrossTableLookup, CtlCheckVarsTarget, }; use crate::keccak::keccak_stark::KeccakStark; +use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; use crate::permutation::PermutationCheckDataTarget; use crate::proof::{ - AllProof, AllProofChallengesTarget, AllProofTarget, StarkOpeningSetTarget, StarkProof, - StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs, - StarkProofWithPublicInputsTarget, + AllProof, AllProofChallengesTarget, AllProofTarget, BlockMetadata, BlockMetadataTarget, + PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof, + StarkProofChallengesTarget, StarkProofTarget, TrieRoots, TrieRootsTarget, }; use crate::stark::Stark; +use crate::util::{h160_limbs, u256_limbs}; use crate::vanishing_poly::eval_vanishing_poly_circuit; use crate::vars::StarkEvaluationTargets; @@ -115,13 +117,10 @@ pub fn verify_proof_circuit< inner_config: &StarkConfig, ) where [(); CpuStark::::COLUMNS]:, - [(); CpuStark::::PUBLIC_INPUTS]:, [(); KeccakStark::::COLUMNS]:, - [(); KeccakStark::::PUBLIC_INPUTS]:, + [(); KeccakMemoryStark::::COLUMNS]:, [(); LogicStark::::COLUMNS]:, - [(); LogicStark::::PUBLIC_INPUTS]:, [(); MemoryStark::::COLUMNS]:, - [(); MemoryStark::::PUBLIC_INPUTS]:, C::Hasher: AlgebraicHasher, { let AllProofChallengesTarget { @@ -134,6 +133,7 @@ pub fn verify_proof_circuit< let AllStark { cpu_stark, keccak_stark, + keccak_memory_stark, logic_stark, memory_stark, cross_table_lookups, @@ -170,6 +170,18 @@ pub fn verify_proof_circuit< inner_config, ) ); + with_context!( + builder, + "verify Keccak memory proof", + verify_stark_proof_with_challenges_circuit::( + builder, + keccak_memory_stark, + &all_proof.stark_proofs[Table::KeccakMemory as usize], + &stark_challenges[Table::KeccakMemory as usize], + &ctl_vars_per_table[Table::KeccakMemory as usize], + inner_config, + ) + ); with_context!( builder, "verify logic proof", @@ -217,23 +229,17 @@ fn verify_stark_proof_with_challenges_circuit< >( builder: &mut CircuitBuilder, stark: S, - proof_with_pis: &StarkProofWithPublicInputsTarget, + proof: &StarkProofTarget, challenges: &StarkProofChallengesTarget, ctl_vars: &[CtlCheckVarsTarget], inner_config: &StarkConfig, ) where C::Hasher: AlgebraicHasher, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let zero = builder.zero(); let one = builder.one_extension(); - let StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } = proof_with_pis; - assert_eq!(public_inputs.len(), S::PUBLIC_INPUTS); let StarkOpeningSetTarget { local_values, next_values, @@ -245,12 +251,6 @@ fn verify_stark_proof_with_challenges_circuit< let vars = StarkEvaluationTargets { local_values: &local_values.to_vec().try_into().unwrap(), next_values: &next_values.to_vec().try_into().unwrap(), - public_inputs: &public_inputs - .iter() - .map(|&t| builder.convert_to_ext(t)) - .collect::>() - .try_into() - .unwrap(), }; let degree_bits = proof.recover_degree_bits(inner_config); @@ -356,84 +356,95 @@ pub fn add_virtual_all_proof, const D: usize>( nums_ctl_zs: &[usize], ) -> AllProofTarget { let stark_proofs = vec![ - { - let proof = add_virtual_stark_proof( - builder, - all_stark.cpu_stark, - config, - degree_bits[Table::Cpu as usize], - nums_ctl_zs[Table::Cpu as usize], - ); - let public_inputs = builder.add_virtual_targets(CpuStark::::PUBLIC_INPUTS); - StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } - }, - { - let proof = add_virtual_stark_proof( - builder, - all_stark.keccak_stark, - config, - degree_bits[Table::Keccak as usize], - nums_ctl_zs[Table::Keccak as usize], - ); - let public_inputs = builder.add_virtual_targets(KeccakStark::::PUBLIC_INPUTS); - StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } - }, - { - let proof = add_virtual_stark_proof( - builder, - all_stark.logic_stark, - config, - degree_bits[Table::Logic as usize], - nums_ctl_zs[Table::Logic as usize], - ); - let public_inputs = builder.add_virtual_targets(LogicStark::::PUBLIC_INPUTS); - StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } - }, - { - let proof = add_virtual_stark_proof( - builder, - all_stark.memory_stark, - config, - degree_bits[Table::Memory as usize], - nums_ctl_zs[Table::Memory as usize], - ); - let public_inputs = builder.add_virtual_targets(KeccakStark::::PUBLIC_INPUTS); - StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } - }, + add_virtual_stark_proof( + builder, + all_stark.cpu_stark, + config, + degree_bits[Table::Cpu as usize], + nums_ctl_zs[Table::Cpu as usize], + ), + add_virtual_stark_proof( + builder, + all_stark.keccak_stark, + config, + degree_bits[Table::Keccak as usize], + nums_ctl_zs[Table::Keccak as usize], + ), + add_virtual_stark_proof( + builder, + all_stark.keccak_memory_stark, + config, + degree_bits[Table::KeccakMemory as usize], + nums_ctl_zs[Table::KeccakMemory as usize], + ), + add_virtual_stark_proof( + builder, + all_stark.logic_stark, + config, + degree_bits[Table::Logic as usize], + nums_ctl_zs[Table::Logic as usize], + ), + add_virtual_stark_proof( + builder, + all_stark.memory_stark, + config, + degree_bits[Table::Memory as usize], + nums_ctl_zs[Table::Memory as usize], + ), ]; - assert_eq!(stark_proofs.len(), Table::num_tables()); - AllProofTarget { stark_proofs } + + let public_values = add_virtual_public_values(builder); + AllProofTarget { + stark_proofs, + public_values, + } } -pub fn add_virtual_stark_proof_with_pis< - F: RichField + Extendable, - S: Stark, - const D: usize, ->( +pub fn add_virtual_public_values, const D: usize>( builder: &mut CircuitBuilder, - stark: S, - config: &StarkConfig, - degree_bits: usize, - num_ctl_zs: usize, -) -> StarkProofWithPublicInputsTarget { - let proof = add_virtual_stark_proof::(builder, stark, config, degree_bits, num_ctl_zs); - let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS); - StarkProofWithPublicInputsTarget { - proof, - public_inputs, +) -> PublicValuesTarget { + let trie_roots_before = add_virtual_trie_roots(builder); + let trie_roots_after = add_virtual_trie_roots(builder); + let block_metadata = add_virtual_block_metadata(builder); + PublicValuesTarget { + trie_roots_before, + trie_roots_after, + block_metadata, + } +} + +pub fn add_virtual_trie_roots, const D: usize>( + builder: &mut CircuitBuilder, +) -> TrieRootsTarget { + let state_root = builder.add_virtual_target_arr(); + let transactions_root = builder.add_virtual_target_arr(); + let receipts_root = builder.add_virtual_target_arr(); + TrieRootsTarget { + state_root, + transactions_root, + receipts_root, + } +} + +pub fn add_virtual_block_metadata, const D: usize>( + builder: &mut CircuitBuilder, +) -> BlockMetadataTarget { + let block_beneficiary = builder.add_virtual_target_arr(); + let block_timestamp = builder.add_virtual_target(); + let block_number = builder.add_virtual_target(); + let block_difficulty = builder.add_virtual_target(); + let block_gaslimit = builder.add_virtual_target(); + let block_chain_id = builder.add_virtual_target(); + let block_base_fee = builder.add_virtual_target(); + BlockMetadataTarget { + block_beneficiary, + block_timestamp, + block_number, + block_difficulty, + block_gaslimit, + block_chain_id, + block_base_fee, } } @@ -499,35 +510,13 @@ pub fn set_all_proof_target, W, const D: usize>( .iter() .zip_eq(&all_proof.stark_proofs) { - set_stark_proof_with_pis_target(witness, pt, p, zero); + set_stark_proof_target(witness, pt, p, zero); } -} - -pub fn set_stark_proof_with_pis_target, W, const D: usize>( - witness: &mut W, - stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget, - stark_proof_with_pis: &StarkProofWithPublicInputs, - zero: Target, -) where - F: RichField + Extendable, - C::Hasher: AlgebraicHasher, - W: Witness, -{ - let StarkProofWithPublicInputs { - proof, - public_inputs, - } = stark_proof_with_pis; - let StarkProofWithPublicInputsTarget { - proof: pt, - public_inputs: pi_targets, - } = stark_proof_with_pis_target; - - // Set public inputs. - for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) { - witness.set_target(pi_t, pi); - } - - set_stark_proof_target(witness, pt, proof, zero); + set_public_value_targets( + witness, + &all_proof_target.public_values, + &all_proof.public_values, + ) } pub fn set_stark_proof_target, W, const D: usize>( @@ -555,3 +544,88 @@ pub fn set_stark_proof_target, W, const D: usize>( set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof); } + +pub fn set_public_value_targets( + witness: &mut W, + public_values_target: &PublicValuesTarget, + public_values: &PublicValues, +) where + F: RichField + Extendable, + W: Witness, +{ + set_trie_roots_target( + witness, + &public_values_target.trie_roots_before, + &public_values.trie_roots_before, + ); + set_trie_roots_target( + witness, + &public_values_target.trie_roots_after, + &public_values.trie_roots_after, + ); + set_block_metadata_target( + witness, + &public_values_target.block_metadata, + &public_values.block_metadata, + ); +} + +pub fn set_trie_roots_target( + witness: &mut W, + trie_roots_target: &TrieRootsTarget, + trie_roots: &TrieRoots, +) where + F: RichField + Extendable, + W: Witness, +{ + witness.set_target_arr( + trie_roots_target.state_root, + u256_limbs(trie_roots.state_root), + ); + witness.set_target_arr( + trie_roots_target.transactions_root, + u256_limbs(trie_roots.transactions_root), + ); + witness.set_target_arr( + trie_roots_target.receipts_root, + u256_limbs(trie_roots.receipts_root), + ); +} + +pub fn set_block_metadata_target( + witness: &mut W, + block_metadata_target: &BlockMetadataTarget, + block_metadata: &BlockMetadata, +) where + F: RichField + Extendable, + W: Witness, +{ + witness.set_target_arr( + block_metadata_target.block_beneficiary, + h160_limbs(block_metadata.block_beneficiary), + ); + witness.set_target( + block_metadata_target.block_timestamp, + F::from_canonical_u64(block_metadata.block_timestamp.as_u64()), + ); + witness.set_target( + block_metadata_target.block_number, + F::from_canonical_u64(block_metadata.block_number.as_u64()), + ); + witness.set_target( + block_metadata_target.block_difficulty, + F::from_canonical_u64(block_metadata.block_difficulty.as_u64()), + ); + witness.set_target( + block_metadata_target.block_gaslimit, + F::from_canonical_u64(block_metadata.block_gaslimit.as_u64()), + ); + witness.set_target( + block_metadata_target.block_chain_id, + F::from_canonical_u64(block_metadata.block_chain_id.as_u64()), + ); + witness.set_target( + block_metadata_target.block_base_fee, + F::from_canonical_u64(block_metadata.block_base_fee.as_u64()), + ); +} diff --git a/evm/src/stark.rs b/evm/src/stark.rs index 8935655b..a205547a 100644 --- a/evm/src/stark.rs +++ b/evm/src/stark.rs @@ -20,8 +20,6 @@ use crate::vars::StarkEvaluationVars; pub trait Stark, const D: usize>: Sync { /// The total number of columns in the trace. const COLUMNS: usize; - /// The number of public inputs. - const PUBLIC_INPUTS: usize; /// Evaluate constraints at a vector of points. /// @@ -31,7 +29,7 @@ pub trait Stark, const D: usize>: Sync { /// constraints over `F`. fn eval_packed_generic( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) where FE: FieldExtension, @@ -40,7 +38,7 @@ pub trait Stark, const D: usize>: Sync { /// Evaluate constraints at a vector of points from the base field `F`. fn eval_packed_base>( &self, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer

, ) { self.eval_packed_generic(vars, yield_constr) @@ -49,12 +47,7 @@ pub trait Stark, const D: usize>: Sync { /// Evaluate constraints at a single point from the degree `D` extension field. fn eval_ext( &self, - vars: StarkEvaluationVars< - F::Extension, - F::Extension, - { Self::COLUMNS }, - { Self::PUBLIC_INPUTS }, - >, + vars: StarkEvaluationVars, yield_constr: &mut ConstraintConsumer, ) { self.eval_packed_generic(vars, yield_constr) @@ -67,7 +60,7 @@ pub trait Stark, const D: usize>: Sync { fn eval_ext_circuit( &self, builder: &mut CircuitBuilder, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, yield_constr: &mut RecursiveConstraintConsumer, ); diff --git a/evm/src/stark_testing.rs b/evm/src/stark_testing.rs index 809423d4..5cd83e41 100644 --- a/evm/src/stark_testing.rs +++ b/evm/src/stark_testing.rs @@ -26,13 +26,11 @@ pub fn test_stark_low_degree, S: Stark, const ) -> Result<()> where [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { let rate_bits = log2_ceil(stark.constraint_degree() + 1); let trace_ldes = random_low_degree_matrix::(S::COLUMNS, rate_bits); let size = trace_ldes.len(); - let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>(); let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits); let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits); @@ -49,7 +47,6 @@ where .clone() .try_into() .unwrap(), - public_inputs: &public_inputs, }; let mut consumer = ConstraintConsumer::::new( @@ -89,14 +86,12 @@ pub fn test_stark_circuit_constraints< ) -> Result<()> where [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, [(); C::Hasher::HASH_SIZE]:, { // Compute native constraint evaluation on random values. let vars = StarkEvaluationVars { local_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), next_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), - public_inputs: &F::Extension::rand_arr::<{ S::PUBLIC_INPUTS }>(), }; let alphas = F::rand_vec(1); let z_last = F::Extension::rand(); @@ -124,8 +119,6 @@ where pw.set_extension_targets(&locals_t, vars.local_values); let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS); pw.set_extension_targets(&nexts_t, vars.next_values); - let pis_t = builder.add_virtual_extension_targets(S::PUBLIC_INPUTS); - pw.set_extension_targets(&pis_t, vars.public_inputs); let alphas_t = builder.add_virtual_targets(1); pw.set_target(alphas_t[0], alphas[0]); let z_last_t = builder.add_virtual_extension_target(); @@ -135,10 +128,9 @@ where let lagrange_last_t = builder.add_virtual_extension_target(); pw.set_extension_target(lagrange_last_t, lagrange_last); - let vars = StarkEvaluationTargets:: { + let vars = StarkEvaluationTargets:: { local_values: &locals_t.try_into().unwrap(), next_values: &nexts_t.try_into().unwrap(), - public_inputs: &pis_t.try_into().unwrap(), }; let mut consumer = RecursiveConstraintConsumer::::new( builder.zero_extension(), diff --git a/evm/src/util.rs b/evm/src/util.rs index 5bc85f99..ae5281db 100644 --- a/evm/src/util.rs +++ b/evm/src/util.rs @@ -1,3 +1,4 @@ +use ethereum_types::{H160, U256}; use itertools::Itertools; use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; @@ -40,3 +41,29 @@ pub fn trace_rows_to_poly_values( .map(|column| PolynomialValues::new(column)) .collect() } + +/// Returns the 32-bit little-endian limbs of a `U256`. +pub(crate) fn u256_limbs(u256: U256) -> [F; 8] { + u256.0 + .into_iter() + .flat_map(|limb_64| { + let lo = limb_64 as u32; + let hi = (limb_64 >> 32) as u32; + [lo, hi] + }) + .map(F::from_canonical_u32) + .collect_vec() + .try_into() + .unwrap() +} + +/// Returns the 32-bit limbs of a `U160`. +pub(crate) fn h160_limbs(h160: H160) -> [F; 5] { + h160.0 + .chunks(4) + .map(|chunk| u32::from_le_bytes(chunk.try_into().unwrap())) + .map(F::from_canonical_u32) + .collect_vec() + .try_into() + .unwrap() +} diff --git a/evm/src/vanishing_poly.rs b/evm/src/vanishing_poly.rs index c0a6534b..e776fa5c 100644 --- a/evm/src/vanishing_poly.rs +++ b/evm/src/vanishing_poly.rs @@ -20,7 +20,7 @@ use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; pub(crate) fn eval_vanishing_poly( stark: &S, config: &StarkConfig, - vars: StarkEvaluationVars, + vars: StarkEvaluationVars, permutation_vars: Option>, ctl_vars: &[CtlCheckVars], consumer: &mut ConstraintConsumer

, @@ -48,7 +48,7 @@ pub(crate) fn eval_vanishing_poly_circuit( builder: &mut CircuitBuilder, stark: &S, config: &StarkConfig, - vars: StarkEvaluationTargets, + vars: StarkEvaluationTargets, permutation_data: Option>, ctl_vars: &[CtlCheckVarsTarget], consumer: &mut RecursiveConstraintConsumer, @@ -57,7 +57,6 @@ pub(crate) fn eval_vanishing_poly_circuit( C: GenericConfig, S: Stark, [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, { stark.eval_ext_circuit(builder, vars, consumer); if let Some(permutation_data) = permutation_data { diff --git a/evm/src/vars.rs b/evm/src/vars.rs index 682ac837..6c82675c 100644 --- a/evm/src/vars.rs +++ b/evm/src/vars.rs @@ -3,24 +3,17 @@ use plonky2::field::types::Field; use plonky2::iop::ext_target::ExtensionTarget; #[derive(Debug, Copy, Clone)] -pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize> +pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize> where F: Field, P: PackedField, { pub local_values: &'a [P; COLUMNS], pub next_values: &'a [P; COLUMNS], - pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS], } #[derive(Debug, Copy, Clone)] -pub struct StarkEvaluationTargets< - 'a, - const D: usize, - const COLUMNS: usize, - const PUBLIC_INPUTS: usize, -> { +pub struct StarkEvaluationTargets<'a, const D: usize, const COLUMNS: usize> { pub local_values: &'a [ExtensionTarget; COLUMNS], pub next_values: &'a [ExtensionTarget; COLUMNS], - pub public_inputs: &'a [ExtensionTarget; PUBLIC_INPUTS], } diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index 1b46dc90..9b56422d 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -12,11 +12,12 @@ use crate::constraint_consumer::ConstraintConsumer; use crate::cpu::cpu_stark::CpuStark; use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars}; use crate::keccak::keccak_stark::KeccakStark; +use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; use crate::permutation::PermutationCheckVars; use crate::proof::{ - AllProof, AllProofChallenges, StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs, + AllProof, AllProofChallenges, StarkOpeningSet, StarkProof, StarkProofChallenges, }; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; @@ -29,13 +30,10 @@ pub fn verify_proof, C: GenericConfig, co ) -> Result<()> where [(); CpuStark::::COLUMNS]:, - [(); CpuStark::::PUBLIC_INPUTS]:, [(); KeccakStark::::COLUMNS]:, - [(); KeccakStark::::PUBLIC_INPUTS]:, + [(); KeccakMemoryStark::::COLUMNS]:, [(); LogicStark::::COLUMNS]:, - [(); LogicStark::::PUBLIC_INPUTS]:, [(); MemoryStark::::COLUMNS]:, - [(); MemoryStark::::PUBLIC_INPUTS]:, [(); C::Hasher::HASH_SIZE]:, { let AllProofChallenges { @@ -48,6 +46,7 @@ where let AllStark { cpu_stark, keccak_stark, + keccak_memory_stark, logic_stark, memory_stark, cross_table_lookups, @@ -74,6 +73,13 @@ where &ctl_vars_per_table[Table::Keccak as usize], config, )?; + verify_stark_proof_with_challenges( + keccak_memory_stark, + &all_proof.stark_proofs[Table::KeccakMemory as usize], + &stark_challenges[Table::KeccakMemory as usize], + &ctl_vars_per_table[Table::KeccakMemory as usize], + config, + )?; verify_stark_proof_with_challenges( memory_stark, &all_proof.stark_proofs[Table::Memory as usize], @@ -104,21 +110,15 @@ pub(crate) fn verify_stark_proof_with_challenges< const D: usize, >( stark: S, - proof_with_pis: &StarkProofWithPublicInputs, + proof: &StarkProof, challenges: &StarkProofChallenges, ctl_vars: &[CtlCheckVars], config: &StarkConfig, ) -> Result<()> where [(); S::COLUMNS]:, - [(); S::PUBLIC_INPUTS]:, [(); C::Hasher::HASH_SIZE]:, { - let StarkProofWithPublicInputs { - proof, - public_inputs, - } = proof_with_pis; - ensure!(public_inputs.len() == S::PUBLIC_INPUTS); let StarkOpeningSet { local_values, next_values, @@ -130,13 +130,6 @@ where let vars = StarkEvaluationVars { local_values: &local_values.to_vec().try_into().unwrap(), next_values: &next_values.to_vec().try_into().unwrap(), - public_inputs: &public_inputs - .iter() - .copied() - .map(F::Extension::from_basefield) - .collect::>() - .try_into() - .unwrap(), }; let degree_bits = proof.recover_degree_bits(config); diff --git a/evm/tests/transfer_to_new_addr.rs b/evm/tests/transfer_to_new_addr.rs index c30e7b7b..ecb71076 100644 --- a/evm/tests/transfer_to_new_addr.rs +++ b/evm/tests/transfer_to_new_addr.rs @@ -4,7 +4,9 @@ use plonky2::plonk::config::PoseidonGoldilocksConfig; use plonky2::util::timing::TimingTree; use plonky2_evm::all_stark::AllStark; use plonky2_evm::config::StarkConfig; -use plonky2_evm::generation::{generate_traces, TransactionData}; +use plonky2_evm::generation::partial_trie::PartialTrie; +use plonky2_evm::generation::GenerationInputs; +use plonky2_evm::proof::BlockMetadata; use plonky2_evm::prover::prove; use plonky2_evm::verifier::verify_proof; @@ -17,30 +19,22 @@ type C = PoseidonGoldilocksConfig; #[ignore] // TODO: Won't work until txn parsing, storage, etc. are implemented. fn test_simple_transfer() -> anyhow::Result<()> { let all_stark = AllStark::::default(); + let config = StarkConfig::standard_fast_config(); - let txn = TransactionData { - signed_txn: hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38").to_vec(), - trie_proofs: vec![ - vec![ - hex!("f874a1202f93d0dfb1562c03c825a33eec4438e468c17fff649ae844c004065985ae2945b850f84e058a152d02c7e14af6800000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").to_vec(), - ], - vec![ - hex!("f8518080a0d36b8b6b60021940d5553689fb33e5d45e649dd8f4f211d26566238a83169da58080a0c62aa627943b70321f89a8b2fea274ecd47116e62042077dcdc0bdca7c1f66738080808080808080808080").to_vec(), - hex!("f873a03f93d0dfb1562c03c825a33eec4438e468c17fff649ae844c004065985ae2945b850f84e068a152d02c7e14af67ccb4ca056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").to_vec(), - ], - ] + let block_metadata = BlockMetadata::default(); + + let txn = hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38"); + + let inputs = GenerationInputs { + signed_txns: vec![txn.to_vec()], + state_trie: PartialTrie::Empty, + transactions_trie: PartialTrie::Empty, + receipts_trie: PartialTrie::Empty, + storage_tries: vec![], + block_metadata, }; - let traces = generate_traces(&all_stark, &[txn]); - - let config = StarkConfig::standard_fast_config(); - let proof = prove::( - &all_stark, - &config, - traces, - vec![vec![]; 4], - &mut TimingTree::default(), - )?; + let proof = prove::(&all_stark, &config, inputs, &mut TimingTree::default())?; verify_proof(all_stark, proof, &config) } diff --git a/plonky2/src/gates/mod.rs b/plonky2/src/gates/mod.rs index df65b44c..48e319ef 100644 --- a/plonky2/src/gates/mod.rs +++ b/plonky2/src/gates/mod.rs @@ -3,7 +3,6 @@ pub mod arithmetic_base; pub mod arithmetic_extension; -pub mod assert_le; pub mod base_sum; pub mod constant; pub mod exponentiation; diff --git a/plonky2/src/iop/witness.rs b/plonky2/src/iop/witness.rs index 871f303f..caa22c33 100644 --- a/plonky2/src/iop/witness.rs +++ b/plonky2/src/iop/witness.rs @@ -104,9 +104,12 @@ pub trait Witness { where F: RichField + Extendable, { - let limbs = value.to_basefield_array(); - (0..D).for_each(|i| { - self.set_target(et.0[i], limbs[i]); + self.set_target_arr(et.0, value.to_basefield_array()); + } + + fn set_target_arr(&mut self, targets: [Target; N], values: [F; N]) { + (0..N).for_each(|i| { + self.set_target(targets[i], values[i]); }); } diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index 6728551c..ca68af9c 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -157,6 +157,10 @@ impl, const D: usize> CircuitBuilder { (0..n).map(|_i| self.add_virtual_target()).collect() } + pub fn add_virtual_target_arr(&mut self) -> [Target; N] { + [0; N].map(|_| self.add_virtual_target()) + } + pub fn add_virtual_hash(&mut self) -> HashOutTarget { HashOutTarget::from_vec(self.add_virtual_targets(4)) } diff --git a/plonky2/src/gates/assert_le.rs b/waksman/src/gates/assert_le.rs similarity index 96% rename from plonky2/src/gates/assert_le.rs rename to waksman/src/gates/assert_le.rs index 19bff044..c67a7125 100644 --- a/plonky2/src/gates/assert_le.rs +++ b/waksman/src/gates/assert_le.rs @@ -1,26 +1,25 @@ use std::marker::PhantomData; +use plonky2::gates::gate::Gate; +use plonky2::gates::packed_util::PackedEvaluableBase; +use plonky2::gates::util::StridedConstraintConsumer; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; +use plonky2::iop::target::Target; +use plonky2::iop::wire::Wire; +use plonky2::iop::witness::{PartitionWitness, Witness}; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit}; +use plonky2::plonk::vars::{ + EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, + EvaluationVarsBasePacked, +}; use plonky2_field::extension::Extendable; use plonky2_field::packed::PackedField; use plonky2_field::types::{Field, Field64}; use plonky2_util::{bits_u64, ceil_div_usize}; -use crate::gates::gate::Gate; -use crate::gates::packed_util::PackedEvaluableBase; -use crate::gates::util::StridedConstraintConsumer; -use crate::hash::hash_types::RichField; -use crate::iop::ext_target::ExtensionTarget; -use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; -use crate::iop::target::Target; -use crate::iop::wire::Wire; -use crate::iop::witness::{PartitionWitness, Witness}; -use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit}; -use crate::plonk::vars::{ - EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, - EvaluationVarsBasePacked, -}; - // TODO: replace/merge this gate with `ComparisonGate`. /// A gate for checking that one value is less than or equal to another. @@ -450,6 +449,11 @@ mod tests { use std::marker::PhantomData; use anyhow::Result; + use plonky2::gates::gate::Gate; + use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; + use plonky2::hash::hash_types::HashOut; + use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use plonky2::plonk::vars::EvaluationVars; use plonky2_field::extension::quartic::QuarticExtension; use plonky2_field::goldilocks_field::GoldilocksField; use plonky2_field::types::Field; @@ -457,11 +461,6 @@ mod tests { use rand::Rng; use crate::gates::assert_le::AssertLessThanGate; - use crate::gates::gate::Gate; - use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; - use crate::hash::hash_types::HashOut; - use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use crate::plonk::vars::EvaluationVars; #[test] fn wire_indices() { diff --git a/waksman/src/gates/mod.rs b/waksman/src/gates/mod.rs index 5a2a8f48..c73890b1 100644 --- a/waksman/src/gates/mod.rs +++ b/waksman/src/gates/mod.rs @@ -1 +1,2 @@ +pub mod assert_le; pub mod switch; diff --git a/waksman/src/sorting.rs b/waksman/src/sorting.rs index ac598dc8..010bc8b9 100644 --- a/waksman/src/sorting.rs +++ b/waksman/src/sorting.rs @@ -3,7 +3,6 @@ use std::marker::PhantomData; use itertools::izip; use plonky2::field::extension::Extendable; use plonky2::field::types::Field; -use plonky2::gates::assert_le::AssertLessThanGate; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::{BoolTarget, Target}; @@ -11,6 +10,7 @@ use plonky2::iop::witness::{PartitionWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2_util::ceil_div_usize; +use crate::gates::assert_le::AssertLessThanGate; use crate::permutation::assert_permutation_circuit; pub struct MemoryOp {