mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 14:23:07 +00:00
Simulate jumpdest data with the interpreter (#1489)
* Simulate jumpdest data with the interpreter * Fix mising type paramenter on some tests * Refactor simulation and fix some intepreter bugs * Fix bug in interpreter * Apply suggestions from code review Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> * Address remaining reviews * [WIP] Fixing memory issue * [WIP] Fixed memory issue but erc20 failing * Fix interpreter halting issue * Restore transition.rs * Minor * Adress reviews * Address reviews * Missing fix --------- Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com>
This commit is contained in:
parent
3ec1bfddb3
commit
710225c9e0
@ -9,8 +9,11 @@ use eth_trie_utils::partial_trie::PartialTrie;
|
||||
use ethereum_types::{BigEndianHash, H160, H256, U256, U512};
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
use super::assembler::BYTES_PER_OFFSET;
|
||||
use super::utils::u256_from_bool;
|
||||
use crate::cpu::halt;
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
@ -23,7 +26,7 @@ use crate::generation::rlp::all_rlp_prover_inputs_reversed;
|
||||
use crate::generation::state::{all_withdrawals_prover_inputs_reversed, GenerationState};
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::memory::segments::{Segment, SEGMENT_SCALING_FACTOR};
|
||||
use crate::util::{h2u, u256_to_usize};
|
||||
use crate::util::{h2u, u256_to_u8, u256_to_usize};
|
||||
use crate::witness::errors::{ProgramError, ProverInputError};
|
||||
use crate::witness::gas::gas_to_charge;
|
||||
use crate::witness::memory::{MemoryAddress, MemoryContextState, MemorySegmentState, MemoryState};
|
||||
@ -32,8 +35,6 @@ use crate::witness::state::RegistersState;
|
||||
use crate::witness::transition::decode;
|
||||
use crate::witness::util::stack_peek;
|
||||
|
||||
type F = GoldilocksField;
|
||||
|
||||
/// Halt interpreter execution whenever a jump to this offset is done.
|
||||
const DEFAULT_HALT_OFFSET: usize = 0xdeadbeef;
|
||||
|
||||
@ -55,14 +56,17 @@ impl MemoryState {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Interpreter<'a> {
|
||||
pub(crate) struct Interpreter<'a, F: Field> {
|
||||
pub(crate) generation_state: GenerationState<F>,
|
||||
prover_inputs_map: &'a HashMap<usize, ProverInputFn>,
|
||||
pub(crate) halt_offsets: Vec<usize>,
|
||||
// The interpreter will halt only if the current context matches halt_context
|
||||
halt_context: Option<usize>,
|
||||
pub(crate) debug_offsets: Vec<usize>,
|
||||
running: bool,
|
||||
opcode_count: [usize; 0x100],
|
||||
memops: Vec<InterpreterMemOpKind>,
|
||||
jumpdest_table: HashMap<usize, BTreeSet<usize>>,
|
||||
}
|
||||
|
||||
/// Structure storing the state of the interpreter's registers.
|
||||
@ -80,10 +84,10 @@ struct InterpreterCheckpoint {
|
||||
mem_len: usize,
|
||||
}
|
||||
|
||||
pub(crate) fn run_interpreter(
|
||||
pub(crate) fn run_interpreter<F: Field>(
|
||||
initial_offset: usize,
|
||||
initial_stack: Vec<U256>,
|
||||
) -> anyhow::Result<Interpreter<'static>> {
|
||||
) -> anyhow::Result<Interpreter<'static, F>> {
|
||||
run(
|
||||
&KERNEL.code,
|
||||
initial_offset,
|
||||
@ -100,9 +104,9 @@ pub(crate) struct InterpreterMemoryInitialization {
|
||||
pub memory: Vec<(usize, Vec<U256>)>,
|
||||
}
|
||||
|
||||
pub(crate) fn run_interpreter_with_memory(
|
||||
pub(crate) fn run_interpreter_with_memory<F: Field>(
|
||||
memory_init: InterpreterMemoryInitialization,
|
||||
) -> anyhow::Result<Interpreter<'static>> {
|
||||
) -> anyhow::Result<Interpreter<'static, F>> {
|
||||
let label = KERNEL.global_labels[&memory_init.label];
|
||||
let mut stack = memory_init.stack;
|
||||
stack.reverse();
|
||||
@ -119,17 +123,47 @@ pub(crate) fn run_interpreter_with_memory(
|
||||
Ok(interpreter)
|
||||
}
|
||||
|
||||
pub(crate) fn run<'a>(
|
||||
pub(crate) fn run<'a, F: Field>(
|
||||
code: &'a [u8],
|
||||
initial_offset: usize,
|
||||
initial_stack: Vec<U256>,
|
||||
prover_inputs: &'a HashMap<usize, ProverInputFn>,
|
||||
) -> anyhow::Result<Interpreter<'a>> {
|
||||
) -> anyhow::Result<Interpreter<'a, F>> {
|
||||
let mut interpreter = Interpreter::new(code, initial_offset, initial_stack, prover_inputs);
|
||||
interpreter.run()?;
|
||||
Ok(interpreter)
|
||||
}
|
||||
|
||||
/// Simulates the CPU execution from `state` until the program counter reaches `final_label`
|
||||
/// in the current context.
|
||||
pub(crate) fn simulate_cpu_and_get_user_jumps<F: Field>(
|
||||
final_label: &str,
|
||||
state: &GenerationState<F>,
|
||||
) -> Option<HashMap<usize, Vec<usize>>> {
|
||||
match state.jumpdest_table {
|
||||
Some(_) => None,
|
||||
None => {
|
||||
let halt_pc = KERNEL.global_labels[final_label];
|
||||
let initial_context = state.registers.context;
|
||||
let mut interpreter =
|
||||
Interpreter::new_with_state_and_halt_condition(state, halt_pc, initial_context);
|
||||
|
||||
log::debug!("Simulating CPU for jumpdest analysis.");
|
||||
|
||||
interpreter.run();
|
||||
|
||||
log::debug!("jdt = {:?}", interpreter.jumpdest_table);
|
||||
|
||||
interpreter
|
||||
.generation_state
|
||||
.set_jumpdest_analysis_inputs(interpreter.jumpdest_table);
|
||||
|
||||
log::debug!("Simulated CPU for jumpdest analysis halted.");
|
||||
interpreter.generation_state.jumpdest_table
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Different types of Memory operations in the interpreter, and the data required to revert them.
|
||||
enum InterpreterMemOpKind {
|
||||
/// We need to provide the context.
|
||||
@ -140,7 +174,7 @@ enum InterpreterMemOpKind {
|
||||
Write(U256, usize, usize, usize),
|
||||
}
|
||||
|
||||
impl<'a> Interpreter<'a> {
|
||||
impl<'a, F: Field> Interpreter<'a, F> {
|
||||
pub(crate) fn new_with_kernel(initial_offset: usize, initial_stack: Vec<U256>) -> Self {
|
||||
let mut result = Self::new(
|
||||
&KERNEL.code,
|
||||
@ -177,10 +211,12 @@ impl<'a> Interpreter<'a> {
|
||||
// `DEFAULT_HALT_OFFSET` is used as a halting point for the interpreter,
|
||||
// while the label `halt` is the halting label in the kernel.
|
||||
halt_offsets: vec![DEFAULT_HALT_OFFSET, KERNEL.global_labels["halt"]],
|
||||
halt_context: None,
|
||||
debug_offsets: vec![],
|
||||
running: false,
|
||||
opcode_count: [0; 256],
|
||||
memops: vec![],
|
||||
jumpdest_table: HashMap::new(),
|
||||
};
|
||||
result.generation_state.registers.program_counter = initial_offset;
|
||||
let initial_stack_len = initial_stack.len();
|
||||
@ -194,6 +230,24 @@ impl<'a> Interpreter<'a> {
|
||||
result
|
||||
}
|
||||
|
||||
pub(crate) fn new_with_state_and_halt_condition(
|
||||
state: &GenerationState<F>,
|
||||
halt_offset: usize,
|
||||
halt_context: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
generation_state: state.soft_clone(),
|
||||
prover_inputs_map: &KERNEL.prover_inputs,
|
||||
halt_offsets: vec![halt_offset],
|
||||
halt_context: Some(halt_context),
|
||||
debug_offsets: vec![],
|
||||
running: false,
|
||||
opcode_count: [0; 256],
|
||||
memops: vec![],
|
||||
jumpdest_table: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Initializes the interpreter state given `GenerationInputs`, using the KERNEL code.
|
||||
pub(crate) fn initialize_interpreter_state_with_kernel(&mut self, inputs: GenerationInputs) {
|
||||
self.initialize_interpreter_state(inputs, KERNEL.code_hash, KERNEL.code.len());
|
||||
@ -399,9 +453,18 @@ impl<'a> Interpreter<'a> {
|
||||
self.running = true;
|
||||
while self.running {
|
||||
let pc = self.generation_state.registers.program_counter;
|
||||
if self.is_kernel() && self.halt_offsets.contains(&pc) {
|
||||
|
||||
if let Some(halt_context) = self.halt_context {
|
||||
if self.is_kernel()
|
||||
&& self.halt_offsets.contains(&pc)
|
||||
&& halt_context == self.generation_state.registers.context
|
||||
{
|
||||
self.running = false;
|
||||
return Ok(());
|
||||
}
|
||||
} else if self.halt_offsets.contains(&pc) {
|
||||
return Ok(());
|
||||
};
|
||||
}
|
||||
|
||||
let checkpoint = self.checkpoint();
|
||||
let result = self.run_opcode();
|
||||
@ -426,13 +489,16 @@ impl<'a> Interpreter<'a> {
|
||||
}
|
||||
}?;
|
||||
}
|
||||
println!("Opcode count:");
|
||||
for i in 0..0x100 {
|
||||
if self.opcode_count[i] > 0 {
|
||||
println!("{}: {}", get_mnemonic(i as u8), self.opcode_count[i])
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
println!("Opcode count:");
|
||||
for i in 0..0x100 {
|
||||
if self.opcode_count[i] > 0 {
|
||||
println!("{}: {}", get_mnemonic(i as u8), self.opcode_count[i])
|
||||
}
|
||||
}
|
||||
println!("Total: {}", self.opcode_count.into_iter().sum::<usize>());
|
||||
}
|
||||
println!("Total: {}", self.opcode_count.into_iter().sum::<usize>());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -587,14 +653,6 @@ impl<'a> Interpreter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_jumpdest_bits(&self, context: usize) -> Vec<bool> {
|
||||
self.generation_state.memory.contexts[context].segments[Segment::JumpdestBits.unscale()]
|
||||
.content
|
||||
.iter()
|
||||
.map(|x| x.bit(0))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn set_jumpdest_analysis_inputs(&mut self, jumps: HashMap<usize, BTreeSet<usize>>) {
|
||||
self.generation_state.set_jumpdest_analysis_inputs(jumps);
|
||||
}
|
||||
@ -685,12 +743,42 @@ impl<'a> Interpreter<'a> {
|
||||
}
|
||||
|
||||
fn run_opcode(&mut self) -> Result<(), ProgramError> {
|
||||
// Jumpdest analysis is performed natively by the interpreter and not
|
||||
// using the non-deterministic Kernel assembly code.
|
||||
if self.is_kernel()
|
||||
&& self.generation_state.registers.program_counter
|
||||
== KERNEL.global_labels["jumpdest_analysis"]
|
||||
{
|
||||
self.generation_state.registers.program_counter =
|
||||
KERNEL.global_labels["jumpdest_analysis_end"];
|
||||
self.generation_state
|
||||
.set_jumpdest_bits(&self.generation_state.get_current_code()?);
|
||||
}
|
||||
|
||||
let opcode = self
|
||||
.code()
|
||||
.get(self.generation_state.registers.program_counter)
|
||||
.byte(0);
|
||||
self.opcode_count[opcode as usize] += 1;
|
||||
self.incr(1);
|
||||
|
||||
let op = decode(self.generation_state.registers, opcode)?;
|
||||
self.generation_state.registers.gas_used += gas_to_charge(op);
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if !self.is_kernel() {
|
||||
println!(
|
||||
"User instruction {:?}, stack = {:?}, ctx = {}",
|
||||
op,
|
||||
{
|
||||
let mut stack = self.stack();
|
||||
stack.reverse();
|
||||
stack
|
||||
},
|
||||
self.generation_state.registers.context
|
||||
);
|
||||
}
|
||||
|
||||
match opcode {
|
||||
0x00 => self.run_syscall(opcode, 0, false), // "STOP",
|
||||
0x01 => self.run_add(), // "ADD",
|
||||
@ -811,20 +899,16 @@ impl<'a> Interpreter<'a> {
|
||||
}
|
||||
}?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if self
|
||||
.debug_offsets
|
||||
.contains(&self.generation_state.registers.program_counter)
|
||||
{
|
||||
println!("At {}, stack={:?}", self.offset_name(), self.stack());
|
||||
println!("At {},", self.offset_name());
|
||||
} else if let Some(label) = self.offset_label() {
|
||||
println!("At {label}");
|
||||
}
|
||||
|
||||
let op = decode(self.generation_state.registers, opcode)
|
||||
// We default to prover inputs, as those are kernel-only instructions that charge nothing.
|
||||
.unwrap_or(Operation::ProverInput);
|
||||
self.generation_state.registers.gas_used += gas_to_charge(op);
|
||||
|
||||
if !self.is_kernel() {
|
||||
let gas_limit_address = MemoryAddress {
|
||||
context: self.context(),
|
||||
@ -1027,6 +1111,7 @@ impl<'a> Interpreter<'a> {
|
||||
.byte(0)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
#[cfg(debug_assertions)]
|
||||
println!("Hashing {:?}", &bytes);
|
||||
let hash = keccak(bytes);
|
||||
self.push(U256::from_big_endian(hash.as_bytes()))
|
||||
@ -1087,51 +1172,75 @@ impl<'a> Interpreter<'a> {
|
||||
self.push(syscall_info)
|
||||
}
|
||||
|
||||
fn set_jumpdest_bit(&mut self, x: U256) -> U256 {
|
||||
fn get_jumpdest_bit(&self, offset: usize) -> U256 {
|
||||
if self.generation_state.memory.contexts[self.context()].segments
|
||||
[Segment::JumpdestBits.unscale()]
|
||||
.content
|
||||
.len()
|
||||
> x.low_u32() as usize
|
||||
> offset
|
||||
{
|
||||
self.generation_state.memory.get(MemoryAddress {
|
||||
context: self.context(),
|
||||
segment: Segment::JumpdestBits.unscale(),
|
||||
virt: x.low_u32() as usize,
|
||||
virt: offset,
|
||||
})
|
||||
} else {
|
||||
0.into()
|
||||
}
|
||||
}
|
||||
fn run_jump(&mut self) -> anyhow::Result<(), ProgramError> {
|
||||
let x = self.pop()?;
|
||||
|
||||
let jumpdest_bit = self.set_jumpdest_bit(x);
|
||||
pub(crate) fn get_jumpdest_bits(&self, context: usize) -> Vec<bool> {
|
||||
self.generation_state.memory.contexts[context].segments[Segment::JumpdestBits.unscale()]
|
||||
.content
|
||||
.iter()
|
||||
.map(|x| x.bit(0))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn add_jumpdest_offset(&mut self, offset: usize) {
|
||||
if let Some(jumpdest_table) = self
|
||||
.jumpdest_table
|
||||
.get_mut(&self.generation_state.registers.context)
|
||||
{
|
||||
jumpdest_table.insert(offset);
|
||||
} else {
|
||||
self.jumpdest_table.insert(
|
||||
self.generation_state.registers.context,
|
||||
BTreeSet::from([offset]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_jump(&mut self) -> anyhow::Result<(), ProgramError> {
|
||||
let offset = self.pop()?;
|
||||
|
||||
// Check that the destination is valid.
|
||||
let x: u32 = x
|
||||
.try_into()
|
||||
.map_err(|_| ProgramError::InvalidJumpDestination)?;
|
||||
let offset: usize = u256_to_usize(offset)?;
|
||||
|
||||
let jumpdest_bit = self.get_jumpdest_bit(offset);
|
||||
|
||||
if !self.is_kernel() && jumpdest_bit != U256::one() {
|
||||
return Err(ProgramError::InvalidJumpDestination);
|
||||
}
|
||||
|
||||
self.jump_to(x as usize, false)
|
||||
self.jump_to(offset, false)
|
||||
}
|
||||
|
||||
fn run_jumpi(&mut self) -> anyhow::Result<(), ProgramError> {
|
||||
let x = self.pop()?;
|
||||
let b = self.pop()?;
|
||||
if !b.is_zero() {
|
||||
let x: u32 = x
|
||||
.try_into()
|
||||
.map_err(|_| ProgramError::InvalidJumpiDestination)?;
|
||||
self.jump_to(x as usize, true)?;
|
||||
}
|
||||
let jumpdest_bit = self.set_jumpdest_bit(x);
|
||||
let offset = self.pop()?;
|
||||
let cond = self.pop()?;
|
||||
|
||||
if !b.is_zero() && !self.is_kernel() && jumpdest_bit != U256::one() {
|
||||
let offset: usize = offset
|
||||
.try_into()
|
||||
.map_err(|_| ProgramError::InvalidJumpiDestination)?;
|
||||
|
||||
let jumpdest_bit = self.get_jumpdest_bit(offset);
|
||||
|
||||
if !cond.is_zero() && (self.is_kernel() || jumpdest_bit == U256::one()) {
|
||||
self.jump_to(offset, true)?;
|
||||
}
|
||||
|
||||
if !cond.is_zero() && !self.is_kernel() && jumpdest_bit != U256::one() {
|
||||
return Err(ProgramError::InvalidJumpiDestination);
|
||||
}
|
||||
Ok(())
|
||||
@ -1167,9 +1276,10 @@ impl<'a> Interpreter<'a> {
|
||||
self.generation_state.observe_contract(tip_h256)?;
|
||||
}
|
||||
|
||||
if self.halt_offsets.contains(&offset) {
|
||||
self.running = false;
|
||||
if !self.is_kernel() {
|
||||
self.add_jumpdest_offset(offset);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1237,6 +1347,7 @@ impl<'a> Interpreter<'a> {
|
||||
}
|
||||
self.set_context(new_ctx);
|
||||
self.generation_state.registers.stack_len = new_sp;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1603,8 +1714,16 @@ pub(crate) use unpack_address;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
|
||||
use crate::cpu::kernel::interpreter::{run, Interpreter};
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::witness::memory::MemoryAddress;
|
||||
use crate::witness::operation::CONTEXT_SCALING_FACTOR;
|
||||
|
||||
#[test]
|
||||
fn test_run() -> anyhow::Result<()> {
|
||||
@ -1612,7 +1731,7 @@ mod tests {
|
||||
0x60, 0x1, 0x60, 0x2, 0x1, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56,
|
||||
]; // PUSH1, 1, PUSH1, 2, ADD, PUSH4 deadbeef, JUMP
|
||||
assert_eq!(
|
||||
run(&code, 0, vec![], &HashMap::new())?.stack(),
|
||||
run::<F>(&code, 0, vec![], &HashMap::new())?.stack(),
|
||||
&[0x3.into()],
|
||||
);
|
||||
Ok(())
|
||||
@ -1637,7 +1756,7 @@ mod tests {
|
||||
0x60, 0xff, 0x60, 0x0, 0x52, 0x60, 0, 0x51, 0x60, 0x1, 0x51, 0x60, 0x42, 0x60, 0x27,
|
||||
0x53,
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, vec![]);
|
||||
|
||||
interpreter.set_code(1, code.to_vec());
|
||||
|
||||
|
||||
@ -10,8 +10,7 @@ mod parser;
|
||||
pub mod stack;
|
||||
mod utils;
|
||||
|
||||
#[cfg(test)]
|
||||
mod interpreter;
|
||||
pub(crate) mod interpreter;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
|
||||
@ -6,6 +6,8 @@ use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use plonky2::field::types::Field;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -20,7 +22,10 @@ use crate::witness::memory::MemoryAddress;
|
||||
use crate::witness::operation::CONTEXT_SCALING_FACTOR;
|
||||
use crate::Node;
|
||||
|
||||
pub(crate) fn initialize_mpts(interpreter: &mut Interpreter, trie_inputs: &TrieInputs) {
|
||||
pub(crate) fn initialize_mpts<F: Field>(
|
||||
interpreter: &mut Interpreter<F>,
|
||||
trie_inputs: &TrieInputs,
|
||||
) {
|
||||
// Load all MPTs.
|
||||
let (trie_root_ptrs, trie_data) =
|
||||
load_all_mpts(trie_inputs).expect("Invalid MPT data for preinitialization");
|
||||
@ -70,8 +75,8 @@ fn random_code() -> Vec<u8> {
|
||||
|
||||
// Stolen from `tests/mpt/insert.rs`
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
fn prepare_interpreter(
|
||||
interpreter: &mut Interpreter,
|
||||
fn prepare_interpreter<F: Field>(
|
||||
interpreter: &mut Interpreter<F>,
|
||||
address: Address,
|
||||
account: &AccountRlp,
|
||||
) -> Result<()> {
|
||||
@ -151,7 +156,7 @@ fn test_extcodesize() -> Result<()> {
|
||||
let code = random_code();
|
||||
let account = test_account(&code);
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, vec![]);
|
||||
let address: Address = thread_rng().gen();
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
prepare_interpreter(&mut interpreter, address, &account)?;
|
||||
@ -183,7 +188,7 @@ fn test_extcodecopy() -> Result<()> {
|
||||
let code = random_code();
|
||||
let account = test_account(&code);
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, vec![]);
|
||||
let address: Address = thread_rng().gen();
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
prepare_interpreter(&mut interpreter, address, &account)?;
|
||||
@ -252,8 +257,8 @@ fn test_extcodecopy() -> Result<()> {
|
||||
|
||||
/// Prepare the interpreter for storage tests by inserting all necessary accounts
|
||||
/// in the state trie, adding the code we want to context 1 and switching the context.
|
||||
fn prepare_interpreter_all_accounts(
|
||||
interpreter: &mut Interpreter,
|
||||
fn prepare_interpreter_all_accounts<F: Field>(
|
||||
interpreter: &mut Interpreter<F>,
|
||||
trie_inputs: TrieInputs,
|
||||
addr: [u8; 20],
|
||||
code: &[u8],
|
||||
@ -318,7 +323,7 @@ fn sstore() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
prepare_interpreter_all_accounts(&mut interpreter, trie_inputs, addr, &code)?;
|
||||
@ -407,7 +412,7 @@ fn sload() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
prepare_interpreter_all_accounts(&mut interpreter, trie_inputs, addr, &code)?;
|
||||
|
||||
@ -6,6 +6,7 @@ use eth_trie_utils::partial_trie::{HashedPartialTrie, Node, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
|
||||
@ -155,7 +156,7 @@ fn test_add11_yml() {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter =
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, tries_inputs);
|
||||
|
||||
let route_txn_label = KERNEL.global_labels["main"];
|
||||
@ -297,7 +298,7 @@ fn test_add11_yml_with_exception() {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter =
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_generation_inputs_and_kernel(0, initial_stack, tries_inputs);
|
||||
|
||||
let route_txn_label = KERNEL.global_labels["main"];
|
||||
|
||||
@ -2,6 +2,8 @@ use anyhow::Result;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256};
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use plonky2::field::types::Field;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -24,8 +26,8 @@ fn test_account(balance: U256) -> AccountRlp {
|
||||
|
||||
// Stolen from `tests/mpt/insert.rs`
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
fn prepare_interpreter(
|
||||
interpreter: &mut Interpreter,
|
||||
fn prepare_interpreter<F: Field>(
|
||||
interpreter: &mut Interpreter<F>,
|
||||
address: Address,
|
||||
account: &AccountRlp,
|
||||
) -> Result<()> {
|
||||
@ -107,7 +109,7 @@ fn test_balance() -> Result<()> {
|
||||
let balance = U256(rng.gen());
|
||||
let account = test_account(balance);
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, vec![]);
|
||||
let address: Address = rng.gen();
|
||||
// Prepare the interpreter by inserting the account in the state trie.
|
||||
prepare_interpreter(&mut interpreter, address, &account)?;
|
||||
|
||||
@ -8,6 +8,7 @@ use ethereum_types::U256;
|
||||
use itertools::Itertools;
|
||||
use num::{BigUint, One, Zero};
|
||||
use num_bigint::RandBigInt;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
use rand::Rng;
|
||||
|
||||
@ -99,7 +100,7 @@ fn run_test(fn_label: &str, memory: Vec<U256>, stack: Vec<U256>) -> Result<(Vec<
|
||||
initial_stack.push(retdest);
|
||||
initial_stack.reverse();
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(fn_label, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(fn_label, initial_stack);
|
||||
interpreter.set_current_general_memory(memory);
|
||||
interpreter.run()?;
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::interpreter::{
|
||||
run_interpreter_with_memory, InterpreterMemoryInitialization,
|
||||
@ -71,7 +72,7 @@ fn run_blake2_f(
|
||||
memory: vec![],
|
||||
};
|
||||
|
||||
let result = run_interpreter_with_memory(interpreter_setup).unwrap();
|
||||
let result = run_interpreter_with_memory::<F>(interpreter_setup).unwrap();
|
||||
let mut hash = result.stack().to_vec();
|
||||
hash.reverse();
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{H256, U256};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -19,7 +20,8 @@ fn test_correct_block_hash() -> Result<()> {
|
||||
|
||||
let hashes: Vec<U256> = vec![U256::from_big_endian(&thread_rng().gen::<H256>().0); 257];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::BlockHashes, hashes[0..256].to_vec());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockCurrentHash, hashes[256]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockNumber, 256.into());
|
||||
@ -48,7 +50,8 @@ fn test_big_index_block_hash() -> Result<()> {
|
||||
|
||||
let hashes: Vec<U256> = vec![U256::from_big_endian(&thread_rng().gen::<H256>().0); 257];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::BlockHashes, hashes[0..256].to_vec());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockCurrentHash, hashes[256]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockNumber, cur_block_number.into());
|
||||
@ -78,7 +81,8 @@ fn test_small_index_block_hash() -> Result<()> {
|
||||
|
||||
let hashes: Vec<U256> = vec![U256::from_big_endian(&thread_rng().gen::<H256>().0); 257];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::BlockHashes, hashes[0..256].to_vec());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockCurrentHash, hashes[256]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockNumber, cur_block_number.into());
|
||||
@ -106,7 +110,8 @@ fn test_block_hash_with_overflow() -> Result<()> {
|
||||
|
||||
let hashes: Vec<U256> = vec![U256::from_big_endian(&thread_rng().gen::<H256>().0); 257];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(blockhash_label, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::BlockHashes, hashes[0..256].to_vec());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockCurrentHash, hashes[256]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::BlockNumber, cur_block_number.into());
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::Rng;
|
||||
|
||||
use crate::cpu::kernel::interpreter::{
|
||||
@ -23,7 +24,7 @@ fn test_bls_fp2_mul() -> Result<()> {
|
||||
segment: KernelGeneral,
|
||||
memory: vec![],
|
||||
};
|
||||
let interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter = run_interpreter_with_memory::<F>(setup).unwrap();
|
||||
let stack: Vec<U256> = interpreter.stack().iter().rev().cloned().collect();
|
||||
let output = Fp2::<BLS381>::from_stack(&stack);
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::Rng;
|
||||
|
||||
use crate::cpu::kernel::interpreter::{
|
||||
@ -23,7 +24,7 @@ fn run_bn_mul_fp6(f: Fp6<BN254>, g: Fp6<BN254>, label: &str) -> Fp6<BN254> {
|
||||
segment: BnPairing,
|
||||
memory: vec![],
|
||||
};
|
||||
let interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter = run_interpreter_with_memory::<F>(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.stack().iter().rev().cloned().collect();
|
||||
Fp6::<BN254>::from_stack(&output)
|
||||
}
|
||||
@ -63,7 +64,7 @@ fn run_bn_mul_fp12(f: Fp12<BN254>, g: Fp12<BN254>, label: &str) -> Fp12<BN254> {
|
||||
segment: BnPairing,
|
||||
memory: vec![(in0, f.to_stack().to_vec()), (in1, g.to_stack().to_vec())],
|
||||
};
|
||||
let interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter = run_interpreter_with_memory::<F>(setup).unwrap();
|
||||
let output = interpreter.extract_kernel_memory(BnPairing, out..out + 12);
|
||||
Fp12::<BN254>::from_stack(&output)
|
||||
}
|
||||
@ -93,7 +94,7 @@ fn run_bn_frob_fp6(n: usize, f: Fp6<BN254>) -> Fp6<BN254> {
|
||||
segment: BnPairing,
|
||||
memory: vec![],
|
||||
};
|
||||
let interpreter: Interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter: Interpreter<F> = run_interpreter_with_memory(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.stack().iter().rev().cloned().collect();
|
||||
Fp6::<BN254>::from_stack(&output)
|
||||
}
|
||||
@ -117,7 +118,7 @@ fn run_bn_frob_fp12(f: Fp12<BN254>, n: usize) -> Fp12<BN254> {
|
||||
segment: BnPairing,
|
||||
memory: vec![(ptr, f.to_stack().to_vec())],
|
||||
};
|
||||
let interpreter: Interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter: Interpreter<F> = run_interpreter_with_memory(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.extract_kernel_memory(BnPairing, ptr..ptr + 12);
|
||||
Fp12::<BN254>::from_stack(&output)
|
||||
}
|
||||
@ -147,7 +148,7 @@ fn test_bn_inv_fp12() -> Result<()> {
|
||||
segment: BnPairing,
|
||||
memory: vec![(ptr, f.to_stack().to_vec())],
|
||||
};
|
||||
let interpreter: Interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter: Interpreter<F> = run_interpreter_with_memory(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.extract_kernel_memory(BnPairing, inv..inv + 12);
|
||||
let output = Fp12::<BN254>::from_stack(&output);
|
||||
|
||||
@ -175,7 +176,7 @@ fn test_bn_final_exponent() -> Result<()> {
|
||||
memory: vec![(ptr, f.to_stack().to_vec())],
|
||||
};
|
||||
|
||||
let interpreter: Interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter: Interpreter<F> = run_interpreter_with_memory(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.extract_kernel_memory(BnPairing, ptr..ptr + 12);
|
||||
let expected: Vec<U256> = bn_final_exponent(f).to_stack();
|
||||
|
||||
@ -202,7 +203,7 @@ fn test_bn_miller() -> Result<()> {
|
||||
segment: BnPairing,
|
||||
memory: vec![(ptr, input)],
|
||||
};
|
||||
let interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter = run_interpreter_with_memory::<F>(setup).unwrap();
|
||||
let output: Vec<U256> = interpreter.extract_kernel_memory(BnPairing, out..out + 12);
|
||||
let expected = bn_miller_loop(p, q).to_stack();
|
||||
|
||||
@ -246,7 +247,7 @@ fn test_bn_pairing() -> Result<()> {
|
||||
segment: BnPairing,
|
||||
memory: vec![(ptr, input)],
|
||||
};
|
||||
let interpreter = run_interpreter_with_memory(setup).unwrap();
|
||||
let interpreter = run_interpreter_with_memory::<F>(setup).unwrap();
|
||||
assert_eq!(interpreter.stack()[0], U256::one());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ use std::collections::HashSet;
|
||||
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{Address, U256};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -33,7 +34,8 @@ fn test_insert_accessed_addresses() -> Result<()> {
|
||||
|
||||
// Test for address already in list.
|
||||
let initial_stack = vec![retaddr, U256::from(addr_in_list.0.as_slice())];
|
||||
let mut interpreter = Interpreter::new_with_kernel(insert_accessed_addresses, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(insert_accessed_addresses, initial_stack);
|
||||
for i in 0..n {
|
||||
let addr = U256::from(addresses[i].0.as_slice());
|
||||
interpreter
|
||||
@ -57,7 +59,8 @@ fn test_insert_accessed_addresses() -> Result<()> {
|
||||
|
||||
// Test for address not in list.
|
||||
let initial_stack = vec![retaddr, U256::from(addr_not_in_list.0.as_slice())];
|
||||
let mut interpreter = Interpreter::new_with_kernel(insert_accessed_addresses, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(insert_accessed_addresses, initial_stack);
|
||||
for i in 0..n {
|
||||
let addr = U256::from(addresses[i].0.as_slice());
|
||||
interpreter
|
||||
@ -115,7 +118,8 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
|
||||
storage_key_in_list.1,
|
||||
U256::from(storage_key_in_list.0 .0.as_slice()),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(insert_accessed_storage_keys, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(insert_accessed_storage_keys, initial_stack);
|
||||
for i in 0..n {
|
||||
let addr = U256::from(storage_keys[i].0 .0.as_slice());
|
||||
interpreter
|
||||
@ -152,7 +156,8 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
|
||||
storage_key_not_in_list.1,
|
||||
U256::from(storage_key_not_in_list.0 .0.as_slice()),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(insert_accessed_storage_keys, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(insert_accessed_storage_keys, initial_stack);
|
||||
for i in 0..n {
|
||||
let addr = U256::from(storage_keys[i].0 .0.as_slice());
|
||||
interpreter
|
||||
|
||||
@ -4,6 +4,7 @@ use anyhow::Result;
|
||||
use ethereum_types::{H256, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -19,7 +20,8 @@ fn test_get_create_address() -> Result<()> {
|
||||
let expected_addr = U256::from_big_endian(&hex!("3f09c73a5ed19289fb9bdc72f1742566df146f56"));
|
||||
|
||||
let initial_stack = vec![retaddr, nonce, sender];
|
||||
let mut interpreter = Interpreter::new_with_kernel(get_create_address, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(get_create_address, initial_stack);
|
||||
interpreter.run()?;
|
||||
|
||||
assert_eq!(interpreter.stack(), &[expected_addr]);
|
||||
@ -105,7 +107,8 @@ fn test_get_create2_address() -> Result<()> {
|
||||
} in create2_test_cases()
|
||||
{
|
||||
let initial_stack = vec![retaddr, salt, U256::from(code_hash.0), sender];
|
||||
let mut interpreter = Interpreter::new_with_kernel(get_create2_address, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(get_create2_address, initial_stack);
|
||||
interpreter.run()?;
|
||||
|
||||
assert_eq!(interpreter.stack(), &[expected_addr]);
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
@ -15,13 +16,15 @@ fn test_intrinsic_gas() -> Result<()> {
|
||||
|
||||
// Contract creation transaction.
|
||||
let initial_stack = vec![0xdeadbeefu32.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(intrinsic_gas, initial_stack.clone());
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(intrinsic_gas, initial_stack.clone());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::ContractCreation, U256::one());
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![(GAS_TX + GAS_TXCREATE).into()]);
|
||||
|
||||
// Message transaction.
|
||||
let mut interpreter = Interpreter::new_with_kernel(intrinsic_gas, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(intrinsic_gas, initial_stack);
|
||||
interpreter.set_txn_field(NormalizedTxnField::To, 123.into());
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![GAS_TX.into()]);
|
||||
|
||||
@ -2,6 +2,8 @@ use std::collections::{BTreeSet, HashMap};
|
||||
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -10,7 +12,10 @@ use crate::witness::operation::CONTEXT_SCALING_FACTOR;
|
||||
|
||||
#[test]
|
||||
fn test_jumpdest_analysis() -> Result<()> {
|
||||
let jumpdest_analysis = KERNEL.global_labels["jumpdest_analysis"];
|
||||
// By default the interpreter will skip jumpdest analysis asm and compute
|
||||
// the jumpdest table bits natively. We avoid that starting 1 line after
|
||||
// performing the missing first PROVER_INPUT "by hand"
|
||||
let jumpdest_analysis = KERNEL.global_labels["jumpdest_analysis"] + 1;
|
||||
const CONTEXT: usize = 3; // arbitrary
|
||||
|
||||
let add = get_opcode("ADD");
|
||||
@ -18,7 +23,7 @@ fn test_jumpdest_analysis() -> Result<()> {
|
||||
let jumpdest = get_opcode("JUMPDEST");
|
||||
|
||||
#[rustfmt::skip]
|
||||
let code: Vec<u8> = vec![
|
||||
let mut code: Vec<u8> = vec![
|
||||
add,
|
||||
jumpdest,
|
||||
push2,
|
||||
@ -28,16 +33,24 @@ fn test_jumpdest_analysis() -> Result<()> {
|
||||
add,
|
||||
jumpdest,
|
||||
];
|
||||
code.extend(
|
||||
(0..32)
|
||||
.rev()
|
||||
.map(get_push_opcode)
|
||||
.chain(std::iter::once(jumpdest)),
|
||||
);
|
||||
|
||||
let jumpdest_bits = vec![false, true, false, false, false, true, false, true];
|
||||
let mut jumpdest_bits = vec![false, true, false, false, false, true, false, true];
|
||||
// Add 32 falses and 1 true
|
||||
jumpdest_bits.extend(
|
||||
std::iter::repeat(false)
|
||||
.take(32)
|
||||
.chain(std::iter::once(true)),
|
||||
);
|
||||
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(jumpdest_analysis, vec![]);
|
||||
let code_len = code.len();
|
||||
|
||||
// Contract creation transaction.
|
||||
let initial_stack = vec![
|
||||
0xDEADBEEFu32.into(),
|
||||
code.len().into(),
|
||||
U256::from(CONTEXT) << CONTEXT_SCALING_FACTOR,
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(jumpdest_analysis, initial_stack);
|
||||
interpreter.set_code(CONTEXT, code);
|
||||
interpreter.set_jumpdest_analysis_inputs(HashMap::from([(
|
||||
3,
|
||||
@ -50,31 +63,47 @@ fn test_jumpdest_analysis() -> Result<()> {
|
||||
),
|
||||
)]));
|
||||
|
||||
// The `set_jumpdest_analysis_inputs` method is never used.
|
||||
assert_eq!(
|
||||
interpreter.generation_state.jumpdest_table,
|
||||
// Context 3 has jumpdest 1, 5, 7. All have proof 0 and hence
|
||||
// the list [proof_0, jumpdest_0, ... ] is [0, 1, 0, 5, 0, 7]
|
||||
Some(HashMap::from([(3, vec![0, 1, 0, 5, 0, 7])]))
|
||||
// the list [proof_0, jumpdest_0, ... ] is [0, 1, 0, 5, 0, 7, 8, 40]
|
||||
Some(HashMap::from([(3, vec![0, 1, 0, 5, 0, 7, 8, 40])]))
|
||||
);
|
||||
|
||||
// Run jumpdest analysis with context = 3
|
||||
interpreter.generation_state.registers.context = CONTEXT;
|
||||
interpreter.push(0xDEADBEEFu32.into());
|
||||
interpreter.push(code_len.into());
|
||||
interpreter.push(U256::from(CONTEXT) << CONTEXT_SCALING_FACTOR);
|
||||
|
||||
// We need to manually pop the jumpdest_table and push its value on the top of the stack
|
||||
interpreter
|
||||
.generation_state
|
||||
.jumpdest_table
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.get_mut(&CONTEXT)
|
||||
.unwrap()
|
||||
.pop();
|
||||
interpreter.push(U256::one());
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
assert_eq!(jumpdest_bits, interpreter.get_jumpdest_bits(3));
|
||||
assert_eq!(jumpdest_bits, interpreter.get_jumpdest_bits(CONTEXT));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_packed_verification() -> Result<()> {
|
||||
let jumpdest_analysis = KERNEL.global_labels["jumpdest_analysis"];
|
||||
let write_table_if_jumpdest = KERNEL.global_labels["write_table_if_jumpdest"];
|
||||
const CONTEXT: usize = 3; // arbitrary
|
||||
|
||||
let add = get_opcode("ADD");
|
||||
let jumpdest = get_opcode("JUMPDEST");
|
||||
|
||||
// The last push(i=0) is 0x5f which is not a valid opcode. However, this
|
||||
// is still meaningful for the test and makes things easier
|
||||
let mut code: Vec<u8> = std::iter::once(add)
|
||||
.chain(
|
||||
(0..=31)
|
||||
@ -92,10 +121,12 @@ fn test_packed_verification() -> Result<()> {
|
||||
// Contract creation transaction.
|
||||
let initial_stack = vec![
|
||||
0xDEADBEEFu32.into(),
|
||||
code.len().into(),
|
||||
U256::from(CONTEXT) << CONTEXT_SCALING_FACTOR,
|
||||
33.into(),
|
||||
U256::one(),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(jumpdest_analysis, initial_stack.clone());
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(write_table_if_jumpdest, initial_stack.clone());
|
||||
interpreter.set_code(CONTEXT, code.clone());
|
||||
interpreter.generation_state.jumpdest_table = Some(HashMap::from([(3, vec![1, 33])]));
|
||||
|
||||
@ -106,8 +137,8 @@ fn test_packed_verification() -> Result<()> {
|
||||
// If we add 1 to each opcode the jumpdest at position 32 is never a valid jumpdest
|
||||
for i in 1..=32 {
|
||||
code[i] += 1;
|
||||
let mut interpreter =
|
||||
Interpreter::new_with_kernel(jumpdest_analysis, initial_stack.clone());
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(write_table_if_jumpdest, initial_stack.clone());
|
||||
interpreter.set_code(CONTEXT, code.clone());
|
||||
interpreter.generation_state.jumpdest_table = Some(HashMap::from([(3, vec![1, 33])]));
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
mod bn {
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::{run_interpreter, Interpreter};
|
||||
@ -43,76 +44,110 @@ mod bn {
|
||||
|
||||
// Standard addition #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point2.1, point2.0])?);
|
||||
// Standard addition #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point2.1, point2.0])?);
|
||||
|
||||
// Standard doubling #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point3.1, point3.0])?);
|
||||
// Standard doubling #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_double, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_double, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point3.1, point3.0])?);
|
||||
// Standard doubling #3
|
||||
let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point3.1, point3.0])?);
|
||||
|
||||
// Addition with identity #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point1.1, point1.0])?);
|
||||
// Addition with identity #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point1.1, point1.0])?);
|
||||
// Addition with identity #3
|
||||
let initial_stack =
|
||||
u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([identity.1, identity.0])?);
|
||||
|
||||
// Addition with invalid point(s) #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX, U256::MAX]);
|
||||
// Addition with invalid point(s) #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX, U256::MAX]);
|
||||
// Addition with invalid point(s) #3
|
||||
let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX, U256::MAX]);
|
||||
// Addition with invalid point(s) #4
|
||||
let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX, U256::MAX]);
|
||||
|
||||
// Scalar multiplication #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point4.1, point4.0])?);
|
||||
// Scalar multiplication #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([identity.1, identity.0])?);
|
||||
// Scalar multiplication #3
|
||||
let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point0.1, point0.0])?);
|
||||
// Scalar multiplication #4
|
||||
let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([identity.1, identity.0])?);
|
||||
// Scalar multiplication #5
|
||||
let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?;
|
||||
let stack = run_interpreter(ec_mul, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_mul, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX, U256::MAX]);
|
||||
|
||||
// Multiple calls
|
||||
@ -126,7 +161,9 @@ mod bn {
|
||||
point0.1,
|
||||
point0.0,
|
||||
])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point4.1, point4.0])?);
|
||||
|
||||
Ok(())
|
||||
@ -147,7 +184,8 @@ mod bn {
|
||||
|
||||
let mut initial_stack = u256ify(["0xdeadbeef"])?;
|
||||
initial_stack.push(k);
|
||||
let mut int = Interpreter::new(&KERNEL.code, glv, initial_stack, &KERNEL.prover_inputs);
|
||||
let mut int: Interpreter<F> =
|
||||
Interpreter::new(&KERNEL.code, glv, initial_stack, &KERNEL.prover_inputs);
|
||||
int.run()?;
|
||||
|
||||
assert_eq!(line, int.stack());
|
||||
@ -165,7 +203,7 @@ mod bn {
|
||||
"0x10d7cf0621b6e42c1dbb421f5ef5e1936ca6a87b38198d1935be31e28821d171",
|
||||
"0x11b7d55f16aaac07de9a0ed8ac2e8023570dbaa78571fc95e553c4b3ba627689",
|
||||
])?;
|
||||
let mut int = Interpreter::new(
|
||||
let mut int: Interpreter<F> = Interpreter::new(
|
||||
&KERNEL.code,
|
||||
precompute,
|
||||
initial_stack,
|
||||
@ -227,6 +265,7 @@ mod bn {
|
||||
mod secp {
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::{combined_kernel, KERNEL};
|
||||
use crate::cpu::kernel::interpreter::{run, run_interpreter, Interpreter};
|
||||
@ -260,36 +299,48 @@ mod secp {
|
||||
|
||||
// Standard addition #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point2.1, point2.0])?);
|
||||
// Standard addition #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?;
|
||||
let stack = run(&kernel.code, ec_add, initial_stack, &kernel.prover_inputs)?
|
||||
let stack = run::<F>(&kernel.code, ec_add, initial_stack, &kernel.prover_inputs)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point2.1, point2.0])?);
|
||||
|
||||
// Standard doubling #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point3.1, point3.0])?);
|
||||
// Standard doubling #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?;
|
||||
let stack = run_interpreter(ec_double, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_double, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point3.1, point3.0])?);
|
||||
|
||||
// Addition with identity #1
|
||||
let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point1.1, point1.0])?);
|
||||
// Addition with identity #2
|
||||
let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([point1.1, point1.0])?);
|
||||
// Addition with identity #3
|
||||
let initial_stack =
|
||||
u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?;
|
||||
let stack = run_interpreter(ec_add, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ec_add, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, u256ify([identity.1, identity.0])?);
|
||||
|
||||
Ok(())
|
||||
@ -310,7 +361,8 @@ mod secp {
|
||||
|
||||
let mut initial_stack = u256ify(["0xdeadbeef"])?;
|
||||
initial_stack.push(k);
|
||||
let mut int = Interpreter::new(&KERNEL.code, glv, initial_stack, &KERNEL.prover_inputs);
|
||||
let mut int: Interpreter<F> =
|
||||
Interpreter::new(&KERNEL.code, glv, initial_stack, &KERNEL.prover_inputs);
|
||||
int.run()?;
|
||||
|
||||
assert_eq!(line, int.stack());
|
||||
|
||||
@ -2,6 +2,7 @@ use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::run_interpreter;
|
||||
@ -10,7 +11,9 @@ use crate::cpu::kernel::tests::u256ify;
|
||||
fn test_valid_ecrecover(hash: &str, v: &str, r: &str, s: &str, expected: &str) -> Result<()> {
|
||||
let ecrecover = KERNEL.global_labels["ecrecover"];
|
||||
let initial_stack = u256ify(["0xdeadbeef", s, r, v, hash])?;
|
||||
let stack = run_interpreter(ecrecover, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ecrecover, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack[0], U256::from_str(expected).unwrap());
|
||||
|
||||
Ok(())
|
||||
@ -19,7 +22,9 @@ fn test_valid_ecrecover(hash: &str, v: &str, r: &str, s: &str, expected: &str) -
|
||||
fn test_invalid_ecrecover(hash: &str, v: &str, r: &str, s: &str) -> Result<()> {
|
||||
let ecrecover = KERNEL.global_labels["ecrecover"];
|
||||
let initial_stack = u256ify(["0xdeadbeef", s, r, v, hash])?;
|
||||
let stack = run_interpreter(ecrecover, initial_stack)?.stack().to_vec();
|
||||
let stack = run_interpreter::<F>(ecrecover, initial_stack)?
|
||||
.stack()
|
||||
.to_vec();
|
||||
assert_eq!(stack, vec![U256::MAX]);
|
||||
|
||||
Ok(())
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -15,16 +16,16 @@ fn test_exp() -> Result<()> {
|
||||
|
||||
// Random input
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), b, a];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack.clone());
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack.clone());
|
||||
|
||||
let stack_with_kernel = run_interpreter(exp, initial_stack)?.stack();
|
||||
let stack_with_kernel = run_interpreter::<F>(exp, initial_stack)?.stack();
|
||||
|
||||
let expected_exp = a.overflowing_pow(b).0;
|
||||
assert_eq!(stack_with_kernel, vec![expected_exp]);
|
||||
|
||||
// 0 base
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), b, U256::zero()];
|
||||
let stack_with_kernel = run_interpreter(exp, initial_stack)?.stack();
|
||||
let stack_with_kernel = run_interpreter::<F>(exp, initial_stack)?.stack();
|
||||
|
||||
let expected_exp = U256::zero().overflowing_pow(b).0;
|
||||
assert_eq!(stack_with_kernel, vec![expected_exp]);
|
||||
@ -33,7 +34,7 @@ fn test_exp() -> Result<()> {
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), U256::zero(), a];
|
||||
interpreter.set_is_kernel(true);
|
||||
interpreter.set_context(0);
|
||||
let stack_with_kernel = run_interpreter(exp, initial_stack)?.stack();
|
||||
let stack_with_kernel = run_interpreter::<F>(exp, initial_stack)?.stack();
|
||||
|
||||
let expected_exp = 1.into();
|
||||
assert_eq!(stack_with_kernel, vec![expected_exp]);
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
use anyhow::Result;
|
||||
// use blake2::Blake2b512;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
use ripemd::{Digest, Ripemd160};
|
||||
use sha2::Sha256;
|
||||
|
||||
use crate::cpu::kernel::interpreter::{
|
||||
run_interpreter_with_memory, InterpreterMemoryInitialization,
|
||||
run_interpreter_with_memory, Interpreter, InterpreterMemoryInitialization,
|
||||
};
|
||||
use crate::memory::segments::Segment::KernelGeneral;
|
||||
|
||||
@ -66,7 +67,7 @@ fn prepare_test<T>(
|
||||
let interpreter_setup = make_interpreter_setup(message, hash_fn_label, hash_input_virt);
|
||||
|
||||
// Run the interpreter
|
||||
let result = run_interpreter_with_memory(interpreter_setup).unwrap();
|
||||
let result: Interpreter<F> = run_interpreter_with_memory(interpreter_setup).unwrap();
|
||||
|
||||
Ok((expected, result.stack().to_vec()))
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{Address, U256};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -25,7 +26,7 @@ fn test_log_0() -> Result<()> {
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 0.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 0.into());
|
||||
|
||||
@ -68,7 +69,7 @@ fn test_log_2() -> Result<()> {
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 2.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 5.into());
|
||||
|
||||
@ -129,7 +130,7 @@ fn test_log_4() -> Result<()> {
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 2.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 5.into());
|
||||
|
||||
@ -189,7 +190,7 @@ fn test_log_5() -> Result<()> {
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 0.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 0.into());
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@ use anyhow::Result;
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{BigEndianHash, H256, U512};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::random;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -98,7 +99,7 @@ fn test_state_trie(
|
||||
let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"];
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use eth_trie_utils::partial_trie::PartialTrie;
|
||||
use ethereum_types::{BigEndianHash, H256};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -111,7 +112,7 @@ fn test_state_trie(trie_inputs: TrieInputs) -> Result<()> {
|
||||
let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"];
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -15,7 +16,7 @@ fn hex_prefix_even_nonterminated() -> Result<()> {
|
||||
let num_nibbles = 6.into();
|
||||
let rlp_pos = U256::from(Segment::RlpRaw as usize);
|
||||
let initial_stack = vec![retdest, terminated, packed_nibbles, num_nibbles, rlp_pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![rlp_pos + U256::from(5)]);
|
||||
|
||||
@ -43,7 +44,7 @@ fn hex_prefix_odd_terminated() -> Result<()> {
|
||||
let num_nibbles = 5.into();
|
||||
let rlp_pos = U256::from(Segment::RlpRaw as usize);
|
||||
let initial_stack = vec![retdest, terminated, packed_nibbles, num_nibbles, rlp_pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![rlp_pos + U256::from(4)]);
|
||||
|
||||
@ -70,7 +71,7 @@ fn hex_prefix_odd_terminated_tiny() -> Result<()> {
|
||||
let num_nibbles = 1.into();
|
||||
let rlp_pos = U256::from(Segment::RlpRaw as usize + 2);
|
||||
let initial_stack = vec![retdest, terminated, packed_nibbles, num_nibbles, rlp_pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(hex_prefix, initial_stack);
|
||||
interpreter.run()?;
|
||||
assert_eq!(
|
||||
interpreter.stack(),
|
||||
|
||||
@ -2,6 +2,7 @@ use anyhow::Result;
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{BigEndianHash, H256};
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
@ -173,7 +174,7 @@ fn test_state_trie(
|
||||
let mpt_hash_state_trie = KERNEL.global_labels["mpt_hash_state_trie"];
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -5,6 +5,7 @@ use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::HashedPartialTrie;
|
||||
use ethereum_types::{BigEndianHash, H256, U256};
|
||||
use hex_literal::hex;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
@ -24,7 +25,7 @@ fn load_all_mpts_empty() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -61,7 +62,7 @@ fn load_all_mpts_leaf() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -107,7 +108,7 @@ fn load_all_mpts_hash() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -145,7 +146,7 @@ fn load_all_mpts_empty_branch() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -197,7 +198,7 @@ fn load_all_mpts_ext_to_leaf() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
@ -243,7 +244,7 @@ fn load_mpt_txn_trie() -> Result<()> {
|
||||
};
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::BigEndianHash;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
@ -20,7 +21,7 @@ fn mpt_read() -> Result<()> {
|
||||
let mpt_read = KERNEL.global_labels["mpt_read"];
|
||||
|
||||
let initial_stack = vec![];
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, initial_stack);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
assert_eq!(interpreter.stack(), vec![]);
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -15,7 +16,8 @@ fn test_mstore_unpacking() -> Result<()> {
|
||||
let addr = (Segment::TxnData as u64).into();
|
||||
let initial_stack = vec![retdest, len, value, addr];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(mstore_unpacking, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(mstore_unpacking, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![addr + U256::from(4)]);
|
||||
|
||||
@ -2,6 +2,7 @@ use anyhow::Result;
|
||||
use ethereum_types::{Address, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -47,7 +48,8 @@ fn test_process_receipt() -> Result<()> {
|
||||
leftover_gas,
|
||||
success,
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(process_receipt, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(process_receipt, initial_stack);
|
||||
interpreter.set_memory_segment(
|
||||
Segment::LogsData,
|
||||
vec![
|
||||
@ -128,7 +130,8 @@ fn test_receipt_encoding() -> Result<()> {
|
||||
let expected_rlp = rlp::encode(&rlp::encode(&receipt_1));
|
||||
|
||||
let initial_stack: Vec<U256> = vec![retdest, 0.into(), 0.into(), 0.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_receipt, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(encode_receipt, initial_stack);
|
||||
|
||||
// Write data to memory.
|
||||
let expected_bloom_bytes = vec![
|
||||
@ -248,7 +251,7 @@ fn test_receipt_bloom_filter() -> Result<()> {
|
||||
// Set logs memory and initialize TxnBloom and BlockBloom segments.
|
||||
let initial_stack: Vec<U256> = vec![retdest];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
let mut logs = vec![
|
||||
0.into(), // unused
|
||||
addr,
|
||||
@ -408,7 +411,7 @@ fn test_mpt_insert_receipt() -> Result<()> {
|
||||
receipt.push(num_logs.into()); // num_logs
|
||||
receipt.extend(logs_0.clone());
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(0, vec![]);
|
||||
initialize_mpts(&mut interpreter, &trie_inputs);
|
||||
|
||||
// If TrieData is empty, we need to push 0 because the first value is always 0.
|
||||
@ -562,7 +565,7 @@ fn test_bloom_two_logs() -> Result<()> {
|
||||
]
|
||||
.into(),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::LogsData, logs);
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]);
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -13,7 +14,8 @@ fn test_decode_rlp_string_len_short() -> Result<()> {
|
||||
0xDEADBEEFu32.into(),
|
||||
U256::from(Segment::RlpRaw as usize + 2),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
|
||||
// A couple dummy bytes, followed by "0x70" which is its own encoding.
|
||||
interpreter.set_rlp_memory(vec![123, 234, 0x70]);
|
||||
@ -33,7 +35,8 @@ fn test_decode_rlp_string_len_medium() -> Result<()> {
|
||||
0xDEADBEEFu32.into(),
|
||||
U256::from(Segment::RlpRaw as usize + 2),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
|
||||
// A couple dummy bytes, followed by the RLP encoding of "1 2 3 4 5".
|
||||
interpreter.set_rlp_memory(vec![123, 234, 0x85, 1, 2, 3, 4, 5]);
|
||||
@ -53,7 +56,8 @@ fn test_decode_rlp_string_len_long() -> Result<()> {
|
||||
0xDEADBEEFu32.into(),
|
||||
U256::from(Segment::RlpRaw as usize + 2),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_string_len, initial_stack);
|
||||
|
||||
// The RLP encoding of the string "1 2 3 ... 56".
|
||||
interpreter.set_rlp_memory(vec![
|
||||
@ -74,7 +78,8 @@ fn test_decode_rlp_list_len_short() -> Result<()> {
|
||||
let decode_rlp_list_len = KERNEL.global_labels["decode_rlp_list_len"];
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), U256::from(Segment::RlpRaw as usize)];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_list_len, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_list_len, initial_stack);
|
||||
|
||||
// The RLP encoding of [1, 2, [3, 4]].
|
||||
interpreter.set_rlp_memory(vec![0xc5, 1, 2, 0xc2, 3, 4]);
|
||||
@ -91,7 +96,8 @@ fn test_decode_rlp_list_len_long() -> Result<()> {
|
||||
let decode_rlp_list_len = KERNEL.global_labels["decode_rlp_list_len"];
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), U256::from(Segment::RlpRaw as usize)];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_list_len, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_list_len, initial_stack);
|
||||
|
||||
// The RLP encoding of [1, ..., 56].
|
||||
interpreter.set_rlp_memory(vec![
|
||||
@ -112,7 +118,8 @@ fn test_decode_rlp_scalar() -> Result<()> {
|
||||
let decode_rlp_scalar = KERNEL.global_labels["decode_rlp_scalar"];
|
||||
|
||||
let initial_stack = vec![0xDEADBEEFu32.into(), U256::from(Segment::RlpRaw as usize)];
|
||||
let mut interpreter = Interpreter::new_with_kernel(decode_rlp_scalar, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(decode_rlp_scalar, initial_stack);
|
||||
|
||||
// The RLP encoding of "12 34 56".
|
||||
interpreter.set_rlp_memory(vec![0x83, 0x12, 0x34, 0x56]);
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -13,7 +14,8 @@ fn test_encode_rlp_scalar_small() -> Result<()> {
|
||||
let scalar = 42.into();
|
||||
let pos = U256::from(Segment::RlpRaw as usize + 2);
|
||||
let initial_stack = vec![retdest, scalar, pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_rlp_scalar, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(encode_rlp_scalar, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
let expected_stack = vec![pos + U256::from(1)]; // pos' = pos + rlp_len = 2 + 1
|
||||
@ -32,7 +34,8 @@ fn test_encode_rlp_scalar_medium() -> Result<()> {
|
||||
let scalar = 0x12345.into();
|
||||
let pos = U256::from(Segment::RlpRaw as usize + 2);
|
||||
let initial_stack = vec![retdest, scalar, pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_rlp_scalar, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(encode_rlp_scalar, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
let expected_stack = vec![pos + U256::from(4)]; // pos' = pos + rlp_len = 2 + 4
|
||||
@ -51,7 +54,8 @@ fn test_encode_rlp_160() -> Result<()> {
|
||||
let string = 0x12345.into();
|
||||
let pos = U256::from(Segment::RlpRaw as usize);
|
||||
let initial_stack = vec![retdest, string, pos, U256::from(20)];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_rlp_fixed, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(encode_rlp_fixed, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
let expected_stack = vec![pos + U256::from(1 + 20)]; // pos'
|
||||
@ -71,7 +75,8 @@ fn test_encode_rlp_256() -> Result<()> {
|
||||
let string = 0x12345.into();
|
||||
let pos = U256::from(Segment::RlpRaw as usize);
|
||||
let initial_stack = vec![retdest, string, pos, U256::from(32)];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_rlp_fixed, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(encode_rlp_fixed, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
let expected_stack = vec![pos + U256::from(1 + 32)]; // pos'
|
||||
@ -91,7 +96,8 @@ fn test_prepend_rlp_list_prefix_small() -> Result<()> {
|
||||
let start_pos = U256::from(Segment::RlpRaw as usize + 9);
|
||||
let end_pos = U256::from(Segment::RlpRaw as usize + 9 + 5);
|
||||
let initial_stack = vec![retdest, start_pos, end_pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(prepend_rlp_list_prefix, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(prepend_rlp_list_prefix, initial_stack);
|
||||
interpreter.set_rlp_memory(vec![
|
||||
// Nine 0s to leave room for the longest possible RLP list prefix.
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
@ -120,7 +126,8 @@ fn test_prepend_rlp_list_prefix_large() -> Result<()> {
|
||||
let start_pos = U256::from(Segment::RlpRaw as usize + 9);
|
||||
let end_pos = U256::from(Segment::RlpRaw as usize + 9 + 60);
|
||||
let initial_stack = vec![retdest, start_pos, end_pos];
|
||||
let mut interpreter = Interpreter::new_with_kernel(prepend_rlp_list_prefix, initial_stack);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(prepend_rlp_list_prefix, initial_stack);
|
||||
|
||||
#[rustfmt::skip]
|
||||
interpreter.set_rlp_memory(vec![
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -10,7 +11,7 @@ fn test_num_bytes_0() -> Result<()> {
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let x = 0.into();
|
||||
let initial_stack = vec![retdest, x];
|
||||
let mut interpreter = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![1.into()]);
|
||||
@ -24,7 +25,7 @@ fn test_num_bytes_small() -> Result<()> {
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let x = 42.into();
|
||||
let initial_stack = vec![retdest, x];
|
||||
let mut interpreter = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![1.into()]);
|
||||
@ -38,7 +39,7 @@ fn test_num_bytes_medium() -> Result<()> {
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let x = 0xAABBCCDDu32.into();
|
||||
let initial_stack = vec![retdest, x];
|
||||
let mut interpreter = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(num_bytes, initial_stack);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(interpreter.stack(), vec![4.into()]);
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
@ -117,7 +118,7 @@ fn run_test(fn_label: &str, expected_fn: fn(U256, U256) -> U256, opname: &str) {
|
||||
for &x in &inputs {
|
||||
for &y in &inputs {
|
||||
let stack = vec![retdest, y, x];
|
||||
let mut interpreter = Interpreter::new_with_kernel(fn_label, stack);
|
||||
let mut interpreter: Interpreter<F> = Interpreter::new_with_kernel(fn_label, stack);
|
||||
interpreter.run().unwrap();
|
||||
assert_eq!(interpreter.stack_len(), 1usize, "unexpected stack size");
|
||||
let output = interpreter
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::U256;
|
||||
use hex_literal::hex;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField as F;
|
||||
use NormalizedTxnField::*;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
@ -13,7 +14,8 @@ fn process_type_0_txn() -> Result<()> {
|
||||
let process_normalized_txn = KERNEL.global_labels["process_normalized_txn"];
|
||||
|
||||
let retaddr = 0xDEADBEEFu32.into();
|
||||
let mut interpreter = Interpreter::new_with_kernel(process_type_0_txn, vec![retaddr]);
|
||||
let mut interpreter: Interpreter<F> =
|
||||
Interpreter::new_with_kernel(process_type_0_txn, vec![retaddr]);
|
||||
|
||||
// When we reach process_normalized_txn, we're done with parsing and normalizing.
|
||||
// Processing normalized transactions is outside the scope of this test.
|
||||
|
||||
@ -333,87 +333,3 @@ fn simulate_cpu<F: Field>(state: &mut GenerationState<F>) -> anyhow::Result<()>
|
||||
transition(state)?;
|
||||
}
|
||||
}
|
||||
|
||||
fn simulate_cpu_between_labels_and_get_user_jumps<F: Field>(
|
||||
initial_label: &str,
|
||||
final_label: &str,
|
||||
state: &mut GenerationState<F>,
|
||||
) -> Option<HashMap<usize, BTreeSet<usize>>> {
|
||||
if state.jumpdest_table.is_some() {
|
||||
None
|
||||
} else {
|
||||
const JUMP_OPCODE: u8 = 0x56;
|
||||
const JUMPI_OPCODE: u8 = 0x57;
|
||||
|
||||
let halt_pc = KERNEL.global_labels[final_label];
|
||||
let mut jumpdest_addresses: HashMap<_, BTreeSet<usize>> = HashMap::new();
|
||||
|
||||
state.registers.program_counter = KERNEL.global_labels[initial_label];
|
||||
let initial_clock = state.traces.clock();
|
||||
let initial_context = state.registers.context;
|
||||
|
||||
log::debug!("Simulating CPU for jumpdest analysis.");
|
||||
|
||||
loop {
|
||||
// skip jumpdest table validations in simulations
|
||||
if state.registers.is_kernel
|
||||
&& state.registers.program_counter == KERNEL.global_labels["jumpdest_analysis"]
|
||||
{
|
||||
state.registers.program_counter = KERNEL.global_labels["jumpdest_analysis_end"]
|
||||
}
|
||||
let pc = state.registers.program_counter;
|
||||
let context = state.registers.context;
|
||||
let halt = state.registers.is_kernel
|
||||
&& pc == halt_pc
|
||||
&& state.registers.context == initial_context;
|
||||
let Ok(opcode) = u256_to_u8(state.memory.get(MemoryAddress::new(
|
||||
context,
|
||||
Segment::Code,
|
||||
state.registers.program_counter,
|
||||
))) else {
|
||||
log::debug!(
|
||||
"Simulated CPU for jumpdest analysis halted after {} cycles",
|
||||
state.traces.clock() - initial_clock
|
||||
);
|
||||
return Some(jumpdest_addresses);
|
||||
};
|
||||
let cond = if let Ok(cond) = stack_peek(state, 1) {
|
||||
cond != U256::zero()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if !state.registers.is_kernel
|
||||
&& (opcode == JUMP_OPCODE || (opcode == JUMPI_OPCODE && cond))
|
||||
{
|
||||
// Avoid deeper calls to abort
|
||||
let Ok(jumpdest) = u256_to_usize(state.registers.stack_top) else {
|
||||
log::debug!(
|
||||
"Simulated CPU for jumpdest analysis halted after {} cycles",
|
||||
state.traces.clock() - initial_clock
|
||||
);
|
||||
return Some(jumpdest_addresses);
|
||||
};
|
||||
state.memory.set(
|
||||
MemoryAddress::new(context, Segment::JumpdestBits, jumpdest),
|
||||
U256::one(),
|
||||
);
|
||||
let jumpdest_opcode =
|
||||
state
|
||||
.memory
|
||||
.get(MemoryAddress::new(context, Segment::Code, jumpdest));
|
||||
if let Some(ctx_addresses) = jumpdest_addresses.get_mut(&context) {
|
||||
ctx_addresses.insert(jumpdest);
|
||||
} else {
|
||||
jumpdest_addresses.insert(context, BTreeSet::from([jumpdest]));
|
||||
}
|
||||
}
|
||||
if halt || transition(state).is_err() {
|
||||
log::debug!(
|
||||
"Simulated CPU for jumpdest analysis halted after {} cycles",
|
||||
state.traces.clock() - initial_clock
|
||||
);
|
||||
return Some(jumpdest_addresses);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -10,12 +10,14 @@ use plonky2::field::types::Field;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cpu::kernel::interpreter::simulate_cpu_and_get_user_jumps;
|
||||
use crate::cpu::kernel::opcodes::get_push_opcode;
|
||||
use crate::extension_tower::{FieldExt, Fp12, BLS381, BN254};
|
||||
use crate::generation::prover_input::EvmField::{
|
||||
Bls381Base, Bls381Scalar, Bn254Base, Bn254Scalar, Secp256k1Base, Secp256k1Scalar,
|
||||
};
|
||||
use crate::generation::prover_input::FieldOp::{Inverse, Sqrt};
|
||||
use crate::generation::simulate_cpu_between_labels_and_get_user_jumps;
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::memory::segments::Segment::BnPairing;
|
||||
@ -250,6 +252,7 @@ impl<F: Field> GenerationState<F> {
|
||||
|
||||
if self.jumpdest_table.is_none() {
|
||||
self.generate_jumpdest_table()?;
|
||||
log::debug!("jdt = {:?}", self.jumpdest_table);
|
||||
}
|
||||
|
||||
let Some(jumpdest_table) = &mut self.jumpdest_table else {
|
||||
@ -258,12 +261,19 @@ impl<F: Field> GenerationState<F> {
|
||||
));
|
||||
};
|
||||
|
||||
let jd_len = jumpdest_table.len();
|
||||
|
||||
if let Some(ctx_jumpdest_table) = jumpdest_table.get_mut(&context)
|
||||
&& let Some(next_jumpdest_address) = ctx_jumpdest_table.pop()
|
||||
{
|
||||
log::debug!(
|
||||
"jumpdest_table_len = {:?}, ctx_jumpdest_table.len = {:?}",
|
||||
jd_len,
|
||||
ctx_jumpdest_table.len()
|
||||
);
|
||||
Ok((next_jumpdest_address + 1).into())
|
||||
} else {
|
||||
self.jumpdest_table = None;
|
||||
jumpdest_table.remove(&context);
|
||||
Ok(U256::zero())
|
||||
}
|
||||
}
|
||||
@ -276,9 +286,17 @@ impl<F: Field> GenerationState<F> {
|
||||
ProverInputError::InvalidJumpdestSimulation,
|
||||
));
|
||||
};
|
||||
|
||||
let jd_len = jumpdest_table.len();
|
||||
|
||||
if let Some(ctx_jumpdest_table) = jumpdest_table.get_mut(&context)
|
||||
&& let Some(next_jumpdest_proof) = ctx_jumpdest_table.pop()
|
||||
{
|
||||
log::debug!(
|
||||
"jumpdest_table_len = {:?}, ctx_jumpdest_table.len = {:?}",
|
||||
jd_len,
|
||||
ctx_jumpdest_table.len()
|
||||
);
|
||||
Ok(next_jumpdest_proof.into())
|
||||
} else {
|
||||
Err(ProgramError::ProverInputError(
|
||||
@ -292,24 +310,9 @@ impl<F: Field> GenerationState<F> {
|
||||
/// Simulate the user's code and store all the jump addresses with their respective contexts.
|
||||
fn generate_jumpdest_table(&mut self) -> Result<(), ProgramError> {
|
||||
let checkpoint = self.checkpoint();
|
||||
let memory = self.memory.clone();
|
||||
|
||||
// Simulate the user's code and (unnecessarily) part of the kernel code, skipping the validate table call
|
||||
let Some(jumpdest_table) = simulate_cpu_between_labels_and_get_user_jumps(
|
||||
"jumpdest_analysis_end",
|
||||
"terminate_common",
|
||||
self,
|
||||
) else {
|
||||
self.jumpdest_table = Some(HashMap::new());
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Return to the state before starting the simulation
|
||||
self.rollback(checkpoint);
|
||||
self.memory = memory;
|
||||
|
||||
// Find proofs for all contexts
|
||||
self.set_jumpdest_analysis_inputs(jumpdest_table);
|
||||
self.jumpdest_table = simulate_cpu_and_get_user_jumps("terminate_common", self);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -333,6 +336,10 @@ impl<F: Field> GenerationState<F> {
|
||||
)));
|
||||
}
|
||||
|
||||
pub(crate) fn get_current_code(&self) -> Result<Vec<u8>, ProgramError> {
|
||||
self.get_code(self.registers.context)
|
||||
}
|
||||
|
||||
fn get_code(&self, context: usize) -> Result<Vec<u8>, ProgramError> {
|
||||
let code_len = self.get_code_len(context)?;
|
||||
let code = (0..code_len)
|
||||
@ -354,12 +361,28 @@ impl<F: Field> GenerationState<F> {
|
||||
)))?;
|
||||
Ok(code_len)
|
||||
}
|
||||
|
||||
fn get_current_code_len(&self) -> Result<usize, ProgramError> {
|
||||
self.get_code_len(self.registers.context)
|
||||
}
|
||||
|
||||
pub(crate) fn set_jumpdest_bits(&mut self, code: &[u8]) {
|
||||
const JUMPDEST_OPCODE: u8 = 0x5b;
|
||||
for (pos, opcode) in CodeIterator::new(code) {
|
||||
if opcode == JUMPDEST_OPCODE {
|
||||
self.memory.set(
|
||||
MemoryAddress::new(self.registers.context, Segment::JumpdestBits, pos),
|
||||
U256::one(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// For all address in `jumpdest_table`, each bounded by `largest_address`,
|
||||
/// For all address in `jumpdest_table` smaller than `largest_address`,
|
||||
/// this function searches for a proof. A proof is the closest address
|
||||
/// for which none of the previous 32 bytes in the code (including opcodes
|
||||
/// and pushed bytes) are PUSHXX and the address is in its range. It returns
|
||||
/// and pushed bytes) is a PUSHXX and the address is in its range. It returns
|
||||
/// a vector of even size containing proofs followed by their addresses.
|
||||
fn get_proofs_and_jumpdests(
|
||||
code: &[u8],
|
||||
@ -370,30 +393,24 @@ fn get_proofs_and_jumpdests(
|
||||
const PUSH32_OPCODE: u8 = 0x7f;
|
||||
let (proofs, _) = CodeIterator::until(code, largest_address + 1).fold(
|
||||
(vec![], 0),
|
||||
|(mut proofs, acc), (pos, _opcode)| {
|
||||
let has_prefix = if let Some(prefix_start) = pos.checked_sub(32) {
|
||||
code[prefix_start..pos]
|
||||
|(mut proofs, last_proof), (addr, opcode)| {
|
||||
let has_prefix = if let Some(prefix_start) = addr.checked_sub(32) {
|
||||
code[prefix_start..addr]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold(true, |acc, (prefix_pos, &byte)| {
|
||||
let cond1 = byte > PUSH32_OPCODE;
|
||||
let cond2 = (prefix_start + prefix_pos) as i32
|
||||
+ (byte as i32 - PUSH1_OPCODE as i32)
|
||||
+ 1
|
||||
< pos as i32;
|
||||
acc && (cond1 || cond2)
|
||||
})
|
||||
.rev()
|
||||
.zip(0..32)
|
||||
.all(|(&byte, i)| byte > PUSH32_OPCODE || byte < PUSH1_OPCODE + i)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let acc = if has_prefix { pos - 32 } else { acc };
|
||||
if jumpdest_table.contains(&pos) {
|
||||
let last_proof = if has_prefix { addr - 32 } else { last_proof };
|
||||
if jumpdest_table.contains(&addr) {
|
||||
// Push the proof
|
||||
proofs.push(acc);
|
||||
proofs.push(last_proof);
|
||||
// Push the address
|
||||
proofs.push(pos);
|
||||
proofs.push(addr);
|
||||
}
|
||||
(proofs, acc)
|
||||
(proofs, last_proof)
|
||||
},
|
||||
);
|
||||
proofs
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user