More MPT logic

This commit is contained in:
Daniel Lubarov 2022-09-22 20:09:48 -07:00
parent 125ad565db
commit c7b03cfe9a
20 changed files with 475 additions and 184 deletions

View File

@ -6,8 +6,6 @@
Our zkEVM is designed for efficient verification by STARKs \cite{stark}, particularly by an AIR with degree 3 constraints. In this model, the prover bottleneck is typically constructing Merkle trees, particularly constructing the tree containing low-degree extensions of witness polynomials.
More specifically, we target a constraint system of degree 3.
\subsection{Field selection}
\label{field}

View File

@ -1,4 +1,16 @@
\section{Merkle Patricia tries}
\label{tries}
TODO
\subsection{Internal memory format}
Withour our zkEVM's kernel memory,
\begin{enumerate}
\item An empty node is encoded as $(\texttt{MPT\_NODE\_EMPTY})$.
\item A branch node is encoded as $(\texttt{MPT\_NODE\_BRANCH}, c_1, \dots, c_{16}, v)$, where each $c_i$ is a pointer to a child node, and $v$ is a leaf payload.
\item An extension node is encoded as $(\texttt{MPT\_NODE\_EXTENSION}, k, c)$, $k$ is a 2-tuple $(\texttt{packed\_nibbles}, \texttt{num\_nibbles})$, and $c$ is a pointer to a child node.
\item A leaf node is encoded as $(\texttt{MPT\_NODE\_LEAF}, k, v)$, where $k$ is a 2-tuple as above, and $v$ is a leaf payload.
\item A digest node is encoded as $(\texttt{MPT\_NODE\_DIGEST}, d)$, where $d$ is a Keccak256 digest.
\end{enumerate}
\subsection{Prover input format}

Binary file not shown.

View File

@ -42,6 +42,7 @@ pub(crate) fn combined_kernel() -> Kernel {
include_str!("asm/rlp/decode.asm"),
include_str!("asm/rlp/read_to_memory.asm"),
include_str!("asm/mpt/hash.asm"),
include_str!("asm/mpt/load.asm"),
include_str!("asm/mpt/read.asm"),
include_str!("asm/mpt/storage_read.asm"),
include_str!("asm/mpt/storage_write.asm"),

View File

@ -1,6 +1,3 @@
// After the transaction data has been parsed into a normalized set of fields
// (see NormalizedTxnField), this routine processes the transaction.
global intrinsic_gas:
// stack: retdest
// Calculate the number of zero and nonzero bytes in the txn data.

View File

@ -1,6 +1,8 @@
// After the transaction data has been parsed into a normalized set of fields
// (see NormalizedTxnField), this routine processes the transaction.
// TODO: Save checkpoints in @CTX_METADATA_STATE_TRIE_CHECKPOINT_PTR and @SEGMENT_STORAGE_TRIE_CHECKPOINT_PTRS.
global process_normalized_txn:
// stack: (empty)
PUSH validate

View File

@ -12,7 +12,7 @@
// stack: value
PUSH $field
// stack: offset, value
%mload_kernel(@SEGMENT_GLOBAL_METADATA)
%mstore_kernel(@SEGMENT_GLOBAL_METADATA)
// stack: (empty)
%endmacro
@ -30,18 +30,18 @@
// stack: value
PUSH $field
// stack: offset, value
%mload_current(@SEGMENT_CONTEXT_METADATA)
%mstore_current(@SEGMENT_CONTEXT_METADATA)
// stack: (empty)
%endmacro
%macro address
%mload_context_metadata(0) // TODO: Read proper field.
%mload_context_metadata(@CTX_METADATA_ADDRESS)
%endmacro
%macro sender
%mload_context_metadata(0) // TODO: Read proper field.
%mload_context_metadata(@CTX_METADATA_CALLER)
%endmacro
%macro callvalue
%mload_context_metadata(0) // TODO: Read proper field.
%mload_context_metadata(@CTX_METADATA_CALL_VALUE)
%endmacro

View File

@ -1,12 +1,176 @@
// TODO: Receipt trie leaves are variable-length, so we need to be careful not
// to permit buffer over-reads.
// Load all partial trie data from prover inputs.
global mpt_load_all:
// First set GLOBAL_METADATA_TRIE_DATA_SIZE = 1.
global load_all_mpts:
// stack: retdest
// First set @GLOBAL_METADATA_TRIE_DATA_SIZE = 1.
// We don't want it to start at 0, as we use 0 as a null pointer.
PUSH 1
%mstore(@GLOBAL_METADATA_TRIE_DATA_SIZE)
%set_trie_data_size
TODO
%load_mpt_and_return_root_ptr
%mstore_global_metadata(@GLOBAL_METADATA_STATE_TRIE_ROOT)
%load_mpt_and_return_root_ptr %mstore_global_metadata(@GLOBAL_METADATA_TXN_TRIE_ROOT)
%load_mpt_and_return_root_ptr %mstore_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_ROOT)
mpt_load_state:
PROVER_INPUT(mpt::state)
TODO
PROVER_INPUT(mpt)
// stack: num_storage_tries, retdest
DUP1 %mstore_global_metadata(@GLOBAL_METADATA_NUM_STORAGE_TRIES)
// stack: num_storage_tries, retdest
PUSH 0 // i = 0
// stack: i, num_storage_tries, retdest
storage_trie_loop:
DUP2 DUP2 EQ
// stack: i == num_storage_tries, i, num_storage_tries, retdest
%jumpi(storage_trie_loop_end)
// stack: i, num_storage_tries, retdest
PROVER_INPUT(mpt)
// stack: storage_trie_addr, i, num_storage_tries, retdest
DUP2
// stack: i, storage_trie_addr, i, num_storage_tries, retdest
%mstore_kernel(@SEGMENT_STORAGE_TRIE_ADDRS)
// stack: i, num_storage_tries, retdest
%load_mpt_and_return_root_ptr
// stack: root_ptr, i, num_storage_tries, retdest
DUP2
// stack: i, root_ptr, i, num_storage_tries, retdest
%mstore_kernel(@SEGMENT_STORAGE_TRIE_PTRS)
// stack: i, num_storage_tries, retdest
%jump(storage_trie_loop)
storage_trie_loop_end:
// TODO: Hash tries and set @GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE, etc.
// stack: i, num_storage_tries, retdest
%pop2
// stack: retdest
JUMP
// Load an MPT from prover inputs.
// Pre stack: retdest
// Post stack: (empty)
load_mpt:
// stack: retdest
PROVER_INPUT(mpt)
// stack: node_type, retdest
DUP1 %append_to_trie_data
// stack: node_type, retdest
DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(load_mpt_empty)
DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(load_mpt_branch)
DUP1 %eq_const(@MPT_NODE_EXTENSION) %jumpi(load_mpt_extension)
DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(load_mpt_leaf)
DUP1 %eq_const(@MPT_NODE_HASH) %jumpi(load_mpt_digest)
PANIC // Invalid node type
load_mpt_empty:
// stack: node_type, retdest
POP
// stack: retdest
JUMP
load_mpt_branch:
// stack: node_type, retdest
POP
// stack: retdest
%get_trie_data_size
// stack: ptr_children, retdest
DUP1 %add_const(16)
// stack: ptr_leaf, ptr_children, retdest
%set_trie_data_size
// stack: ptr_children, retdest
%load_leaf_value
// Save the current trie_data_size (which now points to the end of the leaf)
// for later, then have it point to the start of our 16 child pointers.
%get_trie_data_size
// stack: ptr_end_of_leaf, ptr_children, retdest
SWAP1
%set_trie_data_size
// stack: ptr_end_of_leaf, retdest
// Load the 16 children.
%rep 16
%load_mpt_and_return_root_ptr
// stack: child_ptr, ptr_end_of_leaf, retdest
%append_to_trie_data
// stack: ptr_end_of_leaf, retdest
%endrep
%set_trie_data_size
// stack: retdest
JUMP
load_mpt_extension:
// stack: node_type, retdest
POP
// stack: retdest
PROVER_INPUT(mpt) // read num_nibbles
%append_to_trie_data
PROVER_INPUT(mpt) // read packed_nibbles
%append_to_trie_data
// stack: retdest
%load_mpt_and_return_root_ptr
// stack: child_ptr, retdest
%append_to_trie_data
// stack: retdest
JUMP
load_mpt_leaf:
// stack: node_type, retdest
POP
// stack: retdest
PROVER_INPUT(mpt) // read num_nibbles
%append_to_trie_data
PROVER_INPUT(mpt) // read packed_nibbles
%append_to_trie_data
// stack: retdest
%load_leaf_value
// stack: retdest
JUMP
load_mpt_digest:
// stack: node_type, retdest
POP
// stack: retdest
PROVER_INPUT(mpt) // read digest
%append_to_trie_data
// stack: retdest
JUMP
// Convenience macro to call load_mpt and return where we left off.
%macro load_mpt
PUSH %%after
%jump(load_mpt)
%%after:
%endmacro
%macro load_mpt_and_return_root_ptr
// stack: (empty)
%get_trie_data_size
// stack: ptr
%load_mpt
// stack: ptr
%endmacro
// Load a leaf from prover input, and append it to trie data.
%macro load_leaf_value
// stack: (empty)
PROVER_INPUT(mpt)
// stack: leaf_len
%%loop:
DUP1 ISZERO
// stack: leaf_len == 0, leaf_len
%jumpi(%%finish)
// stack: leaf_len
PROVER_INPUT(mpt)
// stack: leaf_part, leaf_len
%append_to_trie_data
// stack: leaf_len
%sub_const(1)
// stack: leaf_len'
%jump(%%loop)
%%finish:
POP
// stack: (empty)
%endmacro

View File

@ -16,12 +16,12 @@ global mpt_read:
SWAP1 %add_const(1) SWAP1
// stack: node_type, node_payload_ptr, key, nibbles, retdest
DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(mpt_read_empty)
DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(mpt_read_branch)
DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(mpt_read_empty)
DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(mpt_read_branch)
DUP1 %eq_const(@MPT_NODE_EXTENSION) %jumpi(mpt_read_extension)
DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(mpt_read_leaf)
DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(mpt_read_leaf)
// There's still the MPT_NODE_DIGEST case, but if we hit a digest node,
// There's still the MPT_NODE_HASH case, but if we hit a digest node,
// it means the prover failed to provide necessary Merkle data, so panic.
PANIC

View File

@ -9,3 +9,29 @@
%mstore_kernel(@SEGMENT_TRIE_DATA)
// stack: (empty)
%endmacro
%macro get_trie_data_size
// stack: (empty)
%mload_global_metadata(@GLOBAL_METADATA_TRIE_DATA_SIZE)
// stack: trie_data_size
%endmacro
%macro set_trie_data_size
// stack: trie_data_size
%mstore_global_metadata(@GLOBAL_METADATA_TRIE_DATA_SIZE)
// stack: (empty)
%endmacro
// Equivalent to: trie_data[trie_data_size++] = value
%macro append_to_trie_data
// stack: value
%get_trie_data_size
// stack: trie_data_size, value
DUP1
%add_const(1)
// stack: trie_data_size', trie_data_size, value
%set_trie_data_size
// stack: trie_data_size, value
%mstore_trie_data
// stack: (empty)
%endmacro

View File

@ -85,20 +85,20 @@ pub(crate) fn assemble(
let mut local_labels = Vec::with_capacity(files.len());
let mut macro_counter = 0;
for file in files {
let expanded_file = expand_macros(file.body, &macros, &mut macro_counter);
let expanded_file = expand_repeats(expanded_file);
let expanded_file = inline_constants(expanded_file, &constants);
let mut expanded_file = expand_stack_manipulation(expanded_file);
let mut file = file.body;
file = expand_macros(file, &macros, &mut macro_counter);
file = inline_constants(file, &constants);
file = expand_stack_manipulation(file);
if optimize {
optimize_asm(&mut expanded_file);
optimize_asm(&mut file);
}
local_labels.push(find_labels(
&expanded_file,
&file,
&mut offset,
&mut global_labels,
&mut prover_inputs,
));
expanded_files.push(expanded_file);
expanded_files.push(file);
}
let mut code = vec![];
for (file, locals) in izip!(expanded_files, local_labels) {
@ -146,6 +146,11 @@ fn expand_macros(
Item::MacroCall(m, args) => {
expanded.extend(expand_macro_call(m, args, macros, macro_counter));
}
Item::Repeat(count, body) => {
for _ in 0..count.as_usize() {
expanded.extend(expand_macros(body.clone(), macros, macro_counter));
}
}
item => {
expanded.push(item);
}
@ -187,12 +192,10 @@ fn expand_macro_call(
Item::MacroCall(name, args) => {
let expanded_args = args
.iter()
.map(|arg| {
if let PushTarget::MacroVar(var) = arg {
get_arg(var)
} else {
arg.clone()
}
.map(|arg| match arg {
PushTarget::MacroVar(var) => get_arg(var),
PushTarget::MacroLabel(l) => PushTarget::Label(get_actual_label(l)),
_ => arg.clone(),
})
.collect();
Item::MacroCall(name.clone(), expanded_args)
@ -220,21 +223,6 @@ fn expand_macro_call(
expand_macros(expanded_item, macros, macro_counter)
}
fn expand_repeats(body: Vec<Item>) -> Vec<Item> {
let mut expanded = vec![];
for item in body {
if let Item::Repeat(count, block) = item {
let reps = count.as_usize();
for _ in 0..reps {
expanded.extend(block.clone());
}
} else {
expanded.push(item);
}
}
expanded
}
fn inline_constants(body: Vec<Item>, constants: &HashMap<String, U256>) -> Vec<Item> {
let resolve_const = |c| {
*constants
@ -494,7 +482,8 @@ mod tests {
#[test]
fn macro_with_label() {
let files = &[
"%macro spin %%start: PUSH %%start JUMP %endmacro",
"%macro jump(x) PUSH $x JUMP %endmacro",
"%macro spin %%start: %jump(%%start) %endmacro",
"%spin %spin",
];
let kernel = parse_and_assemble_ext(files, HashMap::new(), false);
@ -535,6 +524,11 @@ mod tests {
assert_eq!(kernel.code, vec![push1, 5, push1, 6, push1, 7]);
}
#[test]
fn pop2_macro() {
parse_and_assemble(&["%macro pop2 %rep 2 pop %endrep %endmacro", "%pop2"]);
}
#[test]
#[should_panic]
fn macro_with_wrong_vars() {

View File

@ -1,12 +1,13 @@
use std::collections::HashMap;
use anyhow::{anyhow, bail};
use anyhow::{anyhow, bail, ensure};
use ethereum_types::{BigEndianHash, U256, U512};
use keccak_hash::keccak;
use plonky2::field::goldilocks_field::GoldilocksField;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::assembler::Kernel;
use crate::cpu::kernel::global_metadata::GlobalMetadata;
use crate::cpu::kernel::txn_fields::NormalizedTxnField;
use crate::generation::memory::{MemoryContextState, MemorySegmentState};
use crate::generation::prover_input::ProverInputFn;
@ -60,7 +61,7 @@ pub struct Interpreter<'a> {
offset: usize,
context: usize,
pub(crate) memory: InterpreterMemory,
generation_state: GenerationState<F>,
pub(crate) generation_state: GenerationState<F>,
prover_inputs_map: &'a HashMap<usize, ProverInputFn>,
pub(crate) halt_offsets: Vec<usize>,
running: bool,
@ -151,6 +152,14 @@ impl<'a> Interpreter<'a> {
&self.memory.context_memory[0].segments[Segment::TxnData as usize].content
}
pub(crate) fn get_global_metadata_field(&self, field: GlobalMetadata) -> U256 {
self.memory.context_memory[0].segments[Segment::GlobalMetadata as usize].get(field as usize)
}
pub(crate) fn get_trie_data(&self) -> &[U256] {
&self.memory.context_memory[0].segments[Segment::TrieData as usize].content
}
pub(crate) fn get_rlp_memory(&self) -> Vec<u8> {
self.memory.context_memory[self.context].segments[Segment::RlpRaw as usize]
.content
@ -192,99 +201,99 @@ impl<'a> Interpreter<'a> {
let opcode = self.code().get(self.offset).byte(0);
self.incr(1);
match opcode {
0x00 => self.run_stop(), // "STOP",
0x01 => self.run_add(), // "ADD",
0x02 => self.run_mul(), // "MUL",
0x03 => self.run_sub(), // "SUB",
0x04 => self.run_div(), // "DIV",
0x05 => todo!(), // "SDIV",
0x06 => self.run_mod(), // "MOD",
0x07 => todo!(), // "SMOD",
0x08 => self.run_addmod(), // "ADDMOD",
0x09 => self.run_mulmod(), // "MULMOD",
0x0a => self.run_exp(), // "EXP",
0x0b => todo!(), // "SIGNEXTEND",
0x10 => self.run_lt(), // "LT",
0x11 => self.run_gt(), // "GT",
0x12 => todo!(), // "SLT",
0x13 => todo!(), // "SGT",
0x14 => self.run_eq(), // "EQ",
0x15 => self.run_iszero(), // "ISZERO",
0x16 => self.run_and(), // "AND",
0x17 => self.run_or(), // "OR",
0x18 => self.run_xor(), // "XOR",
0x19 => self.run_not(), // "NOT",
0x1a => self.run_byte(), // "BYTE",
0x1b => self.run_shl(), // "SHL",
0x1c => todo!(), // "SHR",
0x1d => todo!(), // "SAR",
0x20 => self.run_keccak256(), // "KECCAK256",
0x30 => todo!(), // "ADDRESS",
0x31 => todo!(), // "BALANCE",
0x32 => todo!(), // "ORIGIN",
0x33 => todo!(), // "CALLER",
0x34 => todo!(), // "CALLVALUE",
0x35 => todo!(), // "CALLDATALOAD",
0x36 => todo!(), // "CALLDATASIZE",
0x37 => todo!(), // "CALLDATACOPY",
0x38 => todo!(), // "CODESIZE",
0x39 => todo!(), // "CODECOPY",
0x3a => todo!(), // "GASPRICE",
0x3b => todo!(), // "EXTCODESIZE",
0x3c => todo!(), // "EXTCODECOPY",
0x3d => todo!(), // "RETURNDATASIZE",
0x3e => todo!(), // "RETURNDATACOPY",
0x3f => todo!(), // "EXTCODEHASH",
0x40 => todo!(), // "BLOCKHASH",
0x41 => todo!(), // "COINBASE",
0x42 => todo!(), // "TIMESTAMP",
0x43 => todo!(), // "NUMBER",
0x44 => todo!(), // "DIFFICULTY",
0x45 => todo!(), // "GASLIMIT",
0x46 => todo!(), // "CHAINID",
0x48 => todo!(), // "BASEFEE",
0x49 => self.run_prover_input()?, // "PROVER_INPUT",
0x50 => self.run_pop(), // "POP",
0x51 => self.run_mload(), // "MLOAD",
0x52 => self.run_mstore(), // "MSTORE",
0x53 => self.run_mstore8(), // "MSTORE8",
0x54 => todo!(), // "SLOAD",
0x55 => todo!(), // "SSTORE",
0x56 => self.run_jump(), // "JUMP",
0x57 => self.run_jumpi(), // "JUMPI",
0x58 => todo!(), // "GETPC",
0x59 => todo!(), // "MSIZE",
0x5a => todo!(), // "GAS",
0x5b => self.run_jumpdest(), // "JUMPDEST",
0x5c => todo!(), // "GET_STATE_ROOT",
0x5d => todo!(), // "SET_STATE_ROOT",
0x5e => todo!(), // "GET_RECEIPT_ROOT",
0x5f => todo!(), // "SET_RECEIPT_ROOT",
x if (0x60..0x80).contains(&x) => self.run_push(x - 0x5f), // "PUSH"
x if (0x80..0x90).contains(&x) => self.run_dup(x - 0x7f), // "DUP"
x if (0x90..0xa0).contains(&x) => self.run_swap(x - 0x8f), // "SWAP"
0xa0 => todo!(), // "LOG0",
0xa1 => todo!(), // "LOG1",
0xa2 => todo!(), // "LOG2",
0xa3 => todo!(), // "LOG3",
0xa4 => todo!(), // "LOG4",
0xa5 => bail!("Executed PANIC"), // "PANIC",
0xf0 => todo!(), // "CREATE",
0xf1 => todo!(), // "CALL",
0xf2 => todo!(), // "CALLCODE",
0xf3 => todo!(), // "RETURN",
0xf4 => todo!(), // "DELEGATECALL",
0xf5 => todo!(), // "CREATE2",
0xf6 => self.run_get_context(), // "GET_CONTEXT",
0xf7 => self.run_set_context(), // "SET_CONTEXT",
0xf8 => todo!(), // "CONSUME_GAS",
0xf9 => todo!(), // "EXIT_KERNEL",
0xfa => todo!(), // "STATICCALL",
0xfb => self.run_mload_general(), // "MLOAD_GENERAL",
0xfc => self.run_mstore_general(), // "MSTORE_GENERAL",
0xfd => todo!(), // "REVERT",
0xfe => bail!("Executed INVALID"), // "INVALID",
0xff => todo!(), // "SELFDESTRUCT",
0x00 => self.run_stop(), // "STOP",
0x01 => self.run_add(), // "ADD",
0x02 => self.run_mul(), // "MUL",
0x03 => self.run_sub(), // "SUB",
0x04 => self.run_div(), // "DIV",
0x05 => todo!(), // "SDIV",
0x06 => self.run_mod(), // "MOD",
0x07 => todo!(), // "SMOD",
0x08 => self.run_addmod(), // "ADDMOD",
0x09 => self.run_mulmod(), // "MULMOD",
0x0a => self.run_exp(), // "EXP",
0x0b => todo!(), // "SIGNEXTEND",
0x10 => self.run_lt(), // "LT",
0x11 => self.run_gt(), // "GT",
0x12 => todo!(), // "SLT",
0x13 => todo!(), // "SGT",
0x14 => self.run_eq(), // "EQ",
0x15 => self.run_iszero(), // "ISZERO",
0x16 => self.run_and(), // "AND",
0x17 => self.run_or(), // "OR",
0x18 => self.run_xor(), // "XOR",
0x19 => self.run_not(), // "NOT",
0x1a => self.run_byte(), // "BYTE",
0x1b => self.run_shl(), // "SHL",
0x1c => todo!(), // "SHR",
0x1d => todo!(), // "SAR",
0x20 => self.run_keccak256(), // "KECCAK256",
0x30 => todo!(), // "ADDRESS",
0x31 => todo!(), // "BALANCE",
0x32 => todo!(), // "ORIGIN",
0x33 => todo!(), // "CALLER",
0x34 => todo!(), // "CALLVALUE",
0x35 => todo!(), // "CALLDATALOAD",
0x36 => todo!(), // "CALLDATASIZE",
0x37 => todo!(), // "CALLDATACOPY",
0x38 => todo!(), // "CODESIZE",
0x39 => todo!(), // "CODECOPY",
0x3a => todo!(), // "GASPRICE",
0x3b => todo!(), // "EXTCODESIZE",
0x3c => todo!(), // "EXTCODECOPY",
0x3d => todo!(), // "RETURNDATASIZE",
0x3e => todo!(), // "RETURNDATACOPY",
0x3f => todo!(), // "EXTCODEHASH",
0x40 => todo!(), // "BLOCKHASH",
0x41 => todo!(), // "COINBASE",
0x42 => todo!(), // "TIMESTAMP",
0x43 => todo!(), // "NUMBER",
0x44 => todo!(), // "DIFFICULTY",
0x45 => todo!(), // "GASLIMIT",
0x46 => todo!(), // "CHAINID",
0x48 => todo!(), // "BASEFEE",
0x49 => self.run_prover_input()?, // "PROVER_INPUT",
0x50 => self.run_pop(), // "POP",
0x51 => self.run_mload(), // "MLOAD",
0x52 => self.run_mstore(), // "MSTORE",
0x53 => self.run_mstore8(), // "MSTORE8",
0x54 => todo!(), // "SLOAD",
0x55 => todo!(), // "SSTORE",
0x56 => self.run_jump(), // "JUMP",
0x57 => self.run_jumpi(), // "JUMPI",
0x58 => todo!(), // "GETPC",
0x59 => todo!(), // "MSIZE",
0x5a => todo!(), // "GAS",
0x5b => self.run_jumpdest(), // "JUMPDEST",
0x5c => todo!(), // "GET_STATE_ROOT",
0x5d => todo!(), // "SET_STATE_ROOT",
0x5e => todo!(), // "GET_RECEIPT_ROOT",
0x5f => todo!(), // "SET_RECEIPT_ROOT",
x if (0x60..0x80).contains(&x) => self.run_push(x - 0x5f), // "PUSH"
x if (0x80..0x90).contains(&x) => self.run_dup(x - 0x7f), // "DUP"
x if (0x90..0xa0).contains(&x) => self.run_swap(x - 0x8f)?, // "SWAP"
0xa0 => todo!(), // "LOG0",
0xa1 => todo!(), // "LOG1",
0xa2 => todo!(), // "LOG2",
0xa3 => todo!(), // "LOG3",
0xa4 => todo!(), // "LOG4",
0xa5 => bail!("Executed PANIC"), // "PANIC",
0xf0 => todo!(), // "CREATE",
0xf1 => todo!(), // "CALL",
0xf2 => todo!(), // "CALLCODE",
0xf3 => todo!(), // "RETURN",
0xf4 => todo!(), // "DELEGATECALL",
0xf5 => todo!(), // "CREATE2",
0xf6 => self.run_get_context(), // "GET_CONTEXT",
0xf7 => self.run_set_context(), // "SET_CONTEXT",
0xf8 => todo!(), // "CONSUME_GAS",
0xf9 => todo!(), // "EXIT_KERNEL",
0xfa => todo!(), // "STATICCALL",
0xfb => self.run_mload_general(), // "MLOAD_GENERAL",
0xfc => self.run_mstore_general(), // "MSTORE_GENERAL",
0xfd => todo!(), // "REVERT",
0xfe => bail!("Executed INVALID"), // "INVALID",
0xff => todo!(), // "SELFDESTRUCT",
_ => bail!("Unrecognized opcode {}.", opcode),
};
Ok(())
@ -522,9 +531,11 @@ impl<'a> Interpreter<'a> {
self.push(self.stack()[self.stack().len() - n as usize]);
}
fn run_swap(&mut self, n: u8) {
fn run_swap(&mut self, n: u8) -> anyhow::Result<()> {
let len = self.stack().len();
ensure!(len > n as usize);
self.stack_mut().swap(len - 1, len - n as usize - 1);
Ok(())
}
fn run_get_context(&mut self) {

View File

@ -2,6 +2,7 @@ mod core;
mod curve_ops;
mod ecrecover;
mod exp;
mod mpt;
mod packing;
mod rlp;
mod transaction_parsing;

View File

@ -0,0 +1,67 @@
use anyhow::Result;
use eth_trie_utils::partial_trie::{Nibbles, PartialTrie};
use ethereum_types::U256;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
use crate::cpu::kernel::global_metadata::GlobalMetadata;
use crate::cpu::kernel::interpreter::Interpreter;
use crate::generation::mpt::all_mpt_prover_inputs_reversed;
use crate::generation::TrieInputs;
#[test]
fn load_all_mpts() -> Result<()> {
let nonce = U256::from(1111);
let balance = U256::from(2222);
let storage_root = U256::from(3333);
let code_hash = U256::from(4444);
let value_rlp = rlp::encode_list(&[nonce, balance, storage_root, code_hash]);
let trie_inputs = TrieInputs {
state_trie: PartialTrie::Leaf {
nibbles: Nibbles {
count: 2,
packed: 123.into(),
},
value: value_rlp.to_vec(),
},
transactions_trie: Default::default(),
receipts_trie: Default::default(),
storage_tries: vec![],
};
let load_all_mpts = KERNEL.global_labels["load_all_mpts"];
// Contract creation transaction.
let initial_stack = vec![0xdeadbeefu32.into()];
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
interpreter.run()?;
assert_eq!(interpreter.stack(), vec![]);
let type_empty = U256::from(PartialTrieType::Empty as u32);
let type_leaf = U256::from(PartialTrieType::Leaf as u32);
assert_eq!(
interpreter.get_trie_data(),
vec![
0.into(), // First address is unused, so 0 can be treated as a null pointer.
type_leaf,
2.into(),
123.into(),
nonce,
balance,
storage_root,
code_hash,
type_empty,
type_empty,
]
);
assert_eq!(
interpreter.get_global_metadata_field(GlobalMetadata::NumStorageTries),
trie_inputs.storage_tries.len().into()
);
Ok(())
}

View File

@ -0,0 +1 @@
mod load;

View File

@ -30,6 +30,17 @@ pub(crate) mod state;
pub struct GenerationInputs {
pub signed_txns: Vec<Vec<u8>>,
pub tries: TrieInputs,
/// Mapping between smart contract code hashes and the contract byte code.
/// All account smart contracts that are invoked will have an entry present.
pub contract_code: HashMap<H256, Vec<u8>>,
pub block_metadata: BlockMetadata,
}
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct TrieInputs {
/// A partial version of the state trie prior to these transactions. It should include all nodes
/// that will be accessed by these transactions.
pub state_trie: PartialTrie,
@ -45,12 +56,6 @@ pub struct GenerationInputs {
/// A partial version of each storage trie prior to these transactions. It should include all
/// storage tries, and nodes therein, that will be accessed by these transactions.
pub storage_tries: Vec<(Address, PartialTrie)>,
/// Mapping between smart contract code hashes and the contract byte code.
/// All account smart contracts that are invoked will have an entry present.
pub contract_code: HashMap<H256, Vec<u8>>,
pub block_metadata: BlockMetadata,
}
pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(

View File

@ -2,22 +2,34 @@ use eth_trie_utils::partial_trie::PartialTrie;
use ethereum_types::U256;
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
use crate::generation::GenerationInputs;
use crate::generation::TrieInputs;
pub(crate) fn all_mpt_prover_inputs(inputs: &GenerationInputs) -> Vec<U256> {
pub(crate) fn all_mpt_prover_inputs_reversed(trie_inputs: &TrieInputs) -> Vec<U256> {
let mut inputs = all_mpt_prover_inputs(trie_inputs);
inputs.reverse();
inputs
}
/// Generate prover inputs for the initial MPT data, in the format expected by `mpt/load.asm`.
pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Vec<U256> {
let mut prover_inputs = vec![];
mpt_prover_inputs(&inputs.state_trie, &mut prover_inputs, &|_rlp| vec![]); // TODO
mpt_prover_inputs(&trie_inputs.state_trie, &mut prover_inputs, &|rlp| {
rlp::decode_list(rlp)
});
mpt_prover_inputs(
&inputs.transactions_trie,
&mut prover_inputs,
&|_rlp| vec![],
); // TODO
mpt_prover_inputs(&trie_inputs.transactions_trie, &mut prover_inputs, &|rlp| {
rlp::decode_list(rlp)
});
mpt_prover_inputs(&inputs.receipts_trie, &mut prover_inputs, &|_rlp| vec![]); // TODO
mpt_prover_inputs(&trie_inputs.receipts_trie, &mut prover_inputs, &|_rlp| {
// TODO: Decode receipt RLP.
vec![]
});
for (_addr, storage_trie) in &inputs.storage_tries {
prover_inputs.push(trie_inputs.storage_tries.len().into());
for (addr, storage_trie) in &trie_inputs.storage_tries {
prover_inputs.push(addr.0.as_ref().into());
mpt_prover_inputs(storage_trie, &mut prover_inputs, &|leaf_be| {
vec![U256::from_big_endian(leaf_be)]
});
@ -45,7 +57,9 @@ pub(crate) fn mpt_prover_inputs<F>(
for child in children {
mpt_prover_inputs(child, prover_inputs, parse_leaf);
}
prover_inputs.extend(parse_leaf(value));
let leaf = parse_leaf(value);
prover_inputs.push(leaf.len().into());
prover_inputs.extend(leaf);
}
PartialTrie::Extension { nibbles, child } => {
prover_inputs.push(nibbles.count.into());
@ -55,7 +69,9 @@ pub(crate) fn mpt_prover_inputs<F>(
PartialTrie::Leaf { nibbles, value } => {
prover_inputs.push(nibbles.count.into());
prover_inputs.push(nibbles.packed);
prover_inputs.extend(parse_leaf(value));
let leaf = parse_leaf(value);
prover_inputs.push(leaf.len().into());
prover_inputs.extend(leaf);
}
}
}

View File

@ -25,7 +25,7 @@ impl<F: Field> GenerationState<F> {
pub(crate) fn prover_input(&mut self, stack: &[U256], input_fn: &ProverInputFn) -> U256 {
match input_fn.0[0].as_str() {
"ff" => self.run_ff(stack, input_fn),
"mpt" => self.run_mpt(input_fn),
"mpt" => self.run_mpt(),
_ => panic!("Unrecognized prover input function."),
}
}
@ -39,16 +39,10 @@ impl<F: Field> GenerationState<F> {
}
/// MPT data.
fn run_mpt(&mut self, input_fn: &ProverInputFn) -> U256 {
let operation = input_fn.0[1].as_str();
match operation {
"trie_data" => self
.mpt_prover_inputs
.pop()
.unwrap_or_else(|| panic!("Out of MPT data")),
"num_storage_tries" => self.inputs.storage_tries.len().into(),
_ => panic!("Unrecognized MPT operation."),
}
fn run_mpt(&mut self) -> U256 {
self.mpt_prover_inputs
.pop()
.unwrap_or_else(|| panic!("Out of MPT data"))
}
}

View File

@ -6,7 +6,7 @@ use tiny_keccak::keccakf;
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
use crate::generation::memory::MemoryState;
use crate::generation::mpt::all_mpt_prover_inputs;
use crate::generation::mpt::all_mpt_prover_inputs_reversed;
use crate::generation::GenerationInputs;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryOp;
use crate::memory::memory_stark::MemoryOp;
@ -17,6 +17,7 @@ use crate::{keccak, logic};
#[derive(Debug)]
pub(crate) struct GenerationState<F: Field> {
#[allow(unused)] // TODO: Should be used soon.
pub(crate) inputs: GenerationInputs,
pub(crate) cpu_rows: Vec<[F; NUM_CPU_COLUMNS]>,
pub(crate) current_cpu_row: CpuColumnsView<F>,
@ -35,8 +36,7 @@ pub(crate) struct GenerationState<F: Field> {
impl<F: Field> GenerationState<F> {
pub(crate) fn new(inputs: GenerationInputs) -> Self {
let mut mpt_prover_inputs = all_mpt_prover_inputs(&inputs);
mpt_prover_inputs.reverse();
let mpt_prover_inputs = all_mpt_prover_inputs_reversed(&inputs.tries);
Self {
inputs,

View File

@ -7,7 +7,7 @@ use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::util::timing::TimingTree;
use plonky2_evm::all_stark::AllStark;
use plonky2_evm::config::StarkConfig;
use plonky2_evm::generation::GenerationInputs;
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
use plonky2_evm::proof::BlockMetadata;
use plonky2_evm::prover::prove;
use plonky2_evm::verifier::verify_proof;
@ -29,10 +29,12 @@ fn test_simple_transfer() -> anyhow::Result<()> {
let inputs = GenerationInputs {
signed_txns: vec![txn.to_vec()],
state_trie: PartialTrie::Empty,
transactions_trie: PartialTrie::Empty,
receipts_trie: PartialTrie::Empty,
storage_tries: vec![],
tries: TrieInputs {
state_trie: PartialTrie::Empty,
transactions_trie: PartialTrie::Empty,
receipts_trie: PartialTrie::Empty,
storage_tries: vec![],
},
contract_code: HashMap::new(),
block_metadata,
};