mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 06:13:07 +00:00
commit
c27e40e7bb
@ -7,7 +7,7 @@ edition = "2021"
|
||||
[dependencies]
|
||||
plonky2 = { path = "../plonky2", default-features = false, features = ["rand", "timing"] }
|
||||
plonky2_util = { path = "../util" }
|
||||
eth-trie-utils = { git = "https://github.com/mir-protocol/eth-trie-utils.git", rev = "3ca443fd18e3f6d209dd96cbad851e05ae058b34" }
|
||||
eth-trie-utils = { git = "https://github.com/mir-protocol/eth-trie-utils.git", rev = "c52a04c9f349ac812b886f383a7306b27c8b96dc" }
|
||||
maybe_rayon = { path = "../maybe_rayon" }
|
||||
anyhow = "1.0.40"
|
||||
env_logger = "0.9.0"
|
||||
|
||||
@ -41,8 +41,12 @@ pub(crate) fn combined_kernel() -> Kernel {
|
||||
include_str!("asm/rlp/encode.asm"),
|
||||
include_str!("asm/rlp/decode.asm"),
|
||||
include_str!("asm/rlp/read_to_memory.asm"),
|
||||
include_str!("asm/storage/read.asm"),
|
||||
include_str!("asm/storage/write.asm"),
|
||||
include_str!("asm/mpt/hash.asm"),
|
||||
include_str!("asm/mpt/read.asm"),
|
||||
include_str!("asm/mpt/storage_read.asm"),
|
||||
include_str!("asm/mpt/storage_write.asm"),
|
||||
include_str!("asm/mpt/util.asm"),
|
||||
include_str!("asm/mpt/write.asm"),
|
||||
include_str!("asm/transactions/router.asm"),
|
||||
include_str!("asm/transactions/type_0.asm"),
|
||||
include_str!("asm/transactions/type_1.asm"),
|
||||
|
||||
2
evm/src/cpu/kernel/asm/mpt/hash.asm
Normal file
2
evm/src/cpu/kernel/asm/mpt/hash.asm
Normal file
@ -0,0 +1,2 @@
|
||||
global mpt_hash:
|
||||
// TODO
|
||||
12
evm/src/cpu/kernel/asm/mpt/load.asm
Normal file
12
evm/src/cpu/kernel/asm/mpt/load.asm
Normal file
@ -0,0 +1,12 @@
|
||||
// Load all partial trie data from prover inputs.
|
||||
global mpt_load_all:
|
||||
// First set GLOBAL_METADATA_TRIE_DATA_SIZE = 1.
|
||||
// We don't want it to start at 0, as we use 0 as a null pointer.
|
||||
PUSH 1
|
||||
%mstore(@GLOBAL_METADATA_TRIE_DATA_SIZE)
|
||||
|
||||
TODO
|
||||
|
||||
mpt_load_state:
|
||||
PROVER_INPUT(mpt::state)
|
||||
TODO
|
||||
81
evm/src/cpu/kernel/asm/mpt/read.asm
Normal file
81
evm/src/cpu/kernel/asm/mpt/read.asm
Normal file
@ -0,0 +1,81 @@
|
||||
// Read a value from a MPT.
|
||||
//
|
||||
// Arguments:
|
||||
// - the virtual address of the trie to search in
|
||||
// - the key, as a U256
|
||||
// - the number of nibbles in the key
|
||||
//
|
||||
// This function returns a pointer to the leaf, or 0 if the key is not found.
|
||||
|
||||
global mpt_read:
|
||||
// stack: node_ptr, key, nibbles, retdest
|
||||
DUP1
|
||||
%mload_trie_data
|
||||
// stack: node_type, node_ptr, key, nibbles, retdest
|
||||
// Increment node_ptr, so it points to the node payload instead of its type.
|
||||
SWAP1 %add_const(1) SWAP1
|
||||
// stack: node_type, node_payload_ptr, key, nibbles, retdest
|
||||
|
||||
DUP1 %eq_const(@MPT_NODE_EMPTY) %jumpi(mpt_read_empty)
|
||||
DUP1 %eq_const(@MPT_NODE_BRANCH) %jumpi(mpt_read_branch)
|
||||
DUP1 %eq_const(@MPT_NODE_EXTENSION) %jumpi(mpt_read_extension)
|
||||
DUP1 %eq_const(@MPT_NODE_LEAF) %jumpi(mpt_read_leaf)
|
||||
|
||||
// There's still the MPT_NODE_DIGEST case, but if we hit a digest node,
|
||||
// it means the prover failed to provide necessary Merkle data, so panic.
|
||||
PANIC
|
||||
|
||||
mpt_read_empty:
|
||||
// Return 0 to indicate that the value was not found.
|
||||
%stack (node_type, node_payload_ptr, key, nibbles, retdest)
|
||||
-> (retdest, 0)
|
||||
JUMP
|
||||
|
||||
mpt_read_branch:
|
||||
// stack: node_type, node_payload_ptr, key, nibbles, retdest
|
||||
POP
|
||||
// stack: node_payload_ptr, key, nibbles, retdest
|
||||
DUP3 // nibbles
|
||||
ISZERO
|
||||
// stack: nibbles == 0, node_payload_ptr, key, nibbles, retdest
|
||||
%jumpi(mpt_read_branch_end_of_key)
|
||||
|
||||
// stack: node_payload_ptr, key, nibbles, retdest
|
||||
// We have not reached the end of the key, so we descend to one of our children.
|
||||
// Decrement nibbles, then compute current_nibble = (key >> (nibbles * 4)) & 0xF.
|
||||
SWAP2
|
||||
%sub_const(1)
|
||||
// stack: nibbles, key, node_payload_ptr, retdest
|
||||
DUP2 DUP2
|
||||
// stack: nibbles, key, nibbles, key, node_payload_ptr, retdest
|
||||
%mul_const(4)
|
||||
// stack: nibbles * 4, key, nibbles, key, node_payload_ptr, retdest
|
||||
SHR
|
||||
// stack: key >> (nibbles * 4), nibbles, key, node_payload_ptr, retdest
|
||||
%and_const(0xF)
|
||||
// stack: current_nibble, nibbles, key, node_payload_ptr, retdest
|
||||
%stack (current_nibble, nibbles, key, node_payload_ptr, retdest)
|
||||
-> (current_nibble, node_payload_ptr, key, nibbles, retdest)
|
||||
// child_ptr = load(node_payload_ptr + current_nibble)
|
||||
ADD
|
||||
%mload_trie_data
|
||||
// stack: child_ptr, key, nibbles, retdest
|
||||
%jump(mpt_read) // recurse
|
||||
|
||||
mpt_read_branch_end_of_key:
|
||||
%stack (node_payload_ptr, key, nibbles, retdest) -> (node_payload_ptr, retdest)
|
||||
// stack: node_payload_ptr, retdest
|
||||
%add_const(16) // skip over the 16 child nodes
|
||||
// stack: leaf_ptr, retdest
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
mpt_read_extension:
|
||||
// stack: node_type, node_payload_ptr, key, nibbles, retdest
|
||||
POP
|
||||
// stack: node_payload_ptr, key, nibbles, retdest
|
||||
|
||||
mpt_read_leaf:
|
||||
// stack: node_type, node_payload_ptr, key, nibbles, retdest
|
||||
POP
|
||||
// stack: node_payload_ptr, key, nibbles, retdest
|
||||
11
evm/src/cpu/kernel/asm/mpt/util.asm
Normal file
11
evm/src/cpu/kernel/asm/mpt/util.asm
Normal file
@ -0,0 +1,11 @@
|
||||
%macro mload_trie_data
|
||||
// stack: virtual
|
||||
%mload_kernel(@SEGMENT_TRIE_DATA)
|
||||
// stack: value
|
||||
%endmacro
|
||||
|
||||
%macro mstore_trie_data
|
||||
// stack: virtual, value
|
||||
%mstore_kernel(@SEGMENT_TRIE_DATA)
|
||||
// stack: (empty)
|
||||
%endmacro
|
||||
2
evm/src/cpu/kernel/asm/mpt/write.asm
Normal file
2
evm/src/cpu/kernel/asm/mpt/write.asm
Normal file
@ -0,0 +1,2 @@
|
||||
global mpt_write:
|
||||
// TODO
|
||||
@ -9,13 +9,13 @@ use crate::cpu::kernel::ast::Item::LocalLabelDeclaration;
|
||||
use crate::cpu::kernel::ast::StackReplacement;
|
||||
use crate::cpu::kernel::keccak_util::hash_kernel;
|
||||
use crate::cpu::kernel::optimizer::optimize_asm;
|
||||
use crate::cpu::kernel::prover_input::ProverInputFn;
|
||||
use crate::cpu::kernel::stack::stack_manipulation::expand_stack_manipulation;
|
||||
use crate::cpu::kernel::utils::u256_to_trimmed_be_bytes;
|
||||
use crate::cpu::kernel::{
|
||||
ast::{File, Item},
|
||||
opcodes::{get_opcode, get_push_opcode},
|
||||
};
|
||||
use crate::generation::prover_input::ProverInputFn;
|
||||
|
||||
/// The number of bytes to push when pushing an offset within the code (i.e. when assembling jumps).
|
||||
/// Ideally we would automatically use the minimal number of bytes required, but that would be
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use ethereum_types::U256;
|
||||
|
||||
use crate::cpu::kernel::prover_input::ProverInputFn;
|
||||
use crate::generation::prover_input::ProverInputFn;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct File {
|
||||
|
||||
@ -4,11 +4,14 @@ use ethereum_types::U256;
|
||||
use hex_literal::hex;
|
||||
|
||||
use crate::cpu::decode::invalid_opcodes_user;
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
use crate::cpu::kernel::context_metadata::ContextMetadata;
|
||||
use crate::cpu::kernel::global_metadata::GlobalMetadata;
|
||||
use crate::cpu::kernel::txn_fields::NormalizedTxnField;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
pub(crate) mod trie_type;
|
||||
|
||||
/// Constants that are accessible to our kernel assembly code.
|
||||
pub fn evm_constants() -> HashMap<String, U256> {
|
||||
let mut c = HashMap::new();
|
||||
@ -30,6 +33,9 @@ pub fn evm_constants() -> HashMap<String, U256> {
|
||||
for txn_field in ContextMetadata::all() {
|
||||
c.insert(txn_field.var_name().into(), (txn_field as u32).into());
|
||||
}
|
||||
for trie_type in PartialTrieType::all() {
|
||||
c.insert(trie_type.var_name().into(), (trie_type as u32).into());
|
||||
}
|
||||
c.insert(
|
||||
"INVALID_OPCODES_USER".into(),
|
||||
U256::from_little_endian(&invalid_opcodes_user()),
|
||||
44
evm/src/cpu/kernel/constants/trie_type.rs
Normal file
44
evm/src/cpu/kernel/constants/trie_type.rs
Normal file
@ -0,0 +1,44 @@
|
||||
use eth_trie_utils::partial_trie::PartialTrie;
|
||||
|
||||
pub(crate) enum PartialTrieType {
|
||||
Empty = 0,
|
||||
Hash = 1,
|
||||
Branch = 2,
|
||||
Extension = 3,
|
||||
Leaf = 4,
|
||||
}
|
||||
|
||||
impl PartialTrieType {
|
||||
pub(crate) const COUNT: usize = 5;
|
||||
|
||||
pub(crate) fn of(trie: &PartialTrie) -> Self {
|
||||
match trie {
|
||||
PartialTrie::Empty => Self::Empty,
|
||||
PartialTrie::Hash(_) => Self::Hash,
|
||||
PartialTrie::Branch { .. } => Self::Branch,
|
||||
PartialTrie::Extension { .. } => Self::Extension,
|
||||
PartialTrie::Leaf { .. } => Self::Leaf,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn all() -> [Self; Self::COUNT] {
|
||||
[
|
||||
Self::Empty,
|
||||
Self::Hash,
|
||||
Self::Branch,
|
||||
Self::Extension,
|
||||
Self::Leaf,
|
||||
]
|
||||
}
|
||||
|
||||
/// The variable name that gets passed into kernel assembly code.
|
||||
pub(crate) fn var_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Empty => "MPT_NODE_EMPTY",
|
||||
Self::Hash => "MPT_NODE_HASH",
|
||||
Self::Branch => "MPT_NODE_BRANCH",
|
||||
Self::Extension => "MPT_NODE_EXTENSION",
|
||||
Self::Leaf => "MPT_NODE_LEAF",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3,14 +3,19 @@ use std::collections::HashMap;
|
||||
use anyhow::{anyhow, bail};
|
||||
use ethereum_types::{BigEndianHash, U256, U512};
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::assembler::Kernel;
|
||||
use crate::cpu::kernel::prover_input::ProverInputFn;
|
||||
use crate::cpu::kernel::txn_fields::NormalizedTxnField;
|
||||
use crate::generation::memory::{MemoryContextState, MemorySegmentState};
|
||||
use crate::generation::prover_input::ProverInputFn;
|
||||
use crate::generation::state::GenerationState;
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
type F = GoldilocksField;
|
||||
|
||||
/// Halt interpreter execution whenever a jump to this offset is done.
|
||||
const DEFAULT_HALT_OFFSET: usize = 0xdeadbeef;
|
||||
|
||||
@ -55,8 +60,8 @@ pub struct Interpreter<'a> {
|
||||
offset: usize,
|
||||
context: usize,
|
||||
pub(crate) memory: InterpreterMemory,
|
||||
generation_state: GenerationState<F>,
|
||||
prover_inputs_map: &'a HashMap<usize, ProverInputFn>,
|
||||
prover_inputs: Vec<U256>,
|
||||
pub(crate) halt_offsets: Vec<usize>,
|
||||
running: bool,
|
||||
}
|
||||
@ -107,8 +112,8 @@ impl<'a> Interpreter<'a> {
|
||||
jumpdests: find_jumpdests(code),
|
||||
offset: initial_offset,
|
||||
memory: InterpreterMemory::with_code_and_stack(code, initial_stack),
|
||||
generation_state: GenerationState::new(GenerationInputs::default()),
|
||||
prover_inputs_map: prover_inputs,
|
||||
prover_inputs: Vec::new(),
|
||||
context: 0,
|
||||
halt_offsets: vec![DEFAULT_HALT_OFFSET],
|
||||
running: true,
|
||||
@ -431,9 +436,9 @@ impl<'a> Interpreter<'a> {
|
||||
.prover_inputs_map
|
||||
.get(&(self.offset - 1))
|
||||
.ok_or_else(|| anyhow!("Offset not in prover inputs."))?;
|
||||
let output = prover_input_fn.run(self.stack());
|
||||
let stack = self.stack().to_vec();
|
||||
let output = self.generation_state.prover_input(&stack, prover_input_fn);
|
||||
self.push(output);
|
||||
self.prover_inputs.push(output);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
pub mod aggregator;
|
||||
pub mod assembler;
|
||||
mod ast;
|
||||
mod constants;
|
||||
pub(crate) mod constants;
|
||||
pub(crate) mod context_metadata;
|
||||
mod cost_estimator;
|
||||
pub(crate) mod global_metadata;
|
||||
@ -9,7 +9,6 @@ pub(crate) mod keccak_util;
|
||||
mod opcodes;
|
||||
mod optimizer;
|
||||
mod parser;
|
||||
pub mod prover_input;
|
||||
pub mod stack;
|
||||
mod txn_fields;
|
||||
mod utils;
|
||||
|
||||
@ -20,9 +20,11 @@ use crate::proof::{BlockMetadata, PublicValues, TrieRoots};
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
pub(crate) mod memory;
|
||||
pub(crate) mod mpt;
|
||||
pub(crate) mod prover_input;
|
||||
pub(crate) mod state;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
|
||||
/// Inputs needed for trace generation.
|
||||
pub struct GenerationInputs {
|
||||
pub signed_txns: Vec<Vec<u8>>,
|
||||
@ -55,7 +57,7 @@ pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
|
||||
inputs: GenerationInputs,
|
||||
config: &StarkConfig,
|
||||
) -> ([Vec<PolynomialValues<F>>; NUM_TABLES], PublicValues) {
|
||||
let mut state = GenerationState::<F>::default();
|
||||
let mut state = GenerationState::<F>::new(inputs.clone());
|
||||
|
||||
generate_bootstrap_kernel::<F>(&mut state);
|
||||
|
||||
|
||||
61
evm/src/generation/mpt.rs
Normal file
61
evm/src/generation/mpt.rs
Normal file
@ -0,0 +1,61 @@
|
||||
use eth_trie_utils::partial_trie::PartialTrie;
|
||||
use ethereum_types::U256;
|
||||
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
use crate::generation::GenerationInputs;
|
||||
|
||||
pub(crate) fn all_mpt_prover_inputs(inputs: &GenerationInputs) -> Vec<U256> {
|
||||
let mut prover_inputs = vec![];
|
||||
|
||||
mpt_prover_inputs(&inputs.state_trie, &mut prover_inputs, &|_rlp| vec![]); // TODO
|
||||
|
||||
mpt_prover_inputs(
|
||||
&inputs.transactions_trie,
|
||||
&mut prover_inputs,
|
||||
&|_rlp| vec![],
|
||||
); // TODO
|
||||
|
||||
mpt_prover_inputs(&inputs.receipts_trie, &mut prover_inputs, &|_rlp| vec![]); // TODO
|
||||
|
||||
for (_addr, storage_trie) in &inputs.storage_tries {
|
||||
mpt_prover_inputs(storage_trie, &mut prover_inputs, &|leaf_be| {
|
||||
vec![U256::from_big_endian(leaf_be)]
|
||||
});
|
||||
}
|
||||
|
||||
prover_inputs
|
||||
}
|
||||
|
||||
/// Given a trie, generate the prover input data for that trie. In essence, this serializes a trie
|
||||
/// into a `U256` array, in a simple format which the kernel understands. For example, a leaf node
|
||||
/// is serialized as `(TYPE_LEAF, key, value)`, where key is a `(nibbles, depth)` pair and `value`
|
||||
/// is a variable-length structure which depends on which trie we're dealing with.
|
||||
pub(crate) fn mpt_prover_inputs<F>(
|
||||
trie: &PartialTrie,
|
||||
prover_inputs: &mut Vec<U256>,
|
||||
parse_leaf: &F,
|
||||
) where
|
||||
F: Fn(&[u8]) -> Vec<U256>,
|
||||
{
|
||||
prover_inputs.push((PartialTrieType::of(trie) as u32).into());
|
||||
match trie {
|
||||
PartialTrie::Empty => {}
|
||||
PartialTrie::Hash(h) => prover_inputs.push(*h),
|
||||
PartialTrie::Branch { children, value } => {
|
||||
for child in children {
|
||||
mpt_prover_inputs(child, prover_inputs, parse_leaf);
|
||||
}
|
||||
prover_inputs.extend(parse_leaf(value));
|
||||
}
|
||||
PartialTrie::Extension { nibbles, child } => {
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.packed);
|
||||
mpt_prover_inputs(child, prover_inputs, parse_leaf);
|
||||
}
|
||||
PartialTrie::Leaf { nibbles, value } => {
|
||||
prover_inputs.push(nibbles.count.into());
|
||||
prover_inputs.push(nibbles.packed);
|
||||
prover_inputs.extend(parse_leaf(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,11 +1,13 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use ethereum_types::U256;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
use crate::cpu::kernel::prover_input::Field::{
|
||||
use crate::generation::prover_input::EvmField::{
|
||||
Bn254Base, Bn254Scalar, Secp256k1Base, Secp256k1Scalar,
|
||||
};
|
||||
use crate::cpu::kernel::prover_input::FieldOp::{Inverse, Sqrt};
|
||||
use crate::generation::prover_input::FieldOp::{Inverse, Sqrt};
|
||||
use crate::generation::state::GenerationState;
|
||||
|
||||
/// Prover input function represented as a scoped function name.
|
||||
/// Example: `PROVER_INPUT(ff::bn254_base::inverse)` is represented as `ProverInputFn([ff, bn254_base, inverse])`.
|
||||
@ -18,32 +20,39 @@ impl From<Vec<String>> for ProverInputFn {
|
||||
}
|
||||
}
|
||||
|
||||
impl ProverInputFn {
|
||||
/// Run the function on the stack.
|
||||
pub fn run(&self, stack: &[U256]) -> U256 {
|
||||
match self.0[0].as_str() {
|
||||
"ff" => self.run_ff(stack),
|
||||
"mpt" => todo!(),
|
||||
impl<F: Field> GenerationState<F> {
|
||||
#[allow(unused)] // TODO: Should be used soon.
|
||||
pub(crate) fn prover_input(&mut self, stack: &[U256], input_fn: &ProverInputFn) -> U256 {
|
||||
match input_fn.0[0].as_str() {
|
||||
"ff" => self.run_ff(stack, input_fn),
|
||||
"mpt" => self.run_mpt(input_fn),
|
||||
_ => panic!("Unrecognized prover input function."),
|
||||
}
|
||||
}
|
||||
|
||||
// Finite field operations.
|
||||
fn run_ff(&self, stack: &[U256]) -> U256 {
|
||||
let field = Field::from_str(self.0[1].as_str()).unwrap();
|
||||
let op = FieldOp::from_str(self.0[2].as_str()).unwrap();
|
||||
/// Finite field operations.
|
||||
fn run_ff(&self, stack: &[U256], input_fn: &ProverInputFn) -> U256 {
|
||||
let field = EvmField::from_str(input_fn.0[1].as_str()).unwrap();
|
||||
let op = FieldOp::from_str(input_fn.0[2].as_str()).unwrap();
|
||||
let x = *stack.last().expect("Empty stack");
|
||||
field.op(op, x)
|
||||
}
|
||||
|
||||
// MPT operations.
|
||||
#[allow(dead_code)]
|
||||
fn run_mpt(&self, _stack: Vec<U256>) -> U256 {
|
||||
todo!()
|
||||
/// MPT data.
|
||||
fn run_mpt(&mut self, input_fn: &ProverInputFn) -> U256 {
|
||||
let operation = input_fn.0[1].as_str();
|
||||
match operation {
|
||||
"trie_data" => self
|
||||
.mpt_prover_inputs
|
||||
.pop()
|
||||
.unwrap_or_else(|| panic!("Out of MPT data")),
|
||||
"num_storage_tries" => self.inputs.storage_tries.len().into(),
|
||||
_ => panic!("Unrecognized MPT operation."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Field {
|
||||
enum EvmField {
|
||||
Bn254Base,
|
||||
Bn254Scalar,
|
||||
Secp256k1Base,
|
||||
@ -55,7 +64,7 @@ enum FieldOp {
|
||||
Sqrt,
|
||||
}
|
||||
|
||||
impl FromStr for Field {
|
||||
impl FromStr for EvmField {
|
||||
type Err = ();
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
@ -81,19 +90,19 @@ impl FromStr for FieldOp {
|
||||
}
|
||||
}
|
||||
|
||||
impl Field {
|
||||
impl EvmField {
|
||||
fn order(&self) -> U256 {
|
||||
match self {
|
||||
Field::Bn254Base => {
|
||||
EvmField::Bn254Base => {
|
||||
U256::from_str("0x30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47")
|
||||
.unwrap()
|
||||
}
|
||||
Field::Bn254Scalar => todo!(),
|
||||
Field::Secp256k1Base => {
|
||||
EvmField::Bn254Scalar => todo!(),
|
||||
EvmField::Secp256k1Base => {
|
||||
U256::from_str("0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f")
|
||||
.unwrap()
|
||||
}
|
||||
Field::Secp256k1Scalar => {
|
||||
EvmField::Secp256k1Scalar => {
|
||||
U256::from_str("0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141")
|
||||
.unwrap()
|
||||
}
|
||||
@ -6,6 +6,8 @@ use tiny_keccak::keccakf;
|
||||
|
||||
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
|
||||
use crate::generation::memory::MemoryState;
|
||||
use crate::generation::mpt::all_mpt_prover_inputs;
|
||||
use crate::generation::GenerationInputs;
|
||||
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryOp;
|
||||
use crate::memory::memory_stark::MemoryOp;
|
||||
use crate::memory::segments::Segment;
|
||||
@ -15,6 +17,7 @@ use crate::{keccak, logic};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct GenerationState<F: Field> {
|
||||
pub(crate) inputs: GenerationInputs,
|
||||
pub(crate) cpu_rows: Vec<[F; NUM_CPU_COLUMNS]>,
|
||||
pub(crate) current_cpu_row: CpuColumnsView<F>,
|
||||
|
||||
@ -24,9 +27,30 @@ pub(crate) struct GenerationState<F: Field> {
|
||||
pub(crate) keccak_inputs: Vec<[u64; keccak::keccak_stark::NUM_INPUTS]>,
|
||||
pub(crate) keccak_memory_inputs: Vec<KeccakMemoryOp>,
|
||||
pub(crate) logic_ops: Vec<logic::Operation>,
|
||||
|
||||
/// Prover inputs containing MPT data, in reverse order so that the next input can be obtained
|
||||
/// via `pop()`.
|
||||
pub(crate) mpt_prover_inputs: Vec<U256>,
|
||||
}
|
||||
|
||||
impl<F: Field> GenerationState<F> {
|
||||
pub(crate) fn new(inputs: GenerationInputs) -> Self {
|
||||
let mut mpt_prover_inputs = all_mpt_prover_inputs(&inputs);
|
||||
mpt_prover_inputs.reverse();
|
||||
|
||||
Self {
|
||||
inputs,
|
||||
cpu_rows: vec![],
|
||||
current_cpu_row: [F::ZERO; NUM_CPU_COLUMNS].into(),
|
||||
current_context: 0,
|
||||
memory: MemoryState::default(),
|
||||
keccak_inputs: vec![],
|
||||
keccak_memory_inputs: vec![],
|
||||
logic_ops: vec![],
|
||||
mpt_prover_inputs,
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute logical AND, and record the operation to be added in the logic table later.
|
||||
#[allow(unused)] // TODO: Should be used soon.
|
||||
pub(crate) fn and(&mut self, input0: U256, input1: U256) -> U256 {
|
||||
@ -217,19 +241,3 @@ impl<F: Field> GenerationState<F> {
|
||||
self.cpu_rows.push(swapped_row.into());
|
||||
}
|
||||
}
|
||||
|
||||
// `GenerationState` can't `derive(Default)` because `Default` is only implemented for arrays up to
|
||||
// length 32 :-\.
|
||||
impl<F: Field> Default for GenerationState<F> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
cpu_rows: vec![],
|
||||
current_cpu_row: [F::ZERO; NUM_CPU_COLUMNS].into(),
|
||||
current_context: 0,
|
||||
memory: MemoryState::default(),
|
||||
keccak_inputs: vec![],
|
||||
keccak_memory_inputs: vec![],
|
||||
logic_ops: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user