Some cleanup (#1498)

This commit is contained in:
Robin Salen 2024-02-03 12:21:38 -05:00 committed by GitHub
parent 6357963654
commit f3f7433c29
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
38 changed files with 38 additions and 219 deletions

View File

@ -1,7 +1,5 @@
use core::iter;
use core::ops::Deref;
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;

View File

@ -38,7 +38,6 @@ use plonky2::timed;
use plonky2::util::timing::TimingTree;
use plonky2::util::transpose;
use super::columns::BYTE_VALUES_RANGE;
use super::NUM_BYTES;
use crate::byte_packing::columns::{
index_len, value_bytes, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL, IS_READ, LEN_INDICES_COLS,
@ -62,7 +61,7 @@ pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
// obtain the corresponding limb.
let outputs: Vec<Column<F>> = (0..8)
.map(|i| {
let range = (value_bytes(i * 4)..value_bytes(i * 4) + 4);
let range = value_bytes(i * 4)..value_bytes(i * 4) + 4;
Column::linear_combination(
range
.enumerate()

View File

@ -33,10 +33,6 @@ pub(crate) const fn value_bytes(i: usize) -> usize {
BYTES_VALUES_START + i
}
/// Range of columns containing the bytes values.
pub(crate) const BYTE_VALUES_RANGE: Range<usize> =
BYTES_VALUES_START..BYTES_VALUES_START + NUM_BYTES;
/// The counter column (used for the range check) starts from 0 and increments.
pub(crate) const RANGE_COUNTER: usize = BYTES_VALUES_START + NUM_BYTES;
/// The frequencies column used in logUp.

View File

@ -8,12 +8,9 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use super::columns::ops::OpsColumnsView;
use super::cpu_stark::{disable_unused_channels, disable_unused_channels_circuit};
use super::membus::NUM_GP_CHANNELS;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::CpuColumnsView;
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
use crate::memory::segments::Segment;
use crate::memory::VALUE_LIMBS;
// If true, the instruction will keep the current context for the next row.
// If false, next row's context is handled manually.
@ -88,7 +85,7 @@ fn eval_packed_get<P: PackedField>(
// Context is scaled by 2^64, hence stored in the 3rd limb.
yield_constr.constraint(filter * (new_stack_top[2] - lv.context));
for (i, &limb) in new_stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
for (_, &limb) in new_stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
yield_constr.constraint(filter * limb);
}
@ -119,7 +116,7 @@ fn eval_ext_circuit_get<F: RichField + Extendable<D>, const D: usize>(
yield_constr.constraint(builder, constr);
}
for (i, &limb) in new_stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
for (_, &limb) in new_stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
let constr = builder.mul_extension(filter, limb);
yield_constr.constraint(builder, constr);
}
@ -151,7 +148,7 @@ fn eval_packed_set<P: PackedField>(
// The next row's context is read from stack_top.
yield_constr.constraint(filter * (stack_top[2] - nv.context));
for (i, &limb) in stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
for (_, &limb) in stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
yield_constr.constraint(filter * limb);
}
@ -199,7 +196,7 @@ fn eval_ext_circuit_set<F: RichField + Extendable<D>, const D: usize>(
let constr = builder.mul_extension(filter, diff);
yield_constr.constraint(builder, constr);
}
for (i, &limb) in stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
for (_, &limb) in stack_top.iter().enumerate().filter(|(i, _)| *i != 2) {
let constr = builder.mul_extension(filter, limb);
yield_constr.constraint(builder, constr);
}

View File

@ -22,7 +22,7 @@ use crate::cpu::{
};
use crate::cross_table_lookup::TableWithColumns;
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::lookup::{Column, Filter, Lookup};
use crate::lookup::{Column, Filter};
use crate::memory::segments::Segment;
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
use crate::stark::Stark;

View File

@ -3,7 +3,6 @@ use plonky2::field::packed::PackedField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::{CpuColumnsView, COL_MAP};

View File

@ -6,7 +6,6 @@ use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use super::membus::NUM_GP_CHANNELS;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::{CpuColumnsView, MemoryChannelView};
use crate::memory::segments::Segment;

View File

@ -431,7 +431,6 @@ fn push_target_size(target: &PushTarget) -> u8 {
#[cfg(test)]
mod tests {
use super::*;
use crate::cpu::kernel::assembler::*;
use crate::cpu::kernel::parser::parse;
#[test]

View File

@ -2,7 +2,6 @@ use std::collections::HashMap;
use ethereum_types::U256;
use hex_literal::hex;
use static_assertions::const_assert;
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;

View File

@ -1,4 +1,3 @@
use super::opcodes::get_opcode;
use crate::cpu::kernel::assembler::BYTES_PER_OFFSET;
use crate::cpu::kernel::ast::Item;
use crate::cpu::kernel::ast::Item::*;

View File

@ -2,7 +2,7 @@
use core::cmp::Ordering;
use core::ops::Range;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::{BTreeSet, HashMap};
use anyhow::bail;
use eth_trie_utils::partial_trie::PartialTrie;
@ -11,7 +11,6 @@ use keccak_hash::keccak;
use plonky2::field::goldilocks_field::GoldilocksField;
use super::assembler::BYTES_PER_OFFSET;
use super::utils::u256_from_bool;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
@ -1393,70 +1392,6 @@ impl<'a> Interpreter<'a> {
}
}
// Computes the two's complement of the given integer.
fn two_complement(x: U256) -> U256 {
let flipped_bits = x ^ MINUS_ONE;
flipped_bits.overflowing_add(U256::one()).0
}
fn signed_cmp(x: U256, y: U256) -> Ordering {
let x_is_zero = x.is_zero();
let y_is_zero = y.is_zero();
if x_is_zero && y_is_zero {
return Ordering::Equal;
}
let x_is_pos = x.eq(&(x & SIGN_MASK));
let y_is_pos = y.eq(&(y & SIGN_MASK));
if x_is_zero {
if y_is_pos {
return Ordering::Less;
} else {
return Ordering::Greater;
}
};
if y_is_zero {
if x_is_pos {
return Ordering::Greater;
} else {
return Ordering::Less;
}
};
match (x_is_pos, y_is_pos) {
(true, true) => x.cmp(&y),
(true, false) => Ordering::Greater,
(false, true) => Ordering::Less,
(false, false) => x.cmp(&y).reverse(),
}
}
/// -1 in two's complement representation consists in all bits set to 1.
const MINUS_ONE: U256 = U256([
0xffffffffffffffff,
0xffffffffffffffff,
0xffffffffffffffff,
0xffffffffffffffff,
]);
/// -2^255 in two's complement representation consists in the MSB set to 1.
const MIN_VALUE: U256 = U256([
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x8000000000000000,
]);
const SIGN_MASK: U256 = U256([
0xffffffffffffffff,
0xffffffffffffffff,
0xffffffffffffffff,
0x7fffffffffffffff,
]);
fn get_mnemonic(opcode: u8) -> &'static str {
match opcode {
0x00 => "STOP",
@ -1651,7 +1586,6 @@ fn get_mnemonic(opcode: u8) -> &'static str {
}
}
#[macro_use]
macro_rules! unpack_address {
($addr:ident) => {{
let offset = $addr.low_u32() as usize;
@ -1729,8 +1663,8 @@ mod tests {
interpreter.run()?;
// sys_stop returns `success` and `cum_gas_used`, that we need to pop.
interpreter.pop();
interpreter.pop();
interpreter.pop().expect("Stack should not be empty");
interpreter.pop().expect("Stack should not be empty");
assert_eq!(interpreter.stack(), &[0xff.into(), 0xff00.into()]);
assert_eq!(

View File

@ -1,6 +1,6 @@
use std::collections::HashMap;
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{Address, BigEndianHash, H256, U256};
@ -327,8 +327,8 @@ fn sstore() -> Result<()> {
// The first two elements in the stack are `success` and `leftover_gas`,
// returned by the `sys_stop` opcode.
interpreter.pop();
interpreter.pop();
interpreter.pop().expect("Stack should not be empty");
interpreter.pop().expect("Stack should not be empty");
// The code should have added an element to the storage of `to_account`. We run
// `mpt_hash_state_trie` to check that.

View File

@ -1,24 +1,18 @@
use std::collections::HashMap;
use std::str::FromStr;
use anyhow::{anyhow, Result};
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::{HashedPartialTrie, Node, PartialTrie};
use ethereum_types::{Address, BigEndianHash, H256, U256};
use ethereum_types::{Address, BigEndianHash, H256};
use hex_literal::hex;
use keccak_hash::keccak;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cpu::kernel::interpreter::Interpreter;
use crate::cpu::kernel::tests::account_code::initialize_mpts;
use crate::generation::mpt::{AccountRlp, LegacyReceiptRlp};
use crate::generation::rlp::all_rlp_prover_inputs_reversed;
use crate::generation::TrieInputs;
use crate::memory::segments::Segment;
use crate::proof::{BlockHashes, BlockMetadata, TrieRoots};
use crate::util::h2u;
use crate::GenerationInputs;
#[test]

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{Address, BigEndianHash, H256, U256};
use keccak_hash::keccak;

View File

@ -3,7 +3,7 @@ use ethereum_types::U256;
use rand::{thread_rng, Rng};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::interpreter::{run, run_interpreter, Interpreter};
use crate::cpu::kernel::interpreter::{run_interpreter, Interpreter};
#[test]
fn test_exp() -> Result<()> {

View File

@ -1,9 +1,6 @@
use anyhow::Result;
use ethereum_types::U256;
use crate::cpu::kernel::aggregator::{combined_kernel, KERNEL};
use crate::cpu::kernel::interpreter::Interpreter;
use crate::memory::segments::Segment;
#[test]
fn test_kernel_code_hash_consistency() -> Result<()> {

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{BigEndianHash, H256, U512};

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::partial_trie::PartialTrie;
use ethereum_types::{BigEndianHash, H256};

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{BigEndianHash, H256};

View File

@ -1,12 +1,11 @@
use std::str::FromStr;
use anyhow::{anyhow, Result};
use anyhow::Result;
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::HashedPartialTrie;
use ethereum_types::{BigEndianHash, H256, U256};
use hex_literal::hex;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
use crate::cpu::kernel::interpreter::Interpreter;

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use ethereum_types::BigEndianHash;
use crate::cpu::kernel::aggregator::KERNEL;

View File

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result};
use anyhow::Result;
use ethereum_types::{Address, U256};
use hex_literal::hex;
use keccak_hash::keccak;
@ -408,9 +408,6 @@ fn test_mpt_insert_receipt() -> Result<()> {
receipt.push(num_logs.into()); // num_logs
receipt.extend(logs_0.clone());
// First, we load all mpts.
let initial_stack: Vec<U256> = vec![retdest];
let mut interpreter = Interpreter::new_with_kernel(0, vec![]);
initialize_mpts(&mut interpreter, &trie_inputs);

View File

@ -7,7 +7,6 @@ use plonky2::field::packed::PackedField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use static_assertions::const_assert;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::CpuColumnsView;
@ -227,7 +226,7 @@ pub(crate) fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
{
let diff_syscall =
builder.sub_extension(jumpdest_channel.addr_virtual, opcode_handler_addr_start);
let constr = builder.mul_extension((filter_syscall), diff_syscall);
let constr = builder.mul_extension(filter_syscall, diff_syscall);
yield_constr.constraint(builder, constr);
}
{

View File

@ -27,15 +27,11 @@
//! is similar, but we provide not only `local_values` but also `next_values` -- corresponding to
//! the current and next row values -- when computing the linear combinations.
use core::borrow::Borrow;
use core::cmp::min;
use core::fmt::Debug;
use core::iter::repeat;
use anyhow::{ensure, Result};
use hashbrown::HashMap;
use itertools::Itertools;
use plonky2::field::batch_util::{batch_add_inplace, batch_multiply_inplace};
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
@ -46,13 +42,9 @@ use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::plonk::plonk_common::{
reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit,
};
use plonky2::util::ceil_div_usize;
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use crate::all_stark::Table;
use crate::config::StarkConfig;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::evaluation_frame::StarkEvaluationFrame;
@ -303,7 +295,7 @@ pub(crate) fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
for (i, ctl) in ctls.iter().enumerate() {
let CrossTableLookup {
looking_tables,
looked_table,
looked_table: _,
} = ctl;
let mut num_by_table = [0; N];
@ -349,7 +341,7 @@ pub(crate) fn cross_table_lookup_data<'a, F: RichField, const D: usize, const N:
constraint_degree,
);
let mut z_looked = partial_sums(
let z_looked = partial_sums(
&trace_poly_values[looked_table.table],
&[(&looked_table.columns, &looked_table.filter)],
challenge,
@ -412,7 +404,6 @@ fn ctl_helper_zs_cols<F: Field, const N: usize>(
grouped_lookups
.into_iter()
.map(|(table, group)| {
let degree = all_stark_traces[table][0].len();
let columns_filters = group
.map(|table| (&table.columns[..], &table.filter))
.collect::<Vec<(&[Column<F>], &Option<Filter<F>>)>>();
@ -517,7 +508,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}
// Get all cross-table lookup polynomial openings for each STARK proof.
let mut ctl_zs = proofs
let ctl_zs = proofs
.iter()
.zip(num_lookup_columns)
.map(|(p, &num_lookup)| {
@ -551,7 +542,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}
}
for (i, &table) in filtered_looking_tables.iter().enumerate() {
for &table in filtered_looking_tables.iter() {
// We have first all the helper polynomials, then all the z polynomials.
let (looking_z, looking_z_next) =
ctl_zs[table][total_num_helper_cols_by_table[table] + z_indices[table]];
@ -736,7 +727,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
num_helper_ctl_columns: &[usize],
) -> Vec<Self> {
// Get all cross-table lookup polynomial openings for each STARK proof.
let mut ctl_zs = {
let ctl_zs = {
let openings = &proof.openings;
let ctl_zs = openings.auxiliary_polys.iter().skip(num_lookup_columns);
let ctl_zs_next = openings

View File

@ -1,7 +1,6 @@
use core::mem::{self, MaybeUninit};
use core::ops::Range;
use std::collections::BTreeMap;
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
@ -25,7 +24,6 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
use plonky2::recursion::cyclic_recursion::check_cyclic_proof_verifier_data;
use plonky2::recursion::dummy_circuit::cyclic_base_proof;
use plonky2::util::serialization::gate_serialization::default;
use plonky2::util::serialization::{
Buffer, GateSerializer, IoResult, Read, WitnessGeneratorSerializer, Write,
};
@ -1100,11 +1098,8 @@ where
/// ```
pub fn prove_root_after_initial_stark(
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
all_proof: AllProof<F, C, D>,
table_circuits: &[(RecursiveCircuitsForTableSize<F, C, D>, u8); NUM_TABLES],
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, PublicValues)> {
let mut root_inputs = PartialWitness::new();
@ -1113,7 +1108,6 @@ where
let (table_circuit, index_verifier_data) = &table_circuits[table];
let stark_proof = &all_proof.stark_proofs[table];
let original_degree_bits = stark_proof.proof.recover_degree_bits(config);
let shrunk_proof = table_circuit.shrink(stark_proof, &all_proof.ctl_challenges)?;
root_inputs.set_target(

View File

@ -1,11 +1,8 @@
use std::collections::{BTreeSet, HashMap};
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use anyhow::anyhow;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{Address, BigEndianHash, H256, U256};
use itertools::enumerate;
use plonky2::field::extension::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
@ -22,16 +19,12 @@ use crate::all_stark::{AllStark, NUM_TABLES};
use crate::config::StarkConfig;
use crate::cpu::columns::CpuColumnsView;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::assembler::Kernel;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cpu::kernel::opcodes::get_opcode;
use crate::generation::state::GenerationState;
use crate::generation::trie_extractor::{get_receipt_trie, get_state_trie, get_txn_trie};
use crate::memory::segments::Segment;
use crate::proof::{BlockHashes, BlockMetadata, ExtraBlockData, PublicValues, TrieRoots};
use crate::prover::check_abort_signal;
use crate::util::{h2u, u256_to_u8, u256_to_usize};
use crate::witness::errors::{ProgramError, ProverInputError};
use crate::witness::memory::{MemoryAddress, MemoryChannel};
use crate::witness::transition::transition;
@ -41,7 +34,6 @@ pub(crate) mod rlp;
pub(crate) mod state;
mod trie_extractor;
use self::mpt::{load_all_mpts, TrieRootPtrs};
use crate::witness::util::{mem_write_log, stack_peek};
/// Inputs needed for trace generation.
@ -282,7 +274,6 @@ pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
let gas_used_after = read_metadata(GlobalMetadata::BlockGasUsedAfter);
let txn_number_after = read_metadata(GlobalMetadata::TxnNumberAfter);
let trie_root_ptrs = state.trie_root_ptrs;
let extra_block_data = ExtraBlockData {
checkpoint_state_trie_root: inputs.checkpoint_state_trie_root,
txn_number_before: inputs.txn_number_before,
@ -372,7 +363,7 @@ fn simulate_cpu_between_labels_and_get_user_jumps<F: Field>(
}
let pc = state.registers.program_counter;
let context = state.registers.context;
let mut halt = state.registers.is_kernel
let halt = state.registers.is_kernel
&& pc == halt_pc
&& state.registers.context == initial_context;
let Ok(opcode) = u256_to_u8(state.memory.get(MemoryAddress::new(

View File

@ -9,13 +9,10 @@ use keccak_hash::keccak;
use rlp::{Decodable, DecoderError, Encodable, PayloadInfo, Rlp, RlpStream};
use rlp_derive::{RlpDecodable, RlpEncodable};
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
use crate::generation::TrieInputs;
use crate::memory::segments::Segment;
use crate::util::h2u;
use crate::witness::errors::{ProgramError, ProverInputError};
use crate::witness::memory::MemoryAddress;
use crate::Node;
#[derive(RlpEncodable, RlpDecodable, Debug)]

View File

@ -1,20 +1,15 @@
use core::cmp::min;
use core::mem::transmute;
use std::collections::{BTreeSet, HashMap};
use std::str::FromStr;
use anyhow::{bail, Error};
use ethereum_types::{BigEndianHash, H256, U256, U512};
use itertools::{enumerate, Itertools};
use itertools::Itertools;
use num_bigint::BigUint;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use serde::{Deserialize, Serialize};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::context_metadata::ContextMetadata;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::extension_tower::{FieldExt, Fp12, BLS381, BN254};
use crate::generation::prover_input::EvmField::{
Bls381Base, Bls381Scalar, Bn254Base, Bn254Scalar, Secp256k1Base, Secp256k1Scalar,
@ -252,7 +247,6 @@ impl<F: Field> GenerationState<F> {
/// Returns the next used jump address.
fn run_next_jumpdest_table_address(&mut self) -> Result<U256, ProgramError> {
let context = u256_to_usize(stack_peek(self, 0)? >> CONTEXT_SCALING_FACTOR)?;
let code_len = u256_to_usize(self.get_current_code_len()?.into());
if self.jumpdest_table.is_none() {
self.generate_jumpdest_table()?;
@ -300,10 +294,6 @@ impl<F: Field> GenerationState<F> {
let checkpoint = self.checkpoint();
let memory = self.memory.clone();
let code = self.get_current_code()?;
// We need to set the simulated jumpdest bits to one as otherwise
// the simulation will fail.
// Simulate the user's code and (unnecessarily) part of the kernel code, skipping the validate table call
let Some(jumpdest_table) = simulate_cpu_between_labels_and_get_user_jumps(
"jumpdest_analysis_end",
@ -343,10 +333,6 @@ impl<F: Field> GenerationState<F> {
)));
}
fn get_current_code(&self) -> Result<Vec<u8>, ProgramError> {
self.get_code(self.registers.context)
}
fn get_code(&self, context: usize) -> Result<Vec<u8>, ProgramError> {
let code_len = self.get_code_len(context)?;
let code = (0..code_len)
@ -360,17 +346,6 @@ impl<F: Field> GenerationState<F> {
Ok(code)
}
fn set_code_len(&mut self, len: usize) {
self.memory.set(
MemoryAddress::new(
self.registers.context,
Segment::ContextMetadata,
ContextMetadata::CodeSize.unscale(),
),
len.into(),
)
}
fn get_code_len(&self, context: usize) -> Result<usize, ProgramError> {
let code_len = u256_to_usize(self.memory.get(MemoryAddress::new(
context,
@ -379,22 +354,6 @@ impl<F: Field> GenerationState<F> {
)))?;
Ok(code_len)
}
fn get_current_code_len(&self) -> Result<usize, ProgramError> {
self.get_code_len(self.registers.context)
}
fn set_jumpdest_bits(&mut self, code: &[u8]) {
const JUMPDEST_OPCODE: u8 = 0x5b;
for (pos, opcode) in CodeIterator::new(code) {
if opcode == JUMPDEST_OPCODE {
self.memory.set(
MemoryAddress::new(self.registers.context, Segment::JumpdestBits, pos),
U256::one(),
);
}
}
}
}
/// For all address in `jumpdest_table`, each bounded by `largest_address`,
@ -411,7 +370,7 @@ fn get_proofs_and_jumpdests(
const PUSH32_OPCODE: u8 = 0x7f;
let (proofs, _) = CodeIterator::until(code, largest_address + 1).fold(
(vec![], 0),
|(mut proofs, acc), (pos, opcode)| {
|(mut proofs, acc), (pos, _opcode)| {
let has_prefix = if let Some(prefix_start) = pos.checked_sub(32) {
code[prefix_start..pos]
.iter()

View File

@ -1,10 +1,8 @@
use std::collections::{BTreeSet, HashMap};
use std::collections::HashMap;
use ethereum_types::{Address, BigEndianHash, H160, H256, U256};
use keccak_hash::keccak;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use super::mpt::{load_all_mpts, TrieRootPtrs};
use super::TrieInputs;

View File

@ -4,7 +4,7 @@ use core::iter::repeat;
use itertools::Itertools;
use num_bigint::BigUint;
use plonky2::field::batch_util::{batch_add_inplace, batch_multiply_inplace};
use plonky2::field::batch_util::batch_add_inplace;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
@ -108,14 +108,6 @@ impl<F: Field> Filter<F> {
.map(|col| col.eval_table(table, row))
.sum()
}
pub(crate) fn eval_all_rows(&self, table: &[PolynomialValues<F>]) -> Vec<F> {
let length = table[0].len();
(0..length)
.map(|row| self.eval_table(table, row))
.collect::<Vec<F>>()
}
}
/// Represent two linear combination of columns, corresponding to the current and next row values.
@ -480,7 +472,6 @@ pub(crate) fn lookup_helper_columns<F: Field>(
assert!(BigUint::from(num_total_logup_entries) < F::characteristic());
let num_helper_columns = lookup.num_helper_columns(constraint_degree);
let mut helper_columns: Vec<PolynomialValues<F>> = Vec::with_capacity(num_helper_columns);
let looking_cols = lookup
.columns
@ -672,7 +663,6 @@ pub(crate) fn get_helper_cols<F: Field>(
let mut helper_columns = Vec::with_capacity(num_helper_columns);
let mut filter_index = 0;
for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) {
let (first_col, first_filter) = cols_filts.next().unwrap();
@ -843,7 +833,6 @@ pub(crate) fn eval_ext_lookups_circuit<
lookup_vars: LookupCheckVarsTarget<D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();
let degree = stark.constraint_degree();
let lookups = stark.lookups();

View File

@ -1,5 +1,4 @@
use ethereum_types::U256;
use num::traits::AsPrimitive;
pub(crate) const SEGMENT_SCALING_FACTOR: usize = 32;

View File

@ -1,4 +1,4 @@
use ethereum_types::{Address, H160, H256, U256};
use ethereum_types::{Address, H256, U256};
use itertools::Itertools;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::fri::oracle::PolynomialBatch;
@ -19,7 +19,6 @@ use serde::{Deserialize, Serialize};
use crate::all_stark::NUM_TABLES;
use crate::config::StarkConfig;
use crate::cross_table_lookup::GrandProductChallengeSet;
use crate::generation::mpt::TrieRootPtrs;
use crate::util::{get_h160, get_h256, h2u};
/// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs.

View File

@ -35,7 +35,6 @@ use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars};
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::witness::errors::ProgramError;
#[cfg(test)]
use crate::{
cross_table_lookup::testutils::check_ctls, verifier::testutils::get_memory_extra_looking_values,

View File

@ -2,7 +2,7 @@ use core::array::from_fn;
use core::fmt::Debug;
use anyhow::Result;
use ethereum_types::{BigEndianHash, H256, U256};
use ethereum_types::{BigEndianHash, U256};
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::fri::witness_util::set_fri_proof_target;
@ -43,7 +43,7 @@ use crate::proof::{
TrieRootsTarget,
};
use crate::stark::Stark;
use crate::util::{h256_limbs, h2u, u256_limbs, u256_to_u32, u256_to_u64};
use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::witness::errors::ProgramError;
@ -348,7 +348,6 @@ fn verify_stark_proof_with_challenges_circuit<
.iter()
.map(|ctl| ctl.helper_columns.len())
.sum::<usize>();
let num_ctl_z_polys = ctl_vars.len();
let StarkOpeningSetTarget {
local_values,

View File

@ -111,7 +111,6 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
let auxiliary_polys_info =
FriPolynomialInfo::from_range(AUXILIARY_ORACLE_INDEX, 0..num_auxiliary_polys);
let mut start_index = num_lookup_columns;
let ctl_zs_info = FriPolynomialInfo::from_range(
AUXILIARY_ORACLE_INDEX,
num_lookup_columns + num_ctl_helpers..num_auxiliary_polys,

View File

@ -318,7 +318,7 @@ pub(crate) fn verify_stark_proof_with_challenges<
next_values,
auxiliary_polys,
auxiliary_polys_next,
ctl_zs_first,
ctl_zs_first: _,
quotient_polys,
} = &proof.openings;
let vars = S::EvaluationFrame::from_values(local_values, next_values);

View File

@ -928,7 +928,7 @@ pub(crate) fn generate_exception<F: Field>(
row.general.stack_mut().stack_inv_aux = F::ONE;
}
fill_stack_fields(state, &mut row);
fill_stack_fields(state, &mut row)?;
row.general.exception_mut().exc_code_bits = [
F::from_bool(exc_code & 1 != 0),

View File

@ -400,7 +400,7 @@ fn try_perform_instruction<F: Field>(
fill_op_flag(op, &mut row);
fill_stack_fields(state, &mut row);
fill_stack_fields(state, &mut row)?;
// Might write in general CPU columns when it shouldn't, but the correct values will
// overwrite these ones during the op generation.