Remove some dead_code in EVM crate (#1281)

* Remove unnecessary CpuArithmeticView.

* Remove AllChallengerState

* Remove RecursiveAllProof

* Remove unused generate methods

* Remove dead_code from cpu/columns

* Remove todo

---------

Co-authored-by: Linda Guiga <lindaguiga3@gmail.com>
This commit is contained in:
Robin Salen 2023-10-09 09:07:01 -04:00 committed by GitHub
parent 8a5eed9d1c
commit 41a29f069b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 11 additions and 224 deletions

View File

@ -6,7 +6,6 @@ use std::mem::{size_of, transmute};
/// operation is occurring at this row.
#[derive(Clone, Copy)]
pub(crate) union CpuGeneralColumnsView<T: Copy> {
arithmetic: CpuArithmeticView<T>,
exception: CpuExceptionView<T>,
logic: CpuLogicView<T>,
jumps: CpuJumpsView<T>,
@ -14,16 +13,6 @@ pub(crate) union CpuGeneralColumnsView<T: Copy> {
}
impl<T: Copy> CpuGeneralColumnsView<T> {
// SAFETY: Each view is a valid interpretation of the underlying array.
pub(crate) fn arithmetic(&self) -> &CpuArithmeticView<T> {
unsafe { &self.arithmetic }
}
// SAFETY: Each view is a valid interpretation of the underlying array.
pub(crate) fn arithmetic_mut(&mut self) -> &mut CpuArithmeticView<T> {
unsafe { &mut self.arithmetic }
}
// SAFETY: Each view is a valid interpretation of the underlying array.
pub(crate) fn exception(&self) -> &CpuExceptionView<T> {
unsafe { &self.exception }
@ -94,12 +83,6 @@ impl<T: Copy> BorrowMut<[T; NUM_SHARED_COLUMNS]> for CpuGeneralColumnsView<T> {
}
}
#[derive(Copy, Clone)]
pub(crate) struct CpuArithmeticView<T: Copy> {
// TODO: Add "looking" columns for the arithmetic CTL.
tmp: T, // temporary, to suppress errors
}
#[derive(Copy, Clone)]
pub(crate) struct CpuExceptionView<T: Copy> {
// Exception code as little-endian bits.

View File

@ -1,6 +1,3 @@
// TODO: remove when possible.
#![allow(dead_code)]
use std::borrow::{Borrow, BorrowMut};
use std::fmt::Debug;
use std::mem::{size_of, transmute};

View File

@ -2,7 +2,7 @@ use std::borrow::{Borrow, BorrowMut};
use std::mem::{size_of, transmute};
use std::ops::{Deref, DerefMut};
use crate::util::{indices_arr, transmute_no_compile_time_size_checks};
use crate::util::transmute_no_compile_time_size_checks;
#[repr(C)]
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
@ -73,10 +73,3 @@ impl<T: Copy> DerefMut for OpsColumnsView<T> {
unsafe { transmute(self) }
}
}
const fn make_col_map() -> OpsColumnsView<usize> {
let indices_arr = indices_arr::<NUM_OPS_COLUMNS>();
unsafe { transmute::<[usize; NUM_OPS_COLUMNS], OpsColumnsView<usize>>(indices_arr) }
}
pub const COL_MAP: OpsColumnsView<usize> = make_col_map();

View File

@ -1,4 +1,4 @@
use std::borrow::{Borrow, BorrowMut};
use std::borrow::Borrow;
use std::iter::repeat;
use std::marker::PhantomData;
@ -9,10 +9,11 @@ use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use super::columns::CpuColumnsView;
use super::halt;
use crate::all_stark::Table;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::{CpuColumnsView, COL_MAP, NUM_CPU_COLUMNS};
use crate::cpu::columns::{COL_MAP, NUM_CPU_COLUMNS};
use crate::cpu::membus::NUM_GP_CHANNELS;
use crate::cpu::{
bootstrap_kernel, contextops, control_flow, decode, dup_swap, gas, jumps, membus, memio,
@ -198,15 +199,6 @@ pub struct CpuStark<F, const D: usize> {
pub f: PhantomData<F>,
}
impl<F: RichField, const D: usize> CpuStark<F, D> {
// TODO: Remove?
pub fn generate(&self, local_values: &mut [F; NUM_CPU_COLUMNS]) {
let local_values: &mut CpuColumnsView<_> = local_values.borrow_mut();
decode::generate(local_values);
membus::generate(local_values);
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, NUM_CPU_COLUMNS>
where

View File

@ -61,51 +61,6 @@ const COMBINED_OPCODES: [usize; 6] = [
COL_MAP.op.m_op_general,
];
pub fn generate<F: RichField>(lv: &mut CpuColumnsView<F>) {
let cycle_filter: F = COL_MAP.op.iter().map(|&col_i| lv[col_i]).sum();
// This assert is not _strictly_ necessary, but I include it as a sanity check.
assert_eq!(cycle_filter, F::ONE, "cycle_filter should be 0 or 1");
// Validate all opcode bits.
for bit in lv.opcode_bits.into_iter() {
assert!(bit.to_canonical_u64() <= 1);
}
let opcode = lv
.opcode_bits
.into_iter()
.enumerate()
.map(|(i, bit)| bit.to_canonical_u64() << i)
.sum::<u64>() as u8;
let top_bits: [u8; 9] = [
0,
opcode & 0x80,
opcode & 0xc0,
opcode & 0xe0,
opcode & 0xf0,
opcode & 0xf8,
opcode & 0xfc,
opcode & 0xfe,
opcode,
];
let kernel = lv.is_kernel_mode.to_canonical_u64();
assert!(kernel <= 1);
let kernel = kernel != 0;
for (oc, block_length, kernel_only, col) in OPCODES {
let available = !kernel_only || kernel;
let opcode_match = top_bits[8 - block_length] == oc;
let flag = available && opcode_match;
lv[col] = F::from_bool(flag);
}
if opcode == 0xfb || opcode == 0xfc {
lv.op.m_op_general = F::from_bool(kernel);
}
}
/// Break up an opcode (which is 8 bits long) into its eight bits.
const fn bits_from_opcode(opcode: u8) -> [bool; 8] {
[

View File

@ -1,10 +1,8 @@
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::PrimeField64;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use super::columns::COL_MAP;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::columns::CpuColumnsView;
@ -33,18 +31,6 @@ pub mod channel_indices {
/// These limitations save us numerous columns in the CPU table.
pub const NUM_CHANNELS: usize = channel_indices::GP.end;
/// Calculates `lv.stack_len_bounds_aux`. Note that this must be run after decode.
pub fn generate<F: PrimeField64>(lv: &CpuColumnsView<F>) {
let cycle_filter: F = COL_MAP.op.iter().map(|&col_i| lv[col_i]).sum();
if cycle_filter != F::ZERO {
assert!(lv.is_kernel_mode.to_canonical_u64() <= 1);
}
for channel in lv.mem_channels {
assert!(channel.used.to_canonical_u64() <= 1);
}
}
pub fn eval_packed<P: PackedField>(
lv: &CpuColumnsView<P>,
yield_constr: &mut ConstraintConsumer<P>,

View File

@ -6,7 +6,6 @@ use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use crate::all_stark::{AllStark, NUM_TABLES};
use crate::config::StarkConfig;
use crate::cross_table_lookup::get_grand_product_challenge_set;
use crate::proof::*;
@ -234,39 +233,6 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
ctl_challenges,
})
}
#[allow(unused)] // TODO: should be used soon
pub(crate) fn get_challenger_states(
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
) -> AllChallengerState<F, C::Hasher, D> {
let mut challenger = Challenger::<F, C::Hasher>::new();
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
}
observe_public_values::<F, C, D>(&mut challenger, &self.public_values);
let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
let lookups = all_stark.num_lookups_helper_columns(config);
let mut challenger_states = vec![challenger.compact()];
for i in 0..NUM_TABLES {
self.stark_proofs[i]
.proof
.get_challenges(&mut challenger, config);
challenger_states.push(challenger.compact());
}
AllChallengerState {
states: challenger_states.try_into().unwrap(),
ctl_challenges,
}
}
}
impl<F, C, const D: usize> StarkProof<F, C, D>

View File

@ -39,14 +39,6 @@ pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usiz
pub ctl_challenges: GrandProductChallengeSet<F>,
}
#[allow(unused)] // TODO: should be used soon
pub(crate) struct AllChallengerState<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
/// Sponge state of the challenger before starting each proof,
/// along with the final state after all proofs are done. This final state isn't strictly needed.
pub states: [H::Permutation; NUM_TABLES + 1],
pub ctl_challenges: GrandProductChallengeSet<F>,
}
/// Memory values which are public.
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct PublicValues {
@ -697,8 +689,7 @@ where
C: GenericConfig<D, F = F>,
{
pub(crate) init_challenger_state: <C::Hasher as Hasher<F>>::Permutation,
// TODO: set it back to pub(crate) when cpu trace len is a public input
pub proof: StarkProof<F, C, D>,
pub(crate) proof: StarkProof<F, C, D>,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {

View File

@ -1,6 +1,6 @@
use std::fmt::Debug;
use anyhow::{ensure, Result};
use anyhow::Result;
use ethereum_types::{BigEndianHash, U256};
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
@ -10,13 +10,13 @@ use plonky2::gates::gate::GateRef;
use plonky2::gates::noop::NoopGate;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::PlonkyPermutation;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::challenger::RecursiveChallenger;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData, VerifierCircuitData};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
use plonky2::util::reducing::ReducingFactorTarget;
use plonky2::util::serialization::{
@ -25,13 +25,12 @@ use plonky2::util::serialization::{
use plonky2::with_context;
use plonky2_util::log2_ceil;
use crate::all_stark::{Table, NUM_TABLES};
use crate::all_stark::Table;
use crate::config::StarkConfig;
use crate::constraint_consumer::RecursiveConstraintConsumer;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cross_table_lookup::{
get_grand_product_challenge_set, verify_cross_table_lookups, CrossTableLookup,
CtlCheckVarsTarget, GrandProductChallenge, GrandProductChallengeSet,
CrossTableLookup, CtlCheckVarsTarget, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::evaluation_frame::StarkEvaluationFrame;
use crate::lookup::LookupCheckVarsTarget;
@ -48,15 +47,6 @@ use crate::util::{h256_limbs, u256_limbs, u256_to_u32, u256_to_u64};
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::witness::errors::ProgramError;
/// Table-wise recursive proofs of an `AllProof`.
pub struct RecursiveAllProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub recursive_proofs: [ProofWithPublicInputs<F, C, D>; NUM_TABLES],
}
pub(crate) struct PublicInputs<T: Copy + Default + Eq + PartialEq + Debug, P: PlonkyPermutation<T>>
{
pub(crate) trace_cap: Vec<Vec<T>>,
@ -98,72 +88,6 @@ impl<T: Copy + Debug + Default + Eq + PartialEq, P: PlonkyPermutation<T>> Public
}
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
RecursiveAllProof<F, C, D>
{
/// Verify every recursive proof.
pub fn verify(
self,
verifier_data: &[VerifierCircuitData<F, C, D>; NUM_TABLES],
cross_table_lookups: Vec<CrossTableLookup<F>>,
inner_config: &StarkConfig,
) -> Result<()> {
let pis: [_; NUM_TABLES] = core::array::from_fn(|i| {
PublicInputs::<F, <C::Hasher as Hasher<F>>::Permutation>::from_vec(
&self.recursive_proofs[i].public_inputs,
inner_config,
)
});
let mut challenger = Challenger::<F, C::Hasher>::new();
for pi in &pis {
for h in &pi.trace_cap {
challenger.observe_elements(h);
}
}
// TODO: Observe public values if the code isn't deprecated.
let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, inner_config.num_challenges);
// Check that the correct CTL challenges are used in every proof.
for pi in &pis {
ensure!(ctl_challenges == pi.ctl_challenges);
}
let state = challenger.compact();
ensure!(state == pis[0].challenger_state_before);
// Check that the challenger state is consistent between proofs.
for i in 1..NUM_TABLES {
ensure!(pis[i].challenger_state_before == pis[i - 1].challenger_state_after);
}
// Dummy values which will make the check fail.
// TODO: Fix this if the code isn't deprecated.
let mut extra_looking_products = Vec::new();
for i in 0..NUM_TABLES {
extra_looking_products.push(Vec::new());
for _ in 0..inner_config.num_challenges {
extra_looking_products[i].push(F::ONE);
}
}
// Verify the CTL checks.
verify_cross_table_lookups::<F, D>(
&cross_table_lookups,
pis.map(|p| p.ctl_zs_first),
extra_looking_products,
inner_config,
)?;
// Verify the proofs.
for (proof, verifier_data) in self.recursive_proofs.into_iter().zip(verifier_data) {
verifier_data.verify(proof)?;
}
Ok(())
}
}
/// Represents a circuit which recursively verifies a STARK proof.
#[derive(Eq, PartialEq, Debug)]
pub(crate) struct StarkWrapperCircuit<F, C, const D: usize>