plonky2/evm/src/prover.rs
Robin Salen 3ec1bfddb3
Update starky and leverage it as dependency for plonky2_evm (#1503)
* Update prover logic

* Add helper method for CTL data

* Some cleanup

* Update some methods

* Fix

* Some more fixes

* More tweaks

* Final

* Leverage starky crate

* Additional tweaks

* Cleanup

* More cleanup

* Fix

* Cleanup imports

* Fix

* Final tweaks

* Cleanup and hide behind debug_assertions attribute

* Clippy

* Fix no-std

* Make wasm compatible

* Doc and remove todo

* API cleanup and remove TODO

* Add Debug impls

* Add documentation for public items

* Feature-gate alloc imports

* Import method from starky instead

* Add simple crate and module documentation

* Apply comments

* Add lib level documentation

* Add test without lookups

* Fix starks without logup

* Cleanup

* Some more cleanup

* Fix get_challenges for non-lookup STARKs

* Add additional config methods and tests

* Apply comments

* More comments
2024-02-13 11:47:54 -05:00

363 lines
11 KiB
Rust

use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use anyhow::{anyhow, Result};
use hashbrown::HashMap;
use itertools::Itertools;
use once_cell::sync::Lazy;
use plonky2::field::extension::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::GenericConfig;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use starky::config::StarkConfig;
#[cfg(debug_assertions)]
use starky::cross_table_lookup::debug_utils::check_ctls;
use starky::cross_table_lookup::{get_ctl_data, CtlData};
use starky::lookup::GrandProductChallengeSet;
use starky::proof::{MultiProof, StarkProofWithMetadata};
use starky::prover::prove_with_commitment;
use starky::stark::Stark;
use crate::all_stark::{AllStark, Table, NUM_TABLES};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::generation::{generate_traces, GenerationInputs};
use crate::get_challenges::observe_public_values;
use crate::proof::{AllProof, PublicValues};
#[cfg(debug_assertions)]
use crate::verifier::debug_utils::get_memory_extra_looking_values;
/// Generate traces, then create all STARK proofs.
pub fn prove<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
inputs: GenerationInputs,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> Result<AllProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
timed!(timing, "build kernel", Lazy::force(&KERNEL));
let (traces, public_values) = timed!(
timing,
"generate all traces",
generate_traces(all_stark, inputs, config, timing)?
);
check_abort_signal(abort_signal.clone())?;
let proof = prove_with_traces(
all_stark,
config,
traces,
public_values,
timing,
abort_signal,
)?;
Ok(proof)
}
/// Compute all STARK proofs.
pub(crate) fn prove_with_traces<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: [Vec<PolynomialValues<F>>; NUM_TABLES],
public_values: PublicValues,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> Result<AllProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
// For each STARK, we compute the polynomial commitments for the polynomials interpolating its trace.
let trace_commitments = timed!(
timing,
"compute all trace commitments",
trace_poly_values
.iter()
.zip_eq(Table::all())
.map(|(trace, table)| {
timed!(
timing,
&format!("compute trace commitment for {:?}", table),
PolynomialBatch::<F, C, D>::from_values(
trace.clone(),
rate_bits,
false,
cap_height,
timing,
None,
)
)
})
.collect::<Vec<_>>()
);
// Get the Merkle caps for all trace commitments and observe them.
let trace_caps = trace_commitments
.iter()
.map(|c| c.merkle_tree.cap.clone())
.collect::<Vec<_>>();
let mut challenger = Challenger::<F, C::Hasher>::new();
for cap in &trace_caps {
challenger.observe_cap(cap);
}
observe_public_values::<F, C, D>(&mut challenger, &public_values)
.map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?;
// For each STARK, compute its cross-table lookup Z polynomials and get the associated `CtlData`.
let (ctl_challenges, ctl_data_per_table) = timed!(
timing,
"compute CTL data",
get_ctl_data::<F, C, D, NUM_TABLES>(
config,
&trace_poly_values,
&all_stark.cross_table_lookups,
&mut challenger,
all_stark.arithmetic_stark.constraint_degree()
)
);
let stark_proofs = timed!(
timing,
"compute all proofs given commitments",
prove_with_commitments(
all_stark,
config,
&trace_poly_values,
trace_commitments,
ctl_data_per_table,
&mut challenger,
&ctl_challenges,
timing,
abort_signal,
)?
);
// This is an expensive check, hence is only run when `debug_assertions` are enabled.
#[cfg(debug_assertions)]
{
let mut extra_values = HashMap::new();
extra_values.insert(
*Table::Memory,
get_memory_extra_looking_values(&public_values),
);
check_ctls(
&trace_poly_values,
&all_stark.cross_table_lookups,
&extra_values,
);
}
Ok(AllProof {
multi_proof: MultiProof {
stark_proofs,
ctl_challenges,
},
public_values,
})
}
/// Generates a proof for each STARK.
/// At this stage, we have computed the trace polynomials commitments for the various STARKs,
/// and we have the cross-table lookup data for each table, including the associated challenges.
/// - `trace_poly_values` are the trace values for each STARK.
/// - `trace_commitments` are the trace polynomials commitments for each STARK.
/// - `ctl_data_per_table` group all the cross-table lookup data for each STARK.
/// Each STARK uses its associated data to generate a proof.
fn prove_with_commitments<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: &[Vec<PolynomialValues<F>>; NUM_TABLES],
trace_commitments: Vec<PolynomialBatch<F, C, D>>,
ctl_data_per_table: [CtlData<F>; NUM_TABLES],
challenger: &mut Challenger<F, C::Hasher>,
ctl_challenges: &GrandProductChallengeSet<F>,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> Result<[StarkProofWithMetadata<F, C, D>; NUM_TABLES]>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let arithmetic_proof = timed!(
timing,
"prove Arithmetic STARK",
prove_single_table(
&all_stark.arithmetic_stark,
config,
&trace_poly_values[Table::Arithmetic as usize],
&trace_commitments[Table::Arithmetic as usize],
&ctl_data_per_table[Table::Arithmetic as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let byte_packing_proof = timed!(
timing,
"prove byte packing STARK",
prove_single_table(
&all_stark.byte_packing_stark,
config,
&trace_poly_values[Table::BytePacking as usize],
&trace_commitments[Table::BytePacking as usize],
&ctl_data_per_table[Table::BytePacking as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let cpu_proof = timed!(
timing,
"prove CPU STARK",
prove_single_table(
&all_stark.cpu_stark,
config,
&trace_poly_values[Table::Cpu as usize],
&trace_commitments[Table::Cpu as usize],
&ctl_data_per_table[Table::Cpu as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let keccak_proof = timed!(
timing,
"prove Keccak STARK",
prove_single_table(
&all_stark.keccak_stark,
config,
&trace_poly_values[Table::Keccak as usize],
&trace_commitments[Table::Keccak as usize],
&ctl_data_per_table[Table::Keccak as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let keccak_sponge_proof = timed!(
timing,
"prove Keccak sponge STARK",
prove_single_table(
&all_stark.keccak_sponge_stark,
config,
&trace_poly_values[Table::KeccakSponge as usize],
&trace_commitments[Table::KeccakSponge as usize],
&ctl_data_per_table[Table::KeccakSponge as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let logic_proof = timed!(
timing,
"prove logic STARK",
prove_single_table(
&all_stark.logic_stark,
config,
&trace_poly_values[Table::Logic as usize],
&trace_commitments[Table::Logic as usize],
&ctl_data_per_table[Table::Logic as usize],
ctl_challenges,
challenger,
timing,
abort_signal.clone(),
)?
);
let memory_proof = timed!(
timing,
"prove memory STARK",
prove_single_table(
&all_stark.memory_stark,
config,
&trace_poly_values[Table::Memory as usize],
&trace_commitments[Table::Memory as usize],
&ctl_data_per_table[Table::Memory as usize],
ctl_challenges,
challenger,
timing,
abort_signal,
)?
);
Ok([
arithmetic_proof,
byte_packing_proof,
cpu_proof,
keccak_proof,
keccak_sponge_proof,
logic_proof,
memory_proof,
])
}
/// Computes a proof for a single STARK table, including:
/// - the initial state of the challenger,
/// - all the requires Merkle caps,
/// - all the required polynomial and FRI argument openings.
pub(crate) fn prove_single_table<F, C, S, const D: usize>(
stark: &S,
config: &StarkConfig,
trace_poly_values: &[PolynomialValues<F>],
trace_commitment: &PolynomialBatch<F, C, D>,
ctl_data: &CtlData<F>,
ctl_challenges: &GrandProductChallengeSet<F>,
challenger: &mut Challenger<F, C::Hasher>,
timing: &mut TimingTree,
abort_signal: Option<Arc<AtomicBool>>,
) -> Result<StarkProofWithMetadata<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
{
check_abort_signal(abort_signal.clone())?;
// Clear buffered outputs.
let init_challenger_state = challenger.compact();
prove_with_commitment(
stark,
config,
trace_poly_values,
trace_commitment,
Some(ctl_data),
Some(ctl_challenges),
challenger,
&[],
timing,
)
.map(|proof_with_pis| StarkProofWithMetadata {
proof: proof_with_pis.proof,
init_challenger_state,
})
}
/// Utility method that checks whether a kill signal has been emitted by one of the workers,
/// which will result in an early abort for all the other processes involved in the same set
/// of transactions.
pub fn check_abort_signal(abort_signal: Option<Arc<AtomicBool>>) -> Result<()> {
if let Some(signal) = abort_signal {
if signal.load(Ordering::Relaxed) {
return Err(anyhow!("Stopping job from abort signal."));
}
}
Ok(())
}