2022-05-04 20:57:07 +02:00
|
|
|
use anyhow::{ensure, Result};
|
2022-12-09 12:51:42 -08:00
|
|
|
use itertools::Itertools;
|
2023-01-14 21:18:58 -08:00
|
|
|
use once_cell::sync::Lazy;
|
2022-06-27 07:18:21 -07:00
|
|
|
use plonky2::field::extension::Extendable;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::field::packable::Packable;
|
2022-06-27 15:07:52 -07:00
|
|
|
use plonky2::field::packed::PackedField;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
2022-06-27 12:24:09 -07:00
|
|
|
use plonky2::field::types::Field;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::field::zero_poly_coset::ZeroPolyOnCoset;
|
|
|
|
|
use plonky2::fri::oracle::PolynomialBatch;
|
|
|
|
|
use plonky2::hash::hash_types::RichField;
|
|
|
|
|
use plonky2::iop::challenger::Challenger;
|
2023-05-11 02:59:02 +10:00
|
|
|
use plonky2::plonk::config::GenericConfig;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::timed;
|
|
|
|
|
use plonky2::util::timing::TimingTree;
|
|
|
|
|
use plonky2::util::transpose;
|
2023-01-30 08:51:33 -08:00
|
|
|
use plonky2_maybe_rayon::*;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2_util::{log2_ceil, log2_strict};
|
|
|
|
|
|
2022-08-26 10:12:45 +02:00
|
|
|
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
2022-05-04 20:57:07 +02:00
|
|
|
use crate::config::StarkConfig;
|
2022-05-06 17:22:30 +02:00
|
|
|
use crate::constraint_consumer::ConstraintConsumer;
|
2023-01-14 21:18:58 -08:00
|
|
|
use crate::cpu::kernel::aggregator::KERNEL;
|
2023-02-13 15:58:26 +01:00
|
|
|
use crate::cross_table_lookup::{
|
|
|
|
|
cross_table_lookup_data, get_grand_product_challenge_set, CtlCheckVars, CtlData,
|
2023-09-14 10:57:33 +01:00
|
|
|
GrandProductChallengeSet,
|
2023-02-13 15:58:26 +01:00
|
|
|
};
|
2023-09-22 09:19:13 -04:00
|
|
|
use crate::evaluation_frame::StarkEvaluationFrame;
|
2022-08-25 22:11:25 -07:00
|
|
|
use crate::generation::{generate_traces, GenerationInputs};
|
2023-08-18 18:59:58 -04:00
|
|
|
use crate::get_challenges::observe_public_values;
|
2023-02-13 15:58:26 +01:00
|
|
|
use crate::lookup::{lookup_helper_columns, Lookup, LookupCheckVars};
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof, StarkProofWithMetadata};
|
2022-05-04 20:57:07 +02:00
|
|
|
use crate::stark::Stark;
|
|
|
|
|
use crate::vanishing_poly::eval_vanishing_poly;
|
2023-10-16 08:53:42 -04:00
|
|
|
#[cfg(test)]
|
|
|
|
|
use crate::{
|
|
|
|
|
cross_table_lookup::testutils::check_ctls, verifier::testutils::get_memory_extra_looking_values,
|
|
|
|
|
};
|
2022-05-04 20:57:07 +02:00
|
|
|
|
2022-08-25 12:24:22 -07:00
|
|
|
/// Generate traces, then create all STARK proofs.
|
2023-04-01 09:34:13 -04:00
|
|
|
pub fn prove<F, C, const D: usize>(
|
2022-08-25 12:24:22 -07:00
|
|
|
all_stark: &AllStark<F, D>,
|
|
|
|
|
config: &StarkConfig,
|
2022-08-25 22:11:25 -07:00
|
|
|
inputs: GenerationInputs,
|
2022-08-25 12:24:22 -07:00
|
|
|
timing: &mut TimingTree,
|
2023-04-01 09:34:13 -04:00
|
|
|
) -> Result<AllProof<F, C, D>>
|
2022-08-25 12:24:22 -07:00
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-08-25 12:24:22 -07:00
|
|
|
{
|
2023-01-14 21:18:58 -08:00
|
|
|
timed!(timing, "build kernel", Lazy::force(&KERNEL));
|
2023-12-05 18:10:36 -05:00
|
|
|
let (traces, public_values) = timed!(
|
2022-12-09 12:51:42 -08:00
|
|
|
timing,
|
|
|
|
|
"generate all traces",
|
2023-02-25 07:59:51 -08:00
|
|
|
generate_traces(all_stark, inputs, config, timing)?
|
2022-12-09 12:51:42 -08:00
|
|
|
);
|
2023-03-16 13:32:34 -07:00
|
|
|
let proof = prove_with_traces(all_stark, config, traces, public_values, timing)?;
|
2023-12-05 18:10:36 -05:00
|
|
|
Ok(proof)
|
2022-08-25 12:24:22 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Compute all STARK proofs.
|
2023-04-01 09:34:13 -04:00
|
|
|
pub(crate) fn prove_with_traces<F, C, const D: usize>(
|
2022-05-12 20:38:11 +02:00
|
|
|
all_stark: &AllStark<F, D>,
|
2022-05-04 20:57:07 +02:00
|
|
|
config: &StarkConfig,
|
2022-08-26 10:12:45 +02:00
|
|
|
trace_poly_values: [Vec<PolynomialValues<F>>; NUM_TABLES],
|
2022-08-25 12:24:22 -07:00
|
|
|
public_values: PublicValues,
|
2022-05-04 20:57:07 +02:00
|
|
|
timing: &mut TimingTree,
|
2023-04-01 09:34:13 -04:00
|
|
|
) -> Result<AllProof<F, C, D>>
|
2022-05-04 20:57:07 +02:00
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-05-04 20:57:07 +02:00
|
|
|
{
|
|
|
|
|
let rate_bits = config.fri_config.rate_bits;
|
|
|
|
|
let cap_height = config.fri_config.cap_height;
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// For each STARK, we compute the polynomial commitments for the polynomials interpolating its trace.
|
2022-05-04 20:57:07 +02:00
|
|
|
let trace_commitments = timed!(
|
|
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"compute all trace commitments",
|
2022-05-04 20:57:07 +02:00
|
|
|
trace_poly_values
|
|
|
|
|
.iter()
|
2022-12-09 12:51:42 -08:00
|
|
|
.zip_eq(Table::all())
|
|
|
|
|
.map(|(trace, table)| {
|
|
|
|
|
timed!(
|
2022-05-04 20:57:07 +02:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
&format!("compute trace commitment for {:?}", table),
|
2023-04-01 09:34:13 -04:00
|
|
|
PolynomialBatch::<F, C, D>::from_values(
|
2022-12-09 12:51:42 -08:00
|
|
|
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
|
|
|
|
|
// or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`.
|
|
|
|
|
trace.clone(),
|
|
|
|
|
rate_bits,
|
|
|
|
|
false,
|
|
|
|
|
cap_height,
|
|
|
|
|
timing,
|
|
|
|
|
None,
|
|
|
|
|
)
|
2022-05-04 20:57:07 +02:00
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
);
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the Merkle caps for all trace commitments and observe them.
|
2022-05-04 20:57:07 +02:00
|
|
|
let trace_caps = trace_commitments
|
|
|
|
|
.iter()
|
2022-05-04 21:29:29 +02:00
|
|
|
.map(|c| c.merkle_tree.cap.clone())
|
2022-05-04 20:57:07 +02:00
|
|
|
.collect::<Vec<_>>();
|
2023-05-11 02:59:02 +10:00
|
|
|
let mut challenger = Challenger::<F, C::Hasher>::new();
|
2022-05-04 20:57:07 +02:00
|
|
|
for cap in &trace_caps {
|
|
|
|
|
challenger.observe_cap(cap);
|
|
|
|
|
}
|
|
|
|
|
|
2023-09-12 19:23:16 -04:00
|
|
|
observe_public_values::<F, C, D>(&mut challenger, &public_values)
|
|
|
|
|
.map_err(|_| anyhow::Error::msg("Invalid conversion of public values."))?;
|
2023-08-18 18:59:58 -04:00
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get challenges for the cross-table lookups.
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
let ctl_challenges = get_grand_product_challenge_set(&mut challenger, config.num_challenges);
|
2023-10-30 14:28:24 -04:00
|
|
|
// For each STARK, compute its cross-table lookup Z polynomials and get the associated `CtlData`.
|
2022-12-09 12:51:42 -08:00
|
|
|
let ctl_data_per_table = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute CTL data",
|
2023-02-10 21:47:51 -08:00
|
|
|
cross_table_lookup_data::<F, D>(
|
2022-12-09 12:51:42 -08:00
|
|
|
&trace_poly_values,
|
|
|
|
|
&all_stark.cross_table_lookups,
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
&ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
)
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let stark_proofs = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute all proofs given commitments",
|
|
|
|
|
prove_with_commitments(
|
|
|
|
|
all_stark,
|
|
|
|
|
config,
|
2023-10-16 08:53:42 -04:00
|
|
|
&trace_poly_values,
|
2022-12-09 12:51:42 -08:00
|
|
|
trace_commitments,
|
|
|
|
|
ctl_data_per_table,
|
|
|
|
|
&mut challenger,
|
2023-09-14 17:35:29 +01:00
|
|
|
&ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
timing
|
|
|
|
|
)?
|
2022-05-05 19:12:58 +02:00
|
|
|
);
|
2022-05-05 16:30:21 +02:00
|
|
|
|
2023-10-16 08:53:42 -04:00
|
|
|
#[cfg(test)]
|
|
|
|
|
{
|
|
|
|
|
check_ctls(
|
|
|
|
|
&trace_poly_values,
|
|
|
|
|
&all_stark.cross_table_lookups,
|
|
|
|
|
&get_memory_extra_looking_values(&public_values),
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-09 12:51:42 -08:00
|
|
|
Ok(AllProof {
|
|
|
|
|
stark_proofs,
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
public_values,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
/// Generates a proof for each STARK.
|
|
|
|
|
/// At this stage, we have computed the trace polynomials commitments for the various STARKs,
|
|
|
|
|
/// and we have the cross-table lookup data for each table, including the associated challenges.
|
|
|
|
|
/// - `trace_poly_values` are the trace values for each STARK.
|
|
|
|
|
/// - `trace_commitments` are the trace polynomials commitments for each STARK.
|
|
|
|
|
/// - `ctl_data_per_table` group all the cross-table lookup data for each STARK.
|
|
|
|
|
/// Each STARK uses its associated data to generate a proof.
|
2023-04-01 09:34:13 -04:00
|
|
|
fn prove_with_commitments<F, C, const D: usize>(
|
2022-12-09 12:51:42 -08:00
|
|
|
all_stark: &AllStark<F, D>,
|
|
|
|
|
config: &StarkConfig,
|
2023-10-16 08:53:42 -04:00
|
|
|
trace_poly_values: &[Vec<PolynomialValues<F>>; NUM_TABLES],
|
2023-04-01 09:34:13 -04:00
|
|
|
trace_commitments: Vec<PolynomialBatch<F, C, D>>,
|
2022-12-09 12:51:42 -08:00
|
|
|
ctl_data_per_table: [CtlData<F>; NUM_TABLES],
|
2023-05-11 02:59:02 +10:00
|
|
|
challenger: &mut Challenger<F, C::Hasher>,
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges: &GrandProductChallengeSet<F>,
|
2022-12-09 12:51:42 -08:00
|
|
|
timing: &mut TimingTree,
|
2023-04-01 09:34:13 -04:00
|
|
|
) -> Result<[StarkProofWithMetadata<F, C, D>; NUM_TABLES]>
|
2022-12-09 12:51:42 -08:00
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-12-09 12:51:42 -08:00
|
|
|
{
|
Cross-table lookup for arithmetic stark (#905)
* First draft of linking arithmetic Stark into the CTL mechanism.
* Handle {ADD,SUB,MUL}FP254 operations explicitly in `modular.rs`.
* Adjust argument order; add tests.
* Add CTLs for ADD, MUL, SUB, LT and GT.
* Add CTLs for {ADD,MUL,SUB}MOD, DIV and MOD.
* Add CTLs for {ADD,MUL,SUB}FP254 operations.
* Refactor the CPU/arithmetic CTL mapping; add some documentation.
* Minor comment fixes.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Merge `*FP254` CTL into main CTL; rename some registers.
* Connect extra argument from CPU in binary ops to facilitate combining with ternary ops.
* Merge modular ops CTL into main CTL.
* Refactor DIV and MOD code into its own module.
* Merge DIV and MOD into arithmetic CTL.
* Clippy.
* Fixes related to merge.
* Simplify register naming.
* Generate u16 BN254 modulus limbs at compile time.
* Clippy.
* Add degree bits ranges for Arithmetic table.
2023-05-11 03:29:06 +10:00
|
|
|
let arithmetic_proof = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"prove Arithmetic STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.arithmetic_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::Arithmetic as usize],
|
|
|
|
|
&trace_commitments[Table::Arithmetic as usize],
|
|
|
|
|
&ctl_data_per_table[Table::Arithmetic as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
Cross-table lookup for arithmetic stark (#905)
* First draft of linking arithmetic Stark into the CTL mechanism.
* Handle {ADD,SUB,MUL}FP254 operations explicitly in `modular.rs`.
* Adjust argument order; add tests.
* Add CTLs for ADD, MUL, SUB, LT and GT.
* Add CTLs for {ADD,MUL,SUB}MOD, DIV and MOD.
* Add CTLs for {ADD,MUL,SUB}FP254 operations.
* Refactor the CPU/arithmetic CTL mapping; add some documentation.
* Minor comment fixes.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Merge `*FP254` CTL into main CTL; rename some registers.
* Connect extra argument from CPU in binary ops to facilitate combining with ternary ops.
* Merge modular ops CTL into main CTL.
* Refactor DIV and MOD code into its own module.
* Merge DIV and MOD into arithmetic CTL.
* Clippy.
* Fixes related to merge.
* Simplify register naming.
* Generate u16 BN254 modulus limbs at compile time.
* Clippy.
* Add degree bits ranges for Arithmetic table.
2023-05-11 03:29:06 +10:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
2023-09-12 14:45:37 -04:00
|
|
|
let byte_packing_proof = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"prove byte packing STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.byte_packing_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::BytePacking as usize],
|
|
|
|
|
&trace_commitments[Table::BytePacking as usize],
|
|
|
|
|
&ctl_data_per_table[Table::BytePacking as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
2023-09-12 14:45:37 -04:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
2022-12-09 12:51:42 -08:00
|
|
|
let cpu_proof = timed!(
|
2022-05-04 22:04:11 +02:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"prove CPU STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.cpu_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::Cpu as usize],
|
|
|
|
|
&trace_commitments[Table::Cpu as usize],
|
|
|
|
|
&ctl_data_per_table[Table::Cpu as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
|
|
|
|
let keccak_proof = timed!(
|
2022-05-04 22:04:11 +02:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"prove Keccak STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.keccak_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::Keccak as usize],
|
|
|
|
|
&trace_commitments[Table::Keccak as usize],
|
|
|
|
|
&ctl_data_per_table[Table::Keccak as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
|
|
|
|
let keccak_sponge_proof = timed!(
|
2022-08-14 16:36:07 -07:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"prove Keccak sponge STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.keccak_sponge_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::KeccakSponge as usize],
|
|
|
|
|
&trace_commitments[Table::KeccakSponge as usize],
|
|
|
|
|
&ctl_data_per_table[Table::KeccakSponge as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
|
|
|
|
let logic_proof = timed!(
|
2022-06-17 11:57:14 -07:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"prove logic STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.logic_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::Logic as usize],
|
|
|
|
|
&trace_commitments[Table::Logic as usize],
|
|
|
|
|
&ctl_data_per_table[Table::Logic as usize],
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
|
|
|
|
let memory_proof = timed!(
|
2022-06-23 13:59:57 -07:00
|
|
|
timing,
|
2022-12-09 12:51:42 -08:00
|
|
|
"prove memory STARK",
|
|
|
|
|
prove_single_table(
|
|
|
|
|
&all_stark.memory_stark,
|
|
|
|
|
config,
|
|
|
|
|
&trace_poly_values[Table::Memory as usize],
|
|
|
|
|
&trace_commitments[Table::Memory as usize],
|
|
|
|
|
&ctl_data_per_table[Table::Memory as usize],
|
2023-09-14 10:57:33 +01:00
|
|
|
ctl_challenges,
|
2022-12-09 12:51:42 -08:00
|
|
|
challenger,
|
|
|
|
|
timing,
|
|
|
|
|
)?
|
|
|
|
|
);
|
2023-09-12 14:45:37 -04:00
|
|
|
|
2022-12-09 12:51:42 -08:00
|
|
|
Ok([
|
Cross-table lookup for arithmetic stark (#905)
* First draft of linking arithmetic Stark into the CTL mechanism.
* Handle {ADD,SUB,MUL}FP254 operations explicitly in `modular.rs`.
* Adjust argument order; add tests.
* Add CTLs for ADD, MUL, SUB, LT and GT.
* Add CTLs for {ADD,MUL,SUB}MOD, DIV and MOD.
* Add CTLs for {ADD,MUL,SUB}FP254 operations.
* Refactor the CPU/arithmetic CTL mapping; add some documentation.
* Minor comment fixes.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Combine addcy CTLs at the expense of repeated constraint evaluation.
* Merge `*FP254` CTL into main CTL; rename some registers.
* Connect extra argument from CPU in binary ops to facilitate combining with ternary ops.
* Merge modular ops CTL into main CTL.
* Refactor DIV and MOD code into its own module.
* Merge DIV and MOD into arithmetic CTL.
* Clippy.
* Fixes related to merge.
* Simplify register naming.
* Generate u16 BN254 modulus limbs at compile time.
* Clippy.
* Add degree bits ranges for Arithmetic table.
2023-05-11 03:29:06 +10:00
|
|
|
arithmetic_proof,
|
2023-09-12 14:45:37 -04:00
|
|
|
byte_packing_proof,
|
2022-08-14 16:36:07 -07:00
|
|
|
cpu_proof,
|
|
|
|
|
keccak_proof,
|
2022-12-03 11:21:31 -08:00
|
|
|
keccak_sponge_proof,
|
2022-08-14 16:36:07 -07:00
|
|
|
logic_proof,
|
|
|
|
|
memory_proof,
|
2022-12-09 12:51:42 -08:00
|
|
|
])
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
/// Computes a proof for a single STARK table, including:
|
|
|
|
|
/// - the initial state of the challenger,
|
|
|
|
|
/// - all the requires Merkle caps,
|
|
|
|
|
/// - all the required polynomial and FRI argument openings.
|
2023-04-01 09:34:13 -04:00
|
|
|
pub(crate) fn prove_single_table<F, C, S, const D: usize>(
|
2022-05-04 22:04:11 +02:00
|
|
|
stark: &S,
|
2022-05-04 20:57:07 +02:00
|
|
|
config: &StarkConfig,
|
2022-05-04 22:04:11 +02:00
|
|
|
trace_poly_values: &[PolynomialValues<F>],
|
2023-04-01 09:34:13 -04:00
|
|
|
trace_commitment: &PolynomialBatch<F, C, D>,
|
2022-05-13 11:20:29 +02:00
|
|
|
ctl_data: &CtlData<F>,
|
2023-09-14 17:35:29 +01:00
|
|
|
ctl_challenges: &GrandProductChallengeSet<F>,
|
2023-05-11 02:59:02 +10:00
|
|
|
challenger: &mut Challenger<F, C::Hasher>,
|
2022-05-04 20:57:07 +02:00
|
|
|
timing: &mut TimingTree,
|
2023-04-01 09:34:13 -04:00
|
|
|
) -> Result<StarkProofWithMetadata<F, C, D>>
|
2022-05-04 20:57:07 +02:00
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-05-04 20:57:07 +02:00
|
|
|
S: Stark<F, D>,
|
|
|
|
|
{
|
2022-05-04 21:29:29 +02:00
|
|
|
let degree = trace_poly_values[0].len();
|
|
|
|
|
let degree_bits = log2_strict(degree);
|
|
|
|
|
let fri_params = config.fri_params(degree_bits);
|
|
|
|
|
let rate_bits = config.fri_config.rate_bits;
|
2022-05-12 20:38:11 +02:00
|
|
|
let cap_height = config.fri_config.cap_height;
|
|
|
|
|
assert!(
|
|
|
|
|
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
|
|
|
|
|
"FRI total reduction arity is too large.",
|
|
|
|
|
);
|
2022-05-04 21:29:29 +02:00
|
|
|
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
let init_challenger_state = challenger.compact();
|
2022-09-22 11:01:27 +02:00
|
|
|
|
2023-02-13 15:58:26 +01:00
|
|
|
let constraint_degree = stark.constraint_degree();
|
2023-09-14 10:57:33 +01:00
|
|
|
let lookup_challenges = stark.uses_lookups().then(|| {
|
|
|
|
|
ctl_challenges
|
|
|
|
|
.challenges
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|ch| ch.beta)
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
});
|
2023-02-13 15:58:26 +01:00
|
|
|
let lookups = stark.lookups();
|
|
|
|
|
let lookup_helper_columns = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute lookup helper columns",
|
|
|
|
|
lookup_challenges.as_ref().map(|challenges| {
|
|
|
|
|
let mut columns = Vec::new();
|
|
|
|
|
for lookup in &lookups {
|
|
|
|
|
for &challenge in challenges {
|
|
|
|
|
columns.extend(lookup_helper_columns(
|
|
|
|
|
lookup,
|
|
|
|
|
trace_poly_values,
|
|
|
|
|
challenge,
|
|
|
|
|
constraint_degree,
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
columns
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
let num_lookup_columns = lookup_helper_columns.as_ref().map(|v| v.len()).unwrap_or(0);
|
2022-05-05 17:14:21 +02:00
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// We add CTLs to the permutation arguments so that we can batch commit to
|
|
|
|
|
// all auxiliary polynomials.
|
2023-02-13 15:58:26 +01:00
|
|
|
let auxiliary_polys = match lookup_helper_columns {
|
2022-05-13 11:20:29 +02:00
|
|
|
None => ctl_data.z_polys(),
|
2023-02-13 15:58:26 +01:00
|
|
|
Some(mut lookup_columns) => {
|
|
|
|
|
lookup_columns.extend(ctl_data.z_polys());
|
|
|
|
|
lookup_columns
|
2022-05-05 17:14:21 +02:00
|
|
|
}
|
|
|
|
|
};
|
2023-02-13 15:58:26 +01:00
|
|
|
assert!(!auxiliary_polys.is_empty(), "No CTL?");
|
2022-05-05 17:14:21 +02:00
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the polynomial commitments for all auxiliary polynomials.
|
2023-02-13 15:58:26 +01:00
|
|
|
let auxiliary_polys_commitment = timed!(
|
2022-05-17 09:41:06 +02:00
|
|
|
timing,
|
2023-02-13 15:58:26 +01:00
|
|
|
"compute auxiliary polynomials commitment",
|
2022-09-23 10:54:17 -07:00
|
|
|
PolynomialBatch::from_values(
|
2023-02-13 15:58:26 +01:00
|
|
|
auxiliary_polys,
|
2022-09-23 10:54:17 -07:00
|
|
|
rate_bits,
|
|
|
|
|
false,
|
|
|
|
|
config.fri_config.cap_height,
|
|
|
|
|
timing,
|
|
|
|
|
None,
|
|
|
|
|
)
|
2022-05-17 09:41:06 +02:00
|
|
|
);
|
|
|
|
|
|
2023-02-13 15:58:26 +01:00
|
|
|
let auxiliary_polys_cap = auxiliary_polys_commitment.merkle_tree.cap.clone();
|
|
|
|
|
challenger.observe_cap(&auxiliary_polys_cap);
|
2022-05-04 21:29:29 +02:00
|
|
|
|
|
|
|
|
let alphas = challenger.get_n_challenges(config.num_challenges);
|
2023-10-16 08:53:42 -04:00
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
{
|
2022-05-13 11:20:29 +02:00
|
|
|
check_constraints(
|
|
|
|
|
stark,
|
|
|
|
|
trace_commitment,
|
2023-02-13 15:58:26 +01:00
|
|
|
&auxiliary_polys_commitment,
|
|
|
|
|
lookup_challenges.as_ref(),
|
|
|
|
|
&lookups,
|
2022-05-13 11:20:29 +02:00
|
|
|
ctl_data,
|
|
|
|
|
alphas.clone(),
|
|
|
|
|
degree_bits,
|
2023-02-13 15:58:26 +01:00
|
|
|
num_lookup_columns,
|
2022-05-13 11:20:29 +02:00
|
|
|
);
|
|
|
|
|
}
|
2023-10-16 08:53:42 -04:00
|
|
|
|
2022-09-23 10:54:17 -07:00
|
|
|
let quotient_polys = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute quotient polys",
|
2023-04-01 09:34:13 -04:00
|
|
|
compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
|
2022-09-23 10:54:17 -07:00
|
|
|
stark,
|
|
|
|
|
trace_commitment,
|
2023-02-13 15:58:26 +01:00
|
|
|
&auxiliary_polys_commitment,
|
|
|
|
|
lookup_challenges.as_ref(),
|
|
|
|
|
&lookups,
|
2022-09-23 10:54:17 -07:00
|
|
|
ctl_data,
|
|
|
|
|
alphas,
|
|
|
|
|
degree_bits,
|
2023-02-13 15:58:26 +01:00
|
|
|
num_lookup_columns,
|
2022-09-23 10:54:17 -07:00
|
|
|
config,
|
|
|
|
|
)
|
|
|
|
|
);
|
|
|
|
|
let all_quotient_chunks = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"split quotient polys",
|
|
|
|
|
quotient_polys
|
|
|
|
|
.into_par_iter()
|
|
|
|
|
.flat_map(|mut quotient_poly| {
|
|
|
|
|
quotient_poly
|
|
|
|
|
.trim_to_len(degree * stark.quotient_degree_factor())
|
|
|
|
|
.expect(
|
|
|
|
|
"Quotient has failed, the vanishing polynomial is not divisible by Z_H",
|
|
|
|
|
);
|
|
|
|
|
// Split quotient into degree-n chunks.
|
|
|
|
|
quotient_poly.chunks(degree)
|
|
|
|
|
})
|
|
|
|
|
.collect()
|
2022-05-04 21:29:29 +02:00
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Commit to the quotient polynomials.
|
2022-05-04 21:29:29 +02:00
|
|
|
let quotient_commitment = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute quotient commitment",
|
|
|
|
|
PolynomialBatch::from_coeffs(
|
|
|
|
|
all_quotient_chunks,
|
|
|
|
|
rate_bits,
|
|
|
|
|
false,
|
|
|
|
|
config.fri_config.cap_height,
|
|
|
|
|
timing,
|
|
|
|
|
None,
|
|
|
|
|
)
|
|
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Observe the quotient polynomials Merkle cap.
|
2022-05-04 21:29:29 +02:00
|
|
|
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
|
|
|
|
challenger.observe_cap("ient_polys_cap);
|
|
|
|
|
|
|
|
|
|
let zeta = challenger.get_extension_challenge::<D>();
|
|
|
|
|
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
|
|
|
|
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
|
|
|
|
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
|
|
|
|
let g = F::primitive_root_of_unity(degree_bits);
|
|
|
|
|
ensure!(
|
|
|
|
|
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
|
|
|
|
"Opening point is in the subgroup."
|
|
|
|
|
);
|
2022-05-05 17:14:21 +02:00
|
|
|
|
2023-11-04 23:07:36 +08:00
|
|
|
// Compute all openings: evaluate all committed polynomials at `zeta` and, when necessary, at `g * zeta`.
|
2022-05-04 21:29:29 +02:00
|
|
|
let openings = StarkOpeningSet::new(
|
|
|
|
|
zeta,
|
|
|
|
|
g,
|
2022-05-06 14:55:54 +02:00
|
|
|
trace_commitment,
|
2023-02-13 15:58:26 +01:00
|
|
|
&auxiliary_polys_commitment,
|
2022-05-04 21:29:29 +02:00
|
|
|
"ient_commitment,
|
2023-02-13 15:58:26 +01:00
|
|
|
stark.num_lookup_helper_columns(config),
|
2022-05-04 21:29:29 +02:00
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the FRI openings and observe them.
|
2022-05-04 21:29:29 +02:00
|
|
|
challenger.observe_openings(&openings.to_fri_openings());
|
|
|
|
|
|
2022-05-17 09:41:06 +02:00
|
|
|
let initial_merkle_trees = vec![
|
|
|
|
|
trace_commitment,
|
2023-02-13 15:58:26 +01:00
|
|
|
&auxiliary_polys_commitment,
|
2022-05-17 09:41:06 +02:00
|
|
|
"ient_commitment,
|
|
|
|
|
];
|
2022-05-04 21:29:29 +02:00
|
|
|
|
|
|
|
|
let opening_proof = timed!(
|
|
|
|
|
timing,
|
|
|
|
|
"compute openings proof",
|
|
|
|
|
PolynomialBatch::prove_openings(
|
2023-09-11 14:11:13 -04:00
|
|
|
&stark.fri_instance(zeta, g, ctl_data.len(), config),
|
2022-05-04 21:29:29 +02:00
|
|
|
&initial_merkle_trees,
|
|
|
|
|
challenger,
|
|
|
|
|
&fri_params,
|
|
|
|
|
timing,
|
|
|
|
|
)
|
|
|
|
|
);
|
2022-08-25 12:24:22 -07:00
|
|
|
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
let proof = StarkProof {
|
2022-05-04 22:04:11 +02:00
|
|
|
trace_cap: trace_commitment.merkle_tree.cap.clone(),
|
2023-02-13 15:58:26 +01:00
|
|
|
auxiliary_polys_cap,
|
2022-05-04 21:29:29 +02:00
|
|
|
quotient_polys_cap,
|
|
|
|
|
openings,
|
|
|
|
|
opening_proof,
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
};
|
|
|
|
|
Ok(StarkProofWithMetadata {
|
|
|
|
|
init_challenger_state,
|
|
|
|
|
proof,
|
2022-05-04 20:57:07 +02:00
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
|
|
|
|
|
/// where the `C_i`s are the Stark constraints.
|
2023-04-01 09:34:13 -04:00
|
|
|
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
2022-05-04 20:57:07 +02:00
|
|
|
stark: &S,
|
2023-04-01 09:34:13 -04:00
|
|
|
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
2023-02-13 15:58:26 +01:00
|
|
|
auxiliary_polys_commitment: &'a PolynomialBatch<F, C, D>,
|
|
|
|
|
lookup_challenges: Option<&'a Vec<F>>,
|
|
|
|
|
lookups: &[Lookup],
|
2022-05-13 11:20:29 +02:00
|
|
|
ctl_data: &CtlData<F>,
|
2022-05-04 20:57:07 +02:00
|
|
|
alphas: Vec<F>,
|
|
|
|
|
degree_bits: usize,
|
2023-02-13 15:58:26 +01:00
|
|
|
num_lookup_columns: usize,
|
2022-05-04 20:57:07 +02:00
|
|
|
config: &StarkConfig,
|
|
|
|
|
) -> Vec<PolynomialCoeffs<F>>
|
|
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
P: PackedField<Scalar = F>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-05-04 20:57:07 +02:00
|
|
|
S: Stark<F, D>,
|
|
|
|
|
{
|
|
|
|
|
let degree = 1 << degree_bits;
|
|
|
|
|
let rate_bits = config.fri_config.rate_bits;
|
|
|
|
|
|
|
|
|
|
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
|
|
|
|
|
assert!(
|
|
|
|
|
quotient_degree_bits <= rate_bits,
|
|
|
|
|
"Having constraints of degree higher than the rate is not supported yet."
|
|
|
|
|
);
|
|
|
|
|
let step = 1 << (rate_bits - quotient_degree_bits);
|
|
|
|
|
// When opening the `Z`s polys at the "next" point, need to look at the point `next_step` steps away.
|
|
|
|
|
let next_step = 1 << quotient_degree_bits;
|
|
|
|
|
|
|
|
|
|
// Evaluation of the first Lagrange polynomial on the LDE domain.
|
|
|
|
|
let lagrange_first = PolynomialValues::selector(degree, 0).lde_onto_coset(quotient_degree_bits);
|
|
|
|
|
// Evaluation of the last Lagrange polynomial on the LDE domain.
|
|
|
|
|
let lagrange_last =
|
|
|
|
|
PolynomialValues::selector(degree, degree - 1).lde_onto_coset(quotient_degree_bits);
|
|
|
|
|
|
|
|
|
|
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
|
|
|
|
|
|
|
|
|
// Retrieve the LDE values at index `i`.
|
2023-09-22 09:19:13 -04:00
|
|
|
let get_trace_values_packed =
|
|
|
|
|
|i_start| -> Vec<P> { trace_commitment.get_lde_values_packed(i_start, step) };
|
2022-05-04 20:57:07 +02:00
|
|
|
|
|
|
|
|
// Last element of the subgroup.
|
|
|
|
|
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
|
|
|
|
let size = degree << quotient_degree_bits;
|
|
|
|
|
let coset = F::cyclic_subgroup_coset_known_order(
|
|
|
|
|
F::primitive_root_of_unity(degree_bits + quotient_degree_bits),
|
|
|
|
|
F::coset_shift(),
|
|
|
|
|
size,
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// We will step by `P::WIDTH`, and in each iteration, evaluate the quotient polynomial at
|
|
|
|
|
// a batch of `P::WIDTH` points.
|
|
|
|
|
let quotient_values = (0..size)
|
|
|
|
|
.into_par_iter()
|
|
|
|
|
.step_by(P::WIDTH)
|
2022-08-19 17:53:12 -04:00
|
|
|
.flat_map_iter(|i_start| {
|
2022-05-04 20:57:07 +02:00
|
|
|
let i_next_start = (i_start + next_step) % size;
|
|
|
|
|
let i_range = i_start..i_start + P::WIDTH;
|
|
|
|
|
|
|
|
|
|
let x = *P::from_slice(&coset[i_range.clone()]);
|
|
|
|
|
let z_last = x - last;
|
|
|
|
|
let lagrange_basis_first = *P::from_slice(&lagrange_first.values[i_range.clone()]);
|
|
|
|
|
let lagrange_basis_last = *P::from_slice(&lagrange_last.values[i_range]);
|
|
|
|
|
|
|
|
|
|
let mut consumer = ConstraintConsumer::new(
|
|
|
|
|
alphas.clone(),
|
|
|
|
|
z_last,
|
|
|
|
|
lagrange_basis_first,
|
|
|
|
|
lagrange_basis_last,
|
|
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the local and next row evaluations for the current STARK.
|
2023-09-22 09:19:13 -04:00
|
|
|
let vars = S::EvaluationFrame::from_values(
|
|
|
|
|
&get_trace_values_packed(i_start),
|
|
|
|
|
&get_trace_values_packed(i_next_start),
|
|
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the local and next row evaluations for the permutation argument, as well as the associated challenges.
|
2023-02-13 15:58:26 +01:00
|
|
|
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
|
|
|
|
local_values: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
|
|
|
|
[..num_lookup_columns]
|
|
|
|
|
.to_vec(),
|
|
|
|
|
next_values: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step),
|
|
|
|
|
challenges: challenges.to_vec(),
|
|
|
|
|
});
|
2023-10-30 14:28:24 -04:00
|
|
|
|
|
|
|
|
// Get all the data for this STARK's CTLs:
|
|
|
|
|
// - the local and next row evaluations for the CTL Z polynomials
|
|
|
|
|
// - the associated challenges.
|
|
|
|
|
// - for each CTL:
|
|
|
|
|
// - the filter `Column`
|
|
|
|
|
// - the `Column`s that form the looking/looked table.
|
2022-05-13 11:20:29 +02:00
|
|
|
let ctl_vars = ctl_data
|
2022-05-11 14:35:33 +02:00
|
|
|
.zs_columns
|
2022-05-06 14:55:54 +02:00
|
|
|
.iter()
|
|
|
|
|
.enumerate()
|
2022-08-23 23:20:20 -07:00
|
|
|
.map(|(i, zs_columns)| CtlCheckVars::<F, F, P, 1> {
|
2023-02-13 15:58:26 +01:00
|
|
|
local_z: auxiliary_polys_commitment.get_lde_values_packed(i_start, step)
|
|
|
|
|
[num_lookup_columns + i],
|
|
|
|
|
next_z: auxiliary_polys_commitment.get_lde_values_packed(i_next_start, step)
|
|
|
|
|
[num_lookup_columns + i],
|
2022-08-23 23:20:20 -07:00
|
|
|
challenges: zs_columns.challenge,
|
|
|
|
|
columns: &zs_columns.columns,
|
2023-12-05 17:02:37 -05:00
|
|
|
filter: &zs_columns.filter,
|
2022-08-23 23:20:20 -07:00
|
|
|
})
|
2022-05-06 14:55:54 +02:00
|
|
|
.collect::<Vec<_>>();
|
2023-10-30 14:28:24 -04:00
|
|
|
|
|
|
|
|
// Evaluate the polynomial combining all constraints, including those associated
|
|
|
|
|
// to the permutation and CTL arguments.
|
2023-02-10 21:47:51 -08:00
|
|
|
eval_vanishing_poly::<F, F, P, S, D, 1>(
|
2022-05-04 20:57:07 +02:00
|
|
|
stark,
|
2023-09-22 09:19:13 -04:00
|
|
|
&vars,
|
2023-02-13 15:58:26 +01:00
|
|
|
lookups,
|
|
|
|
|
lookup_vars,
|
2022-05-13 11:20:29 +02:00
|
|
|
&ctl_vars,
|
2022-05-04 20:57:07 +02:00
|
|
|
&mut consumer,
|
|
|
|
|
);
|
|
|
|
|
let mut constraints_evals = consumer.accumulators();
|
|
|
|
|
// We divide the constraints evaluations by `Z_H(x)`.
|
2022-08-19 17:53:12 -04:00
|
|
|
let denominator_inv: P = z_h_on_coset.eval_inverse_packed(i_start);
|
2022-05-04 20:57:07 +02:00
|
|
|
for eval in &mut constraints_evals {
|
|
|
|
|
*eval *= denominator_inv;
|
|
|
|
|
}
|
2022-08-19 17:53:12 -04:00
|
|
|
|
|
|
|
|
let num_challenges = alphas.len();
|
|
|
|
|
|
2023-02-10 21:47:51 -08:00
|
|
|
(0..P::WIDTH).map(move |i| {
|
2022-08-19 17:54:48 -04:00
|
|
|
(0..num_challenges)
|
|
|
|
|
.map(|j| constraints_evals[j].as_slice()[i])
|
|
|
|
|
.collect()
|
|
|
|
|
})
|
2022-05-04 20:57:07 +02:00
|
|
|
})
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
|
|
transpose("ient_values)
|
|
|
|
|
.into_par_iter()
|
|
|
|
|
.map(PolynomialValues::new)
|
|
|
|
|
.map(|values| values.coset_ifft(F::coset_shift()))
|
|
|
|
|
.collect()
|
|
|
|
|
}
|
2022-05-12 20:38:11 +02:00
|
|
|
|
2023-10-16 08:53:42 -04:00
|
|
|
#[cfg(test)]
|
2022-05-13 11:20:29 +02:00
|
|
|
/// Check that all constraints evaluate to zero on `H`.
|
|
|
|
|
/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup.
|
2023-04-01 09:34:13 -04:00
|
|
|
fn check_constraints<'a, F, C, S, const D: usize>(
|
2022-05-12 20:38:11 +02:00
|
|
|
stark: &S,
|
2023-04-01 09:34:13 -04:00
|
|
|
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
2023-02-13 15:58:26 +01:00
|
|
|
auxiliary_commitment: &'a PolynomialBatch<F, C, D>,
|
|
|
|
|
lookup_challenges: Option<&'a Vec<F>>,
|
|
|
|
|
lookups: &[Lookup],
|
2022-05-13 11:20:29 +02:00
|
|
|
ctl_data: &CtlData<F>,
|
2022-05-12 20:38:11 +02:00
|
|
|
alphas: Vec<F>,
|
|
|
|
|
degree_bits: usize,
|
2023-02-13 15:58:26 +01:00
|
|
|
num_lookup_columns: usize,
|
2022-05-12 20:38:11 +02:00
|
|
|
) where
|
|
|
|
|
F: RichField + Extendable<D>,
|
2023-04-01 09:34:13 -04:00
|
|
|
C: GenericConfig<D, F = F>,
|
2022-05-12 20:38:11 +02:00
|
|
|
S: Stark<F, D>,
|
|
|
|
|
{
|
|
|
|
|
let degree = 1 << degree_bits;
|
2022-05-13 11:20:29 +02:00
|
|
|
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
2022-05-12 22:29:10 +02:00
|
|
|
|
|
|
|
|
let size = degree << rate_bits;
|
|
|
|
|
let step = 1 << rate_bits;
|
2022-05-12 20:38:11 +02:00
|
|
|
|
2022-05-13 11:20:29 +02:00
|
|
|
// Evaluation of the first Lagrange polynomial.
|
2022-05-12 22:29:10 +02:00
|
|
|
let lagrange_first = PolynomialValues::selector(degree, 0).lde(rate_bits);
|
2022-05-13 11:20:29 +02:00
|
|
|
// Evaluation of the last Lagrange polynomial.
|
2022-05-12 22:29:10 +02:00
|
|
|
let lagrange_last = PolynomialValues::selector(degree, degree - 1).lde(rate_bits);
|
2022-05-12 20:38:11 +02:00
|
|
|
|
2022-05-12 22:29:10 +02:00
|
|
|
let subgroup = F::two_adic_subgroup(degree_bits + rate_bits);
|
2022-05-12 20:38:11 +02:00
|
|
|
|
2022-07-12 15:29:27 -07:00
|
|
|
// Get the evaluations of a batch of polynomials over our subgroup.
|
2023-04-01 09:34:13 -04:00
|
|
|
let get_subgroup_evals = |comm: &PolynomialBatch<F, C, D>| -> Vec<Vec<F>> {
|
2022-07-12 15:29:27 -07:00
|
|
|
let values = comm
|
|
|
|
|
.polynomials
|
|
|
|
|
.par_iter()
|
|
|
|
|
.map(|coeffs| coeffs.clone().fft().values)
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
transpose(&values)
|
2022-05-12 20:38:11 +02:00
|
|
|
};
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get batch evaluations of the trace, permutation and CTL polynomials over our subgroup.
|
2022-07-12 15:29:27 -07:00
|
|
|
let trace_subgroup_evals = get_subgroup_evals(trace_commitment);
|
2023-02-13 15:58:26 +01:00
|
|
|
let auxiliary_subgroup_evals = get_subgroup_evals(auxiliary_commitment);
|
2022-07-12 15:29:27 -07:00
|
|
|
|
2022-05-12 20:38:11 +02:00
|
|
|
// Last element of the subgroup.
|
|
|
|
|
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
|
|
|
|
|
2022-05-12 22:29:10 +02:00
|
|
|
let constraint_values = (0..size)
|
2022-05-12 20:38:11 +02:00
|
|
|
.map(|i| {
|
2022-05-12 22:29:10 +02:00
|
|
|
let i_next = (i + step) % size;
|
2022-05-12 20:38:11 +02:00
|
|
|
|
|
|
|
|
let x = subgroup[i];
|
|
|
|
|
let z_last = x - last;
|
|
|
|
|
let lagrange_basis_first = lagrange_first.values[i];
|
|
|
|
|
let lagrange_basis_last = lagrange_last.values[i];
|
|
|
|
|
|
|
|
|
|
let mut consumer = ConstraintConsumer::new(
|
|
|
|
|
alphas.clone(),
|
|
|
|
|
z_last,
|
|
|
|
|
lagrange_basis_first,
|
|
|
|
|
lagrange_basis_last,
|
|
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the local and next row evaluations for the current STARK's trace.
|
2023-09-22 09:19:13 -04:00
|
|
|
let vars = S::EvaluationFrame::from_values(
|
|
|
|
|
&trace_subgroup_evals[i],
|
|
|
|
|
&trace_subgroup_evals[i_next],
|
|
|
|
|
);
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the local and next row evaluations for the current STARK's permutation argument.
|
2023-02-13 15:58:26 +01:00
|
|
|
let lookup_vars = lookup_challenges.map(|challenges| LookupCheckVars {
|
|
|
|
|
local_values: auxiliary_subgroup_evals[i][..num_lookup_columns].to_vec(),
|
|
|
|
|
next_values: auxiliary_subgroup_evals[i_next][..num_lookup_columns].to_vec(),
|
|
|
|
|
challenges: challenges.to_vec(),
|
|
|
|
|
});
|
2022-05-17 09:41:06 +02:00
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Get the local and next row evaluations for the current STARK's CTL Z polynomials.
|
2022-05-13 11:20:29 +02:00
|
|
|
let ctl_vars = ctl_data
|
2022-05-12 20:38:11 +02:00
|
|
|
.zs_columns
|
|
|
|
|
.iter()
|
|
|
|
|
.enumerate()
|
2022-08-23 23:20:20 -07:00
|
|
|
.map(|(iii, zs_columns)| CtlCheckVars::<F, F, F, 1> {
|
2023-02-13 15:58:26 +01:00
|
|
|
local_z: auxiliary_subgroup_evals[i][num_lookup_columns + iii],
|
|
|
|
|
next_z: auxiliary_subgroup_evals[i_next][num_lookup_columns + iii],
|
2022-08-23 23:20:20 -07:00
|
|
|
challenges: zs_columns.challenge,
|
|
|
|
|
columns: &zs_columns.columns,
|
2023-12-05 17:02:37 -05:00
|
|
|
filter: &zs_columns.filter,
|
2022-08-23 23:20:20 -07:00
|
|
|
})
|
2022-05-12 20:38:11 +02:00
|
|
|
.collect::<Vec<_>>();
|
2023-10-30 14:28:24 -04:00
|
|
|
// Evaluate the polynomial combining all constraints, including those associated
|
|
|
|
|
// to the permutation and CTL arguments.
|
2023-02-10 21:47:51 -08:00
|
|
|
eval_vanishing_poly::<F, F, F, S, D, 1>(
|
2022-05-12 20:38:11 +02:00
|
|
|
stark,
|
2023-09-22 09:19:13 -04:00
|
|
|
&vars,
|
2023-02-13 15:58:26 +01:00
|
|
|
lookups,
|
|
|
|
|
lookup_vars,
|
2022-05-13 11:20:29 +02:00
|
|
|
&ctl_vars,
|
2022-05-12 20:38:11 +02:00
|
|
|
&mut consumer,
|
|
|
|
|
);
|
2022-05-12 22:35:13 +02:00
|
|
|
consumer.accumulators()
|
2022-05-12 20:38:11 +02:00
|
|
|
})
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
2023-10-30 14:28:24 -04:00
|
|
|
// Assert that all constraints evaluate to 0 over our subgroup.
|
2022-06-01 20:17:21 +02:00
|
|
|
for v in constraint_values {
|
2022-06-29 17:34:38 -07:00
|
|
|
assert!(
|
|
|
|
|
v.iter().all(|x| x.is_zero()),
|
|
|
|
|
"Constraint failed in {}",
|
2023-10-16 08:53:42 -04:00
|
|
|
std::any::type_name::<S>()
|
2022-06-29 17:34:38 -07:00
|
|
|
);
|
2022-05-12 22:29:10 +02:00
|
|
|
}
|
2022-05-12 20:38:11 +02:00
|
|
|
}
|