mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 14:23:07 +00:00
Merge pull request #541 from mir-protocol/starky_multitables
Start of Starky multitable implementation
This commit is contained in:
commit
17dfa8d713
@ -1,5 +1,5 @@
|
||||
[workspace]
|
||||
members = ["field", "insertion", "plonky2", "starky", "system_zero", "util", "waksman", "ecdsa", "u32"]
|
||||
members = ["field", "insertion", "plonky2", "starky", "system_zero", "util", "waksman", "ecdsa", "u32", "starky2"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
|
||||
@ -108,7 +108,7 @@ where
|
||||
let permutation_zs_cap = permutation_zs_commitment
|
||||
.as_ref()
|
||||
.map(|commit| commit.merkle_tree.cap.clone());
|
||||
for cap in &permutation_zs_cap {
|
||||
if let Some(cap) = &permutation_zs_cap {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
|
||||
15
starky2/Cargo.toml
Normal file
15
starky2/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "starky2"
|
||||
description = "Implementation of STARKs 2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plonky2 = { path = "../plonky2" }
|
||||
plonky2_util = { path = "../util" }
|
||||
anyhow = "1.0.40"
|
||||
env_logger = "0.9.0"
|
||||
itertools = "0.10.0"
|
||||
log = "0.4.14"
|
||||
rayon = "1.5.1"
|
||||
rand = "0.8.5"
|
||||
112
starky2/src/all_stark.rs
Normal file
112
starky2/src/all_stark.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::CrossTableLookup;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::stark::Stark;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AllStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
pub cpu_stark: CpuStark<F, D>,
|
||||
pub keccak_stark: KeccakStark<F, D>,
|
||||
pub cross_table_lookups: Vec<CrossTableLookup<F>>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
|
||||
pub(crate) fn nums_permutation_zs(&self, config: &StarkConfig) -> Vec<usize> {
|
||||
vec![
|
||||
self.cpu_stark.num_permutation_batches(config),
|
||||
self.keccak_stark.num_permutation_batches(config),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Table {
|
||||
Cpu = 0,
|
||||
Keccak = 1,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::all_stark::{AllStark, Table};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::CrossTableLookup;
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::prover::prove;
|
||||
use crate::verifier::verify_proof;
|
||||
|
||||
#[test]
|
||||
fn test_all_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
|
||||
let cpu_stark = CpuStark::<F, D> {
|
||||
f: Default::default(),
|
||||
};
|
||||
let cpu_rows = 1 << 4;
|
||||
|
||||
let keccak_stark = KeccakStark::<F, D> {
|
||||
f: Default::default(),
|
||||
};
|
||||
let keccak_rows = 1 << 3;
|
||||
|
||||
let mut cpu_trace = vec![PolynomialValues::zero(cpu_rows); 10];
|
||||
let mut keccak_trace = vec![PolynomialValues::zero(keccak_rows); 7];
|
||||
|
||||
let vs0 = (0..keccak_rows)
|
||||
.map(F::from_canonical_usize)
|
||||
.collect::<Vec<_>>();
|
||||
let vs1 = (1..=keccak_rows)
|
||||
.map(F::from_canonical_usize)
|
||||
.collect::<Vec<_>>();
|
||||
let start = thread_rng().gen_range(0..cpu_rows - keccak_rows);
|
||||
|
||||
let default = vec![F::ONE; 2];
|
||||
|
||||
cpu_trace[2].values = vec![default[0]; cpu_rows];
|
||||
cpu_trace[2].values[start..start + keccak_rows].copy_from_slice(&vs0);
|
||||
cpu_trace[4].values = vec![default[1]; cpu_rows];
|
||||
cpu_trace[4].values[start..start + keccak_rows].copy_from_slice(&vs1);
|
||||
|
||||
keccak_trace[3].values[..].copy_from_slice(&vs0);
|
||||
keccak_trace[5].values[..].copy_from_slice(&vs1);
|
||||
|
||||
let cross_table_lookups = vec![CrossTableLookup {
|
||||
looking_table: Table::Cpu,
|
||||
looking_columns: vec![2, 4],
|
||||
looked_table: Table::Keccak,
|
||||
looked_columns: vec![3, 5],
|
||||
default: vec![F::ONE; 2],
|
||||
}];
|
||||
|
||||
let all_stark = AllStark {
|
||||
cpu_stark,
|
||||
keccak_stark,
|
||||
cross_table_lookups,
|
||||
};
|
||||
|
||||
let proof = prove::<F, C, D>(
|
||||
&all_stark,
|
||||
&config,
|
||||
vec![cpu_trace, keccak_trace],
|
||||
vec![vec![]; 2],
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_proof(all_stark, proof, &config)
|
||||
}
|
||||
}
|
||||
34
starky2/src/config.rs
Normal file
34
starky2/src/config.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use plonky2::fri::reduction_strategies::FriReductionStrategy;
|
||||
use plonky2::fri::{FriConfig, FriParams};
|
||||
|
||||
pub struct StarkConfig {
|
||||
pub security_bits: usize,
|
||||
|
||||
/// The number of challenge points to generate, for IOPs that have soundness errors of (roughly)
|
||||
/// `degree / |F|`.
|
||||
pub num_challenges: usize,
|
||||
|
||||
pub fri_config: FriConfig,
|
||||
}
|
||||
|
||||
impl StarkConfig {
|
||||
/// A typical configuration with a rate of 2, resulting in fast but large proofs.
|
||||
/// Targets ~100 bit conjectured security.
|
||||
pub fn standard_fast_config() -> Self {
|
||||
Self {
|
||||
security_bits: 100,
|
||||
num_challenges: 2,
|
||||
fri_config: FriConfig {
|
||||
rate_bits: 1,
|
||||
cap_height: 4,
|
||||
proof_of_work_bits: 10,
|
||||
reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5),
|
||||
num_query_rounds: 90,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||
self.fri_config.fri_params(degree_bits, false)
|
||||
}
|
||||
}
|
||||
166
starky2/src/constraint_consumer.rs
Normal file
166
starky2/src/constraint_consumer.rs
Normal file
@ -0,0 +1,166 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
pub struct ConstraintConsumer<P: PackedField> {
|
||||
/// Random values used to combine multiple constraints into one.
|
||||
pub alphas: Vec<P::Scalar>,
|
||||
|
||||
/// Running sums of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
// TODO(JN): This is pub so it can be used in a test. Once we have an API for accessing this
|
||||
// result, it should be made private.
|
||||
pub constraint_accs: Vec<P>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: P,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the first trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_first: P,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the last trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_last: P,
|
||||
}
|
||||
|
||||
impl<P: PackedField> ConstraintConsumer<P> {
|
||||
pub fn new(
|
||||
alphas: Vec<P::Scalar>,
|
||||
z_last: P,
|
||||
lagrange_basis_first: P,
|
||||
lagrange_basis_last: P,
|
||||
) -> Self {
|
||||
Self {
|
||||
constraint_accs: vec![P::ZEROS; alphas.len()],
|
||||
alphas,
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Do this correctly.
|
||||
pub fn accumulators(self) -> Vec<P::Scalar> {
|
||||
self.constraint_accs
|
||||
.into_iter()
|
||||
.map(|acc| acc.as_slice()[0])
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows except the last.
|
||||
pub fn constraint_transition(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.z_last);
|
||||
}
|
||||
|
||||
/// Add one constraint on all rows.
|
||||
pub fn constraint(&mut self, constraint: P) {
|
||||
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||
*acc *= alpha;
|
||||
*acc += constraint;
|
||||
}
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// first row of the trace.
|
||||
pub fn constraint_first_row(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.lagrange_basis_first);
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// last row of the trace.
|
||||
pub fn constraint_last_row(&mut self, constraint: P) {
|
||||
self.constraint(constraint * self.lagrange_basis_last);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// A random value used to combine multiple constraints into one.
|
||||
alphas: Vec<Target>,
|
||||
|
||||
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
constraint_accs: Vec<ExtensionTarget<D>>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: ExtensionTarget<D>,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the first trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_first: ExtensionTarget<D>,
|
||||
|
||||
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
|
||||
/// with the last trace row, and zero at other points in the subgroup.
|
||||
lagrange_basis_last: ExtensionTarget<D>,
|
||||
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
||||
pub fn new(
|
||||
zero: ExtensionTarget<D>,
|
||||
alphas: Vec<Target>,
|
||||
z_last: ExtensionTarget<D>,
|
||||
lagrange_basis_first: ExtensionTarget<D>,
|
||||
lagrange_basis_last: ExtensionTarget<D>,
|
||||
) -> Self {
|
||||
Self {
|
||||
constraint_accs: vec![zero; alphas.len()],
|
||||
alphas,
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
_phantom: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn accumulators(self) -> Vec<ExtensionTarget<D>> {
|
||||
self.constraint_accs
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows except the last.
|
||||
pub fn constraint_transition(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.z_last);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows.
|
||||
pub fn constraint(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||
*acc = builder.scalar_mul_add_extension(alpha, *acc, constraint);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// first row of the trace.
|
||||
pub fn constraint_first_row(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
/// last row of the trace.
|
||||
pub fn constraint_last_row(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last);
|
||||
self.constraint(builder, filtered_constraint);
|
||||
}
|
||||
}
|
||||
46
starky2/src/cpu/cpu_stark.rs
Normal file
46
starky2/src/cpu/cpu_stark.rs
Normal file
@ -0,0 +1,46 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct CpuStark<F, const D: usize> {
|
||||
pub f: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
|
||||
const COLUMNS: usize = 10;
|
||||
const PUBLIC_INPUTS: usize = 0;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
_yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
}
|
||||
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
vec![PermutationPair::singletons(8, 9)]
|
||||
}
|
||||
}
|
||||
1
starky2/src/cpu/mod.rs
Normal file
1
starky2/src/cpu/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod cpu_stark;
|
||||
275
starky2/src/cross_table_lookup.rs
Normal file
275
starky2/src/cross_table_lookup.rs
Normal file
@ -0,0 +1,275 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
|
||||
use crate::all_stark::Table;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::permutation::{
|
||||
get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CrossTableLookup<F: Field> {
|
||||
pub looking_table: Table,
|
||||
pub looking_columns: Vec<usize>,
|
||||
pub looked_table: Table,
|
||||
pub looked_columns: Vec<usize>,
|
||||
pub default: Vec<F>,
|
||||
}
|
||||
|
||||
impl<F: Field> CrossTableLookup<F> {
|
||||
pub fn new(
|
||||
looking_table: Table,
|
||||
looking_columns: Vec<usize>,
|
||||
looked_table: Table,
|
||||
looked_columns: Vec<usize>,
|
||||
default: Vec<F>,
|
||||
) -> Self {
|
||||
assert_eq!(looking_columns.len(), looked_columns.len());
|
||||
Self {
|
||||
looking_table,
|
||||
looking_columns,
|
||||
looked_table,
|
||||
looked_columns,
|
||||
default,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Cross-table lookup data for one table.
|
||||
#[derive(Clone)]
|
||||
pub struct CtlData<F: Field> {
|
||||
/// Challenges used in the argument.
|
||||
pub(crate) challenges: GrandProductChallengeSet<F>,
|
||||
/// Vector of `(Z, columns)` where `Z` is a Z-polynomial for a lookup on columns `columns`.
|
||||
pub zs_columns: Vec<(PolynomialValues<F>, Vec<usize>)>,
|
||||
}
|
||||
|
||||
impl<F: Field> CtlData<F> {
|
||||
pub(crate) fn new(challenges: GrandProductChallengeSet<F>) -> Self {
|
||||
Self {
|
||||
challenges,
|
||||
zs_columns: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.zs_columns.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.zs_columns.is_empty()
|
||||
}
|
||||
|
||||
pub fn z_polys(&self) -> Vec<PolynomialValues<F>> {
|
||||
self.zs_columns.iter().map(|(p, _)| p.clone()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cross_table_lookup_data<F: RichField, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[Vec<PolynomialValues<F>>],
|
||||
cross_table_lookups: &[CrossTableLookup<F>],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
) -> Vec<CtlData<F>> {
|
||||
let challenges = get_grand_product_challenge_set(challenger, config.num_challenges);
|
||||
let mut ctl_data_per_table = vec![CtlData::new(challenges.clone()); trace_poly_values.len()];
|
||||
for CrossTableLookup {
|
||||
looking_table,
|
||||
looking_columns,
|
||||
looked_table,
|
||||
looked_columns,
|
||||
default,
|
||||
} in cross_table_lookups
|
||||
{
|
||||
for &challenge in &challenges.challenges {
|
||||
let z_looking = partial_products(
|
||||
&trace_poly_values[*looking_table as usize],
|
||||
looking_columns,
|
||||
challenge,
|
||||
);
|
||||
let z_looked = partial_products(
|
||||
&trace_poly_values[*looked_table as usize],
|
||||
looked_columns,
|
||||
challenge,
|
||||
);
|
||||
|
||||
debug_assert_eq!(
|
||||
*z_looking.values.last().unwrap(),
|
||||
*z_looked.values.last().unwrap()
|
||||
* challenge.combine(default).exp_u64(
|
||||
trace_poly_values[*looking_table as usize][0].len() as u64
|
||||
- trace_poly_values[*looked_table as usize][0].len() as u64
|
||||
)
|
||||
);
|
||||
|
||||
ctl_data_per_table[*looking_table as usize]
|
||||
.zs_columns
|
||||
.push((z_looking, looking_columns.clone()));
|
||||
ctl_data_per_table[*looked_table as usize]
|
||||
.zs_columns
|
||||
.push((z_looked, looked_columns.clone()));
|
||||
}
|
||||
}
|
||||
ctl_data_per_table
|
||||
}
|
||||
|
||||
fn partial_products<F: Field>(
|
||||
trace: &[PolynomialValues<F>],
|
||||
columns: &[usize],
|
||||
challenge: GrandProductChallenge<F>,
|
||||
) -> PolynomialValues<F> {
|
||||
let mut partial_prod = F::ONE;
|
||||
let degree = trace[0].len();
|
||||
let mut res = Vec::with_capacity(degree);
|
||||
for i in 0..degree {
|
||||
partial_prod *= challenge.combine(columns.iter().map(|&j| &trace[j].values[i]));
|
||||
res.push(partial_prod);
|
||||
}
|
||||
res.into()
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CtlCheckVars<'a, F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
pub(crate) local_z: P,
|
||||
pub(crate) next_z: P,
|
||||
pub(crate) challenges: GrandProductChallenge<F>,
|
||||
pub(crate) columns: &'a [usize],
|
||||
}
|
||||
|
||||
impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
|
||||
{
|
||||
pub(crate) fn from_proofs<C: GenericConfig<D, F = F>>(
|
||||
proofs: &[&StarkProofWithPublicInputs<F, C, D>],
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_permutation_zs: &[usize],
|
||||
) -> Vec<Vec<Self>> {
|
||||
debug_assert_eq!(proofs.len(), num_permutation_zs.len());
|
||||
let mut ctl_zs = proofs
|
||||
.iter()
|
||||
.zip(num_permutation_zs)
|
||||
.map(|(p, &num_perms)| {
|
||||
let openings = &p.proof.openings;
|
||||
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
|
||||
let ctl_zs_right = openings.permutation_ctl_zs_right.iter().skip(num_perms);
|
||||
ctl_zs.zip(ctl_zs_right)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut ctl_vars_per_table = vec![vec![]; proofs.len()];
|
||||
for CrossTableLookup {
|
||||
looking_table,
|
||||
looking_columns,
|
||||
looked_table,
|
||||
looked_columns,
|
||||
..
|
||||
} in cross_table_lookups
|
||||
{
|
||||
for &challenges in &ctl_challenges.challenges {
|
||||
let (looking_z, looking_z_next) = ctl_zs[*looking_table as usize].next().unwrap();
|
||||
ctl_vars_per_table[*looking_table as usize].push(Self {
|
||||
local_z: *looking_z,
|
||||
next_z: *looking_z_next,
|
||||
challenges,
|
||||
columns: looking_columns,
|
||||
});
|
||||
|
||||
let (looked_z, looked_z_next) = ctl_zs[*looked_table as usize].next().unwrap();
|
||||
ctl_vars_per_table[*looked_table as usize].push(Self {
|
||||
local_z: *looked_z,
|
||||
next_z: *looked_z_next,
|
||||
challenges,
|
||||
columns: looked_columns,
|
||||
});
|
||||
}
|
||||
}
|
||||
ctl_vars_per_table
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
for lookup_vars in ctl_vars {
|
||||
let CtlCheckVars {
|
||||
local_z,
|
||||
next_z,
|
||||
challenges,
|
||||
columns,
|
||||
} = lookup_vars;
|
||||
let combine = |v: &[P]| -> P { challenges.combine(columns.iter().map(|&i| &v[i])) };
|
||||
|
||||
// Check value of `Z(1)`
|
||||
consumer.constraint_first_row(*local_z - combine(vars.local_values));
|
||||
// Check `Z(gw) = combination * Z(w)`
|
||||
consumer.constraint_transition(*next_z - *local_z * combine(vars.next_values));
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn verify_cross_table_lookups<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
cross_table_lookups: Vec<CrossTableLookup<F>>,
|
||||
proofs: &[&StarkProofWithPublicInputs<F, C, D>],
|
||||
challenges: GrandProductChallengeSet<F>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()> {
|
||||
let degrees_bits = proofs
|
||||
.iter()
|
||||
.map(|p| p.proof.recover_degree_bits(config))
|
||||
.collect::<Vec<_>>();
|
||||
let mut ctl_zs_openings = proofs
|
||||
.iter()
|
||||
.map(|p| p.proof.openings.ctl_zs_last.iter())
|
||||
.collect::<Vec<_>>();
|
||||
for (
|
||||
i,
|
||||
CrossTableLookup {
|
||||
looking_table,
|
||||
looked_table,
|
||||
default,
|
||||
..
|
||||
},
|
||||
) in cross_table_lookups.into_iter().enumerate()
|
||||
{
|
||||
let looking_degree = 1 << degrees_bits[looking_table as usize];
|
||||
let looked_degree = 1 << degrees_bits[looked_table as usize];
|
||||
let looking_z = *ctl_zs_openings[looking_table as usize].next().unwrap();
|
||||
let looked_z = *ctl_zs_openings[looked_table as usize].next().unwrap();
|
||||
let challenge = challenges.challenges[i % config.num_challenges];
|
||||
let combined_default = challenge.combine(default.iter());
|
||||
|
||||
ensure!(
|
||||
looking_z == looked_z * combined_default.exp_u64(looking_degree - looked_degree),
|
||||
"Cross-table lookup verification failed."
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
158
starky2/src/get_challenges.rs
Normal file
158
starky2/src/get_challenges.rs
Normal file
@ -0,0 +1,158 @@
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::fri::proof::FriProof;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
|
||||
use crate::all_stark::AllStark;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::permutation::{
|
||||
get_grand_product_challenge_set, get_n_grand_product_challenge_sets,
|
||||
get_n_permutation_challenge_sets_target,
|
||||
};
|
||||
use crate::proof::*;
|
||||
use crate::stark::Stark;
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
pub(crate) fn get_challenges(
|
||||
&self,
|
||||
all_stark: &AllStark<F, D>,
|
||||
config: &StarkConfig,
|
||||
) -> AllProofChallenges<F, D> {
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
|
||||
for proof in self.proofs() {
|
||||
challenger.observe_cap(&proof.proof.trace_cap);
|
||||
}
|
||||
|
||||
let ctl_challenges =
|
||||
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
|
||||
|
||||
AllProofChallenges {
|
||||
cpu_challenges: self.cpu_proof.get_challenges(
|
||||
&mut challenger,
|
||||
&all_stark.cpu_stark,
|
||||
config,
|
||||
),
|
||||
keccak_challenges: self.keccak_proof.get_challenges(
|
||||
&mut challenger,
|
||||
&all_stark.keccak_stark,
|
||||
config,
|
||||
),
|
||||
ctl_challenges,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
/// Computes all Fiat-Shamir challenges used in the STARK proof.
|
||||
pub(crate) fn get_challenges<S: Stark<F, D>>(
|
||||
&self,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
let degree_bits = self.proof.recover_degree_bits(config);
|
||||
|
||||
let StarkProof {
|
||||
permutation_ctl_zs_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
FriProof {
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
..
|
||||
} = &self.proof;
|
||||
|
||||
let num_challenges = config.num_challenges;
|
||||
|
||||
let permutation_challenge_sets = stark.uses_permutation_args().then(|| {
|
||||
get_n_grand_product_challenge_sets(
|
||||
challenger,
|
||||
num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
)
|
||||
});
|
||||
|
||||
challenger.observe_cap(permutation_ctl_zs_cap);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge::<D>();
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
StarkProofChallenges {
|
||||
permutation_challenge_sets,
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges::<C, D>(
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
*pow_witness,
|
||||
degree_bits,
|
||||
&config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
pub(crate) fn get_challenges<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let proof = &self.proof;
|
||||
let opening_proof = &proof.opening_proof;
|
||||
let num_challenges = config.num_challenges;
|
||||
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
|
||||
challenger.observe_cap(&proof.trace_cap);
|
||||
let permutation_challenge_sets =
|
||||
proof.permutation_zs_cap.as_ref().map(|permutation_zs_cap| {
|
||||
let tmp = get_n_permutation_challenge_sets_target(
|
||||
builder,
|
||||
&mut challenger,
|
||||
num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
challenger.observe_cap(permutation_zs_cap);
|
||||
tmp
|
||||
});
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
challenger.observe_cap(&proof.quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||
challenger.observe_openings(&proof.openings.to_fri_openings());
|
||||
StarkProofChallengesTarget {
|
||||
permutation_challenge_sets,
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges::<C>(
|
||||
builder,
|
||||
&opening_proof.commit_phase_merkle_caps,
|
||||
&opening_proof.final_poly,
|
||||
opening_proof.pow_witness,
|
||||
&config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
46
starky2/src/keccak/keccak_stark.rs
Normal file
46
starky2/src/keccak/keccak_stark.rs
Normal file
@ -0,0 +1,46 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct KeccakStark<F, const D: usize> {
|
||||
pub(crate) f: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
|
||||
const COLUMNS: usize = 7;
|
||||
const PUBLIC_INPUTS: usize = 0;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
_yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
}
|
||||
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
|
||||
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
vec![PermutationPair::singletons(0, 6)]
|
||||
}
|
||||
}
|
||||
1
starky2/src/keccak/mod.rs
Normal file
1
starky2/src/keccak/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod keccak_stark;
|
||||
22
starky2/src/lib.rs
Normal file
22
starky2/src/lib.rs
Normal file
@ -0,0 +1,22 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![feature(generic_const_exprs)]
|
||||
|
||||
pub mod all_stark;
|
||||
pub mod config;
|
||||
pub mod constraint_consumer;
|
||||
pub mod cpu;
|
||||
pub mod cross_table_lookup;
|
||||
mod get_challenges;
|
||||
pub mod keccak;
|
||||
pub mod permutation;
|
||||
pub mod proof;
|
||||
pub mod prover;
|
||||
pub mod recursive_verifier;
|
||||
pub mod stark;
|
||||
pub mod stark_testing;
|
||||
pub mod util;
|
||||
pub mod vanishing_poly;
|
||||
pub mod vars;
|
||||
pub mod verifier;
|
||||
410
starky2/src/permutation.rs
Normal file
410
starky2/src/permutation.rs
Normal file
@ -0,0 +1,410 @@
|
||||
//! Permutation arguments.
|
||||
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::batch_util::batch_multiply_inplace;
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
|
||||
/// In particular, there should exist some permutation `pi` such that for any `i`,
|
||||
/// `trace[lhs[i]] = pi(trace[rhs[i]])`. Here `trace` denotes the trace in column-major form, so
|
||||
/// `trace[col]` is a column vector.
|
||||
pub struct PermutationPair {
|
||||
/// Each entry contains two column indices, representing two columns which should be
|
||||
/// permutations of one another.
|
||||
pub column_pairs: Vec<(usize, usize)>,
|
||||
}
|
||||
|
||||
impl PermutationPair {
|
||||
pub fn singletons(lhs: usize, rhs: usize) -> Self {
|
||||
Self {
|
||||
column_pairs: vec![(lhs, rhs)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single instance of a permutation check protocol.
|
||||
pub(crate) struct PermutationInstance<'a, T: Copy> {
|
||||
pub(crate) pair: &'a PermutationPair,
|
||||
pub(crate) challenge: GrandProductChallenge<T>,
|
||||
}
|
||||
|
||||
/// Randomness for a single instance of a permutation check protocol.
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) struct GrandProductChallenge<T: Copy> {
|
||||
/// Randomness used to combine multiple columns into one.
|
||||
pub(crate) beta: T,
|
||||
/// Random offset that's added to the beta-reduced column values.
|
||||
pub(crate) gamma: T,
|
||||
}
|
||||
|
||||
impl<F: Field> GrandProductChallenge<F> {
|
||||
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
||||
&self,
|
||||
terms: T,
|
||||
) -> P
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
T::IntoIter: DoubleEndedIterator,
|
||||
{
|
||||
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct GrandProductChallengeSet<T: Copy> {
|
||||
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
||||
}
|
||||
|
||||
/// Compute all Z polynomials (for permutation arguments).
|
||||
pub(crate) fn compute_permutation_z_polys<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
permutation_challenge_sets: &[GrandProductChallengeSet<F>],
|
||||
) -> Vec<PolynomialValues<F>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
permutation_batches
|
||||
.into_par_iter()
|
||||
.map(|instances| compute_permutation_z_poly(&instances, trace_poly_values))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute a single Z polynomial.
|
||||
fn compute_permutation_z_poly<F: Field>(
|
||||
instances: &[PermutationInstance<F>],
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
) -> PolynomialValues<F> {
|
||||
let degree = trace_poly_values[0].len();
|
||||
let (reduced_lhs_polys, reduced_rhs_polys): (Vec<_>, Vec<_>) = instances
|
||||
.iter()
|
||||
.map(|instance| permutation_reduced_polys(instance, trace_poly_values, degree))
|
||||
.unzip();
|
||||
|
||||
let numerator = poly_product_elementwise(reduced_lhs_polys.into_iter());
|
||||
let denominator = poly_product_elementwise(reduced_rhs_polys.into_iter());
|
||||
|
||||
// Compute the quotients.
|
||||
let denominator_inverses = F::batch_multiplicative_inverse(&denominator.values);
|
||||
let mut quotients = numerator.values;
|
||||
batch_multiply_inplace(&mut quotients, &denominator_inverses);
|
||||
|
||||
// Compute Z, which contains partial products of the quotients.
|
||||
let mut partial_products = Vec::with_capacity(degree);
|
||||
let mut acc = F::ONE;
|
||||
for q in quotients {
|
||||
partial_products.push(acc);
|
||||
acc *= q;
|
||||
}
|
||||
PolynomialValues::new(partial_products)
|
||||
}
|
||||
|
||||
/// Computes the reduced polynomial, `\sum beta^i f_i(x) + gamma`, for both the "left" and "right"
|
||||
/// sides of a given `PermutationPair`.
|
||||
fn permutation_reduced_polys<F: Field>(
|
||||
instance: &PermutationInstance<F>,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
degree: usize,
|
||||
) -> (PolynomialValues<F>, PolynomialValues<F>) {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
|
||||
let mut reduced_lhs = PolynomialValues::constant(*gamma, degree);
|
||||
let mut reduced_rhs = PolynomialValues::constant(*gamma, degree);
|
||||
for ((lhs, rhs), weight) in column_pairs.iter().zip(beta.powers()) {
|
||||
reduced_lhs.add_assign_scaled(&trace_poly_values[*lhs], weight);
|
||||
reduced_rhs.add_assign_scaled(&trace_poly_values[*rhs], weight);
|
||||
}
|
||||
(reduced_lhs, reduced_rhs)
|
||||
}
|
||||
|
||||
/// Computes the elementwise product of a set of polynomials. Assumes that the set is non-empty and
|
||||
/// that each polynomial has the same length.
|
||||
fn poly_product_elementwise<F: Field>(
|
||||
mut polys: impl Iterator<Item = PolynomialValues<F>>,
|
||||
) -> PolynomialValues<F> {
|
||||
let mut product = polys.next().expect("Expected at least one polynomial");
|
||||
for poly in polys {
|
||||
batch_multiply_inplace(&mut product.values, &poly.values)
|
||||
}
|
||||
product
|
||||
}
|
||||
|
||||
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
) -> GrandProductChallenge<F> {
|
||||
let beta = challenger.get_challenge();
|
||||
let gamma = challenger.get_challenge();
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<F> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_grand_product_challenge(challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, H: Hasher<F>>(
|
||||
challenger: &mut Challenger<F, H>,
|
||||
num_challenges: usize,
|
||||
num_sets: usize,
|
||||
) -> Vec<GrandProductChallengeSet<F>> {
|
||||
(0..num_sets)
|
||||
.map(|_| get_grand_product_challenge_set(challenger, num_challenges))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_permutation_challenge_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
) -> GrandProductChallenge<Target> {
|
||||
let beta = challenger.get_challenge(builder);
|
||||
let gamma = challenger.get_challenge(builder);
|
||||
GrandProductChallenge { beta, gamma }
|
||||
}
|
||||
|
||||
fn get_permutation_challenge_set_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
) -> GrandProductChallengeSet<Target> {
|
||||
let challenges = (0..num_challenges)
|
||||
.map(|_| get_permutation_challenge_target(builder, challenger))
|
||||
.collect();
|
||||
GrandProductChallengeSet { challenges }
|
||||
}
|
||||
|
||||
pub(crate) fn get_n_permutation_challenge_sets_target<
|
||||
F: RichField + Extendable<D>,
|
||||
H: AlgebraicHasher<F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, H, D>,
|
||||
num_challenges: usize,
|
||||
num_sets: usize,
|
||||
) -> Vec<GrandProductChallengeSet<Target>> {
|
||||
(0..num_sets)
|
||||
.map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get a list of instances of our batch-permutation argument. These are permutation arguments
|
||||
/// where the same `Z(x)` polynomial is used to check more than one permutation.
|
||||
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
|
||||
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
|
||||
/// chunk these arguments based on our batch size.
|
||||
pub(crate) fn get_permutation_batches<'a, T: Copy>(
|
||||
permutation_pairs: &'a [PermutationPair],
|
||||
permutation_challenge_sets: &[GrandProductChallengeSet<T>],
|
||||
num_challenges: usize,
|
||||
batch_size: usize,
|
||||
) -> Vec<Vec<PermutationInstance<'a, T>>> {
|
||||
permutation_pairs
|
||||
.iter()
|
||||
.cartesian_product(0..num_challenges)
|
||||
.chunks(batch_size)
|
||||
.into_iter()
|
||||
.map(|batch| {
|
||||
batch
|
||||
.enumerate()
|
||||
.map(|(i, (pair, chal))| {
|
||||
let challenge = permutation_challenge_sets[i].challenges[chal];
|
||||
PermutationInstance { pair, challenge }
|
||||
})
|
||||
.collect_vec()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub struct PermutationCheckVars<F, FE, P, const D2: usize>
|
||||
where
|
||||
F: Field,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
pub(crate) local_zs: Vec<P>,
|
||||
pub(crate) next_zs: Vec<P>,
|
||||
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<F>>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_permutation_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
|
||||
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let PermutationCheckVars {
|
||||
local_zs,
|
||||
next_zs,
|
||||
permutation_challenge_sets,
|
||||
} = permutation_vars;
|
||||
|
||||
// Check that Z(1) = 1;
|
||||
for &z in &local_zs {
|
||||
consumer.constraint_first_row(z - FE::ONE);
|
||||
}
|
||||
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
&permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
// Each zs value corresponds to a permutation batch.
|
||||
for (i, instances) in permutation_batches.iter().enumerate() {
|
||||
// Z(gx) * down = Z x * up
|
||||
let (reduced_lhs, reduced_rhs): (Vec<P>, Vec<P>) = instances
|
||||
.iter()
|
||||
.map(|instance| {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
let mut factor = ReducingFactor::new(*beta);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
||||
.unzip();
|
||||
(
|
||||
factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma),
|
||||
factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma),
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
let constraint = next_zs[i] * reduced_rhs.into_iter().product::<P>()
|
||||
- local_zs[i] * reduced_lhs.into_iter().product::<P>();
|
||||
consumer.constraint(constraint);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PermutationCheckDataTarget<const D: usize> {
|
||||
pub(crate) local_zs: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) next_zs: Vec<ExtensionTarget<D>>,
|
||||
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<Target>>,
|
||||
}
|
||||
|
||||
pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
|
||||
permutation_data: PermutationCheckDataTarget<D>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let PermutationCheckDataTarget {
|
||||
local_zs,
|
||||
next_zs,
|
||||
permutation_challenge_sets,
|
||||
} = permutation_data;
|
||||
|
||||
let one = builder.one_extension();
|
||||
// Check that Z(1) = 1;
|
||||
for &z in &local_zs {
|
||||
let z_1 = builder.sub_extension(z, one);
|
||||
consumer.constraint_first_row(builder, z_1);
|
||||
}
|
||||
|
||||
let permutation_pairs = stark.permutation_pairs();
|
||||
|
||||
let permutation_batches = get_permutation_batches(
|
||||
&permutation_pairs,
|
||||
&permutation_challenge_sets,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
);
|
||||
|
||||
// Each zs value corresponds to a permutation batch.
|
||||
for (i, instances) in permutation_batches.iter().enumerate() {
|
||||
let (reduced_lhs, reduced_rhs): (Vec<ExtensionTarget<D>>, Vec<ExtensionTarget<D>>) =
|
||||
instances
|
||||
.iter()
|
||||
.map(|instance| {
|
||||
let PermutationInstance {
|
||||
pair: PermutationPair { column_pairs },
|
||||
challenge: GrandProductChallenge { beta, gamma },
|
||||
} = instance;
|
||||
let beta_ext = builder.convert_to_ext(*beta);
|
||||
let gamma_ext = builder.convert_to_ext(*gamma);
|
||||
let mut factor = ReducingFactorTarget::new(beta_ext);
|
||||
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
||||
.iter()
|
||||
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
||||
.unzip();
|
||||
let reduced_lhs = factor.reduce(&lhs, builder);
|
||||
let reduced_rhs = factor.reduce(&rhs, builder);
|
||||
(
|
||||
builder.add_extension(reduced_lhs, gamma_ext),
|
||||
builder.add_extension(reduced_rhs, gamma_ext),
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
let reduced_lhs_product = builder.mul_many_extension(&reduced_lhs);
|
||||
let reduced_rhs_product = builder.mul_many_extension(&reduced_rhs);
|
||||
// constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product
|
||||
let constraint = {
|
||||
let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product);
|
||||
builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp)
|
||||
};
|
||||
consumer.constraint(builder, constraint)
|
||||
}
|
||||
}
|
||||
262
starky2/src/proof.rs
Normal file
262
starky2/src/proof.rs
Normal file
@ -0,0 +1,262 @@
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::fri::proof::{
|
||||
CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget,
|
||||
};
|
||||
use plonky2::fri::structure::{
|
||||
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
|
||||
};
|
||||
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::permutation::GrandProductChallengeSet;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
pub cpu_proof: StarkProofWithPublicInputs<F, C, D>,
|
||||
pub keccak_proof: StarkProofWithPublicInputs<F, C, D>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
|
||||
pub fn proofs(&self) -> [&StarkProofWithPublicInputs<F, C, D>; 2] {
|
||||
[&self.cpu_proof, &self.keccak_proof]
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
pub cpu_challenges: StarkProofChallenges<F, D>,
|
||||
pub keccak_challenges: StarkProofChallenges<F, D>,
|
||||
pub ctl_challenges: GrandProductChallengeSet<F>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of permutation Z values.
|
||||
pub permutation_ctl_zs_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSet<F, D>,
|
||||
/// A batch FRI argument for all openings.
|
||||
pub opening_proof: FriProof<F, C::Hasher, D>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
lde_bits - config.fri_config.rate_bits
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StarkProofTarget<const D: usize> {
|
||||
pub trace_cap: MerkleCapTarget,
|
||||
pub permutation_zs_cap: Option<MerkleCapTarget>,
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
pub openings: StarkOpeningSetTarget<D>,
|
||||
pub opening_proof: FriProofTarget<D>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
lde_bits - config.fri_config.rate_bits
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkProofWithPublicInputs<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
> {
|
||||
pub proof: StarkProof<F, C, D>,
|
||||
// TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`.
|
||||
pub public_inputs: Vec<F>,
|
||||
}
|
||||
|
||||
pub struct StarkProofWithPublicInputsTarget<const D: usize> {
|
||||
pub proof: StarkProofTarget<D>,
|
||||
pub public_inputs: Vec<Target>,
|
||||
}
|
||||
|
||||
pub struct CompressedStarkProof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSet<F, D>,
|
||||
/// A batch FRI argument for all openings.
|
||||
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
|
||||
}
|
||||
|
||||
pub struct CompressedStarkProofWithPublicInputs<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
> {
|
||||
pub proof: CompressedStarkProof<F, C, D>,
|
||||
pub public_inputs: Vec<F>,
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Randomness used in any permutation arguments.
|
||||
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<F>>>,
|
||||
|
||||
/// Random values used to combine STARK constraints.
|
||||
pub stark_alphas: Vec<F>,
|
||||
|
||||
/// Point at which the STARK polynomials are opened.
|
||||
pub stark_zeta: F::Extension,
|
||||
|
||||
pub fri_challenges: FriChallenges<F, D>,
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<Target>>>,
|
||||
pub stark_alphas: Vec<Target>,
|
||||
pub stark_zeta: ExtensionTarget<D>,
|
||||
pub fri_challenges: FriChallengesTarget<D>,
|
||||
}
|
||||
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Openings of trace polynomials at `zeta`.
|
||||
pub local_values: Vec<F::Extension>,
|
||||
/// Openings of trace polynomials at `g * zeta`.
|
||||
pub next_values: Vec<F::Extension>,
|
||||
/// Openings of permutations and cross-table lookups `Z` polynomials at `zeta`.
|
||||
pub permutation_ctl_zs: Vec<F::Extension>,
|
||||
/// Openings of permutations and cross-table lookups `Z` polynomials at `g * zeta`.
|
||||
pub permutation_ctl_zs_right: Vec<F::Extension>,
|
||||
/// Openings of cross-table lookups `Z` polynomials at `g^-1`.
|
||||
pub ctl_zs_last: Vec<F>,
|
||||
/// Openings of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<F::Extension>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
pub fn new<C: GenericConfig<D, F = F>>(
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &PolynomialBatch<F, C, D>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
degree_bits: usize,
|
||||
num_permutation_zs: usize,
|
||||
) -> Self {
|
||||
let eval_commitment = |z: F::Extension, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.to_extension().eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let eval_commitment_base = |z: F, c: &PolynomialBatch<F, C, D>| {
|
||||
c.polynomials
|
||||
.par_iter()
|
||||
.map(|p| p.eval(z))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let zeta_right = zeta.scalar_mul(g);
|
||||
Self {
|
||||
local_values: eval_commitment(zeta, trace_commitment),
|
||||
next_values: eval_commitment(zeta_right, trace_commitment),
|
||||
permutation_ctl_zs: eval_commitment(zeta, permutation_ctl_zs_commitment),
|
||||
permutation_ctl_zs_right: eval_commitment(zeta_right, permutation_ctl_zs_commitment),
|
||||
ctl_zs_last: eval_commitment_base(
|
||||
F::primitive_root_of_unity(degree_bits).inverse(),
|
||||
permutation_ctl_zs_commitment,
|
||||
)[num_permutation_zs..]
|
||||
.to_vec(),
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpenings<F, D> {
|
||||
let zeta_batch = FriOpeningBatch {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs)
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
let zeta_right_batch = FriOpeningBatch {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(&self.permutation_ctl_zs_right)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
let mut batches = vec![zeta_batch, zeta_right_batch];
|
||||
|
||||
if !self.ctl_zs_last.is_empty() {
|
||||
batches.push(FriOpeningBatch {
|
||||
values: self
|
||||
.ctl_zs_last
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
FriOpenings { batches }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StarkOpeningSetTarget<const D: usize> {
|
||||
pub local_values: Vec<ExtensionTarget<D>>,
|
||||
pub next_values: Vec<ExtensionTarget<D>>,
|
||||
pub permutation_zs: Option<Vec<ExtensionTarget<D>>>,
|
||||
pub permutation_zs_right: Option<Vec<ExtensionTarget<D>>>,
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
|
||||
let zeta_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(self.permutation_zs.iter().flatten())
|
||||
.chain(&self.quotient_polys)
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
let zeta_right_batch = FriOpeningBatchTarget {
|
||||
values: self
|
||||
.next_values
|
||||
.iter()
|
||||
.chain(self.permutation_zs_right.iter().flatten())
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
FriOpeningsTarget {
|
||||
batches: vec![zeta_batch, zeta_right_batch],
|
||||
}
|
||||
}
|
||||
}
|
||||
534
starky2/src/prover.rs
Normal file
534
starky2/src/prover.rs
Normal file
@ -0,0 +1,534 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::packable::Packable;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::zero_poly_coset::ZeroPolyOnCoset;
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::all_stark::{AllStark, Table};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::permutation::PermutationCheckVars;
|
||||
use crate::permutation::{
|
||||
compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::{AllProof, StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
/// Compute all STARK proofs.
|
||||
pub fn prove<F, C, const D: usize>(
|
||||
all_stark: &AllStark<F, D>,
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
|
||||
public_inputs: Vec<Vec<F>>,
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<AllProof<F, C, D>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); <<F as Packable>::Packing>::WIDTH]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let num_starks = Table::Keccak as usize + 1;
|
||||
debug_assert_eq!(num_starks, trace_poly_values.len());
|
||||
debug_assert_eq!(num_starks, public_inputs.len());
|
||||
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
|
||||
let trace_commitments = timed!(
|
||||
timing,
|
||||
"compute trace commitments",
|
||||
trace_poly_values
|
||||
.iter()
|
||||
.map(|trace| {
|
||||
PolynomialBatch::<F, C, D>::from_values(
|
||||
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
|
||||
// or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`.
|
||||
trace.clone(),
|
||||
rate_bits,
|
||||
false,
|
||||
cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let trace_caps = trace_commitments
|
||||
.iter()
|
||||
.map(|c| c.merkle_tree.cap.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
for cap in &trace_caps {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let ctl_data_per_table = cross_table_lookup_data::<F, C, D>(
|
||||
config,
|
||||
&trace_poly_values,
|
||||
&all_stark.cross_table_lookups,
|
||||
&mut challenger,
|
||||
);
|
||||
|
||||
let cpu_proof = prove_single_table(
|
||||
&all_stark.cpu_stark,
|
||||
config,
|
||||
&trace_poly_values[Table::Cpu as usize],
|
||||
&trace_commitments[Table::Cpu as usize],
|
||||
&ctl_data_per_table[Table::Cpu as usize],
|
||||
public_inputs[Table::Cpu as usize]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
&mut challenger,
|
||||
timing,
|
||||
)?;
|
||||
let keccak_proof = prove_single_table(
|
||||
&all_stark.keccak_stark,
|
||||
config,
|
||||
&trace_poly_values[Table::Keccak as usize],
|
||||
&trace_commitments[Table::Keccak as usize],
|
||||
&ctl_data_per_table[Table::Keccak as usize],
|
||||
public_inputs[Table::Keccak as usize]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
&mut challenger,
|
||||
timing,
|
||||
)?;
|
||||
|
||||
Ok(AllProof {
|
||||
cpu_proof,
|
||||
keccak_proof,
|
||||
})
|
||||
}
|
||||
|
||||
/// Compute proof for a single STARK table.
|
||||
fn prove_single_table<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
trace_poly_values: &[PolynomialValues<F>],
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
ctl_data: &CtlData<F>,
|
||||
public_inputs: [F; S::PUBLIC_INPUTS],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<StarkProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); <<F as Packable>::Packing>::WIDTH]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let degree = trace_poly_values[0].len();
|
||||
let degree_bits = log2_strict(degree);
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
assert!(
|
||||
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
|
||||
"FRI total reduction arity is too large.",
|
||||
);
|
||||
|
||||
// Permutation arguments.
|
||||
let permutation_challenges = stark.uses_permutation_args().then(|| {
|
||||
get_n_grand_product_challenge_sets(
|
||||
challenger,
|
||||
config.num_challenges,
|
||||
stark.permutation_batch_size(),
|
||||
)
|
||||
});
|
||||
let permutation_zs = permutation_challenges.as_ref().map(|challenges| {
|
||||
compute_permutation_z_polys::<F, C, S, D>(stark, config, trace_poly_values, challenges)
|
||||
});
|
||||
let num_permutation_zs = permutation_zs.as_ref().map(|v| v.len()).unwrap_or(0);
|
||||
|
||||
let z_polys = match permutation_zs {
|
||||
None => ctl_data.z_polys(),
|
||||
Some(mut permutation_zs) => {
|
||||
permutation_zs.extend(ctl_data.z_polys());
|
||||
permutation_zs
|
||||
}
|
||||
};
|
||||
assert!(!z_polys.is_empty(), "No CTL?");
|
||||
|
||||
let permutation_ctl_zs_commitment = PolynomialBatch::from_values(
|
||||
z_polys,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
);
|
||||
|
||||
let permutation_ctl_zs_cap = permutation_ctl_zs_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap(&permutation_ctl_zs_cap);
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
if cfg!(test) {
|
||||
check_constraints(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
permutation_challenges.as_ref(),
|
||||
ctl_data,
|
||||
public_inputs,
|
||||
alphas.clone(),
|
||||
degree_bits,
|
||||
num_permutation_zs,
|
||||
config,
|
||||
);
|
||||
}
|
||||
let quotient_polys = compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
|
||||
stark,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
permutation_challenges.as_ref(),
|
||||
ctl_data,
|
||||
public_inputs,
|
||||
alphas,
|
||||
degree_bits,
|
||||
num_permutation_zs,
|
||||
config,
|
||||
);
|
||||
let all_quotient_chunks = quotient_polys
|
||||
.into_par_iter()
|
||||
.flat_map(|mut quotient_poly| {
|
||||
quotient_poly
|
||||
.trim_to_len(degree * stark.quotient_degree_factor())
|
||||
.expect("Quotient has failed, the vanishing polynomial is not divisible by Z_H");
|
||||
// Split quotient into degree-n chunks.
|
||||
quotient_poly.chunks(degree)
|
||||
})
|
||||
.collect();
|
||||
let quotient_commitment = timed!(
|
||||
timing,
|
||||
"compute quotient commitment",
|
||||
PolynomialBatch::from_coeffs(
|
||||
all_quotient_chunks,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap("ient_polys_cap);
|
||||
|
||||
let zeta = challenger.get_extension_challenge::<D>();
|
||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||
let g = F::primitive_root_of_unity(degree_bits);
|
||||
ensure!(
|
||||
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
||||
"Opening point is in the subgroup."
|
||||
);
|
||||
|
||||
let openings = StarkOpeningSet::new(
|
||||
zeta,
|
||||
g,
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
"ient_commitment,
|
||||
degree_bits,
|
||||
stark.num_permutation_batches(config),
|
||||
);
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
let initial_merkle_trees = vec![
|
||||
trace_commitment,
|
||||
&permutation_ctl_zs_commitment,
|
||||
"ient_commitment,
|
||||
];
|
||||
|
||||
let opening_proof = timed!(
|
||||
timing,
|
||||
"compute openings proof",
|
||||
PolynomialBatch::prove_openings(
|
||||
&stark.fri_instance(zeta, g, degree_bits, ctl_data.len(), config),
|
||||
&initial_merkle_trees,
|
||||
challenger,
|
||||
&fri_params,
|
||||
timing,
|
||||
)
|
||||
);
|
||||
let proof = StarkProof {
|
||||
trace_cap: trace_commitment.merkle_tree.cap.clone(),
|
||||
permutation_ctl_zs_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof,
|
||||
};
|
||||
|
||||
Ok(StarkProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs: public_inputs.to_vec(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
|
||||
/// where the `C_i`s are the Stark constraints.
|
||||
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
|
||||
ctl_data: &CtlData<F>,
|
||||
public_inputs: [F; S::PUBLIC_INPUTS],
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_permutation_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) -> Vec<PolynomialCoeffs<F>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
P: PackedField<Scalar = F>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
[(); P::WIDTH]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
|
||||
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
|
||||
assert!(
|
||||
quotient_degree_bits <= rate_bits,
|
||||
"Having constraints of degree higher than the rate is not supported yet."
|
||||
);
|
||||
let step = 1 << (rate_bits - quotient_degree_bits);
|
||||
// When opening the `Z`s polys at the "next" point, need to look at the point `next_step` steps away.
|
||||
let next_step = 1 << quotient_degree_bits;
|
||||
|
||||
// Evaluation of the first Lagrange polynomial on the LDE domain.
|
||||
let lagrange_first = PolynomialValues::selector(degree, 0).lde_onto_coset(quotient_degree_bits);
|
||||
// Evaluation of the last Lagrange polynomial on the LDE domain.
|
||||
let lagrange_last =
|
||||
PolynomialValues::selector(degree, degree - 1).lde_onto_coset(quotient_degree_bits);
|
||||
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_trace_values_packed = |i_start| -> [P; S::COLUMNS] {
|
||||
trace_commitment
|
||||
.get_lde_values_packed(i_start, step)
|
||||
.try_into()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let size = degree << quotient_degree_bits;
|
||||
let coset = F::cyclic_subgroup_coset_known_order(
|
||||
F::primitive_root_of_unity(degree_bits + quotient_degree_bits),
|
||||
F::coset_shift(),
|
||||
size,
|
||||
);
|
||||
|
||||
// We will step by `P::WIDTH`, and in each iteration, evaluate the quotient polynomial at
|
||||
// a batch of `P::WIDTH` points.
|
||||
let quotient_values = (0..size)
|
||||
.into_par_iter()
|
||||
.step_by(P::WIDTH)
|
||||
.map(|i_start| {
|
||||
let i_next_start = (i_start + next_step) % size;
|
||||
let i_range = i_start..i_start + P::WIDTH;
|
||||
|
||||
let x = *P::from_slice(&coset[i_range.clone()]);
|
||||
let z_last = x - last;
|
||||
let lagrange_basis_first = *P::from_slice(&lagrange_first.values[i_range.clone()]);
|
||||
let lagrange_basis_last = *P::from_slice(&lagrange_last.values[i_range]);
|
||||
|
||||
let mut consumer = ConstraintConsumer::new(
|
||||
alphas.clone(),
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &get_trace_values_packed(i_start),
|
||||
next_values: &get_trace_values_packed(i_next_start),
|
||||
public_inputs: &public_inputs,
|
||||
};
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
|
||||
[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
next_zs: permutation_ctl_zs_commitment
|
||||
.get_lde_values_packed(i_next_start, step)[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
|
||||
});
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, (_, columns))| CtlCheckVars::<F, F, P, 1> {
|
||||
local_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_start, step)
|
||||
[num_permutation_zs + i],
|
||||
next_z: permutation_ctl_zs_commitment.get_lde_values_packed(i_next_start, step)
|
||||
[num_permutation_zs + i],
|
||||
challenges: ctl_data.challenges.challenges[i % config.num_challenges],
|
||||
columns,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, P, C, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_check_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
let mut constraints_evals = consumer.accumulators();
|
||||
// We divide the constraints evaluations by `Z_H(x)`.
|
||||
let denominator_inv = z_h_on_coset.eval_inverse_packed(i_start);
|
||||
for eval in &mut constraints_evals {
|
||||
*eval *= denominator_inv;
|
||||
}
|
||||
constraints_evals
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose("ient_values)
|
||||
.into_par_iter()
|
||||
.map(PolynomialValues::new)
|
||||
.map(|values| values.coset_ifft(F::coset_shift()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Check that all constraints evaluate to zero on `H`.
|
||||
/// Can also be used to check the degree of the constraints by evaluating on a larger subgroup.
|
||||
fn check_constraints<'a, F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
|
||||
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
|
||||
ctl_data: &CtlData<F>,
|
||||
public_inputs: [F; S::PUBLIC_INPUTS],
|
||||
alphas: Vec<F>,
|
||||
degree_bits: usize,
|
||||
num_permutation_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = 0; // Set this to higher value to check constraint degree.
|
||||
|
||||
let size = degree << rate_bits;
|
||||
let step = 1 << rate_bits;
|
||||
|
||||
// Evaluation of the first Lagrange polynomial.
|
||||
let lagrange_first = PolynomialValues::selector(degree, 0).lde(rate_bits);
|
||||
// Evaluation of the last Lagrange polynomial.
|
||||
let lagrange_last = PolynomialValues::selector(degree, degree - 1).lde(rate_bits);
|
||||
|
||||
let subgroup = F::two_adic_subgroup(degree_bits + rate_bits);
|
||||
|
||||
// Retrieve the polynomials values at index `i`.
|
||||
let get_comm_values = |comm: &PolynomialBatch<F, C, D>, i| -> Vec<F> {
|
||||
comm.polynomials
|
||||
.iter()
|
||||
.map(|poly| poly.eval(subgroup[i])) // O(n^2) FTW
|
||||
.collect()
|
||||
};
|
||||
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
|
||||
let constraint_values = (0..size)
|
||||
.map(|i| {
|
||||
let i_next = (i + step) % size;
|
||||
|
||||
let x = subgroup[i];
|
||||
let z_last = x - last;
|
||||
let lagrange_basis_first = lagrange_first.values[i];
|
||||
let lagrange_basis_last = lagrange_last.values[i];
|
||||
|
||||
let mut consumer = ConstraintConsumer::new(
|
||||
alphas.clone(),
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
);
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &get_comm_values(trace_commitment, i).try_into().unwrap(),
|
||||
next_values: &get_comm_values(trace_commitment, i_next)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
public_inputs: &public_inputs,
|
||||
};
|
||||
let permutation_check_vars =
|
||||
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i, step)
|
||||
[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
next_zs: permutation_ctl_zs_commitment.get_lde_values_packed(i_next, step)
|
||||
[..num_permutation_zs]
|
||||
.to_vec(),
|
||||
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
|
||||
});
|
||||
|
||||
let ctl_vars = ctl_data
|
||||
.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(iii, (_, columns))| CtlCheckVars::<F, F, F, 1> {
|
||||
local_z: get_comm_values(permutation_ctl_zs_commitment, i)
|
||||
[num_permutation_zs + iii],
|
||||
next_z: get_comm_values(permutation_ctl_zs_commitment, i_next)
|
||||
[num_permutation_zs + iii],
|
||||
challenges: ctl_data.challenges.challenges[iii % config.num_challenges],
|
||||
columns,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
eval_vanishing_poly::<F, F, F, C, S, D, 1>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_check_vars,
|
||||
&ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
consumer.accumulators()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let values = transpose(&constraint_values);
|
||||
for v in values {
|
||||
assert!(v.iter().all(|x| x.is_zero()));
|
||||
}
|
||||
}
|
||||
331
starky2/src/recursive_verifier.rs
Normal file
331
starky2/src/recursive_verifier.rs
Normal file
@ -0,0 +1,331 @@
|
||||
use std::iter::once;
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::fri::witness_util::set_fri_proof_target;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::witness::Witness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2::util::reducing::ReducingFactorTarget;
|
||||
use plonky2::with_context;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::permutation::PermutationCheckDataTarget;
|
||||
use crate::proof::{
|
||||
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
|
||||
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
|
||||
pub fn verify_stark_proof_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
|
||||
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
|
||||
let challenges = with_context!(
|
||||
builder,
|
||||
"compute challenges",
|
||||
proof_with_pis.get_challenges::<F, C, S>(builder, &stark, inner_config)
|
||||
);
|
||||
|
||||
verify_stark_proof_with_challenges_circuit::<F, C, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
proof_with_pis,
|
||||
challenges,
|
||||
inner_config,
|
||||
degree_bits,
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursively verifies an inner proof.
|
||||
fn verify_stark_proof_with_challenges_circuit<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
challenges: StarkProofChallengesTarget<D>,
|
||||
inner_config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
check_permutation_options(&stark, &proof_with_pis, &challenges).unwrap();
|
||||
let one = builder.one_extension();
|
||||
|
||||
let StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_zs,
|
||||
permutation_zs_right,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationTargets {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
public_inputs: &public_inputs
|
||||
.into_iter()
|
||||
.map(|t| builder.convert_to_ext(t))
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
let (l_1, l_last) =
|
||||
eval_l_1_and_l_last_circuit(builder, degree_bits, challenges.stark_zeta, z_h_zeta);
|
||||
let last =
|
||||
builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse());
|
||||
let z_last = builder.sub_extension(challenges.stark_zeta, last);
|
||||
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
challenges.stark_alphas,
|
||||
z_last,
|
||||
l_1,
|
||||
l_last,
|
||||
);
|
||||
|
||||
let permutation_data = stark
|
||||
.uses_permutation_args()
|
||||
.then(|| PermutationCheckDataTarget {
|
||||
local_zs: permutation_zs.as_ref().unwrap().clone(),
|
||||
next_zs: permutation_zs_right.as_ref().unwrap().clone(),
|
||||
permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(),
|
||||
});
|
||||
|
||||
with_context!(
|
||||
builder,
|
||||
"evaluate vanishing polynomial",
|
||||
eval_vanishing_poly_circuit::<F, C, S, D>(
|
||||
builder,
|
||||
&stark,
|
||||
inner_config,
|
||||
vars,
|
||||
permutation_data,
|
||||
&mut consumer,
|
||||
)
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, builder);
|
||||
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
|
||||
let merkle_caps = once(proof.trace_cap)
|
||||
.chain(proof.permutation_zs_cap)
|
||||
.chain(once(proof.quotient_polys_cap))
|
||||
.collect_vec();
|
||||
|
||||
let fri_instance = stark.fri_instance_target(
|
||||
builder,
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
inner_config,
|
||||
);
|
||||
builder.verify_fri_proof::<C>(
|
||||
&fri_instance,
|
||||
&proof.openings.to_fri_openings(),
|
||||
&challenges.fri_challenges,
|
||||
&merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&inner_config.fri_params(degree_bits),
|
||||
);
|
||||
}
|
||||
|
||||
fn eval_l_1_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
log_n: usize,
|
||||
x: ExtensionTarget<D>,
|
||||
z_x: ExtensionTarget<D>,
|
||||
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||
let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n));
|
||||
let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n));
|
||||
let one = builder.one_extension();
|
||||
let l_1_deno = builder.mul_sub_extension(n, x, n);
|
||||
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||
|
||||
(
|
||||
builder.div_extension(z_x, l_1_deno),
|
||||
builder.div_extension(z_x, l_last_deno),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn add_virtual_stark_proof_with_pis<
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) -> StarkProofWithPublicInputsTarget<D> {
|
||||
let proof = add_virtual_stark_proof::<F, S, D>(builder, stark, config, degree_bits);
|
||||
let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS);
|
||||
StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) -> StarkProofTarget<D> {
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_leaves_per_oracle = once(S::COLUMNS)
|
||||
.chain(
|
||||
stark
|
||||
.uses_permutation_args()
|
||||
.then(|| stark.num_permutation_batches(config)),
|
||||
)
|
||||
.chain(once(stark.quotient_degree_factor() * config.num_challenges))
|
||||
.collect_vec();
|
||||
|
||||
let permutation_zs_cap = stark
|
||||
.uses_permutation_args()
|
||||
.then(|| builder.add_virtual_cap(cap_height));
|
||||
|
||||
StarkProofTarget {
|
||||
trace_cap: builder.add_virtual_cap(cap_height),
|
||||
permutation_zs_cap,
|
||||
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||
openings: add_stark_opening_set::<F, S, D>(builder, stark, config),
|
||||
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
config: &StarkConfig,
|
||||
) -> StarkOpeningSetTarget<D> {
|
||||
let num_challenges = config.num_challenges;
|
||||
StarkOpeningSetTarget {
|
||||
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||
permutation_zs: stark
|
||||
.uses_permutation_args()
|
||||
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
|
||||
permutation_zs_right: stark
|
||||
.uses_permutation_args()
|
||||
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
|
||||
quotient_polys: builder
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,
|
||||
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
W: Witness<F>,
|
||||
{
|
||||
let StarkProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = stark_proof_with_pis;
|
||||
let StarkProofWithPublicInputsTarget {
|
||||
proof: pt,
|
||||
public_inputs: pi_targets,
|
||||
} = stark_proof_with_pis_target;
|
||||
|
||||
// Set public inputs.
|
||||
for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) {
|
||||
witness.set_target(pi_t, pi);
|
||||
}
|
||||
|
||||
set_stark_proof_target(witness, pt, proof);
|
||||
}
|
||||
|
||||
pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
witness: &mut W,
|
||||
proof_target: &StarkProofTarget<D>,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
W: Witness<F>,
|
||||
{
|
||||
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);
|
||||
witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||
|
||||
witness.set_fri_openings(
|
||||
&proof_target.openings.to_fri_openings(),
|
||||
&proof.openings.to_fri_openings(),
|
||||
);
|
||||
|
||||
if let Some(permutation_zs_cap_target) = &proof_target.permutation_zs_cap {
|
||||
witness.set_cap_target(permutation_zs_cap_target, &proof.permutation_ctl_zs_cap);
|
||||
}
|
||||
|
||||
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
|
||||
}
|
||||
|
||||
/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff
|
||||
/// the Stark uses a permutation argument.
|
||||
fn check_permutation_options<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: &S,
|
||||
proof_with_pis: &StarkProofWithPublicInputsTarget<D>,
|
||||
challenges: &StarkProofChallengesTarget<D>,
|
||||
) -> Result<()> {
|
||||
let options_is_some = [
|
||||
proof_with_pis.proof.permutation_zs_cap.is_some(),
|
||||
proof_with_pis.proof.openings.permutation_zs.is_some(),
|
||||
proof_with_pis.proof.openings.permutation_zs_right.is_some(),
|
||||
challenges.permutation_challenge_sets.is_some(),
|
||||
];
|
||||
ensure!(
|
||||
options_is_some
|
||||
.into_iter()
|
||||
.all(|b| b == stark.uses_permutation_args()),
|
||||
"Permutation data doesn't match with Stark configuration."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
229
starky2/src/stark.rs
Normal file
229
starky2/src/stark.rs
Normal file
@ -0,0 +1,229 @@
|
||||
use std::iter::once;
|
||||
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::fri::structure::{
|
||||
FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo,
|
||||
FriPolynomialInfo,
|
||||
};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_util::ceil_div_usize;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
/// The total number of columns in the trace.
|
||||
const COLUMNS: usize;
|
||||
/// The number of public inputs.
|
||||
const PUBLIC_INPUTS: usize;
|
||||
|
||||
/// Evaluate constraints at a vector of points.
|
||||
///
|
||||
/// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us
|
||||
/// evaluate constraints over a larger domain if desired. This can also be called with `FE = F`
|
||||
/// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating
|
||||
/// constraints over `F`.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
/// Evaluate constraints at a vector of points from the base field `F`.
|
||||
fn eval_packed_base<P: PackedField<Scalar = F>>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a single point from the degree `D` extension field.
|
||||
fn eval_ext(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<
|
||||
F::Extension,
|
||||
F::Extension,
|
||||
{ Self::COLUMNS },
|
||||
{ Self::PUBLIC_INPUTS },
|
||||
>,
|
||||
yield_constr: &mut ConstraintConsumer<F::Extension>,
|
||||
) {
|
||||
self.eval_packed_generic(vars, yield_constr)
|
||||
}
|
||||
|
||||
/// Evaluate constraints at a vector of points from the degree `D` extension field. This is like
|
||||
/// `eval_ext`, except in the context of a recursive circuit.
|
||||
/// Note: constraints must be added through`yeld_constr.constraint(builder, constraint)` in the
|
||||
/// same order as they are given in `eval_packed_generic`.
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn constraint_degree(&self) -> usize;
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn quotient_degree_factor(&self) -> usize {
|
||||
1.max(self.constraint_degree() - 1)
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
fn fri_instance(
|
||||
&self,
|
||||
zeta: F::Extension,
|
||||
g: F,
|
||||
degree_bits: usize,
|
||||
num_ctl_zs: usize,
|
||||
config: &StarkConfig,
|
||||
) -> FriInstanceInfo<F, D> {
|
||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||
let mut oracle_indices = 0..;
|
||||
|
||||
let trace_info =
|
||||
FriPolynomialInfo::from_range(oracle_indices.next().unwrap(), 0..Self::COLUMNS);
|
||||
|
||||
let num_permutation_batches = self.num_permutation_batches(config);
|
||||
let permutation_ctl_zs_info = (num_permutation_batches + num_ctl_zs > 0).then(|| {
|
||||
let permutation_ctl_index = oracle_indices.next().unwrap();
|
||||
FriPolynomialInfo::from_range(
|
||||
permutation_ctl_index,
|
||||
0..num_permutation_batches + num_ctl_zs,
|
||||
)
|
||||
});
|
||||
|
||||
let ctl_zs_info = (num_ctl_zs > 0).then(|| {
|
||||
let index = permutation_ctl_zs_info
|
||||
.as_ref()
|
||||
.map(|info| info[0].oracle_index)
|
||||
.unwrap_or_else(|| oracle_indices.next().unwrap());
|
||||
FriPolynomialInfo::from_range(
|
||||
index,
|
||||
num_permutation_batches..num_permutation_batches + num_ctl_zs,
|
||||
)
|
||||
});
|
||||
|
||||
let quotient_info = FriPolynomialInfo::from_range(
|
||||
oracle_indices.next().unwrap(),
|
||||
0..self.quotient_degree_factor() * config.num_challenges,
|
||||
);
|
||||
|
||||
let zeta_batch = FriBatchInfo {
|
||||
point: zeta,
|
||||
polynomials: once(trace_info.clone())
|
||||
.chain(permutation_ctl_zs_info.clone())
|
||||
.chain(once(quotient_info))
|
||||
.collect::<Vec<_>>()
|
||||
.concat(),
|
||||
};
|
||||
let zeta_right_batch = FriBatchInfo {
|
||||
point: zeta.scalar_mul(g),
|
||||
polynomials: once(trace_info)
|
||||
.chain(permutation_ctl_zs_info)
|
||||
.collect::<Vec<_>>()
|
||||
.concat(),
|
||||
};
|
||||
let ctl_last_batch = ctl_zs_info.map(|info| FriBatchInfo {
|
||||
point: F::Extension::primitive_root_of_unity(degree_bits).inverse(),
|
||||
polynomials: info,
|
||||
});
|
||||
FriInstanceInfo {
|
||||
oracles: vec![no_blinding_oracle; oracle_indices.next().unwrap()],
|
||||
batches: once(zeta_batch)
|
||||
.chain(once(zeta_right_batch))
|
||||
.chain(ctl_last_batch)
|
||||
.collect::<Vec<_>>(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
fn fri_instance_target(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
g: F,
|
||||
config: &StarkConfig,
|
||||
) -> FriInstanceInfoTarget<D> {
|
||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||
let mut oracle_indices = 0..;
|
||||
|
||||
let trace_info =
|
||||
FriPolynomialInfo::from_range(oracle_indices.next().unwrap(), 0..Self::COLUMNS);
|
||||
|
||||
let permutation_zs_info = if self.uses_permutation_args() {
|
||||
FriPolynomialInfo::from_range(
|
||||
oracle_indices.next().unwrap(),
|
||||
0..self.num_permutation_batches(config),
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let quotient_info = FriPolynomialInfo::from_range(
|
||||
oracle_indices.next().unwrap(),
|
||||
0..self.quotient_degree_factor() * config.num_challenges,
|
||||
);
|
||||
|
||||
let zeta_batch = FriBatchInfoTarget {
|
||||
point: zeta,
|
||||
polynomials: [
|
||||
trace_info.clone(),
|
||||
permutation_zs_info.clone(),
|
||||
quotient_info,
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
let zeta_right = builder.mul_const_extension(g, zeta);
|
||||
let zeta_right_batch = FriBatchInfoTarget {
|
||||
point: zeta_right,
|
||||
polynomials: [trace_info, permutation_zs_info].concat(),
|
||||
};
|
||||
FriInstanceInfoTarget {
|
||||
oracles: vec![no_blinding_oracle; oracle_indices.next().unwrap()],
|
||||
batches: vec![zeta_batch, zeta_right_batch],
|
||||
}
|
||||
}
|
||||
|
||||
/// Pairs of lists of columns that should be permutations of one another. A permutation argument
|
||||
/// will be used for each such pair. Empty by default.
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn uses_permutation_args(&self) -> bool {
|
||||
!self.permutation_pairs().is_empty()
|
||||
}
|
||||
|
||||
/// The number of permutation argument instances that can be combined into a single constraint.
|
||||
fn permutation_batch_size(&self) -> usize {
|
||||
// The permutation argument constraints look like
|
||||
// Z(x) \prod(...) = Z(g x) \prod(...)
|
||||
// where each product has a number of terms equal to the batch size. So our batch size
|
||||
// should be one less than our constraint degree, which happens to be our quotient degree.
|
||||
self.quotient_degree_factor()
|
||||
}
|
||||
|
||||
fn num_permutation_instances(&self, config: &StarkConfig) -> usize {
|
||||
self.permutation_pairs().len() * config.num_challenges
|
||||
}
|
||||
|
||||
fn num_permutation_batches(&self, config: &StarkConfig) -> usize {
|
||||
ceil_div_usize(
|
||||
self.num_permutation_instances(config),
|
||||
self.permutation_batch_size(),
|
||||
)
|
||||
}
|
||||
}
|
||||
87
starky2/src/stark_testing.rs
Normal file
87
starky2/src/stark_testing.rs
Normal file
@ -0,0 +1,87 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
const WITNESS_SIZE: usize = 1 << 5;
|
||||
|
||||
/// Tests that the constraints imposed by the given STARK are low-degree by applying them to random
|
||||
/// low-degree witness polynomials.
|
||||
pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
|
||||
|
||||
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
|
||||
let size = trace_ldes.len();
|
||||
let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>();
|
||||
|
||||
let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits);
|
||||
let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits);
|
||||
|
||||
let last = F::primitive_root_of_unity(log2_strict(WITNESS_SIZE)).inverse();
|
||||
let subgroup =
|
||||
F::cyclic_subgroup_known_order(F::primitive_root_of_unity(log2_strict(size)), size);
|
||||
let alpha = F::rand();
|
||||
let constraint_evals = (0..size)
|
||||
.map(|i| {
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &trace_ldes[i].clone().try_into().unwrap(),
|
||||
next_values: &trace_ldes[(i + (1 << rate_bits)) % size]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
public_inputs: &public_inputs,
|
||||
};
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F>::new(
|
||||
vec![alpha],
|
||||
subgroup[i] - last,
|
||||
lagrange_first.values[i],
|
||||
lagrange_last.values[i],
|
||||
);
|
||||
stark.eval_packed_base(vars, &mut consumer);
|
||||
consumer.accumulators()[0]
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let constraint_eval_degree = PolynomialValues::new(constraint_evals).degree();
|
||||
let maximum_degree = WITNESS_SIZE * stark.constraint_degree() - 1;
|
||||
|
||||
ensure!(
|
||||
constraint_eval_degree <= maximum_degree,
|
||||
"Expected degrees at most {} * {} - 1 = {}, actual {:?}",
|
||||
WITNESS_SIZE,
|
||||
stark.constraint_degree(),
|
||||
maximum_degree,
|
||||
constraint_eval_degree
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn random_low_degree_matrix<F: Field>(num_polys: usize, rate_bits: usize) -> Vec<Vec<F>> {
|
||||
let polys = (0..num_polys)
|
||||
.map(|_| random_low_degree_values(rate_bits))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose(&polys)
|
||||
}
|
||||
|
||||
fn random_low_degree_values<F: Field>(rate_bits: usize) -> Vec<F> {
|
||||
PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE))
|
||||
.lde(rate_bits)
|
||||
.fft()
|
||||
.values
|
||||
}
|
||||
16
starky2/src/util.rs
Normal file
16
starky2/src/util.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use itertools::Itertools;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::util::transpose;
|
||||
|
||||
/// A helper function to transpose a row-wise trace and put it in the format that `prove` expects.
|
||||
pub fn trace_rows_to_poly_values<F: Field, const COLUMNS: usize>(
|
||||
trace_rows: Vec<[F; COLUMNS]>,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
let trace_row_vecs = trace_rows.into_iter().map(|row| row.to_vec()).collect_vec();
|
||||
let trace_col_vecs: Vec<Vec<F>> = transpose(&trace_row_vecs);
|
||||
trace_col_vecs
|
||||
.into_iter()
|
||||
.map(|column| PolynomialValues::new(column))
|
||||
.collect()
|
||||
}
|
||||
69
starky2/src/vanishing_poly.rs
Normal file
69
starky2/src/vanishing_poly.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{eval_cross_table_lookup_checks, CtlCheckVars};
|
||||
use crate::permutation::{
|
||||
eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget,
|
||||
PermutationCheckVars,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_vanishing_poly<F, FE, P, C, S, const D: usize, const D2: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
|
||||
permutation_vars: Option<PermutationCheckVars<F, FE, P, D2>>,
|
||||
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
|
||||
consumer: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
stark.eval_packed_generic(vars, consumer);
|
||||
if let Some(permutation_vars) = permutation_vars {
|
||||
eval_permutation_checks::<F, FE, P, C, S, D, D2>(
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_vars,
|
||||
consumer,
|
||||
);
|
||||
}
|
||||
eval_cross_table_lookup_checks::<F, FE, P, C, S, D, D2>(vars, ctl_vars, consumer);
|
||||
}
|
||||
|
||||
pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
|
||||
permutation_data: Option<PermutationCheckDataTarget<D>>,
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
stark.eval_ext_circuit(builder, vars, consumer);
|
||||
if let Some(permutation_data) = permutation_data {
|
||||
eval_permutation_checks_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_data,
|
||||
consumer,
|
||||
);
|
||||
}
|
||||
}
|
||||
26
starky2/src/vars.rs
Normal file
26
starky2/src/vars.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize>
|
||||
where
|
||||
F: Field,
|
||||
P: PackedField<Scalar = F>,
|
||||
{
|
||||
pub local_values: &'a [P; COLUMNS],
|
||||
pub next_values: &'a [P; COLUMNS],
|
||||
pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS],
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StarkEvaluationTargets<
|
||||
'a,
|
||||
const D: usize,
|
||||
const COLUMNS: usize,
|
||||
const PUBLIC_INPUTS: usize,
|
||||
> {
|
||||
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
|
||||
pub public_inputs: &'a [ExtensionTarget<D>; PUBLIC_INPUTS],
|
||||
}
|
||||
225
starky2/src/verifier.rs
Normal file
225
starky2/src/verifier.rs
Normal file
@ -0,0 +1,225 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::fri::verifier::verify_fri_proof;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||
|
||||
use crate::all_stark::{AllStark, Table};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
|
||||
use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::permutation::PermutationCheckVars;
|
||||
use crate::proof::{
|
||||
AllProof, AllProofChallenges, StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
all_stark: AllStark<F, D>,
|
||||
all_proof: AllProof<F, C, D>,
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); CpuStark::<F, D>::COLUMNS]:,
|
||||
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
|
||||
[(); KeccakStark::<F, D>::COLUMNS]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let AllProofChallenges {
|
||||
cpu_challenges,
|
||||
keccak_challenges,
|
||||
ctl_challenges,
|
||||
} = all_proof.get_challenges(&all_stark, config);
|
||||
|
||||
let nums_permutation_zs = all_stark.nums_permutation_zs(config);
|
||||
|
||||
let AllStark {
|
||||
cpu_stark,
|
||||
keccak_stark,
|
||||
cross_table_lookups,
|
||||
} = all_stark;
|
||||
|
||||
let ctl_vars_per_table = CtlCheckVars::from_proofs(
|
||||
&all_proof.proofs(),
|
||||
&cross_table_lookups,
|
||||
&ctl_challenges,
|
||||
&nums_permutation_zs,
|
||||
);
|
||||
|
||||
verify_stark_proof_with_challenges(
|
||||
cpu_stark,
|
||||
&all_proof.cpu_proof,
|
||||
cpu_challenges,
|
||||
&ctl_vars_per_table[Table::Cpu as usize],
|
||||
config,
|
||||
)?;
|
||||
verify_stark_proof_with_challenges(
|
||||
keccak_stark,
|
||||
&all_proof.keccak_proof,
|
||||
keccak_challenges,
|
||||
&ctl_vars_per_table[Table::Keccak as usize],
|
||||
config,
|
||||
)?;
|
||||
|
||||
verify_cross_table_lookups(
|
||||
cross_table_lookups,
|
||||
&all_proof.proofs(),
|
||||
ctl_challenges,
|
||||
config,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn verify_stark_proof_with_challenges<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
|
||||
challenges: StarkProofChallenges<F, D>,
|
||||
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
|
||||
config: &StarkConfig,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let StarkProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_ctl_zs,
|
||||
permutation_ctl_zs_right,
|
||||
ctl_zs_last,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
public_inputs: &public_inputs
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let degree_bits = proof.recover_degree_bits(config);
|
||||
let (l_1, l_last) = eval_l_1_and_l_last(degree_bits, challenges.stark_zeta);
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let z_last = challenges.stark_zeta - last.into();
|
||||
let mut consumer = ConstraintConsumer::<F::Extension>::new(
|
||||
challenges
|
||||
.stark_alphas
|
||||
.iter()
|
||||
.map(|&alpha| F::Extension::from_basefield(alpha))
|
||||
.collect::<Vec<_>>(),
|
||||
z_last,
|
||||
l_1,
|
||||
l_last,
|
||||
);
|
||||
let num_permutation_zs = stark.num_permutation_batches(config);
|
||||
let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars {
|
||||
local_zs: permutation_ctl_zs[..num_permutation_zs].to_vec(),
|
||||
next_zs: permutation_ctl_zs_right[..num_permutation_zs].to_vec(),
|
||||
permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(),
|
||||
});
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, C, S, D, D>(
|
||||
&stark,
|
||||
config,
|
||||
vars,
|
||||
permutation_data,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let zeta_pow_deg = challenges.stark_zeta.exp_power_of_2(degree_bits);
|
||||
let z_h_zeta = zeta_pow_deg - F::Extension::ONE;
|
||||
// `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations.
|
||||
// Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)`
|
||||
// where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`.
|
||||
// So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each
|
||||
// `quotient_degree_factor`-sized chunk of the original evaluations.
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
ensure!(
|
||||
vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg),
|
||||
"Mismatch between evaluation and opening of quotient polynomial"
|
||||
);
|
||||
}
|
||||
|
||||
let merkle_caps = vec![
|
||||
proof.trace_cap.clone(),
|
||||
proof.permutation_ctl_zs_cap.clone(),
|
||||
proof.quotient_polys_cap.clone(),
|
||||
];
|
||||
|
||||
verify_fri_proof::<F, C, D>(
|
||||
&stark.fri_instance(
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
degree_bits,
|
||||
ctl_zs_last.len(),
|
||||
config,
|
||||
),
|
||||
&proof.openings.to_fri_openings(),
|
||||
&challenges.fri_challenges,
|
||||
&merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&config.fri_params(degree_bits),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Evaluate the Lagrange polynomials `L_1` and `L_n` at a point `x`.
|
||||
/// `L_1(x) = (x^n - 1)/(n * (x - 1))`
|
||||
/// `L_n(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup.
|
||||
fn eval_l_1_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
let n = F::from_canonical_usize(1 << log_n);
|
||||
let g = F::primitive_root_of_unity(log_n);
|
||||
let z_x = x.exp_power_of_2(log_n) - F::ONE;
|
||||
let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]);
|
||||
|
||||
(z_x * invs[0], z_x * invs[1])
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
|
||||
use crate::verifier::eval_l_1_and_l_last;
|
||||
|
||||
#[test]
|
||||
fn test_eval_l_1_and_l_last() {
|
||||
type F = GoldilocksField;
|
||||
let log_n = 5;
|
||||
let n = 1 << log_n;
|
||||
|
||||
let x = F::rand(); // challenge point
|
||||
let expected_l_first_x = PolynomialValues::selector(n, 0).ifft().eval(x);
|
||||
let expected_l_last_x = PolynomialValues::selector(n, n - 1).ifft().eval(x);
|
||||
|
||||
let (l_first_x, l_last_x) = eval_l_1_and_l_last(log_n, x);
|
||||
assert_eq!(l_first_x, expected_l_first_x);
|
||||
assert_eq!(l_last_x, expected_l_last_x);
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user