Merge pull request #499 from mir-protocol/stark_permutation_checks

Stark verifier permutation checks
This commit is contained in:
wborgeaud 2022-02-23 09:56:02 +01:00 committed by GitHub
commit 517d26e4c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 566 additions and 124 deletions

View File

@ -208,7 +208,7 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
}
}
pub(crate) fn get_challenge(&mut self, builder: &mut CircuitBuilder<F, D>) -> Target {
pub fn get_challenge(&mut self, builder: &mut CircuitBuilder<F, D>) -> Target {
self.absorb_buffered_inputs(builder);
if self.output_buffer.is_empty() {

View File

@ -1,6 +1,6 @@
use std::borrow::Borrow;
use plonky2_field::extension_field::Extendable;
use plonky2_field::extension_field::{Extendable, FieldExtension};
use plonky2_field::field_types::Field;
use plonky2_field::polynomial::PolynomialCoeffs;
@ -35,6 +35,11 @@ impl<F: Field> ReducingFactor<F> {
self.base * x
}
fn mul_ext<FE: FieldExtension<D, BaseField = F>, const D: usize>(&mut self, x: FE) -> FE {
self.count += 1;
x.scalar_mul(self.base)
}
fn mul_poly(&mut self, p: &mut PolynomialCoeffs<F>) {
self.count += 1;
*p *= self.base;
@ -45,6 +50,14 @@ impl<F: Field> ReducingFactor<F> {
.fold(F::ZERO, |acc, x| self.mul(acc) + *x.borrow())
}
pub fn reduce_ext<FE: FieldExtension<D, BaseField = F>, const D: usize>(
&mut self,
iter: impl DoubleEndedIterator<Item = impl Borrow<FE>>,
) -> FE {
iter.rev()
.fold(FE::ZERO, |acc, x| self.mul_ext(acc) + *x.borrow())
}
pub fn reduce_polys(
&mut self,
polys: impl DoubleEndedIterator<Item = impl Borrow<PolynomialCoeffs<F>>>,

View File

@ -6,12 +6,14 @@ use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
/// Toy STARK system used for testing.
/// Computes a Fibonacci sequence with state `[x0, x1]` using the state transition
/// `x0 <- x1, x1 <- x0 + x1`.
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
/// Note: The `i, j` columns are only used to test the permutation argument.
#[derive(Copy, Clone)]
struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
num_rows: usize,
@ -34,21 +36,25 @@ impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
}
}
/// Generate the trace using `x0, x1` as inital state values.
/// Generate the trace using `x0, x1, 0, 1` as initial state values.
fn generate_trace(&self, x0: F, x1: F) -> Vec<[F; Self::COLUMNS]> {
(0..self.num_rows)
.scan([x0, x1], |acc, _| {
let mut trace = (0..self.num_rows)
.scan([x0, x1, F::ZERO, F::ONE], |acc, _| {
let tmp = *acc;
acc[0] = tmp[1];
acc[1] = tmp[0] + tmp[1];
acc[2] = tmp[2] + F::ONE;
acc[3] = tmp[3] + F::ONE;
Some(tmp)
})
.collect()
.collect::<Vec<_>>();
trace[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another.
trace
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
const COLUMNS: usize = 2;
const COLUMNS: usize = 4;
const PUBLIC_INPUTS: usize = 3;
fn eval_packed_generic<FE, P, const D2: usize>(
@ -105,6 +111,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
fn constraint_degree(&self) -> usize {
2
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair {
column_pairs: vec![(2, 3)],
}]
}
}
#[cfg(test)]

View File

@ -1,4 +1,3 @@
use anyhow::Result;
use plonky2::field::extension_field::Extendable;
use plonky2::field::polynomial::PolynomialCoeffs;
use plonky2::fri::proof::{FriProof, FriProofTarget};
@ -11,7 +10,9 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use crate::config::StarkConfig;
use crate::permutation::get_n_permutation_challenge_sets;
use crate::permutation::{
get_n_permutation_challenge_sets, get_n_permutation_challenge_sets_target,
};
use crate::proof::*;
use crate::stark::Stark;
@ -26,7 +27,7 @@ fn get_challenges<F, C, S, const D: usize>(
pow_witness: F,
config: &StarkConfig,
degree_bits: usize,
) -> Result<StarkProofChallenges<F, D>>
) -> StarkProofChallenges<F, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -38,20 +39,15 @@ where
challenger.observe_cap(trace_cap);
let permutation_challenge_sets = if stark.uses_permutation_args() {
get_n_permutation_challenge_sets(
let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| {
let tmp = get_n_permutation_challenge_sets(
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
)
} else {
vec![]
};
if stark.uses_permutation_args() {
let cap =
permutation_zs_cap.ok_or_else(|| anyhow::Error::msg("expected permutation_zs_cap"));
challenger.observe_cap(cap?);
}
);
challenger.observe_cap(permutation_zs_cap);
tmp
});
let stark_alphas = challenger.get_n_challenges(num_challenges);
@ -60,7 +56,7 @@ where
challenger.observe_openings(&openings.to_fri_openings());
Ok(StarkProofChallenges {
StarkProofChallenges {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
@ -71,7 +67,7 @@ where
degree_bits,
&config.fri_config,
),
})
}
}
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
@ -84,11 +80,10 @@ where
stark: &S,
config: &StarkConfig,
degree_bits: usize,
) -> anyhow::Result<Vec<usize>> {
Ok(self
.get_challenges(stark, config, degree_bits)?
) -> Vec<usize> {
self.get_challenges(stark, config, degree_bits)
.fri_challenges
.fri_query_indices)
.fri_query_indices
}
/// Computes all Fiat-Shamir challenges used in the STARK proof.
@ -97,7 +92,7 @@ where
stark: &S,
config: &StarkConfig,
degree_bits: usize,
) -> Result<StarkProofChallenges<F, D>> {
) -> StarkProofChallenges<F, D> {
let StarkProof {
trace_cap,
permutation_zs_cap,
@ -131,9 +126,11 @@ where
pub(crate) fn get_challenges_target<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
trace_cap: &MerkleCapTarget,
permutation_zs_cap: Option<&MerkleCapTarget>,
quotient_polys_cap: &MerkleCapTarget,
@ -151,6 +148,18 @@ where
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(trace_cap);
let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| {
let tmp = get_n_permutation_challenge_sets_target(
builder,
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
);
challenger.observe_cap(permutation_zs_cap);
tmp
});
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
challenger.observe_cap(quotient_polys_cap);
@ -159,6 +168,7 @@ where
challenger.observe_openings(&openings.to_fri_openings());
StarkProofChallengesTarget {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C>(
@ -172,9 +182,14 @@ where
}
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
pub(crate) fn get_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
>(
&self,
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
@ -194,8 +209,9 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
},
} = &self.proof;
get_challenges_target::<F, C, D>(
get_challenges_target::<F, C, S, D>(
builder,
stark,
trace_cap,
permutation_zs_cap.as_ref(),
quotient_polys_cap,

View File

@ -3,6 +3,7 @@
#![allow(unused_variables)]
#![allow(incomplete_features)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::type_complexity)]
#![feature(generic_const_exprs)]
pub mod config;
@ -14,6 +15,7 @@ pub mod prover;
pub mod recursive_verifier;
pub mod stark;
pub mod stark_testing;
pub mod vanishing_poly;
pub mod vars;
pub mod verifier;

View File

@ -2,16 +2,23 @@
use itertools::Itertools;
use plonky2::field::batch_util::batch_multiply_inplace;
use plonky2::field::extension_field::Extendable;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::field_types::Field;
use plonky2::field::packed_field::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
/// In particular, there should exist some permutation `pi` such that for any `i`,
@ -24,31 +31,32 @@ pub struct PermutationPair {
}
/// A single instance of a permutation check protocol.
pub(crate) struct PermutationInstance<'a, F: Field> {
pub(crate) struct PermutationInstance<'a, T: Copy> {
pub(crate) pair: &'a PermutationPair,
pub(crate) challenge: PermutationChallenge<F>,
pub(crate) challenge: PermutationChallenge<T>,
}
/// Randomness for a single instance of a permutation check protocol.
#[derive(Copy, Clone)]
pub(crate) struct PermutationChallenge<F: Field> {
pub(crate) struct PermutationChallenge<T: Copy> {
/// Randomness used to combine multiple columns into one.
pub(crate) beta: F,
pub(crate) beta: T,
/// Random offset that's added to the beta-reduced column values.
pub(crate) gamma: F,
pub(crate) gamma: T,
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
pub(crate) struct PermutationChallengeSet<F: Field> {
pub(crate) challenges: Vec<PermutationChallenge<F>>,
#[derive(Clone)]
pub(crate) struct PermutationChallengeSet<T: Copy> {
pub(crate) challenges: Vec<PermutationChallenge<T>>,
}
/// Compute all Z polynomials (for permutation arguments).
pub(crate) fn compute_permutation_z_polys<F, C, S, const D: usize>(
stark: &S,
config: &StarkConfig,
challenger: &mut Challenger<F, C::Hasher>,
trace_poly_values: &[PolynomialValues<F>],
permutation_challenge_sets: &[PermutationChallengeSet<F>],
) -> Vec<PolynomialValues<F>>
where
F: RichField + Extendable<D>,
@ -56,33 +64,13 @@ where
S: Stark<F, D>,
{
let permutation_pairs = stark.permutation_pairs();
let permutation_challenge_sets = get_n_permutation_challenge_sets(
challenger,
let permutation_batches = get_permutation_batches(
&permutation_pairs,
permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Get a list of instances of our batch-permutation argument. These are permutation arguments
// where the same `Z(x)` polynomial is used to check more than one permutation.
// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
// chunk these arguments based on our batch size.
let permutation_batches = permutation_pairs
.iter()
.cartesian_product(0..config.num_challenges)
.chunks(stark.permutation_batch_size())
.into_iter()
.map(|batch| {
batch
.enumerate()
.map(|(i, (pair, chal))| {
let challenge = permutation_challenge_sets[i].challenges[chal];
PermutationInstance { pair, challenge }
})
.collect_vec()
})
.collect_vec();
permutation_batches
.into_par_iter()
.map(|instances| compute_permutation_z_poly(&instances, trace_poly_values))
@ -178,3 +166,221 @@ pub(crate) fn get_n_permutation_challenge_sets<F: RichField, H: Hasher<F>>(
.map(|_| get_permutation_challenge_set(challenger, num_challenges))
.collect()
}
fn get_permutation_challenge_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
) -> PermutationChallenge<Target> {
let beta = challenger.get_challenge(builder);
let gamma = challenger.get_challenge(builder);
PermutationChallenge { beta, gamma }
}
fn get_permutation_challenge_set_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
) -> PermutationChallengeSet<Target> {
let challenges = (0..num_challenges)
.map(|_| get_permutation_challenge_target(builder, challenger))
.collect();
PermutationChallengeSet { challenges }
}
pub(crate) fn get_n_permutation_challenge_sets_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
num_sets: usize,
) -> Vec<PermutationChallengeSet<Target>> {
(0..num_sets)
.map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges))
.collect()
}
/// Get a list of instances of our batch-permutation argument. These are permutation arguments
/// where the same `Z(x)` polynomial is used to check more than one permutation.
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
/// chunk these arguments based on our batch size.
pub(crate) fn get_permutation_batches<'a, T: Copy>(
permutation_pairs: &'a [PermutationPair],
permutation_challenge_sets: &[PermutationChallengeSet<T>],
num_challenges: usize,
batch_size: usize,
) -> Vec<Vec<PermutationInstance<'a, T>>> {
permutation_pairs
.iter()
.cartesian_product(0..num_challenges)
.chunks(batch_size)
.into_iter()
.map(|batch| {
batch
.enumerate()
.map(|(i, (pair, chal))| {
let challenge = permutation_challenge_sets[i].challenges[chal];
PermutationInstance { pair, challenge }
})
.collect_vec()
})
.collect()
}
// TODO: Use slices.
pub struct PermutationCheckVars<F: Field, FE: FieldExtension<D2, BaseField = F>, const D2: usize> {
pub(crate) local_zs: Vec<FE>,
pub(crate) next_zs: Vec<FE>,
pub(crate) permutation_challenge_sets: Vec<PermutationChallengeSet<F>>,
}
pub(crate) fn eval_permutation_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, FE, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: PermutationCheckVars<F, FE, D2>,
consumer: &mut ConstraintConsumer<FE>,
) where
F: RichField + Extendable<D>,
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckVars {
local_zs,
next_zs,
permutation_challenge_sets,
} = permutation_data;
// Check that Z(1) = 1;
for &z in &local_zs {
consumer.constraint_first_row(z - FE::ONE);
}
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
&permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Each zs value corresponds to a permutation batch.
for (i, instances) in permutation_batches.iter().enumerate() {
// Z(gx) * down = Z x * up
let (reduced_lhs, reduced_rhs): (Vec<FE>, Vec<FE>) = instances
.iter()
.map(|instance| {
let PermutationInstance {
pair: PermutationPair { column_pairs },
challenge: PermutationChallenge { beta, gamma },
} = instance;
let mut factor = ReducingFactor::new(*beta);
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
.iter()
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
.unzip();
(
factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma),
factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma),
)
})
.unzip();
let constraint = next_zs[i] * reduced_rhs.into_iter().product()
- local_zs[i] * reduced_lhs.into_iter().product();
consumer.constraint(constraint);
}
}
// TODO: Use slices.
pub struct PermutationCheckDataTarget<const D: usize> {
pub(crate) local_zs: Vec<ExtensionTarget<D>>,
pub(crate) next_zs: Vec<ExtensionTarget<D>>,
pub(crate) permutation_challenge_sets: Vec<PermutationChallengeSet<Target>>,
}
pub(crate) fn eval_permutation_checks_recursively<F, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: PermutationCheckDataTarget<D>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckDataTarget {
local_zs,
next_zs,
permutation_challenge_sets,
} = permutation_data;
let one = builder.one_extension();
// Check that Z(1) = 1;
for &z in &local_zs {
let z_1 = builder.sub_extension(z, one);
consumer.constraint_first_row(builder, z_1);
}
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
&permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Each zs value corresponds to a permutation batch.
for (i, instances) in permutation_batches.iter().enumerate() {
let (reduced_lhs, reduced_rhs): (Vec<ExtensionTarget<D>>, Vec<ExtensionTarget<D>>) =
instances
.iter()
.map(|instance| {
let PermutationInstance {
pair: PermutationPair { column_pairs },
challenge: PermutationChallenge { beta, gamma },
} = instance;
let beta_ext = builder.convert_to_ext(*beta);
let gamma_ext = builder.convert_to_ext(*gamma);
let mut factor = ReducingFactorTarget::new(beta_ext);
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
.iter()
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
.unzip();
let reduced_lhs = factor.reduce(&lhs, builder);
let reduced_rhs = factor.reduce(&rhs, builder);
(
builder.add_extension(reduced_lhs, gamma_ext),
builder.add_extension(reduced_rhs, gamma_ext),
)
})
.unzip();
let reduced_lhs_product = builder.mul_many_extension(&reduced_lhs);
let reduced_rhs_product = builder.mul_many_extension(&reduced_rhs);
// constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product
let constraint = {
let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product);
builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp)
};
consumer.constraint(builder, constraint)
}
}

View File

@ -32,6 +32,7 @@ pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
/// Recover the length of the trace from a STARK proof and a STARK config.
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
.initial_trees_proof
@ -51,6 +52,7 @@ pub struct StarkProofTarget<const D: usize> {
}
impl<const D: usize> StarkProofTarget<D> {
/// Recover the length of the trace from a STARK proof and a STARK config.
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
.initial_trees_proof
@ -101,7 +103,7 @@ pub struct CompressedStarkProofWithPublicInputs<
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Randomness used in any permutation arguments.
pub permutation_challenge_sets: Vec<PermutationChallengeSet<F>>,
pub permutation_challenge_sets: Option<Vec<PermutationChallengeSet<F>>>,
/// Random values used to combine STARK constraints.
pub stark_alphas: Vec<F>,
@ -113,6 +115,7 @@ pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: us
}
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
pub permutation_challenge_sets: Option<Vec<PermutationChallengeSet<Target>>>,
pub stark_alphas: Vec<Target>,
pub stark_zeta: ExtensionTarget<D>,
pub fri_challenges: FriChallengesTarget<D>,
@ -179,27 +182,29 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
pub struct StarkOpeningSetTarget<const D: usize> {
pub local_values: Vec<ExtensionTarget<D>>,
pub next_values: Vec<ExtensionTarget<D>>,
pub permutation_zs: Vec<ExtensionTarget<D>>,
pub permutation_zs_right: Vec<ExtensionTarget<D>>,
pub permutation_zs: Option<Vec<ExtensionTarget<D>>>,
pub permutation_zs_right: Option<Vec<ExtensionTarget<D>>>,
pub quotient_polys: Vec<ExtensionTarget<D>>,
}
impl<const D: usize> StarkOpeningSetTarget<D> {
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
let zeta_batch = FriOpeningBatchTarget {
values: [
self.local_values.as_slice(),
self.quotient_polys.as_slice(),
self.permutation_zs.as_slice(),
]
.concat(),
values: self
.local_values
.iter()
.chain(self.permutation_zs.iter().flatten())
.chain(&self.quotient_polys)
.copied()
.collect_vec(),
};
let zeta_right_batch = FriOpeningBatchTarget {
values: [
self.next_values.as_slice(),
self.permutation_zs_right.as_slice(),
]
.concat(),
values: self
.next_values
.iter()
.chain(self.permutation_zs_right.iter().flatten())
.copied()
.collect_vec(),
};
FriOpeningsTarget {
batches: vec![zeta_batch, zeta_right_batch],

View File

@ -18,9 +18,13 @@ use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::permutation::compute_permutation_z_polys;
use crate::permutation::PermutationCheckVars;
use crate::permutation::{
compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet,
};
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
pub fn prove<F, C, S, const D: usize>(
@ -80,28 +84,36 @@ where
challenger.observe_cap(&trace_cap);
// Permutation arguments.
let permutation_zs_commitment = if stark.uses_permutation_args() {
let permutation_zs_commitment_challenges = stark.uses_permutation_args().then(|| {
let permutation_challenge_sets = get_n_permutation_challenge_sets(
&mut challenger,
config.num_challenges,
stark.permutation_batch_size(),
);
let permutation_z_polys = compute_permutation_z_polys::<F, C, S, D>(
&stark,
config,
&mut challenger,
&trace_poly_values,
&permutation_challenge_sets,
);
timed!(
let permutation_zs_commitment = timed!(
timing,
"compute permutation Z commitments",
Some(PolynomialBatch::from_values(
PolynomialBatch::from_values(
permutation_z_polys,
rate_bits,
false,
config.fri_config.cap_height,
timing,
None,
))
)
} else {
None
};
)
);
(permutation_zs_commitment, permutation_challenge_sets)
});
let permutation_zs_commitment = permutation_zs_commitment_challenges
.as_ref()
.map(|(comm, _)| comm);
let permutation_zs_cap = permutation_zs_commitment
.as_ref()
.map(|commit| commit.merkle_tree.cap.clone());
@ -113,10 +125,11 @@ where
let quotient_polys = compute_quotient_polys::<F, C, S, D>(
&stark,
&trace_commitment,
&permutation_zs_commitment_challenges,
public_inputs,
alphas,
degree_bits,
rate_bits,
config,
);
let all_quotient_chunks = quotient_polys
.into_par_iter()
@ -156,13 +169,13 @@ where
zeta,
g,
&trace_commitment,
permutation_zs_commitment.as_ref(),
permutation_zs_commitment,
&quotient_commitment,
);
challenger.observe_openings(&openings.to_fri_openings());
let initial_merkle_trees = once(&trace_commitment)
.chain(permutation_zs_commitment.as_ref())
.chain(permutation_zs_commitment)
.chain(once(&quotient_commitment))
.collect_vec();
@ -193,13 +206,17 @@ where
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
/// where the `C_i`s are the Stark constraints.
fn compute_quotient_polys<F, C, S, const D: usize>(
fn compute_quotient_polys<'a, F, C, S, const D: usize>(
stark: &S,
trace_commitment: &PolynomialBatch<F, C, D>,
trace_commitment: &'a PolynomialBatch<F, C, D>,
permutation_zs_commitment_challenges: &'a Option<(
PolynomialBatch<F, C, D>,
Vec<PermutationChallengeSet<F>>,
)>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
rate_bits: usize,
config: &StarkConfig,
) -> Vec<PolynomialCoeffs<F>>
where
F: RichField + Extendable<D>,
@ -209,6 +226,7 @@ where
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = config.fri_config.rate_bits;
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
assert!(
@ -228,9 +246,10 @@ where
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
// Retrieve the LDE values at index `i`.
let get_at_index = |comm: &PolynomialBatch<F, C, D>, i: usize| -> [F; S::COLUMNS] {
comm.get_lde_values(i * step).try_into().unwrap()
};
let get_at_index =
|comm: &'a PolynomialBatch<F, C, D>, i: usize| -> &'a [F] { comm.get_lde_values(i * step) };
let get_trace_at_index = |i| get_at_index(trace_commitment, i).try_into().unwrap();
// Last element of the subgroup.
let last = F::primitive_root_of_unity(degree_bits).inverse();
let size = degree << quotient_degree_bits;
@ -251,12 +270,26 @@ where
lagrange_last.values[i],
);
let vars = StarkEvaluationVars::<F, F, { S::COLUMNS }, { S::PUBLIC_INPUTS }> {
local_values: &get_at_index(trace_commitment, i),
next_values: &get_at_index(trace_commitment, (i + next_step) % size),
local_values: &get_trace_at_index(i),
next_values: &get_trace_at_index((i + next_step) % size),
public_inputs: &public_inputs,
};
stark.eval_packed_base(vars, &mut consumer);
// TODO: Add in constraints for permutation arguments.
let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map(
|(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckVars {
local_zs: get_at_index(permutation_zs_commitment, i).to_vec(),
next_zs: get_at_index(permutation_zs_commitment, (i + next_step) % size)
.to_vec(),
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
},
);
// TODO: Use packed field for F.
eval_vanishing_poly::<F, F, F, C, S, D, 1>(
stark,
config,
vars,
permutation_check_data,
&mut consumer,
);
// TODO: Fix this once we use a genuine `PackedField`.
let mut constraints_evals = consumer.accumulators();
// We divide the constraints evaluations by `Z_H(x)`.

View File

@ -1,5 +1,6 @@
use std::iter::once;
use anyhow::{ensure, Result};
use itertools::Itertools;
use plonky2::field::extension_field::Extendable;
use plonky2::field::field_types::Field;
@ -13,11 +14,13 @@ use plonky2::util::reducing::ReducingFactorTarget;
use crate::config::StarkConfig;
use crate::constraint_consumer::RecursiveConstraintConsumer;
use crate::permutation::PermutationCheckDataTarget;
use crate::proof::{
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly_recursively;
use crate::vars::StarkEvaluationTargets;
pub fn recursively_verify_stark_proof<
@ -37,7 +40,7 @@ pub fn recursively_verify_stark_proof<
{
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
let challenges = proof_with_pis.get_challenges::<F, C>(builder, inner_config);
let challenges = proof_with_pis.get_challenges::<F, C, S>(builder, &stark, inner_config);
recursively_verify_stark_proof_with_challenges::<F, C, S, D>(
builder,
@ -67,6 +70,7 @@ fn recursively_verify_stark_proof_with_challenges<
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
check_permutation_options(&stark, &proof_with_pis, &challenges).unwrap();
let one = builder.one_extension();
let StarkProofWithPublicInputsTarget {
@ -104,8 +108,21 @@ fn recursively_verify_stark_proof_with_challenges<
l_1,
l_last,
);
stark.eval_ext_recursively(builder, vars, &mut consumer);
// TODO: Add in constraints for permutation arguments.
let permutation_data = stark
.uses_permutation_args()
.then(|| PermutationCheckDataTarget {
local_zs: permutation_zs.as_ref().unwrap().clone(),
next_zs: permutation_zs_right.as_ref().unwrap().clone(),
permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(),
});
eval_vanishing_poly_recursively::<F, C, S, D>(
builder,
&stark,
inner_config,
vars,
permutation_data,
&mut consumer,
);
let vanishing_polys_zeta = consumer.accumulators();
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
@ -187,24 +204,25 @@ pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, con
let fri_params = config.fri_params(degree_bits);
let cap_height = fri_params.config.cap_height;
let num_leaves_per_oracle = &[
S::COLUMNS,
// TODO: permutation polys
stark.quotient_degree_factor() * config.num_challenges,
];
let num_leaves_per_oracle = once(S::COLUMNS)
.chain(
stark
.uses_permutation_args()
.then(|| stark.num_permutation_batches(config)),
)
.chain(once(stark.quotient_degree_factor() * config.num_challenges))
.collect_vec();
let permutation_zs_cap = if stark.uses_permutation_args() {
Some(builder.add_virtual_cap(cap_height))
} else {
None
};
let permutation_zs_cap = stark
.uses_permutation_args()
.then(|| builder.add_virtual_cap(cap_height));
StarkProofTarget {
trace_cap: builder.add_virtual_cap(cap_height),
permutation_zs_cap,
quotient_polys_cap: builder.add_virtual_cap(cap_height),
openings: add_stark_opening_set::<F, S, D>(builder, stark, config),
opening_proof: builder.add_virtual_fri_proof(num_leaves_per_oracle, &fri_params),
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
}
}
@ -217,8 +235,12 @@ fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D:
StarkOpeningSetTarget {
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
permutation_zs: vec![/*TODO*/],
permutation_zs_right: vec![/*TODO*/],
permutation_zs: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
permutation_zs_right: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
quotient_polys: builder
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
}
@ -267,5 +289,33 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
&proof.openings.to_fri_openings(),
);
if let (Some(permutation_zs_cap_target), Some(permutation_zs_cap)) =
(&proof_target.permutation_zs_cap, &proof.permutation_zs_cap)
{
witness.set_cap_target(permutation_zs_cap_target, permutation_zs_cap);
}
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
}
/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff
/// the Stark uses a permutation argument.
fn check_permutation_options<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
stark: &S,
proof_with_pis: &StarkProofWithPublicInputsTarget<D>,
challenges: &StarkProofChallengesTarget<D>,
) -> Result<()> {
let options_is_some = [
proof_with_pis.proof.permutation_zs_cap.is_some(),
proof_with_pis.proof.openings.permutation_zs.is_some(),
proof_with_pis.proof.openings.permutation_zs_right.is_some(),
challenges.permutation_challenge_sets.is_some(),
];
ensure!(
options_is_some
.into_iter()
.all(|b| b == stark.uses_permutation_args()),
"Permutation data doesn't match with Stark configuration."
);
Ok(())
}

View File

@ -16,7 +16,6 @@ use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
/// Represents a STARK system.
// TODO: Add a `constraint_degree` fn that returns the maximum constraint degree.
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// The total number of columns in the trace.
const COLUMNS: usize;

View File

@ -0,0 +1,68 @@
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::GenericConfig;
use crate::config::StarkConfig;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::{
eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckDataTarget,
PermutationCheckVars,
};
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_vanishing_poly<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, FE, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: Option<PermutationCheckVars<F, FE, D2>>,
consumer: &mut ConstraintConsumer<FE>,
) where
F: RichField + Extendable<D>,
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_packed_generic(vars, consumer);
if let Some(permutation_data) = permutation_data {
eval_permutation_checks::<F, FE, P, C, S, D, D2>(
stark,
config,
vars,
permutation_data,
consumer,
);
}
}
pub(crate) fn eval_vanishing_poly_recursively<F, C, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: Option<PermutationCheckDataTarget<D>>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_ext_recursively(builder, vars, consumer);
if let Some(permutation_data) = permutation_data {
eval_permutation_checks_recursively::<F, S, D>(
builder,
stark,
config,
vars,
permutation_data,
consumer,
);
}
}

View File

@ -11,8 +11,10 @@ use plonky2::plonk::plonk_common::reduce_with_powers;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::permutation::PermutationCheckVars;
use crate::proof::{StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
pub fn verify_stark_proof<
@ -32,7 +34,7 @@ where
{
ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(config);
let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits)?;
let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits);
verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config)
}
@ -53,6 +55,7 @@ where
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
check_permutation_options(&stark, &proof_with_pis, &challenges)?;
let StarkProofWithPublicInputs {
proof,
public_inputs,
@ -88,8 +91,18 @@ where
l_1,
l_last,
);
stark.eval_ext(vars, &mut consumer);
// TODO: Add in constraints for permutation arguments.
let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckVars {
local_zs: permutation_zs.as_ref().unwrap().clone(),
next_zs: permutation_zs_right.as_ref().unwrap().clone(),
permutation_challenge_sets: challenges.permutation_challenge_sets.unwrap(),
});
eval_vanishing_poly::<F, F::Extension, F::Extension, C, S, D, D>(
&stark,
config,
vars,
permutation_data,
&mut consumer,
);
let vanishing_polys_zeta = consumer.accumulators();
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
@ -141,7 +154,32 @@ fn eval_l_1_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
(z_x * invs[0], z_x * invs[1])
}
/// Recover the length of the trace from a STARK proof and a STARK config.
/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff
/// the Stark uses a permutation argument.
fn check_permutation_options<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
stark: &S,
proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
challenges: &StarkProofChallenges<F, D>,
) -> Result<()> {
let options_is_some = [
proof_with_pis.proof.permutation_zs_cap.is_some(),
proof_with_pis.proof.openings.permutation_zs.is_some(),
proof_with_pis.proof.openings.permutation_zs_right.is_some(),
challenges.permutation_challenge_sets.is_some(),
];
ensure!(
options_is_some
.into_iter()
.all(|b| b == stark.uses_permutation_args()),
"Permutation data doesn't match with Stark configuration."
);
Ok(())
}
#[cfg(test)]
mod tests {