Should work (does not)

This commit is contained in:
wborgeaud 2022-02-22 11:44:24 +01:00
parent ed4aef0fa0
commit 6cd2fc62b5
8 changed files with 260 additions and 61 deletions

View File

@ -208,7 +208,7 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
}
}
pub(crate) fn get_challenge(&mut self, builder: &mut CircuitBuilder<F, D>) -> Target {
pub fn get_challenge(&mut self, builder: &mut CircuitBuilder<F, D>) -> Target {
self.absorb_buffered_inputs(builder);
if self.output_buffer.is_empty() {

View File

@ -234,7 +234,7 @@ mod tests {
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config)?;
if print_gate_counts {
builder.print_gate_counts(0);

View File

@ -11,7 +11,9 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use crate::config::StarkConfig;
use crate::permutation::get_n_permutation_challenge_sets;
use crate::permutation::{
get_n_permutation_challenge_sets, get_n_permutation_challenge_sets_target,
};
use crate::proof::*;
use crate::stark::Stark;
@ -131,9 +133,11 @@ where
pub(crate) fn get_challenges_target<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
trace_cap: &MerkleCapTarget,
permutation_zs_cap: Option<&MerkleCapTarget>,
quotient_polys_cap: &MerkleCapTarget,
@ -142,7 +146,7 @@ pub(crate) fn get_challenges_target<
final_poly: &PolynomialCoeffsExtTarget<D>,
pow_witness: Target,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
) -> Result<StarkProofChallengesTarget<D>>
where
C::Hasher: AlgebraicHasher<F>,
{
@ -151,6 +155,23 @@ where
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(trace_cap);
let permutation_challenge_sets = if stark.uses_permutation_args() {
get_n_permutation_challenge_sets_target(
builder,
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
)
} else {
vec![]
};
if stark.uses_permutation_args() {
let cap =
permutation_zs_cap.ok_or_else(|| anyhow::Error::msg("expected permutation_zs_cap"));
challenger.observe_cap(cap?);
}
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
challenger.observe_cap(quotient_polys_cap);
@ -158,7 +179,8 @@ where
challenger.observe_openings(&openings.to_fri_openings());
StarkProofChallengesTarget {
Ok(StarkProofChallengesTarget {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C>(
@ -168,15 +190,20 @@ where
pow_witness,
&config.fri_config,
),
}
})
}
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
pub(crate) fn get_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
>(
&self,
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
) -> Result<StarkProofChallengesTarget<D>>
where
C::Hasher: AlgebraicHasher<F>,
{
@ -194,8 +221,9 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
},
} = &self.proof;
get_challenges_target::<F, C, D>(
get_challenges_target::<F, C, S, D>(
builder,
stark,
trace_cap,
permutation_zs_cap.as_ref(),
quotient_polys_cap,

View File

@ -6,14 +6,17 @@ use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::field_types::Field;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::stark::Stark;
use crate::vars::StarkEvaluationVars;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
/// In particular, there should exist some permutation `pi` such that for any `i`,
@ -26,24 +29,24 @@ pub struct PermutationPair {
}
/// A single instance of a permutation check protocol.
pub(crate) struct PermutationInstance<'a, F: Field> {
pub(crate) struct PermutationInstance<'a, T: Copy> {
pub(crate) pair: &'a PermutationPair,
pub(crate) challenge: PermutationChallenge<F>,
pub(crate) challenge: PermutationChallenge<T>,
}
/// Randomness for a single instance of a permutation check protocol.
#[derive(Copy, Clone)]
pub(crate) struct PermutationChallenge<F: Field> {
pub(crate) struct PermutationChallenge<T: Copy> {
/// Randomness used to combine multiple columns into one.
pub(crate) beta: F,
pub(crate) beta: T,
/// Random offset that's added to the beta-reduced column values.
pub(crate) gamma: F,
pub(crate) gamma: T,
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
#[derive(Clone)]
pub(crate) struct PermutationChallengeSet<F: Field> {
pub(crate) challenges: Vec<PermutationChallenge<F>>,
pub(crate) struct PermutationChallengeSet<T: Copy> {
pub(crate) challenges: Vec<PermutationChallenge<T>>,
}
/// Compute all Z polynomials (for permutation arguments).
@ -163,17 +166,60 @@ pub(crate) fn get_n_permutation_challenge_sets<F: RichField, H: Hasher<F>>(
.collect()
}
fn get_permutation_challenge_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
) -> PermutationChallenge<Target> {
let beta = challenger.get_challenge(builder);
let gamma = challenger.get_challenge(builder);
PermutationChallenge { beta, gamma }
}
fn get_permutation_challenge_set_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
) -> PermutationChallengeSet<Target> {
let challenges = (0..num_challenges)
.map(|_| get_permutation_challenge_target(builder, challenger))
.collect();
PermutationChallengeSet { challenges }
}
pub(crate) fn get_n_permutation_challenge_sets_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
num_sets: usize,
) -> Vec<PermutationChallengeSet<Target>> {
(0..num_sets)
.map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges))
.collect()
}
/// Get a list of instances of our batch-permutation argument. These are permutation arguments
/// where the same `Z(x)` polynomial is used to check more than one permutation.
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
/// chunk these arguments based on our batch size.
pub(crate) fn get_permutation_batches<'a, F: Field>(
pub(crate) fn get_permutation_batches<'a, T: Copy>(
permutation_pairs: &'a [PermutationPair],
permutation_challenge_sets: &[PermutationChallengeSet<F>],
permutation_challenge_sets: &[PermutationChallengeSet<T>],
num_challenges: usize,
batch_size: usize,
) -> Vec<Vec<PermutationInstance<'a, F>>> {
) -> Vec<Vec<PermutationInstance<'a, T>>> {
permutation_pairs
.iter()
.cartesian_product(0..num_challenges)
@ -202,10 +248,8 @@ pub(crate) fn eval_permutation_checks<F, FE, C, S, const D: usize, const D2: usi
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, FE, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
local_zs: &[FE],
next_zs: &[FE],
permutation_data: PermutationCheckData<F, FE, D2>,
consumer: &mut ConstraintConsumer<FE>,
permutation_challenge_sets: &[PermutationChallengeSet<F>],
) where
F: RichField + Extendable<D>,
FE: FieldExtension<D2, BaseField = F>,
@ -214,12 +258,17 @@ pub(crate) fn eval_permutation_checks<F, FE, C, S, const D: usize, const D2: usi
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckData {
local_zs,
next_zs,
permutation_challenge_sets,
} = permutation_data;
// TODO: Z_1 check.
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
permutation_challenge_sets,
&permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
@ -254,3 +303,78 @@ pub(crate) fn eval_permutation_checks<F, FE, C, S, const D: usize, const D2: usi
consumer.constraint(constraint);
}
}
// TODO: Use slices.
pub struct PermutationCheckDataTarget<const D: usize> {
pub(crate) local_zs: Vec<ExtensionTarget<D>>,
pub(crate) next_zs: Vec<ExtensionTarget<D>>,
pub(crate) permutation_challenge_sets: Vec<PermutationChallengeSet<Target>>,
}
pub(crate) fn eval_permutation_checks_recursively<F, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: PermutationCheckDataTarget<D>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckDataTarget {
local_zs,
next_zs,
permutation_challenge_sets,
} = permutation_data;
// TODO: Z_1 check.
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
&permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Each zs value corresponds to a permutation batch.
for (i, instances) in permutation_batches.iter().enumerate() {
// Z(gx) * down = Z x * up
let (reduced_lhs, reduced_rhs): (Vec<ExtensionTarget<D>>, Vec<ExtensionTarget<D>>) =
instances
.iter()
.map(|instance| {
let PermutationInstance {
pair: PermutationPair { column_pairs },
challenge: PermutationChallenge { beta, gamma },
} = instance;
let zero = builder.zero_extension();
let beta_ext = builder.convert_to_ext(*beta);
let gamma_ext = builder.convert_to_ext(*gamma);
let mut reduced =
column_pairs
.iter()
.rev()
.fold((zero, zero), |(lhs, rhs), &(i, j)| {
(
builder.mul_add_extension(lhs, beta_ext, vars.local_values[i]),
builder.mul_add_extension(rhs, beta_ext, vars.local_values[j]),
)
});
reduced.0 = builder.add_extension(reduced.0, gamma_ext);
reduced.1 = builder.add_extension(reduced.1, gamma_ext);
reduced
})
.unzip();
let reduced_lhs_product = builder.mul_many_extension(&reduced_lhs);
let reduced_rhs_product = builder.mul_many_extension(&reduced_rhs);
// constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product
let constraint = {
let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product);
builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp)
};
consumer.constraint(builder, constraint)
}
}

View File

@ -113,6 +113,7 @@ pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: us
}
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
pub permutation_challenge_sets: Vec<PermutationChallengeSet<Target>>,
pub stark_alphas: Vec<Target>,
pub stark_zeta: ExtensionTarget<D>,
pub fri_challenges: FriChallengesTarget<D>,
@ -179,27 +180,29 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
pub struct StarkOpeningSetTarget<const D: usize> {
pub local_values: Vec<ExtensionTarget<D>>,
pub next_values: Vec<ExtensionTarget<D>>,
pub permutation_zs: Vec<ExtensionTarget<D>>,
pub permutation_zs_right: Vec<ExtensionTarget<D>>,
pub permutation_zs: Option<Vec<ExtensionTarget<D>>>,
pub permutation_zs_right: Option<Vec<ExtensionTarget<D>>>,
pub quotient_polys: Vec<ExtensionTarget<D>>,
}
impl<const D: usize> StarkOpeningSetTarget<D> {
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
let zeta_batch = FriOpeningBatchTarget {
values: [
self.local_values.as_slice(),
self.quotient_polys.as_slice(),
self.permutation_zs.as_slice(),
]
.concat(),
values: self
.local_values
.iter()
.chain(self.permutation_zs.iter().flatten())
.chain(&self.quotient_polys)
.copied()
.collect_vec(),
};
let zeta_right_batch = FriOpeningBatchTarget {
values: [
self.next_values.as_slice(),
self.permutation_zs_right.as_slice(),
]
.concat(),
values: self
.next_values
.iter()
.chain(self.permutation_zs_right.iter().flatten())
.copied()
.collect_vec(),
};
FriOpeningsTarget {
batches: vec![zeta_batch, zeta_right_batch],

View File

@ -1,5 +1,6 @@
use std::iter::once;
use anyhow::Result;
use itertools::Itertools;
use plonky2::field::extension_field::Extendable;
use plonky2::field::field_types::Field;
@ -13,11 +14,13 @@ use plonky2::util::reducing::ReducingFactorTarget;
use crate::config::StarkConfig;
use crate::constraint_consumer::RecursiveConstraintConsumer;
use crate::permutation::PermutationCheckDataTarget;
use crate::proof::{
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly_recursively;
use crate::vars::StarkEvaluationTargets;
pub fn recursively_verify_stark_proof<
@ -30,14 +33,15 @@ pub fn recursively_verify_stark_proof<
stark: S,
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
inner_config: &StarkConfig,
) where
) -> Result<()>
where
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
let challenges = proof_with_pis.get_challenges::<F, C>(builder, inner_config);
let challenges = proof_with_pis.get_challenges::<F, C, S>(builder, &stark, inner_config)?;
recursively_verify_stark_proof_with_challenges::<F, C, S, D>(
builder,
@ -47,6 +51,8 @@ pub fn recursively_verify_stark_proof<
inner_config,
degree_bits,
);
Ok(())
}
/// Recursively verifies an inner proof.
@ -104,8 +110,21 @@ fn recursively_verify_stark_proof_with_challenges<
l_1,
l_last,
);
stark.eval_ext_recursively(builder, vars, &mut consumer);
// TODO: Add in constraints for permutation arguments.
let permutation_data = stark
.uses_permutation_args()
.then(|| PermutationCheckDataTarget {
local_zs: permutation_zs.as_ref().unwrap().clone(),
next_zs: permutation_zs_right.as_ref().unwrap().clone(),
permutation_challenge_sets: challenges.permutation_challenge_sets,
});
eval_vanishing_poly_recursively::<F, C, S, D>(
builder,
&stark,
inner_config,
vars,
permutation_data,
&mut consumer,
);
let vanishing_polys_zeta = consumer.accumulators();
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
@ -222,10 +241,12 @@ fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D:
StarkOpeningSetTarget {
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
permutation_zs: builder
.add_virtual_extension_targets(stark.num_permutation_batches(config)),
permutation_zs_right: builder
.add_virtual_extension_targets(stark.num_permutation_batches(config)),
permutation_zs: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
permutation_zs_right: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
quotient_polys: builder
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
}

View File

@ -1,12 +1,16 @@
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::GenericConfig;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::permutation::{eval_permutation_checks, PermutationCheckData};
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::{
eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckData,
PermutationCheckDataTarget,
};
use crate::stark::Stark;
use crate::vars::StarkEvaluationVars;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_vanishing_poly<F, FE, C, S, const D: usize, const D2: usize>(
stark: &S,
@ -23,20 +27,40 @@ pub(crate) fn eval_vanishing_poly<F, FE, C, S, const D: usize, const D2: usize>(
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_packed_generic(vars, consumer);
if let Some(PermutationCheckData {
local_zs,
next_zs,
permutation_challenge_sets,
}) = permutation_data
{
if let Some(permutation_data) = permutation_data {
eval_permutation_checks::<F, FE, C, S, D, D2>(
stark,
config,
vars,
&local_zs,
&next_zs,
permutation_data,
consumer,
);
}
}
pub(crate) fn eval_vanishing_poly_recursively<F, C, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: Option<PermutationCheckDataTarget<D>>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_ext_recursively(builder, vars, consumer);
if let Some(permutation_data) = permutation_data {
eval_permutation_checks_recursively::<F, S, D>(
builder,
stark,
config,
vars,
permutation_data,
consumer,
&permutation_challenge_sets,
);
}
}

View File

@ -90,7 +90,6 @@ where
l_1,
l_last,
);
// stark.eval_ext(vars, &mut consumer);
let permutation_data = stark.uses_permutation_args().then(|| PermutationCheckData {
local_zs: permutation_zs.as_ref().unwrap().clone(),
next_zs: permutation_zs_right.as_ref().unwrap().clone(),