This commit is contained in:
wborgeaud 2022-02-21 16:05:24 +01:00
parent 79ba85eb08
commit 5c1173379e
3 changed files with 139 additions and 86 deletions

View File

@ -2,7 +2,7 @@
use itertools::Itertools;
use plonky2::field::batch_util::batch_multiply_inplace;
use plonky2::field::extension_field::Extendable;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::field_types::Field;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
@ -11,7 +11,9 @@ use plonky2::plonk::config::{GenericConfig, Hasher};
use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::stark::Stark;
use crate::vars::StarkEvaluationVars;
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
/// In particular, there should exist some permutation `pi` such that for any `i`,
@ -39,6 +41,7 @@ pub(crate) struct PermutationChallenge<F: Field> {
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
#[derive(Clone)]
pub(crate) struct PermutationChallengeSet<F: Field> {
pub(crate) challenges: Vec<PermutationChallenge<F>>,
}
@ -49,6 +52,7 @@ pub(crate) fn compute_permutation_z_polys<F, C, S, const D: usize>(
config: &StarkConfig,
challenger: &mut Challenger<F, C::Hasher>,
trace_poly_values: &[PolynomialValues<F>],
permutation_challenge_sets: &[PermutationChallengeSet<F>],
) -> Vec<PolynomialValues<F>>
where
F: RichField + Extendable<D>,
@ -56,12 +60,6 @@ where
S: Stark<F, D>,
{
let permutation_pairs = stark.permutation_pairs();
let permutation_challenge_sets = get_n_permutation_challenge_sets(
challenger,
config.num_challenges,
stark.permutation_batch_size(),
);
let permutation_batches = get_permutation_batches(
&permutation_pairs,
&permutation_challenge_sets,
@ -192,3 +190,63 @@ pub(crate) fn get_permutation_batches<'a, F: Field>(
})
.collect()
}
// TODO: Use slices.
pub struct PermutationCheckData<F: Field, FE: FieldExtension<D2, BaseField = F>, const D2: usize> {
pub(crate) local_zs: Vec<FE>,
pub(crate) next_zs: Vec<FE>,
pub(crate) permutation_challenge_sets: Vec<PermutationChallengeSet<F>>,
}
pub(crate) fn eval_permutation_checks<F, FE, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, FE, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
local_zs: &[FE],
next_zs: &[FE],
consumer: &mut ConstraintConsumer<FE>,
permutation_challenge_sets: &[PermutationChallengeSet<F>],
) where
F: RichField + Extendable<D>,
FE: FieldExtension<D2, BaseField = F>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
// TODO: Z_1 check.
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Each zs value corresponds to a permutation batch.
for (i, instances) in permutation_batches.iter().enumerate() {
// Z(gx) * down = Z x * up
let (reduced_lhs, reduced_rhs): (Vec<FE>, Vec<FE>) = instances
.iter()
.map(|instance| {
let PermutationInstance {
pair: PermutationPair { column_pairs },
challenge: PermutationChallenge { beta, gamma },
} = instance;
column_pairs.iter().rev().fold(
(FE::from_basefield(*gamma), FE::from_basefield(*gamma)),
|(lhs, rhs), &(i, j)| {
(
lhs.scalar_mul(*beta) + vars.local_values[i],
rhs.scalar_mul(*beta) + vars.local_values[j],
)
},
)
})
.unzip();
let constraint = next_zs[i] * reduced_rhs.into_iter().product()
- local_zs[i] * reduced_lhs.into_iter().product();
consumer.constraint(constraint);
}
}

View File

@ -18,9 +18,13 @@ use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::permutation::compute_permutation_z_polys;
use crate::permutation::PermutationCheckData;
use crate::permutation::{
compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet,
};
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
pub fn prove<F, C, S, const D: usize>(
@ -80,28 +84,41 @@ where
challenger.observe_cap(&trace_cap);
// Permutation arguments.
let permutation_zs_commitment = if stark.uses_permutation_args() {
let permutation_zs_commitment_challenges = if stark.uses_permutation_args() {
let permutation_challenge_sets = get_n_permutation_challenge_sets(
&mut challenger,
config.num_challenges,
stark.permutation_batch_size(),
);
let permutation_z_polys = compute_permutation_z_polys::<F, C, S, D>(
&stark,
config,
&mut challenger,
&trace_poly_values,
&permutation_challenge_sets,
);
timed!(
timing,
"compute permutation Z commitments",
Some(PolynomialBatch::from_values(
permutation_z_polys,
rate_bits,
false,
config.fri_config.cap_height,
timing,
None,
Some((
PolynomialBatch::from_values(
permutation_z_polys,
rate_bits,
false,
config.fri_config.cap_height,
timing,
None,
),
permutation_challenge_sets
))
)
} else {
None
};
let permutation_zs_commitment = permutation_zs_commitment_challenges
.as_ref()
.map(|(comm, _)| comm);
let permutation_zs_cap = permutation_zs_commitment
.as_ref()
.map(|commit| commit.merkle_tree.cap.clone());
@ -113,10 +130,11 @@ where
let quotient_polys = compute_quotient_polys::<F, C, S, D>(
&stark,
&trace_commitment,
&permutation_zs_commitment_challenges,
public_inputs,
alphas,
degree_bits,
rate_bits,
config,
);
let all_quotient_chunks = quotient_polys
.into_par_iter()
@ -156,13 +174,13 @@ where
zeta,
g,
&trace_commitment,
permutation_zs_commitment.as_ref(),
permutation_zs_commitment,
&quotient_commitment,
);
challenger.observe_openings(&openings.to_fri_openings());
let initial_merkle_trees = once(&trace_commitment)
.chain(permutation_zs_commitment.as_ref())
.chain(permutation_zs_commitment)
.chain(once(&quotient_commitment))
.collect_vec();
@ -196,10 +214,14 @@ where
fn compute_quotient_polys<F, C, S, const D: usize>(
stark: &S,
trace_commitment: &PolynomialBatch<F, C, D>,
permutation_zs_commitment_challenges: &Option<(
PolynomialBatch<F, C, D>,
Vec<PermutationChallengeSet<F>>,
)>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
rate_bits: usize,
config: &StarkConfig,
) -> Vec<PolynomialCoeffs<F>>
where
F: RichField + Extendable<D>,
@ -209,6 +231,7 @@ where
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = config.fri_config.rate_bits;
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
assert!(
@ -255,7 +278,22 @@ where
next_values: &get_at_index(trace_commitment, (i + next_step) % size),
public_inputs: &public_inputs,
};
stark.eval_packed_base(vars, &mut consumer);
let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map(
|(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckData {
local_zs: get_at_index(&permutation_zs_commitment, i).to_vec(),
next_zs: get_at_index(&permutation_zs_commitment, (i + next_step) % size)
.to_vec(),
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
},
);
eval_vanishing_poly::<F, F, C, S, D, 1>(
stark,
config,
vars,
permutation_check_data,
&mut consumer,
);
// stark.eval_packed_base(vars, &mut consumer);
// TODO: Add in constraints for permutation arguments.
// TODO: Fix this once we use a genuine `PackedField`.
let mut constraints_evals = consumer.accumulators();

View File

@ -4,82 +4,39 @@ use plonky2::plonk::config::GenericConfig;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::permutation::{
get_permutation_batches, PermutationChallenge, PermutationChallengeSet, PermutationInstance,
PermutationPair,
};
use crate::permutation::{eval_permutation_checks, PermutationCheckData};
use crate::stark::Stark;
use crate::vars::StarkEvaluationVars;
pub(crate) fn eval_vanishing_poly<F, C, S, const D: usize>(
stark: S,
pub(crate) fn eval_vanishing_poly<F, FE, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<F::Extension, F::Extension, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
mut consumer: ConstraintConsumer<F::Extension>,
permutation_challenge_sets: &[PermutationChallengeSet<F>],
vars: StarkEvaluationVars<FE, FE, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
permutation_data: Option<PermutationCheckData<F, FE, D2>>,
consumer: &mut ConstraintConsumer<FE>,
) where
F: RichField + Extendable<D>,
FE: FieldExtension<D2, BaseField = F>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_packed_generic(vars, &mut consumer);
}
fn eval_permutation_checks<F, C, S, const D: usize>(
stark: S,
config: &StarkConfig,
vars: StarkEvaluationVars<F::Extension, F::Extension, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
consumer: &mut ConstraintConsumer<F::Extension>,
permutation_challenge_sets: &[PermutationChallengeSet<F>],
) where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
// TODO: Z_1 check.
let permutation_pairs = stark.permutation_pairs();
let permutation_batches = get_permutation_batches(
&permutation_pairs,
stark.eval_packed_generic(vars, consumer);
if let Some(PermutationCheckData {
local_zs,
next_zs,
permutation_challenge_sets,
config.num_challenges,
stark.permutation_batch_size(),
);
// Each zs value corresponds to a permutation batch.
for (i, instances) in permutation_batches.iter().enumerate() {
// Z(gx) * down = Z x * up
let (reduced_lhs, reduced_rhs): (Vec<F::Extension>, Vec<F::Extension>) = instances
.iter()
.map(|instance| {
let PermutationInstance {
pair: PermutationPair { column_pairs },
challenge: PermutationChallenge { beta, gamma },
} = instance;
column_pairs.iter().rev().fold(
(
F::Extension::from_basefield(*gamma),
F::Extension::from_basefield(*gamma),
),
|(lhs, rhs), &(i, j)| {
(
lhs.scalar_mul(*beta) + vars.local_values[i],
rhs.scalar_mul(*beta) + vars.local_values[j],
)
},
)
})
.unzip();
let constraint = next_zs[i] * reduced_rhs.into_iter().product()
- local_zs[i] * reduced_lhs.into_iter().product();
consumer.constraint(constraint);
}) = permutation_data
{
eval_permutation_checks::<F, FE, C, S, D, D2>(
stark,
config,
vars,
&local_zs,
&next_zs,
consumer,
&permutation_challenge_sets,
);
}
}