2022-05-04 20:57:07 +02:00
|
|
|
//! Permutation arguments.
|
|
|
|
|
|
2022-09-23 13:41:14 +02:00
|
|
|
use std::fmt::Debug;
|
|
|
|
|
|
2022-05-04 20:57:07 +02:00
|
|
|
use itertools::Itertools;
|
|
|
|
|
use plonky2::field::batch_util::batch_multiply_inplace;
|
2022-06-27 07:18:21 -07:00
|
|
|
use plonky2::field::extension::{Extendable, FieldExtension};
|
2022-06-27 15:07:52 -07:00
|
|
|
use plonky2::field::packed::PackedField;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::field::polynomial::PolynomialValues;
|
2022-06-27 12:24:09 -07:00
|
|
|
use plonky2::field::types::Field;
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::hash::hash_types::RichField;
|
|
|
|
|
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
|
|
|
|
use plonky2::iop::ext_target::ExtensionTarget;
|
|
|
|
|
use plonky2::iop::target::Target;
|
|
|
|
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
2023-02-10 21:47:51 -08:00
|
|
|
use plonky2::plonk::config::{AlgebraicHasher, Hasher};
|
Shrink STARK proofs to a constant degree
The goal here is to end up with a single "root" circuit representing any EVM proof. I.e. it must verify each STARK, but be general enough to work with any combination of STARK sizes (within some range of sizes that we chose to support). This root circuit can then be plugged into our aggregation circuit.
In particular, for each STARK, and for each initial `degree_bits` (within a range that we choose to support), this adds a "shrinking chain" of circuits. Such a chain shrinks a STARK proof from that initial `degree_bits` down to a constant, `THRESHOLD_DEGREE_BITS`.
The root circuit then combines these shrunk-to-constant proofs for each table. It's similar to `RecursiveAllProof::verify_circuit`; I adapted the code from there and I think we can remove it after. The main difference is that now instead of having one verification key per STARK, we have several possible VKs, one per initial `degree_bits`. We bake the list of possible VKs into the root circuit, and have the prover indicate the index of the VK they're actually using.
This also partially removes the default feature of CTLs. So far we've used filters instead of defaults. Until now it was easy to keep supporting defaults just in case, but here maintaining support would require some more work. E.g. we couldn't use `exp_u64` any more, since the size delta is now dynamic, it can't be hardcoded. If there are no concerns, I'll fully remove the feature after.
2022-12-27 18:15:18 -08:00
|
|
|
use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit};
|
2022-05-04 20:57:07 +02:00
|
|
|
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
|
2023-01-30 08:51:33 -08:00
|
|
|
use plonky2_maybe_rayon::*;
|
2022-05-04 20:57:07 +02:00
|
|
|
|
|
|
|
|
use crate::config::StarkConfig;
|
|
|
|
|
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
|
|
|
|
use crate::stark::Stark;
|
|
|
|
|
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
|
|
|
|
|
|
|
|
|
/// A pair of lists of columns, `lhs` and `rhs`, that should be permutations of one another.
|
|
|
|
|
/// In particular, there should exist some permutation `pi` such that for any `i`,
|
|
|
|
|
/// `trace[lhs[i]] = pi(trace[rhs[i]])`. Here `trace` denotes the trace in column-major form, so
|
|
|
|
|
/// `trace[col]` is a column vector.
|
|
|
|
|
pub struct PermutationPair {
|
|
|
|
|
/// Each entry contains two column indices, representing two columns which should be
|
|
|
|
|
/// permutations of one another.
|
|
|
|
|
pub column_pairs: Vec<(usize, usize)>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl PermutationPair {
|
|
|
|
|
pub fn singletons(lhs: usize, rhs: usize) -> Self {
|
|
|
|
|
Self {
|
|
|
|
|
column_pairs: vec![(lhs, rhs)],
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// A single instance of a permutation check protocol.
|
2022-09-23 13:41:14 +02:00
|
|
|
pub(crate) struct PermutationInstance<'a, T: Copy + Eq + PartialEq + Debug> {
|
2022-05-04 20:57:07 +02:00
|
|
|
pub(crate) pair: &'a PermutationPair,
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) challenge: GrandProductChallenge<T>,
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Randomness for a single instance of a permutation check protocol.
|
2022-09-23 13:41:14 +02:00
|
|
|
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
|
|
|
|
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
|
2022-05-04 20:57:07 +02:00
|
|
|
/// Randomness used to combine multiple columns into one.
|
|
|
|
|
pub(crate) beta: T,
|
|
|
|
|
/// Random offset that's added to the beta-reduced column values.
|
|
|
|
|
pub(crate) gamma: T,
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-18 09:22:58 +02:00
|
|
|
impl<F: Field> GrandProductChallenge<F> {
|
|
|
|
|
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
|
|
|
|
|
&self,
|
|
|
|
|
terms: T,
|
|
|
|
|
) -> P
|
|
|
|
|
where
|
|
|
|
|
FE: FieldExtension<D2, BaseField = F>,
|
|
|
|
|
P: PackedField<Scalar = FE>,
|
|
|
|
|
T::IntoIter: DoubleEndedIterator,
|
|
|
|
|
{
|
|
|
|
|
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-24 16:24:52 +02:00
|
|
|
impl GrandProductChallenge<Target> {
|
2022-05-26 20:44:59 +02:00
|
|
|
pub(crate) fn combine_circuit<F: RichField + Extendable<D>, const D: usize>(
|
2022-05-24 16:24:52 +02:00
|
|
|
&self,
|
|
|
|
|
builder: &mut CircuitBuilder<F, D>,
|
|
|
|
|
terms: &[ExtensionTarget<D>],
|
|
|
|
|
) -> ExtensionTarget<D> {
|
2022-05-30 20:13:25 +02:00
|
|
|
let reduced = reduce_with_powers_ext_circuit(builder, terms, self.beta);
|
|
|
|
|
let gamma = builder.convert_to_ext(self.gamma);
|
|
|
|
|
builder.add_extension(reduced, gamma)
|
2022-05-24 16:24:52 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-04 20:57:07 +02:00
|
|
|
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
|
2022-09-23 13:41:14 +02:00
|
|
|
#[derive(Clone, Eq, PartialEq, Debug)]
|
|
|
|
|
pub(crate) struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Compute all Z polynomials (for permutation arguments).
|
2023-02-10 21:47:51 -08:00
|
|
|
pub(crate) fn compute_permutation_z_polys<F, S, const D: usize>(
|
2022-05-04 20:57:07 +02:00
|
|
|
stark: &S,
|
|
|
|
|
config: &StarkConfig,
|
|
|
|
|
trace_poly_values: &[PolynomialValues<F>],
|
2022-05-12 13:51:02 +02:00
|
|
|
permutation_challenge_sets: &[GrandProductChallengeSet<F>],
|
2022-05-04 20:57:07 +02:00
|
|
|
) -> Vec<PolynomialValues<F>>
|
|
|
|
|
where
|
|
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
S: Stark<F, D>,
|
|
|
|
|
{
|
|
|
|
|
let permutation_pairs = stark.permutation_pairs();
|
|
|
|
|
let permutation_batches = get_permutation_batches(
|
|
|
|
|
&permutation_pairs,
|
|
|
|
|
permutation_challenge_sets,
|
|
|
|
|
config.num_challenges,
|
|
|
|
|
stark.permutation_batch_size(),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
permutation_batches
|
|
|
|
|
.into_par_iter()
|
|
|
|
|
.map(|instances| compute_permutation_z_poly(&instances, trace_poly_values))
|
|
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Compute a single Z polynomial.
|
|
|
|
|
fn compute_permutation_z_poly<F: Field>(
|
|
|
|
|
instances: &[PermutationInstance<F>],
|
|
|
|
|
trace_poly_values: &[PolynomialValues<F>],
|
|
|
|
|
) -> PolynomialValues<F> {
|
|
|
|
|
let degree = trace_poly_values[0].len();
|
|
|
|
|
let (reduced_lhs_polys, reduced_rhs_polys): (Vec<_>, Vec<_>) = instances
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|instance| permutation_reduced_polys(instance, trace_poly_values, degree))
|
|
|
|
|
.unzip();
|
|
|
|
|
|
|
|
|
|
let numerator = poly_product_elementwise(reduced_lhs_polys.into_iter());
|
|
|
|
|
let denominator = poly_product_elementwise(reduced_rhs_polys.into_iter());
|
|
|
|
|
|
|
|
|
|
// Compute the quotients.
|
|
|
|
|
let denominator_inverses = F::batch_multiplicative_inverse(&denominator.values);
|
|
|
|
|
let mut quotients = numerator.values;
|
|
|
|
|
batch_multiply_inplace(&mut quotients, &denominator_inverses);
|
|
|
|
|
|
|
|
|
|
// Compute Z, which contains partial products of the quotients.
|
|
|
|
|
let mut partial_products = Vec::with_capacity(degree);
|
|
|
|
|
let mut acc = F::ONE;
|
|
|
|
|
for q in quotients {
|
|
|
|
|
partial_products.push(acc);
|
|
|
|
|
acc *= q;
|
|
|
|
|
}
|
|
|
|
|
PolynomialValues::new(partial_products)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Computes the reduced polynomial, `\sum beta^i f_i(x) + gamma`, for both the "left" and "right"
|
|
|
|
|
/// sides of a given `PermutationPair`.
|
|
|
|
|
fn permutation_reduced_polys<F: Field>(
|
|
|
|
|
instance: &PermutationInstance<F>,
|
|
|
|
|
trace_poly_values: &[PolynomialValues<F>],
|
|
|
|
|
degree: usize,
|
|
|
|
|
) -> (PolynomialValues<F>, PolynomialValues<F>) {
|
|
|
|
|
let PermutationInstance {
|
|
|
|
|
pair: PermutationPair { column_pairs },
|
2022-05-12 13:51:02 +02:00
|
|
|
challenge: GrandProductChallenge { beta, gamma },
|
2022-05-04 20:57:07 +02:00
|
|
|
} = instance;
|
|
|
|
|
|
|
|
|
|
let mut reduced_lhs = PolynomialValues::constant(*gamma, degree);
|
|
|
|
|
let mut reduced_rhs = PolynomialValues::constant(*gamma, degree);
|
|
|
|
|
for ((lhs, rhs), weight) in column_pairs.iter().zip(beta.powers()) {
|
|
|
|
|
reduced_lhs.add_assign_scaled(&trace_poly_values[*lhs], weight);
|
|
|
|
|
reduced_rhs.add_assign_scaled(&trace_poly_values[*rhs], weight);
|
|
|
|
|
}
|
|
|
|
|
(reduced_lhs, reduced_rhs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Computes the elementwise product of a set of polynomials. Assumes that the set is non-empty and
|
|
|
|
|
/// that each polynomial has the same length.
|
|
|
|
|
fn poly_product_elementwise<F: Field>(
|
|
|
|
|
mut polys: impl Iterator<Item = PolynomialValues<F>>,
|
|
|
|
|
) -> PolynomialValues<F> {
|
|
|
|
|
let mut product = polys.next().expect("Expected at least one polynomial");
|
|
|
|
|
for poly in polys {
|
|
|
|
|
batch_multiply_inplace(&mut product.values, &poly.values)
|
|
|
|
|
}
|
|
|
|
|
product
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-12 13:51:02 +02:00
|
|
|
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
|
2022-05-04 20:57:07 +02:00
|
|
|
challenger: &mut Challenger<F, H>,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> GrandProductChallenge<F> {
|
2022-05-04 20:57:07 +02:00
|
|
|
let beta = challenger.get_challenge();
|
|
|
|
|
let gamma = challenger.get_challenge();
|
2022-05-12 13:51:02 +02:00
|
|
|
GrandProductChallenge { beta, gamma }
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
|
2022-05-04 20:57:07 +02:00
|
|
|
challenger: &mut Challenger<F, H>,
|
|
|
|
|
num_challenges: usize,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> GrandProductChallengeSet<F> {
|
2022-05-04 20:57:07 +02:00
|
|
|
let challenges = (0..num_challenges)
|
2022-05-12 13:51:02 +02:00
|
|
|
.map(|_| get_grand_product_challenge(challenger))
|
2022-05-04 20:57:07 +02:00
|
|
|
.collect();
|
2022-05-12 13:51:02 +02:00
|
|
|
GrandProductChallengeSet { challenges }
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, H: Hasher<F>>(
|
2022-05-04 20:57:07 +02:00
|
|
|
challenger: &mut Challenger<F, H>,
|
|
|
|
|
num_challenges: usize,
|
|
|
|
|
num_sets: usize,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> Vec<GrandProductChallengeSet<F>> {
|
2022-05-04 20:57:07 +02:00
|
|
|
(0..num_sets)
|
2022-05-12 13:51:02 +02:00
|
|
|
.map(|_| get_grand_product_challenge_set(challenger, num_challenges))
|
2022-05-04 20:57:07 +02:00
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-20 11:21:13 +02:00
|
|
|
fn get_grand_product_challenge_target<
|
2022-05-04 20:57:07 +02:00
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
H: AlgebraicHasher<F>,
|
|
|
|
|
const D: usize,
|
|
|
|
|
>(
|
|
|
|
|
builder: &mut CircuitBuilder<F, D>,
|
|
|
|
|
challenger: &mut RecursiveChallenger<F, H, D>,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> GrandProductChallenge<Target> {
|
2022-05-04 20:57:07 +02:00
|
|
|
let beta = challenger.get_challenge(builder);
|
|
|
|
|
let gamma = challenger.get_challenge(builder);
|
2022-05-12 13:51:02 +02:00
|
|
|
GrandProductChallenge { beta, gamma }
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2022-05-20 11:21:13 +02:00
|
|
|
pub(crate) fn get_grand_product_challenge_set_target<
|
2022-05-04 20:57:07 +02:00
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
H: AlgebraicHasher<F>,
|
|
|
|
|
const D: usize,
|
|
|
|
|
>(
|
|
|
|
|
builder: &mut CircuitBuilder<F, D>,
|
|
|
|
|
challenger: &mut RecursiveChallenger<F, H, D>,
|
|
|
|
|
num_challenges: usize,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> GrandProductChallengeSet<Target> {
|
2022-05-04 20:57:07 +02:00
|
|
|
let challenges = (0..num_challenges)
|
2022-05-20 11:21:13 +02:00
|
|
|
.map(|_| get_grand_product_challenge_target(builder, challenger))
|
2022-05-04 20:57:07 +02:00
|
|
|
.collect();
|
2022-05-12 13:51:02 +02:00
|
|
|
GrandProductChallengeSet { challenges }
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2022-05-20 11:21:13 +02:00
|
|
|
pub(crate) fn get_n_grand_product_challenge_sets_target<
|
2022-05-04 20:57:07 +02:00
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
H: AlgebraicHasher<F>,
|
|
|
|
|
const D: usize,
|
|
|
|
|
>(
|
|
|
|
|
builder: &mut CircuitBuilder<F, D>,
|
|
|
|
|
challenger: &mut RecursiveChallenger<F, H, D>,
|
|
|
|
|
num_challenges: usize,
|
|
|
|
|
num_sets: usize,
|
2022-05-12 13:51:02 +02:00
|
|
|
) -> Vec<GrandProductChallengeSet<Target>> {
|
2022-05-04 20:57:07 +02:00
|
|
|
(0..num_sets)
|
2022-05-20 11:21:13 +02:00
|
|
|
.map(|_| get_grand_product_challenge_set_target(builder, challenger, num_challenges))
|
2022-05-04 20:57:07 +02:00
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get a list of instances of our batch-permutation argument. These are permutation arguments
|
|
|
|
|
/// where the same `Z(x)` polynomial is used to check more than one permutation.
|
|
|
|
|
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
|
|
|
|
|
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
|
|
|
|
|
/// chunk these arguments based on our batch size.
|
2022-09-23 13:41:14 +02:00
|
|
|
pub(crate) fn get_permutation_batches<'a, T: Copy + Eq + PartialEq + Debug>(
|
2022-05-04 20:57:07 +02:00
|
|
|
permutation_pairs: &'a [PermutationPair],
|
2022-05-12 13:51:02 +02:00
|
|
|
permutation_challenge_sets: &[GrandProductChallengeSet<T>],
|
2022-05-04 20:57:07 +02:00
|
|
|
num_challenges: usize,
|
|
|
|
|
batch_size: usize,
|
|
|
|
|
) -> Vec<Vec<PermutationInstance<'a, T>>> {
|
|
|
|
|
permutation_pairs
|
|
|
|
|
.iter()
|
|
|
|
|
.cartesian_product(0..num_challenges)
|
|
|
|
|
.chunks(batch_size)
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|batch| {
|
|
|
|
|
batch
|
|
|
|
|
.enumerate()
|
|
|
|
|
.map(|(i, (pair, chal))| {
|
|
|
|
|
let challenge = permutation_challenge_sets[i].challenges[chal];
|
|
|
|
|
PermutationInstance { pair, challenge }
|
|
|
|
|
})
|
|
|
|
|
.collect_vec()
|
|
|
|
|
})
|
|
|
|
|
.collect()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct PermutationCheckVars<F, FE, P, const D2: usize>
|
|
|
|
|
where
|
|
|
|
|
F: Field,
|
|
|
|
|
FE: FieldExtension<D2, BaseField = F>,
|
|
|
|
|
P: PackedField<Scalar = FE>,
|
|
|
|
|
{
|
|
|
|
|
pub(crate) local_zs: Vec<P>,
|
|
|
|
|
pub(crate) next_zs: Vec<P>,
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<F>>,
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2023-02-10 21:47:51 -08:00
|
|
|
pub(crate) fn eval_permutation_checks<F, FE, P, S, const D: usize, const D2: usize>(
|
2022-05-04 20:57:07 +02:00
|
|
|
stark: &S,
|
|
|
|
|
config: &StarkConfig,
|
2022-08-25 12:24:22 -07:00
|
|
|
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
|
2022-05-13 11:20:29 +02:00
|
|
|
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
|
2022-05-04 20:57:07 +02:00
|
|
|
consumer: &mut ConstraintConsumer<P>,
|
|
|
|
|
) where
|
|
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
FE: FieldExtension<D2, BaseField = F>,
|
|
|
|
|
P: PackedField<Scalar = FE>,
|
|
|
|
|
S: Stark<F, D>,
|
|
|
|
|
{
|
|
|
|
|
let PermutationCheckVars {
|
|
|
|
|
local_zs,
|
|
|
|
|
next_zs,
|
|
|
|
|
permutation_challenge_sets,
|
2022-05-13 11:20:29 +02:00
|
|
|
} = permutation_vars;
|
2022-05-04 20:57:07 +02:00
|
|
|
|
|
|
|
|
// Check that Z(1) = 1;
|
|
|
|
|
for &z in &local_zs {
|
|
|
|
|
consumer.constraint_first_row(z - FE::ONE);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let permutation_pairs = stark.permutation_pairs();
|
|
|
|
|
|
|
|
|
|
let permutation_batches = get_permutation_batches(
|
|
|
|
|
&permutation_pairs,
|
|
|
|
|
&permutation_challenge_sets,
|
|
|
|
|
config.num_challenges,
|
|
|
|
|
stark.permutation_batch_size(),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Each zs value corresponds to a permutation batch.
|
|
|
|
|
for (i, instances) in permutation_batches.iter().enumerate() {
|
|
|
|
|
// Z(gx) * down = Z x * up
|
|
|
|
|
let (reduced_lhs, reduced_rhs): (Vec<P>, Vec<P>) = instances
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|instance| {
|
|
|
|
|
let PermutationInstance {
|
|
|
|
|
pair: PermutationPair { column_pairs },
|
2022-05-12 13:51:02 +02:00
|
|
|
challenge: GrandProductChallenge { beta, gamma },
|
2022-05-04 20:57:07 +02:00
|
|
|
} = instance;
|
|
|
|
|
let mut factor = ReducingFactor::new(*beta);
|
|
|
|
|
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
|
|
|
|
.unzip();
|
|
|
|
|
(
|
|
|
|
|
factor.reduce_ext(lhs.into_iter()) + FE::from_basefield(*gamma),
|
|
|
|
|
factor.reduce_ext(rhs.into_iter()) + FE::from_basefield(*gamma),
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.unzip();
|
|
|
|
|
let constraint = next_zs[i] * reduced_rhs.into_iter().product::<P>()
|
|
|
|
|
- local_zs[i] * reduced_lhs.into_iter().product::<P>();
|
|
|
|
|
consumer.constraint(constraint);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct PermutationCheckDataTarget<const D: usize> {
|
|
|
|
|
pub(crate) local_zs: Vec<ExtensionTarget<D>>,
|
|
|
|
|
pub(crate) next_zs: Vec<ExtensionTarget<D>>,
|
2022-05-12 13:51:02 +02:00
|
|
|
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<Target>>,
|
2022-05-04 20:57:07 +02:00
|
|
|
}
|
|
|
|
|
|
2022-05-18 09:22:58 +02:00
|
|
|
pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
|
2022-05-04 20:57:07 +02:00
|
|
|
builder: &mut CircuitBuilder<F, D>,
|
|
|
|
|
stark: &S,
|
|
|
|
|
config: &StarkConfig,
|
2022-08-25 12:24:22 -07:00
|
|
|
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
|
2022-05-04 20:57:07 +02:00
|
|
|
permutation_data: PermutationCheckDataTarget<D>,
|
|
|
|
|
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
|
|
|
|
) where
|
|
|
|
|
F: RichField + Extendable<D>,
|
|
|
|
|
S: Stark<F, D>,
|
|
|
|
|
[(); S::COLUMNS]:,
|
|
|
|
|
{
|
|
|
|
|
let PermutationCheckDataTarget {
|
|
|
|
|
local_zs,
|
|
|
|
|
next_zs,
|
|
|
|
|
permutation_challenge_sets,
|
|
|
|
|
} = permutation_data;
|
|
|
|
|
|
|
|
|
|
let one = builder.one_extension();
|
|
|
|
|
// Check that Z(1) = 1;
|
|
|
|
|
for &z in &local_zs {
|
|
|
|
|
let z_1 = builder.sub_extension(z, one);
|
|
|
|
|
consumer.constraint_first_row(builder, z_1);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let permutation_pairs = stark.permutation_pairs();
|
|
|
|
|
|
|
|
|
|
let permutation_batches = get_permutation_batches(
|
|
|
|
|
&permutation_pairs,
|
|
|
|
|
&permutation_challenge_sets,
|
|
|
|
|
config.num_challenges,
|
|
|
|
|
stark.permutation_batch_size(),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Each zs value corresponds to a permutation batch.
|
|
|
|
|
for (i, instances) in permutation_batches.iter().enumerate() {
|
|
|
|
|
let (reduced_lhs, reduced_rhs): (Vec<ExtensionTarget<D>>, Vec<ExtensionTarget<D>>) =
|
|
|
|
|
instances
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|instance| {
|
|
|
|
|
let PermutationInstance {
|
|
|
|
|
pair: PermutationPair { column_pairs },
|
2022-05-12 13:51:02 +02:00
|
|
|
challenge: GrandProductChallenge { beta, gamma },
|
2022-05-04 20:57:07 +02:00
|
|
|
} = instance;
|
|
|
|
|
let beta_ext = builder.convert_to_ext(*beta);
|
|
|
|
|
let gamma_ext = builder.convert_to_ext(*gamma);
|
|
|
|
|
let mut factor = ReducingFactorTarget::new(beta_ext);
|
|
|
|
|
let (lhs, rhs): (Vec<_>, Vec<_>) = column_pairs
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|&(i, j)| (vars.local_values[i], vars.local_values[j]))
|
|
|
|
|
.unzip();
|
|
|
|
|
let reduced_lhs = factor.reduce(&lhs, builder);
|
|
|
|
|
let reduced_rhs = factor.reduce(&rhs, builder);
|
|
|
|
|
(
|
|
|
|
|
builder.add_extension(reduced_lhs, gamma_ext),
|
|
|
|
|
builder.add_extension(reduced_rhs, gamma_ext),
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.unzip();
|
2022-06-03 18:06:14 +02:00
|
|
|
let reduced_lhs_product = builder.mul_many_extension(reduced_lhs);
|
|
|
|
|
let reduced_rhs_product = builder.mul_many_extension(reduced_rhs);
|
2022-05-04 20:57:07 +02:00
|
|
|
// constraint = next_zs[i] * reduced_rhs_product - local_zs[i] * reduced_lhs_product
|
|
|
|
|
let constraint = {
|
|
|
|
|
let tmp = builder.mul_extension(local_zs[i], reduced_lhs_product);
|
|
|
|
|
builder.mul_sub_extension(next_zs[i], reduced_rhs_product, tmp)
|
|
|
|
|
};
|
|
|
|
|
consumer.constraint(builder, constraint)
|
|
|
|
|
}
|
|
|
|
|
}
|