mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-09 09:13:09 +00:00
Merge pull request #468 from mir-protocol/stark_constraint_degree
Stark constraint degree
This commit is contained in:
commit
9eb9bac0db
@ -333,28 +333,28 @@ fn compute_quotient_polys<
|
||||
alphas: &[F],
|
||||
) -> Vec<PolynomialCoeffs<F>> {
|
||||
let num_challenges = common_data.config.num_challenges;
|
||||
let max_degree_bits = log2_ceil(common_data.quotient_degree_factor);
|
||||
let quotient_degree_bits = log2_ceil(common_data.quotient_degree_factor);
|
||||
assert!(
|
||||
max_degree_bits <= common_data.config.fri_config.rate_bits,
|
||||
quotient_degree_bits <= common_data.config.fri_config.rate_bits,
|
||||
"Having constraints of degree higher than the rate is not supported yet. \
|
||||
If we need this in the future, we can precompute the larger LDE before computing the `ListPolynomialCommitment`s."
|
||||
If we need this in the future, we can precompute the larger LDE before computing the `PolynomialBatch`s."
|
||||
);
|
||||
|
||||
// We reuse the LDE computed in `ListPolynomialCommitment` and extract every `step` points to get
|
||||
// We reuse the LDE computed in `PolynomialBatch` and extract every `step` points to get
|
||||
// an LDE matching `max_filtered_constraint_degree`.
|
||||
let step = 1 << (common_data.config.fri_config.rate_bits - max_degree_bits);
|
||||
let step = 1 << (common_data.config.fri_config.rate_bits - quotient_degree_bits);
|
||||
// When opening the `Z`s polys at the "next" point in Plonk, need to look at the point `next_step`
|
||||
// steps away since we work on an LDE of degree `max_filtered_constraint_degree`.
|
||||
let next_step = 1 << max_degree_bits;
|
||||
let next_step = 1 << quotient_degree_bits;
|
||||
|
||||
let points = F::two_adic_subgroup(common_data.degree_bits + max_degree_bits);
|
||||
let points = F::two_adic_subgroup(common_data.degree_bits + quotient_degree_bits);
|
||||
let lde_size = points.len();
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_at_index =
|
||||
|comm: &'a PolynomialBatch<F, C, D>, i: usize| -> &'a [F] { comm.get_lde_values(i * step) };
|
||||
|
||||
let z_h_on_coset = ZeroPolyOnCoset::new(common_data.degree_bits, max_degree_bits);
|
||||
let z_h_on_coset = ZeroPolyOnCoset::new(common_data.degree_bits, quotient_degree_bits);
|
||||
|
||||
let points_batches = points.par_chunks(BATCH_SIZE);
|
||||
let quotient_values: Vec<Vec<F>> = points_batches
|
||||
|
||||
@ -81,6 +81,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
|
||||
) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
2
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -93,10 +97,11 @@ mod tests {
|
||||
use crate::config::StarkConfig;
|
||||
use crate::fibonacci_stark::FibonacciStark;
|
||||
use crate::prover::prove;
|
||||
use crate::stark_testing::test_stark_low_degree;
|
||||
use crate::verifier::verify;
|
||||
|
||||
fn fibonacci(n: usize, x0: usize, x1: usize) -> usize {
|
||||
(0..n).fold((0, 1), |x, _| (x.1, x.0 + x.1)).1
|
||||
fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
|
||||
(0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -108,11 +113,7 @@ mod tests {
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [
|
||||
F::ZERO,
|
||||
F::ONE,
|
||||
F::from_canonical_usize(fibonacci(num_rows - 1, 0, 1)),
|
||||
];
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
@ -125,4 +126,17 @@ mod tests {
|
||||
|
||||
verify(stark, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fibonacci_stark_degree() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,6 +12,7 @@ mod get_challenges;
|
||||
pub mod proof;
|
||||
pub mod prover;
|
||||
pub mod stark;
|
||||
pub mod stark_testing;
|
||||
pub mod vars;
|
||||
pub mod verifier;
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::timed;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::log2_strict;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
@ -82,7 +82,7 @@ where
|
||||
.flat_map(|mut quotient_poly| {
|
||||
quotient_poly.trim();
|
||||
quotient_poly
|
||||
.pad(degree << rate_bits)
|
||||
.pad(degree * stark.quotient_degree_factor())
|
||||
.expect("Quotient has failed, the vanishing polynomial is not divisible by `Z_H");
|
||||
// Split quotient into degree-n chunks.
|
||||
quotient_poly.chunks(degree)
|
||||
@ -123,7 +123,7 @@ where
|
||||
timing,
|
||||
"compute openings proof",
|
||||
PolynomialBatch::prove_openings(
|
||||
&S::fri_instance(zeta, g, rate_bits, config.num_challenges),
|
||||
&stark.fri_instance(zeta, g, rate_bits, config.num_challenges),
|
||||
initial_merkle_trees,
|
||||
&mut challenger,
|
||||
&fri_params,
|
||||
@ -145,8 +145,6 @@ where
|
||||
|
||||
/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,
|
||||
/// where the `C_i`s are the Stark constraints.
|
||||
// TODO: This won't work for the Fibonacci example because the constraints wrap around the subgroup.
|
||||
// The denominator should be the vanishing polynomial of `H` without its last element.
|
||||
fn compute_quotient_polys<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
@ -164,34 +162,44 @@ where
|
||||
{
|
||||
let degree = 1 << degree_bits;
|
||||
|
||||
let quotient_degree_bits = log2_ceil(stark.quotient_degree_factor());
|
||||
assert!(
|
||||
quotient_degree_bits <= rate_bits,
|
||||
"Having constraints of degree higher than the rate is not supported yet."
|
||||
);
|
||||
let step = 1 << (rate_bits - quotient_degree_bits);
|
||||
// When opening the `Z`s polys at the "next" point, need to look at the point `next_step` steps away.
|
||||
let next_step = 1 << quotient_degree_bits;
|
||||
|
||||
// Evaluation of the first Lagrange polynomial on the LDE domain.
|
||||
let lagrange_first = {
|
||||
let mut evals = PolynomialValues::new(vec![F::ZERO; degree]);
|
||||
evals.values[0] = F::ONE;
|
||||
evals.lde_onto_coset(rate_bits)
|
||||
evals.lde_onto_coset(quotient_degree_bits)
|
||||
};
|
||||
// Evaluation of the last Lagrange polynomial on the LDE domain.
|
||||
let lagrange_last = {
|
||||
let mut evals = PolynomialValues::new(vec![F::ZERO; degree]);
|
||||
evals.values[degree - 1] = F::ONE;
|
||||
evals.lde_onto_coset(rate_bits)
|
||||
evals.lde_onto_coset(quotient_degree_bits)
|
||||
};
|
||||
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, rate_bits);
|
||||
let z_h_on_coset = ZeroPolyOnCoset::<F>::new(degree_bits, quotient_degree_bits);
|
||||
|
||||
// Retrieve the LDE values at index `i`.
|
||||
let get_at_index = |comm: &PolynomialBatch<F, C, D>, i: usize| -> [F; S::COLUMNS] {
|
||||
comm.get_lde_values(i).try_into().unwrap()
|
||||
comm.get_lde_values(i * step).try_into().unwrap()
|
||||
};
|
||||
// Last element of the subgroup.
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let size = degree << quotient_degree_bits;
|
||||
let coset = F::cyclic_subgroup_coset_known_order(
|
||||
F::primitive_root_of_unity(degree_bits + rate_bits),
|
||||
F::primitive_root_of_unity(degree_bits + quotient_degree_bits),
|
||||
F::coset_shift(),
|
||||
degree << rate_bits,
|
||||
size,
|
||||
);
|
||||
|
||||
let quotient_values = (0..degree << rate_bits)
|
||||
let quotient_values = (0..size)
|
||||
.into_par_iter()
|
||||
.map(|i| {
|
||||
// TODO: Set `P` to a genuine `PackedField` here.
|
||||
@ -203,10 +211,7 @@ where
|
||||
);
|
||||
let vars = StarkEvaluationVars::<F, F, { S::COLUMNS }, { S::PUBLIC_INPUTS }> {
|
||||
local_values: &get_at_index(trace_commitment, i),
|
||||
next_values: &get_at_index(
|
||||
trace_commitment,
|
||||
(i + (1 << rate_bits)) % (degree << rate_bits),
|
||||
),
|
||||
next_values: &get_at_index(trace_commitment, (i + next_step) % size),
|
||||
public_inputs: &public_inputs,
|
||||
};
|
||||
stark.eval_packed_base(vars, &mut consumer);
|
||||
|
||||
@ -62,9 +62,18 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
);
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn constraint_degree(&self) -> usize;
|
||||
|
||||
/// The maximum constraint degree.
|
||||
fn quotient_degree_factor(&self) -> usize {
|
||||
1.max(self.constraint_degree() - 1)
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
// TODO: Permutation polynomials.
|
||||
fn fri_instance(
|
||||
&self,
|
||||
zeta: F::Extension,
|
||||
g: F::Extension,
|
||||
rate_bits: usize,
|
||||
@ -72,7 +81,8 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
) -> FriInstanceInfo<F, D> {
|
||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||
let trace_info = FriPolynomialInfo::from_range(0, 0..Self::COLUMNS);
|
||||
let quotient_info = FriPolynomialInfo::from_range(1, 0..(1 << rate_bits) * num_challenges);
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(1, 0..self.quotient_degree_factor() * num_challenges);
|
||||
let zeta_batch = FriBatchInfo {
|
||||
point: zeta,
|
||||
polynomials: [trace_info.clone(), quotient_info].concat(),
|
||||
|
||||
95
starky/src/stark_testing.rs
Normal file
95
starky/src/stark_testing.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
|
||||
const WITNESS_SIZE: usize = 1 << 5;
|
||||
|
||||
/// Tests that the constraints imposed by the given STARK are low-degree by applying them to random
|
||||
/// low-degree witness polynomials.
|
||||
pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||
stark: S,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
|
||||
|
||||
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
|
||||
let size = trace_ldes.len();
|
||||
let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>();
|
||||
|
||||
let lagrange_first = {
|
||||
let mut evals = PolynomialValues::new(vec![F::ZERO; WITNESS_SIZE]);
|
||||
evals.values[0] = F::ONE;
|
||||
evals.lde(rate_bits)
|
||||
};
|
||||
let lagrange_last = {
|
||||
let mut evals = PolynomialValues::new(vec![F::ZERO; WITNESS_SIZE]);
|
||||
evals.values[WITNESS_SIZE - 1] = F::ONE;
|
||||
evals.lde(rate_bits)
|
||||
};
|
||||
|
||||
let last = F::primitive_root_of_unity(log2_strict(WITNESS_SIZE)).inverse();
|
||||
let subgroup =
|
||||
F::cyclic_subgroup_known_order(F::primitive_root_of_unity(log2_strict(size)), size);
|
||||
let alpha = F::rand();
|
||||
let constraint_evals = (0..size)
|
||||
.map(|i| {
|
||||
let vars = StarkEvaluationVars {
|
||||
local_values: &trace_ldes[i].clone().try_into().unwrap(),
|
||||
next_values: &trace_ldes[(i + (1 << rate_bits)) % size]
|
||||
.clone()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
public_inputs: &public_inputs,
|
||||
};
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F>::new(
|
||||
vec![alpha],
|
||||
subgroup[i] - last,
|
||||
lagrange_first.values[i],
|
||||
lagrange_last.values[i],
|
||||
);
|
||||
stark.eval_packed_base(vars, &mut consumer);
|
||||
consumer.accumulators()[0]
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let constraint_eval_degree = PolynomialValues::new(constraint_evals).degree();
|
||||
let maximum_degree = WITNESS_SIZE * stark.constraint_degree() - 1;
|
||||
|
||||
ensure!(
|
||||
constraint_eval_degree <= maximum_degree,
|
||||
"Expected degrees at most {} * {} - 1 = {}, actual {:?}",
|
||||
WITNESS_SIZE,
|
||||
stark.constraint_degree(),
|
||||
maximum_degree,
|
||||
constraint_eval_degree
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn random_low_degree_matrix<F: Field>(num_polys: usize, rate_bits: usize) -> Vec<Vec<F>> {
|
||||
let polys = (0..num_polys)
|
||||
.map(|_| random_low_degree_values(rate_bits))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose(&polys)
|
||||
}
|
||||
|
||||
fn random_low_degree_values<F: Field>(rate_bits: usize) -> Vec<F> {
|
||||
PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE))
|
||||
.lde(rate_bits)
|
||||
.fft()
|
||||
.values
|
||||
}
|
||||
@ -98,7 +98,7 @@ where
|
||||
// So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each
|
||||
// `quotient_degree_factor`-sized chunk of the original evaluations.
|
||||
for (i, chunk) in quotient_polys_zeta
|
||||
.chunks(1 << config.fri_config.rate_bits)
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
ensure!(vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg));
|
||||
@ -108,7 +108,7 @@ where
|
||||
let merkle_caps = &[proof.trace_cap, proof.quotient_polys_cap];
|
||||
|
||||
verify_fri_proof::<F, C, D>(
|
||||
&S::fri_instance(
|
||||
&stark.fri_instance(
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits).into(),
|
||||
config.fri_config.rate_bits,
|
||||
|
||||
@ -80,6 +80,10 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for SystemZero<F,
|
||||
self.eval_permutation_unit_recursively(builder, vars, yield_constr);
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
3
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -93,6 +97,7 @@ mod tests {
|
||||
use starky::config::StarkConfig;
|
||||
use starky::prover::prove;
|
||||
use starky::stark::Stark;
|
||||
use starky::stark_testing::test_stark_low_degree;
|
||||
use starky::verifier::verify;
|
||||
|
||||
use crate::system_zero::SystemZero;
|
||||
@ -114,4 +119,16 @@ mod tests {
|
||||
|
||||
verify(system, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // TODO
|
||||
fn degree() -> Result<()> {
|
||||
type F = GoldilocksField;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
const D: usize = 2;
|
||||
|
||||
type S = SystemZero<F, D>;
|
||||
let system = S::default();
|
||||
test_stark_low_degree(system)
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user