mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-05 07:13:08 +00:00
Prove Starks without constraints (#1552)
* Enable starks without constraints * Clippy * Add test stark without constraints * Missing file * Missing changes in the recursive side * Fix bug with recursion * Missing import * Clippy * Apply suggestions from code review Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> * Address reviews * Fix TODO * Apply suggestions from code review Co-authored-by: Linda Guiga <101227802+LindaGuiga@users.noreply.github.com> * More reviews * Fix bug in eval_helper_columns * Apply suggestions from code review Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> * Address reviews * Allow <= blowup_factor + 1 constraints + reviews * Add unconstrined Stark * Missing file * Remove asserts --------- Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> Co-authored-by: Linda Guiga <101227802+LindaGuiga@users.noreply.github.com>
This commit is contained in:
parent
2a2becc415
commit
4f8e631550
@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
## Unreleased
|
||||
|
||||
- Fix CTLs with exactly two looking tables ([#1555](https://github.com/0xPolygonZero/plonky2/pull/1555))
|
||||
- Make Starks without constraints provable ([#1552](https://github.com/0xPolygonZero/plonky2/pull/1552))
|
||||
|
||||
## [0.2.1] - 2024-03-01 (`starky` crate only)
|
||||
|
||||
|
||||
@ -88,9 +88,7 @@ impl<F: Field> PolynomialValues<F> {
|
||||
}
|
||||
|
||||
pub fn degree(&self) -> usize {
|
||||
self.degree_plus_one()
|
||||
.checked_sub(1)
|
||||
.expect("deg(0) is undefined")
|
||||
self.degree_plus_one().saturating_sub(1)
|
||||
}
|
||||
|
||||
pub fn degree_plus_one(&self) -> usize {
|
||||
|
||||
@ -1,3 +1,6 @@
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec;
|
||||
|
||||
use crate::field::extension::Extendable;
|
||||
use crate::hash::hash_types::{HashOutTarget, RichField};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
@ -149,13 +152,16 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let salt = salt_size(common_data.fri_params.hiding);
|
||||
let num_leaves_per_oracle = &[
|
||||
let num_leaves_per_oracle = &mut vec![
|
||||
common_data.num_preprocessed_polys(),
|
||||
config.num_wires + salt,
|
||||
common_data.num_zs_partial_products_polys() + common_data.num_all_lookup_polys() + salt,
|
||||
common_data.num_quotient_polys() + salt,
|
||||
];
|
||||
|
||||
if common_data.num_quotient_polys() > 0 {
|
||||
num_leaves_per_oracle.push(common_data.num_quotient_polys() + salt);
|
||||
}
|
||||
|
||||
ProofTarget {
|
||||
wires_cap: self.add_virtual_cap(cap_height),
|
||||
plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height),
|
||||
|
||||
@ -15,7 +15,6 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
|
||||
use crate::lookup::{Column, Lookup};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
@ -132,135 +131,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
|
||||
}
|
||||
}
|
||||
|
||||
/// Similar system than above, but with extra columns to illustrate the permutation argument.
|
||||
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
|
||||
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
|
||||
/// Note: The `i, j` columns are the columns used to test the permutation argument.
|
||||
#[derive(Copy, Clone)]
|
||||
struct FibonacciWithPermutationStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> FibonacciWithPermutationStark<F, D> {
|
||||
// The first public input is `x0`.
|
||||
const PI_INDEX_X0: usize = 0;
|
||||
// The second public input is `x1`.
|
||||
const PI_INDEX_X1: usize = 1;
|
||||
// The third public input is the second element of the last row, which should be equal to the
|
||||
// `num_rows`-th Fibonacci number.
|
||||
const PI_INDEX_RES: usize = 2;
|
||||
|
||||
const fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, x1, 0, 1, 1` as initial state values.
|
||||
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
let mut trace_rows = (0..self.num_rows)
|
||||
.scan([x0, x1, F::ZERO, F::ONE, F::ONE], |acc, _| {
|
||||
let tmp = *acc;
|
||||
acc[0] = tmp[1];
|
||||
acc[1] = tmp[0] + tmp[1];
|
||||
acc[2] = tmp[2] + F::ONE;
|
||||
acc[3] = tmp[3] + F::ONE;
|
||||
// acc[4] (i.e. frequency column) remains unchanged, as we're permuting a strictly monotonous sequence.
|
||||
Some(tmp)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another.
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
const FIBONACCI_PERM_COLUMNS: usize = 5;
|
||||
const FIBONACCI_PERM_PUBLIC_INPUTS: usize = 3;
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D>
|
||||
for FibonacciWithPermutationStark<F, D>
|
||||
{
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, FIBONACCI_PERM_COLUMNS, FIBONACCI_PERM_PUBLIC_INPUTS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget = StarkFrame<
|
||||
ExtensionTarget<D>,
|
||||
ExtensionTarget<D>,
|
||||
FIBONACCI_PERM_COLUMNS,
|
||||
FIBONACCI_PERM_PUBLIC_INPUTS,
|
||||
>;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let public_inputs = vars.get_public_inputs();
|
||||
|
||||
// Check public inputs.
|
||||
yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
|
||||
yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
|
||||
yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
|
||||
|
||||
// x0' <- x1
|
||||
yield_constr.constraint_transition(next_values[0] - local_values[1]);
|
||||
// x1' <- x0 + x1
|
||||
yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
|
||||
}
|
||||
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: &Self::EvaluationFrameTarget,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let public_inputs = vars.get_public_inputs();
|
||||
// Check public inputs.
|
||||
let pis_constraints = [
|
||||
builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
|
||||
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
|
||||
builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
|
||||
];
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||
yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||
|
||||
// x0' <- x1
|
||||
let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
|
||||
yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
// x1' <- x0 + x1
|
||||
let second_col_constraint = {
|
||||
let tmp = builder.sub_extension(next_values[1], local_values[0]);
|
||||
builder.sub_extension(tmp, local_values[1])
|
||||
};
|
||||
yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
2
|
||||
}
|
||||
|
||||
fn lookups(&self) -> Vec<Lookup<F>> {
|
||||
vec![Lookup {
|
||||
columns: vec![Column::single(2)],
|
||||
table_column: Column::single(3),
|
||||
frequencies_column: Column::single(4),
|
||||
filter_columns: vec![None; 1],
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
@ -274,7 +144,7 @@ mod tests {
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::fibonacci_stark::{FibonacciStark, FibonacciWithPermutationStark};
|
||||
use crate::fibonacci_stark::FibonacciStark;
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
@ -294,30 +164,15 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
|
||||
// Test first STARK
|
||||
let stark = S1::new(num_rows);
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S1, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)?;
|
||||
|
||||
// Test second STARK
|
||||
let stark = S2::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S2, D>(
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
@ -333,14 +188,10 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S1::new(num_rows);
|
||||
test_stark_low_degree(stark)?;
|
||||
|
||||
let stark = S2::new(num_rows);
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
@ -349,14 +200,11 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S1::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S1, D>(stark)?;
|
||||
let stark = S2::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S2, D>(stark)
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -365,17 +213,16 @@ mod tests {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S1 = FibonacciStark<F, D>;
|
||||
type S2 = FibonacciWithPermutationStark<F, D>;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
|
||||
// Test first STARK
|
||||
let stark = S1::new(num_rows);
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S1, D>(
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
@ -384,21 +231,7 @@ mod tests {
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S1, C, D>(stark, proof, &config, true)?;
|
||||
|
||||
// Test second STARK
|
||||
let stark = S2::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S2, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S2, C, D>(stark, proof, &config, true)
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
|
||||
@ -28,7 +28,7 @@ fn get_challenges<F, C, const D: usize>(
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
trace_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
auxiliary_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
|
||||
quotient_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
openings: &StarkOpeningSet<F, D>,
|
||||
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
|
||||
final_poly: &PolynomialCoeffs<F::Extension>,
|
||||
@ -60,7 +60,9 @@ where
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
if let Some(quotient_polys_cap) = quotient_polys_cap {
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
}
|
||||
let stark_zeta = challenger.get_extension_challenge::<D>();
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
@ -125,7 +127,7 @@ where
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap,
|
||||
quotient_polys_cap.as_ref(),
|
||||
openings,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
@ -168,7 +170,7 @@ fn get_challenges_target<F, C, const D: usize>(
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
trace_cap: Option<&MerkleCapTarget>,
|
||||
auxiliary_polys_cap: Option<&MerkleCapTarget>,
|
||||
quotient_polys_cap: &MerkleCapTarget,
|
||||
quotient_polys_cap: Option<&MerkleCapTarget>,
|
||||
openings: &StarkOpeningSetTarget<D>,
|
||||
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||
@ -200,7 +202,10 @@ where
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
if let Some(cap) = quotient_polys_cap {
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
|
||||
@ -266,7 +271,7 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap,
|
||||
quotient_polys_cap.as_ref(),
|
||||
openings,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
|
||||
@ -340,3 +340,7 @@ pub mod verifier;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod fibonacci_stark;
|
||||
#[cfg(test)]
|
||||
pub mod permutation_stark;
|
||||
#[cfg(test)]
|
||||
pub mod unconstrained_stark;
|
||||
|
||||
@ -431,7 +431,10 @@ impl<F: Field> Lookup<F> {
|
||||
pub fn num_helper_columns(&self, constraint_degree: usize) -> usize {
|
||||
// One helper column for each column batch of size `constraint_degree-1`,
|
||||
// then one column for the inverse of `table + challenge` and one for the `Z` polynomial.
|
||||
ceil_div_usize(self.columns.len(), constraint_degree - 1) + 1
|
||||
ceil_div_usize(
|
||||
self.columns.len(),
|
||||
constraint_degree.checked_sub(1).unwrap_or(1),
|
||||
) + 1
|
||||
}
|
||||
}
|
||||
|
||||
@ -576,11 +579,6 @@ pub(crate) fn lookup_helper_columns<F: Field>(
|
||||
challenge: F,
|
||||
constraint_degree: usize,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
assert!(
|
||||
constraint_degree == 2 || constraint_degree == 3,
|
||||
"TODO: Allow other constraint degrees."
|
||||
);
|
||||
|
||||
assert_eq!(lookup.columns.len(), lookup.filter_columns.len());
|
||||
|
||||
let num_total_logup_entries = trace_poly_values[0].values.len() * lookup.columns.len();
|
||||
@ -666,11 +664,11 @@ pub(crate) fn eval_helper_columns<F, FE, P, const D: usize, const D2: usize>(
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
if !helper_columns.is_empty() {
|
||||
for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() {
|
||||
let fs =
|
||||
&filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()];
|
||||
let h = helper_columns[j];
|
||||
|
||||
let chunk_size = constraint_degree.checked_sub(1).unwrap_or(1);
|
||||
for (chunk, (fs, &h)) in columns
|
||||
.chunks(chunk_size)
|
||||
.zip(filter.chunks(chunk_size).zip(helper_columns))
|
||||
{
|
||||
match chunk.len() {
|
||||
2 => {
|
||||
let combin0 = challenges.combine(&chunk[0]);
|
||||
@ -719,11 +717,11 @@ pub(crate) fn eval_helper_columns_circuit<F: RichField + Extendable<D>, const D:
|
||||
consumer: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
if !helper_columns.is_empty() {
|
||||
for (j, chunk) in columns.chunks(constraint_degree - 1).enumerate() {
|
||||
let fs =
|
||||
&filter[(constraint_degree - 1) * j..(constraint_degree - 1) * j + chunk.len()];
|
||||
let h = helper_columns[j];
|
||||
|
||||
let chunk_size = constraint_degree.checked_sub(1).unwrap_or(1);
|
||||
for (chunk, (fs, &h)) in columns
|
||||
.chunks(chunk_size)
|
||||
.zip(filter.chunks(chunk_size).zip(helper_columns))
|
||||
{
|
||||
let one = builder.one_extension();
|
||||
match chunk.len() {
|
||||
2 => {
|
||||
@ -774,11 +772,17 @@ pub(crate) fn get_helper_cols<F: Field>(
|
||||
challenge: GrandProductChallenge<F>,
|
||||
constraint_degree: usize,
|
||||
) -> Vec<PolynomialValues<F>> {
|
||||
let num_helper_columns = ceil_div_usize(columns_filters.len(), constraint_degree - 1);
|
||||
let num_helper_columns = ceil_div_usize(
|
||||
columns_filters.len(),
|
||||
constraint_degree.checked_sub(1).unwrap_or(1),
|
||||
);
|
||||
|
||||
let mut helper_columns = Vec::with_capacity(num_helper_columns);
|
||||
|
||||
for mut cols_filts in &columns_filters.iter().chunks(constraint_degree - 1) {
|
||||
for mut cols_filts in &columns_filters
|
||||
.iter()
|
||||
.chunks(constraint_degree.checked_sub(1).unwrap_or(1))
|
||||
{
|
||||
let (first_col, first_filter) = cols_filts.next().unwrap();
|
||||
|
||||
let mut filter_col = Vec::with_capacity(degree);
|
||||
@ -885,10 +889,6 @@ pub(crate) fn eval_packed_lookups_generic<F, FE, P, S, const D: usize, const D2:
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
let degree = stark.constraint_degree();
|
||||
assert!(
|
||||
degree == 2 || degree == 3,
|
||||
"TODO: Allow other constraint degrees."
|
||||
);
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
@ -958,10 +958,6 @@ pub(crate) fn eval_ext_lookups_circuit<
|
||||
|
||||
let local_values = vars.get_local_values();
|
||||
let next_values = vars.get_next_values();
|
||||
assert!(
|
||||
degree == 2 || degree == 3,
|
||||
"TODO: Allow other constraint degrees."
|
||||
);
|
||||
let mut start = 0;
|
||||
for lookup in lookups {
|
||||
let num_helper_columns = lookup.num_helper_columns(degree);
|
||||
|
||||
236
starky/src/permutation_stark.rs
Normal file
236
starky/src/permutation_stark.rs
Normal file
@ -0,0 +1,236 @@
|
||||
//! An example of generating and verifying a STARK to highlight the use of the
|
||||
//! permutation argument with logUp.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkFrame;
|
||||
use crate::lookup::{Column, Lookup};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
/// Computes a sequence with state `[i, j]` using the state transition
|
||||
/// i' <- i+1, j' <- j+1`.
|
||||
/// Note: The `0, 1` columns are the columns used to test the permutation argument.
|
||||
#[derive(Copy, Clone)]
|
||||
struct PermutationStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> PermutationStark<F, D> {
|
||||
const fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, x0+1, 1` as initial state values.
|
||||
fn generate_trace(&self, x0: F) -> Vec<PolynomialValues<F>> {
|
||||
let mut trace_rows = (0..self.num_rows)
|
||||
.scan([x0, x0 + F::ONE, F::ONE], |acc, _| {
|
||||
let tmp = *acc;
|
||||
acc[0] = tmp[0] + F::ONE;
|
||||
acc[1] = tmp[1] + F::ONE;
|
||||
// acc[2] (i.e. frequency column) remains unchanged, as we're permuting a strictly monotonous sequence.
|
||||
Some(tmp)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows[self.num_rows - 1][1] = x0; // So that column 0 and 1 are permutation of one another.
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
const PERM_COLUMNS: usize = 3;
|
||||
const PERM_PUBLIC_INPUTS: usize = 1;
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for PermutationStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, PERM_COLUMNS, PERM_PUBLIC_INPUTS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget =
|
||||
StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, PERM_COLUMNS, PERM_PUBLIC_INPUTS>;
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
fn lookups(&self) -> Vec<Lookup<F>> {
|
||||
vec![Lookup {
|
||||
columns: vec![Column::single(0)],
|
||||
table_column: Column::single(1),
|
||||
frequencies_column: Column::single(2),
|
||||
filter_columns: vec![None; 1],
|
||||
}]
|
||||
}
|
||||
|
||||
// We don't constrain any register, for the sake of highlighting the permutation argument only.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
_vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
_yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
}
|
||||
|
||||
// We don't constrain any register, for the sake of highlighting the permutation argument only.
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
_builder: &mut CircuitBuilder<F, D>,
|
||||
_vars: &Self::EvaluationFrameTarget,
|
||||
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::permutation_stark::PermutationStark;
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
add_virtual_stark_proof_with_pis, set_stark_proof_with_pis_target,
|
||||
verify_stark_proof_circuit,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
use crate::verifier::verify_stark_proof;
|
||||
|
||||
#[test]
|
||||
fn test_pemutations_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = PermutationStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
|
||||
let public_input = F::ZERO;
|
||||
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_input);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&[public_input],
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_permutation_stark_degree() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = PermutationStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_permutation_stark_circuit() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = PermutationStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recursive_stark_verifier() -> Result<()> {
|
||||
init_logger();
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = PermutationStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_input = F::ZERO;
|
||||
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_input);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
&[public_input],
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D> + Copy,
|
||||
InnerC: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
|
||||
inner_config: &StarkConfig,
|
||||
print_gate_counts: bool,
|
||||
) -> Result<()>
|
||||
where
|
||||
InnerC::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::new();
|
||||
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||
let pt =
|
||||
add_virtual_stark_proof_with_pis(&mut builder, &stark, inner_config, degree_bits, 0, 0);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof, builder.zero());
|
||||
|
||||
verify_stark_proof_circuit::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||
|
||||
if print_gate_counts {
|
||||
builder.print_gate_counts(0);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
let proof = data.prove(pw)?;
|
||||
data.verify(proof)
|
||||
}
|
||||
|
||||
fn init_logger() {
|
||||
let _ = env_logger::builder().format_timestamp(None).try_init();
|
||||
}
|
||||
}
|
||||
@ -33,7 +33,7 @@ pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
|
||||
/// Optional merkle cap of LDEs of permutation Z values, if any.
|
||||
pub auxiliary_polys_cap: Option<MerkleCap<F, C::Hasher>>,
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
|
||||
pub quotient_polys_cap: Option<MerkleCap<F, C::Hasher>>,
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSet<F, D>,
|
||||
/// A batch FRI argument for all openings.
|
||||
@ -61,7 +61,7 @@ pub struct StarkProofTarget<const D: usize> {
|
||||
/// Optional `Target` for the Merkle cap of lookup helper and CTL columns LDEs, if any.
|
||||
pub auxiliary_polys_cap: Option<MerkleCapTarget>,
|
||||
/// `Target` for the Merkle cap of quotient polynomial evaluations LDEs.
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
pub quotient_polys_cap: Option<MerkleCapTarget>,
|
||||
/// `Target`s for the purported values of each polynomial at the challenge point.
|
||||
pub openings: StarkOpeningSetTarget<D>,
|
||||
/// `Target`s for the batch FRI argument for all openings.
|
||||
@ -76,7 +76,10 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
if let Some(poly) = &self.auxiliary_polys_cap {
|
||||
buffer.write_target_merkle_cap(poly)?;
|
||||
}
|
||||
buffer.write_target_merkle_cap(&self.quotient_polys_cap)?;
|
||||
buffer.write_bool(self.quotient_polys_cap.is_some())?;
|
||||
if let Some(poly) = &self.quotient_polys_cap {
|
||||
buffer.write_target_merkle_cap(poly)?;
|
||||
}
|
||||
buffer.write_target_fri_proof(&self.opening_proof)?;
|
||||
self.openings.to_buffer(buffer)?;
|
||||
Ok(())
|
||||
@ -90,7 +93,11 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let quotient_polys_cap = buffer.read_target_merkle_cap()?;
|
||||
let quotient_polys_cap = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_merkle_cap()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let opening_proof = buffer.read_target_fri_proof()?;
|
||||
let openings = StarkOpeningSetTarget::from_buffer(buffer)?;
|
||||
|
||||
@ -253,7 +260,7 @@ pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// Openings of cross-table lookups `Z` polynomials at `1`.
|
||||
pub ctl_zs_first: Option<Vec<F>>,
|
||||
/// Openings of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<F::Extension>,
|
||||
pub quotient_polys: Option<Vec<F::Extension>>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
@ -266,7 +273,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
auxiliary_polys_commitment: Option<&PolynomialBatch<F, C, D>>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
quotient_commitment: Option<&PolynomialBatch<F, C, D>>,
|
||||
num_lookup_columns: usize,
|
||||
requires_ctl: bool,
|
||||
num_ctl_polys: &[usize],
|
||||
@ -298,7 +305,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
let total_num_helper_cols: usize = num_ctl_polys.iter().sum();
|
||||
auxiliary_first.unwrap()[num_lookup_columns + total_num_helper_cols..].to_vec()
|
||||
}),
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
quotient_polys: quotient_commitment.map(|c| eval_commitment(zeta, c)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -310,7 +317,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(self.auxiliary_polys.iter().flatten())
|
||||
.chain(&self.quotient_polys)
|
||||
.chain(self.quotient_polys.iter().flatten())
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
@ -360,7 +367,7 @@ pub struct StarkOpeningSetTarget<const D: usize> {
|
||||
/// `ExtensionTarget`s for the opening of lookups and cross-table lookups `Z` polynomials at 1.
|
||||
pub ctl_zs_first: Option<Vec<Target>>,
|
||||
/// `ExtensionTarget`s for the opening of quotient polynomials at `zeta`.
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
pub quotient_polys: Option<Vec<ExtensionTarget<D>>>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
@ -386,7 +393,10 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
} else {
|
||||
buffer.write_bool(false)?;
|
||||
}
|
||||
buffer.write_target_ext_vec(&self.quotient_polys)?;
|
||||
buffer.write_bool(self.quotient_polys.is_some())?;
|
||||
if let Some(quotient_polys) = &self.quotient_polys {
|
||||
buffer.write_target_ext_vec(quotient_polys)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -409,7 +419,11 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let quotient_polys = buffer.read_target_ext_vec::<D>()?;
|
||||
let quotient_polys = if buffer.read_bool()? {
|
||||
Some(buffer.read_target_ext_vec::<D>()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
local_values,
|
||||
@ -428,7 +442,7 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
.local_values
|
||||
.iter()
|
||||
.chain(self.auxiliary_polys.iter().flatten())
|
||||
.chain(&self.quotient_polys)
|
||||
.chain(self.quotient_polys.iter().flatten())
|
||||
.copied()
|
||||
.collect_vec(),
|
||||
};
|
||||
|
||||
@ -119,6 +119,12 @@ where
|
||||
"FRI total reduction arity is too large.",
|
||||
);
|
||||
|
||||
let constraint_degree = stark.constraint_degree();
|
||||
assert!(
|
||||
constraint_degree <= (1 << rate_bits) + 1,
|
||||
"The degree of the Stark constraints must be <= blowup_factor + 1"
|
||||
);
|
||||
|
||||
// Permutation arguments.
|
||||
|
||||
let constraint_degree = stark.constraint_degree();
|
||||
@ -238,38 +244,43 @@ where
|
||||
config,
|
||||
)
|
||||
);
|
||||
let all_quotient_chunks = timed!(
|
||||
timing,
|
||||
"split quotient polys",
|
||||
quotient_polys
|
||||
.into_par_iter()
|
||||
.flat_map(|mut quotient_poly| {
|
||||
quotient_poly
|
||||
.trim_to_len(degree * stark.quotient_degree_factor())
|
||||
.expect(
|
||||
"Quotient has failed, the vanishing polynomial is not divisible by Z_H",
|
||||
);
|
||||
// Split quotient into degree-n chunks.
|
||||
quotient_poly.chunks(degree)
|
||||
})
|
||||
.collect()
|
||||
);
|
||||
// Commit to the quotient polynomials.
|
||||
let quotient_commitment = timed!(
|
||||
timing,
|
||||
"compute quotient commitment",
|
||||
PolynomialBatch::from_coeffs(
|
||||
all_quotient_chunks,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
let (quotient_commitment, quotient_polys_cap) = if let Some(quotient_polys) = quotient_polys {
|
||||
let all_quotient_chunks = timed!(
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
// Observe the quotient polynomials Merkle cap.
|
||||
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap("ient_polys_cap);
|
||||
"split quotient polys",
|
||||
quotient_polys
|
||||
.into_par_iter()
|
||||
.flat_map(|mut quotient_poly| {
|
||||
quotient_poly
|
||||
.trim_to_len(degree * stark.quotient_degree_factor())
|
||||
.expect(
|
||||
"Quotient has failed, the vanishing polynomial is not divisible by Z_H",
|
||||
);
|
||||
// Split quotient into degree-n chunks.
|
||||
quotient_poly.chunks(degree)
|
||||
})
|
||||
.collect()
|
||||
);
|
||||
// Commit to the quotient polynomials.
|
||||
let quotient_commitment = timed!(
|
||||
timing,
|
||||
"compute quotient commitment",
|
||||
PolynomialBatch::from_coeffs(
|
||||
all_quotient_chunks,
|
||||
rate_bits,
|
||||
false,
|
||||
config.fri_config.cap_height,
|
||||
timing,
|
||||
None,
|
||||
)
|
||||
);
|
||||
// Observe the quotient polynomials Merkle cap.
|
||||
let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone();
|
||||
challenger.observe_cap("ient_polys_cap);
|
||||
(Some(quotient_commitment), Some(quotient_polys_cap))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let zeta = challenger.get_extension_challenge::<D>();
|
||||
|
||||
@ -288,7 +299,7 @@ where
|
||||
g,
|
||||
trace_commitment,
|
||||
auxiliary_polys_commitment.as_ref(),
|
||||
"ient_commitment,
|
||||
quotient_commitment.as_ref(),
|
||||
stark.num_lookup_helper_columns(config),
|
||||
stark.requires_ctls(),
|
||||
&num_ctl_polys,
|
||||
@ -298,7 +309,7 @@ where
|
||||
|
||||
let initial_merkle_trees = once(trace_commitment)
|
||||
.chain(&auxiliary_polys_commitment)
|
||||
.chain(once("ient_commitment))
|
||||
.chain("ient_commitment)
|
||||
.collect_vec();
|
||||
|
||||
let opening_proof = timed!(
|
||||
@ -342,13 +353,17 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
|
||||
num_lookup_columns: usize,
|
||||
num_ctl_columns: &[usize],
|
||||
config: &StarkConfig,
|
||||
) -> Vec<PolynomialCoeffs<F>>
|
||||
) -> Option<Vec<PolynomialCoeffs<F>>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
P: PackedField<Scalar = F>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
if stark.quotient_degree_factor() == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let degree = 1 << degree_bits;
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let total_num_helper_cols: usize = num_ctl_columns.iter().sum();
|
||||
@ -501,11 +516,13 @@ where
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
transpose("ient_values)
|
||||
.into_par_iter()
|
||||
.map(PolynomialValues::new)
|
||||
.map(|values| values.coset_ifft(F::coset_shift()))
|
||||
.collect()
|
||||
Some(
|
||||
transpose("ient_values)
|
||||
.into_par_iter()
|
||||
.map(PolynomialValues::new)
|
||||
.map(|values| values.coset_ifft(F::coset_shift()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Check that all constraints evaluate to zero on `H`.
|
||||
|
||||
@ -162,18 +162,20 @@ pub fn verify_stark_proof_with_challenges_circuit<
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, builder);
|
||||
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
if let Some(quotient_polys) = quotient_polys {
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, builder);
|
||||
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
}
|
||||
|
||||
let merkle_caps = once(proof.trace_cap.clone())
|
||||
.chain(proof.auxiliary_polys_cap.clone())
|
||||
.chain(once(proof.quotient_polys_cap.clone()))
|
||||
.chain(proof.quotient_polys_cap.clone())
|
||||
.collect_vec();
|
||||
|
||||
let fri_instance = stark.fri_instance_target(
|
||||
@ -258,16 +260,22 @@ pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, con
|
||||
(stark.uses_lookups() || stark.requires_ctls())
|
||||
.then(|| stark.num_lookup_helper_columns(config) + num_ctl_helper_zs),
|
||||
)
|
||||
.chain(once(stark.quotient_degree_factor() * config.num_challenges))
|
||||
.chain(
|
||||
(stark.quotient_degree_factor() > 0)
|
||||
.then(|| stark.quotient_degree_factor() * config.num_challenges),
|
||||
)
|
||||
.collect_vec();
|
||||
|
||||
let auxiliary_polys_cap = (stark.uses_lookups() || stark.requires_ctls())
|
||||
.then(|| builder.add_virtual_cap(cap_height));
|
||||
|
||||
let quotient_polys_cap =
|
||||
(stark.constraint_degree() > 0).then(|| builder.add_virtual_cap(cap_height));
|
||||
|
||||
StarkProofTarget {
|
||||
trace_cap: builder.add_virtual_cap(cap_height),
|
||||
auxiliary_polys_cap,
|
||||
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||
quotient_polys_cap,
|
||||
openings: add_virtual_stark_opening_set::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
@ -302,8 +310,11 @@ fn add_virtual_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, c
|
||||
ctl_zs_first: stark
|
||||
.requires_ctls()
|
||||
.then(|| builder.add_virtual_targets(num_ctl_zs)),
|
||||
quotient_polys: builder
|
||||
.add_virtual_extension_targets(stark.quotient_degree_factor() * config.num_challenges),
|
||||
quotient_polys: (stark.constraint_degree() > 0).then(|| {
|
||||
builder.add_virtual_extension_targets(
|
||||
stark.quotient_degree_factor() * config.num_challenges,
|
||||
)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,7 +360,11 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||
W: Witness<F>,
|
||||
{
|
||||
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);
|
||||
witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||
if let (Some(quotient_polys_cap_target), Some(quotient_polys_cap)) =
|
||||
(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap)
|
||||
{
|
||||
witness.set_cap_target(quotient_polys_cap_target, quotient_polys_cap);
|
||||
}
|
||||
|
||||
witness.set_fri_openings(
|
||||
&proof_target.openings.to_fri_openings(zero),
|
||||
|
||||
@ -84,7 +84,10 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
|
||||
/// Outputs the maximum quotient polynomial's degree factor of this [`Stark`].
|
||||
fn quotient_degree_factor(&self) -> usize {
|
||||
1.max(self.constraint_degree() - 1)
|
||||
match self.constraint_degree().checked_sub(1) {
|
||||
Some(v) => 1.max(v),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Outputs the number of quotient polynomials this [`Stark`] would require with
|
||||
@ -123,11 +126,17 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
};
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
});
|
||||
let quotient_info = if num_quotient_polys > 0 {
|
||||
let quotient_polys =
|
||||
FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
});
|
||||
quotient_polys
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let zeta_batch = FriBatchInfo {
|
||||
point: zeta,
|
||||
@ -192,11 +201,17 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
};
|
||||
|
||||
let num_quotient_polys = self.num_quotient_polys(config);
|
||||
let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
});
|
||||
let quotient_info = if num_quotient_polys > 0 {
|
||||
let quotient_polys =
|
||||
FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys);
|
||||
oracles.push(FriOracleInfo {
|
||||
num_polys: num_quotient_polys,
|
||||
blinding: false,
|
||||
});
|
||||
quotient_polys
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let zeta_batch = FriBatchInfoTarget {
|
||||
point: zeta,
|
||||
|
||||
@ -58,7 +58,7 @@ pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let constraint_eval_degree = PolynomialValues::new(constraint_evals).degree();
|
||||
let maximum_degree = WITNESS_SIZE * stark.constraint_degree() - 1;
|
||||
let maximum_degree = (WITNESS_SIZE * stark.constraint_degree()).saturating_sub(1);
|
||||
|
||||
ensure!(
|
||||
constraint_eval_degree <= maximum_degree,
|
||||
|
||||
201
starky/src/unconstrained_stark.rs
Normal file
201
starky/src/unconstrained_stark.rs
Normal file
@ -0,0 +1,201 @@
|
||||
//! An example of proving and verifying an empty STARK (that is,
|
||||
//! a proof of knowledge of the trace)
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::evaluation_frame::StarkFrame;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
|
||||
/// A trace wirh arbitrary values
|
||||
#[derive(Copy, Clone)]
|
||||
struct UnconstrainedStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> UnconstrainedStark<F, D> {
|
||||
const fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using two columns of random values
|
||||
fn generate_trace(&self) -> Vec<PolynomialValues<F>> {
|
||||
let trace_rows = (0..self.num_rows)
|
||||
.map(|_| [F::rand(), F::rand()])
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
const COLUMNS: usize = 2;
|
||||
const PUBLIC_INPUTS: usize = 0;
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for UnconstrainedStark<F, D> {
|
||||
type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
|
||||
where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>;
|
||||
|
||||
type EvaluationFrameTarget =
|
||||
StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
// We don't constrain any register.
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
_vars: &Self::EvaluationFrame<FE, P, D2>,
|
||||
_yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
}
|
||||
|
||||
// We don't constrain any register.
|
||||
fn eval_ext_circuit(
|
||||
&self,
|
||||
_builder: &mut CircuitBuilder<F, D>,
|
||||
_vars: &Self::EvaluationFrameTarget,
|
||||
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
add_virtual_stark_proof_with_pis, set_stark_proof_with_pis_target,
|
||||
verify_stark_proof_circuit,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
use crate::unconstrained_stark::UnconstrainedStark;
|
||||
use crate::verifier::verify_stark_proof;
|
||||
|
||||
#[test]
|
||||
fn test_unconstrained_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = UnconstrainedStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace();
|
||||
let proof = prove::<F, C, S, D>(stark, &config, trace, &[], &mut TimingTree::default())?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unconstrained_stark_degree() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = UnconstrainedStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unconstrained_stark_circuit() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = UnconstrainedStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recursive_stark_verifier() -> Result<()> {
|
||||
init_logger();
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = UnconstrainedStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace();
|
||||
let proof = prove::<F, C, S, D>(stark, &config, trace, &[], &mut TimingTree::default())?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D> + Copy,
|
||||
InnerC: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
|
||||
inner_config: &StarkConfig,
|
||||
print_gate_counts: bool,
|
||||
) -> Result<()>
|
||||
where
|
||||
InnerC::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::new();
|
||||
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||
let pt =
|
||||
add_virtual_stark_proof_with_pis(&mut builder, &stark, inner_config, degree_bits, 0, 0);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof, builder.zero());
|
||||
|
||||
verify_stark_proof_circuit::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||
|
||||
if print_gate_counts {
|
||||
builder.print_gate_counts(0);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
let proof = data.prove(pw)?;
|
||||
data.verify(proof)
|
||||
}
|
||||
|
||||
fn init_logger() {
|
||||
let _ = env_logger::builder().format_timestamp(None).try_init();
|
||||
}
|
||||
}
|
||||
@ -164,8 +164,10 @@ where
|
||||
// where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`.
|
||||
// So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each
|
||||
// `quotient_degree_factor`-sized chunk of the original evaluations.
|
||||
|
||||
for (i, chunk) in quotient_polys
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.iter()
|
||||
.flat_map(|x| x.chunks(stark.quotient_degree_factor()))
|
||||
.enumerate()
|
||||
{
|
||||
ensure!(
|
||||
@ -176,7 +178,7 @@ where
|
||||
|
||||
let merkle_caps = once(proof.trace_cap.clone())
|
||||
.chain(proof.auxiliary_polys_cap.clone())
|
||||
.chain(once(proof.quotient_polys_cap.clone()))
|
||||
.chain(proof.quotient_polys_cap.clone())
|
||||
.collect_vec();
|
||||
|
||||
let num_ctl_zs = ctl_vars
|
||||
@ -245,11 +247,18 @@ where
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
ensure!(trace_cap.height() == cap_height);
|
||||
ensure!(quotient_polys_cap.height() == cap_height);
|
||||
ensure!(
|
||||
quotient_polys_cap.is_none()
|
||||
|| quotient_polys_cap.as_ref().map(|q| q.height()) == Some(cap_height)
|
||||
);
|
||||
|
||||
ensure!(local_values.len() == S::COLUMNS);
|
||||
ensure!(next_values.len() == S::COLUMNS);
|
||||
ensure!(quotient_polys.len() == stark.num_quotient_polys(config));
|
||||
ensure!(if let Some(quotient_polys) = quotient_polys {
|
||||
quotient_polys.len() == stark.num_quotient_polys(config)
|
||||
} else {
|
||||
stark.num_quotient_polys(config) == 0
|
||||
});
|
||||
|
||||
check_lookup_options::<F, C, S, D>(
|
||||
stark,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user