diff --git a/field/src/polynomial/mod.rs b/field/src/polynomial/mod.rs index 4264c914..7fccb46e 100644 --- a/field/src/polynomial/mod.rs +++ b/field/src/polynomial/mod.rs @@ -57,6 +57,12 @@ impl PolynomialValues { fft_with_options(coeffs, Some(rate_bits), None) } + /// Low-degree extend `Self` (seen as evaluations over the subgroup) onto a coset. + pub fn lde_onto_coset(self, rate_bits: usize) -> Self { + let coeffs = ifft(self).lde(rate_bits); + coeffs.coset_fft_with_options(F::coset_shift(), Some(rate_bits), None) + } + pub fn degree(&self) -> usize { self.degree_plus_one() .checked_sub(1) diff --git a/plonky2/src/fri/proof.rs b/plonky2/src/fri/proof.rs index f96db781..1f9e6b16 100644 --- a/plonky2/src/fri/proof.rs +++ b/plonky2/src/fri/proof.rs @@ -253,10 +253,10 @@ impl, H: Hasher, const D: usize> CompressedFriPr pow_witness, .. } = self; - let ProofChallenges { + let FriChallenges { fri_query_indices: indices, .. - } = challenges; + } = &challenges.fri_challenges; let mut fri_inferred_elements = fri_inferred_elements.0.into_iter(); let cap_height = params.config.cap_height; let reduction_arity_bits = ¶ms.reduction_arity_bits; @@ -362,3 +362,16 @@ impl, H: Hasher, const D: usize> CompressedFriPr } } } + +pub struct FriChallenges, const D: usize> { + // Scaling factor to combine polynomials. + pub fri_alpha: F::Extension, + + // Betas used in the FRI commit phase reductions. + pub fri_betas: Vec, + + pub fri_pow_response: F, + + // Indices at which the oracle is queried in FRI. + pub fri_query_indices: Vec, +} diff --git a/plonky2/src/fri/verifier.rs b/plonky2/src/fri/verifier.rs index 40d1ab25..49cfa053 100644 --- a/plonky2/src/fri/verifier.rs +++ b/plonky2/src/fri/verifier.rs @@ -4,14 +4,13 @@ use plonky2_field::field_types::Field; use plonky2_field::interpolation::{barycentric_weights, interpolate}; use plonky2_util::{log2_strict, reverse_index_bits_in_place}; -use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound}; +use crate::fri::proof::{FriChallenges, FriInitialTreeProof, FriProof, FriQueryRound}; use crate::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOpenings}; use crate::fri::{FriConfig, FriParams}; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::verify_merkle_proof; use crate::hash::merkle_tree::MerkleCap; use crate::plonk::config::{GenericConfig, Hasher}; -use crate::plonk::proof::{OpeningSet, ProofChallenges}; use crate::util::reducing::ReducingFactor; use crate::util::reverse_bits; @@ -57,15 +56,14 @@ pub(crate) fn fri_verify_proof_of_work, const D: us Ok(()) } -pub(crate) fn verify_fri_proof< +pub fn verify_fri_proof< F: RichField + Extendable, C: GenericConfig, const D: usize, >( instance: &FriInstanceInfo, - // Openings of the PLONK polynomials. - os: &OpeningSet, - challenges: &ProofChallenges, + openings: &FriOpenings, + challenges: &FriChallenges, initial_merkle_caps: &[MerkleCap], proof: &FriProof, params: &FriParams, @@ -88,7 +86,7 @@ pub(crate) fn verify_fri_proof< ); let precomputed_reduced_evals = - PrecomputedReducedOpenings::from_os_and_alpha(&os.to_fri_openings(), challenges.fri_alpha); + PrecomputedReducedOpenings::from_os_and_alpha(openings, challenges.fri_alpha); for (&x_index, round_proof) in challenges .fri_query_indices .iter() @@ -171,7 +169,7 @@ fn fri_verifier_query_round< const D: usize, >( instance: &FriInstanceInfo, - challenges: &ProofChallenges, + challenges: &FriChallenges, precomputed_reduced_evals: &PrecomputedReducedOpenings, initial_merkle_caps: &[MerkleCap], proof: &FriProof, diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index b8ca4fb7..d7583646 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -2,7 +2,10 @@ use std::convert::TryInto; use std::marker::PhantomData; use plonky2_field::extension_field::{Extendable, FieldExtension}; +use plonky2_field::polynomial::PolynomialCoeffs; +use crate::fri::proof::FriChallenges; +use crate::fri::FriConfig; use crate::hash::hash_types::RichField; use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget}; use crate::hash::hashing::{PlonkyPermutation, SPONGE_RATE, SPONGE_WIDTH}; @@ -10,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher}; use crate::plonk::proof::{OpeningSet, OpeningSetTarget}; /// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir. @@ -152,6 +155,57 @@ impl> Challenger { .collect() } + pub fn fri_challenges, const D: usize>( + &mut self, + commit_phase_merkle_caps: &[MerkleCap], + final_poly: &PolynomialCoeffs, + pow_witness: F, + degree_bits: usize, + config: &FriConfig, + ) -> FriChallenges + where + F: RichField + Extendable, + { + let num_fri_queries = config.num_query_rounds; + let lde_size = 1 << (degree_bits + config.rate_bits); + // Scaling factor to combine polynomials. + let fri_alpha = self.get_extension_challenge::(); + + // Recover the random betas used in the FRI reductions. + let fri_betas = commit_phase_merkle_caps + .iter() + .map(|cap| { + self.observe_cap(cap); + self.get_extension_challenge::() + }) + .collect(); + + self.observe_extension_elements(&final_poly.coeffs); + + let fri_pow_response = C::InnerHasher::hash( + &self + .get_hash() + .elements + .iter() + .copied() + .chain(Some(pow_witness)) + .collect::>(), + false, + ) + .elements[0]; + + let fri_query_indices = (0..num_fri_queries) + .map(|_| self.get_challenge().to_canonical_u64() as usize % lde_size) + .collect(); + + FriChallenges { + fri_alpha, + fri_betas, + fri_pow_response, + fri_query_indices, + } + } + /// Absorb any buffered inputs. After calling this, the input buffer will be empty. fn absorb_buffered_inputs(&mut self) { if self.input_buffer.is_empty() { diff --git a/plonky2/src/plonk/get_challenges.rs b/plonky2/src/plonk/get_challenges.rs index d28f29da..3167fef7 100644 --- a/plonky2/src/plonk/get_challenges.rs +++ b/plonky2/src/plonk/get_challenges.rs @@ -1,10 +1,9 @@ use std::collections::HashSet; -use itertools::Itertools; use plonky2_field::extension_field::Extendable; use plonky2_field::polynomial::PolynomialCoeffs; -use crate::fri::proof::{CompressedFriProof, FriProof}; +use crate::fri::proof::{CompressedFriProof, FriChallenges, FriProof}; use crate::fri::verifier::{compute_evaluation, fri_combine_initial, PrecomputedReducedOpenings}; use crate::hash::hash_types::RichField; use crate::hash::merkle_tree::MerkleCap; @@ -30,8 +29,6 @@ fn get_challenges, C: GenericConfig, cons ) -> anyhow::Result> { let config = &common_data.config; let num_challenges = config.num_challenges; - let num_fri_queries = config.fri_config.num_query_rounds; - let lde_size = common_data.lde_size(); let mut challenger = Challenger::::new(); @@ -51,45 +48,18 @@ fn get_challenges, C: GenericConfig, cons challenger.observe_opening_set(openings); - // Scaling factor to combine polynomials. - let fri_alpha = challenger.get_extension_challenge::(); - - // Recover the random betas used in the FRI reductions. - let fri_betas = commit_phase_merkle_caps - .iter() - .map(|cap| { - challenger.observe_cap(cap); - challenger.get_extension_challenge::() - }) - .collect(); - - challenger.observe_extension_elements(&final_poly.coeffs); - - let fri_pow_response = C::InnerHasher::hash( - &challenger - .get_hash() - .elements - .iter() - .copied() - .chain(Some(pow_witness)) - .collect_vec(), - false, - ) - .elements[0]; - - let fri_query_indices = (0..num_fri_queries) - .map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size) - .collect(); - Ok(ProofChallenges { plonk_betas, plonk_gammas, plonk_alphas, plonk_zeta, - fri_alpha, - fri_betas, - fri_pow_response, - fri_query_indices, + fri_challenges: challenger.fri_challenges::( + commit_phase_merkle_caps, + final_poly, + pow_witness, + common_data.degree_bits, + &config.fri_config, + ), }) } @@ -100,7 +70,10 @@ impl, C: GenericConfig, const D: usize> &self, common_data: &CommonCircuitData, ) -> anyhow::Result> { - Ok(self.get_challenges(common_data)?.fri_query_indices) + Ok(self + .get_challenges(common_data)? + .fri_challenges + .fri_query_indices) } /// Computes all Fiat-Shamir challenges used in the Plonk proof. @@ -179,9 +152,13 @@ impl, C: GenericConfig, const D: usize> ) -> FriInferredElements { let ProofChallenges { plonk_zeta, - fri_alpha, - fri_betas, - fri_query_indices, + fri_challenges: + FriChallenges { + fri_alpha, + fri_betas, + fri_query_indices, + .. + }, .. } = challenges; let mut fri_inferred_elements = Vec::new(); diff --git a/plonky2/src/plonk/mod.rs b/plonky2/src/plonk/mod.rs index b2d1ed03..4f2fa4e1 100644 --- a/plonky2/src/plonk/mod.rs +++ b/plonky2/src/plonk/mod.rs @@ -4,7 +4,7 @@ pub mod config; pub(crate) mod copy_constraint; mod get_challenges; pub(crate) mod permutation_argument; -pub(crate) mod plonk_common; +pub mod plonk_common; pub mod proof; pub mod prover; pub mod recursive_verifier; diff --git a/plonky2/src/plonk/plonk_common.rs b/plonky2/src/plonk/plonk_common.rs index 74495198..519593b3 100644 --- a/plonky2/src/plonk/plonk_common.rs +++ b/plonky2/src/plonk/plonk_common.rs @@ -124,7 +124,7 @@ pub(crate) fn reduce_with_powers_multi< cumul } -pub(crate) fn reduce_with_powers<'a, P: PackedField, T: IntoIterator>( +pub fn reduce_with_powers<'a, P: PackedField, T: IntoIterator>( terms: T, alpha: P::Scalar, ) -> P diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index 7fbdc671..803e64d4 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -3,7 +3,7 @@ use rayon::prelude::*; use serde::{Deserialize, Serialize}; use crate::fri::oracle::PolynomialBatch; -use crate::fri::proof::{CompressedFriProof, FriProof, FriProofTarget}; +use crate::fri::proof::{CompressedFriProof, FriChallenges, FriProof, FriProofTarget}; use crate::fri::structure::{ FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget, }; @@ -227,28 +227,19 @@ impl, C: GenericConfig, const D: usize> } pub(crate) struct ProofChallenges, const D: usize> { - // Random values used in Plonk's permutation argument. + /// Random values used in Plonk's permutation argument. pub plonk_betas: Vec, - // Random values used in Plonk's permutation argument. + /// Random values used in Plonk's permutation argument. pub plonk_gammas: Vec, - // Random values used to combine PLONK constraints. + /// Random values used to combine PLONK constraints. pub plonk_alphas: Vec, - // Point at which the PLONK polynomials are opened. + /// Point at which the PLONK polynomials are opened. pub plonk_zeta: F::Extension, - // Scaling factor to combine polynomials. - pub fri_alpha: F::Extension, - - // Betas used in the FRI commit phase reductions. - pub fri_betas: Vec, - - pub fri_pow_response: F, - - // Indices at which the oracle is queried in FRI. - pub fri_query_indices: Vec, + pub fri_challenges: FriChallenges, } /// Coset elements that can be inferred in the FRI reduction steps. diff --git a/plonky2/src/plonk/verifier.rs b/plonky2/src/plonk/verifier.rs index cbaec6d9..46d41bfe 100644 --- a/plonky2/src/plonk/verifier.rs +++ b/plonky2/src/plonk/verifier.rs @@ -91,8 +91,8 @@ pub(crate) fn verify_with_challenges< verify_fri_proof::( &common_data.get_fri_instance(challenges.plonk_zeta), - &proof.openings, - &challenges, + &proof.openings.to_fri_openings(), + &challenges.fri_challenges, merkle_caps, &proof.opening_proof, &common_data.fri_params, diff --git a/starky/src/constraint_consumer.rs b/starky/src/constraint_consumer.rs index adb88e41..b7c9f399 100644 --- a/starky/src/constraint_consumer.rs +++ b/starky/src/constraint_consumer.rs @@ -14,6 +14,9 @@ pub struct ConstraintConsumer { /// Running sums of constraints that have been emitted so far, scaled by powers of alpha. constraint_accs: Vec

, + /// The evaluation of `X - g^(n-1)`. + z_last: P, + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated /// with the first trace row, and zero at other points in the subgroup. lagrange_basis_first: P, @@ -24,10 +27,16 @@ pub struct ConstraintConsumer { } impl ConstraintConsumer

{ - pub fn new(alphas: Vec, lagrange_basis_first: P, lagrange_basis_last: P) -> Self { + pub fn new( + alphas: Vec, + z_last: P, + lagrange_basis_first: P, + lagrange_basis_last: P, + ) -> Self { Self { constraint_accs: vec![P::ZEROS; alphas.len()], alphas, + z_last, lagrange_basis_first, lagrange_basis_last, } @@ -41,31 +50,29 @@ impl ConstraintConsumer

{ .collect() } - /// Add one constraint. - pub fn one(&mut self, constraint: P) { + /// Add one constraint valid on all rows except the last. + pub fn constraint(&mut self, constraint: P) { + self.constraint_wrapping(constraint * self.z_last); + } + + /// Add one constraint on all rows. + pub fn constraint_wrapping(&mut self, constraint: P) { for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { *acc *= alpha; *acc += constraint; } } - /// Add a series of constraints. - pub fn many(&mut self, constraints: impl IntoIterator) { - constraints - .into_iter() - .for_each(|constraint| self.one(constraint)); - } - /// Add one constraint, but first multiply it by a filter such that it will only apply to the /// first row of the trace. - pub fn one_first_row(&mut self, constraint: P) { - self.one(constraint * self.lagrange_basis_first); + pub fn constraint_first_row(&mut self, constraint: P) { + self.constraint_wrapping(constraint * self.lagrange_basis_first); } /// Add one constraint, but first multiply it by a filter such that it will only apply to the /// last row of the trace. - pub fn one_last_row(&mut self, constraint: P) { - self.one(constraint * self.lagrange_basis_last); + pub fn constraint_last_row(&mut self, constraint: P) { + self.constraint_wrapping(constraint * self.lagrange_basis_last); } } @@ -76,6 +83,9 @@ pub struct RecursiveConstraintConsumer, const D: us /// A running sum of constraints that have been emitted so far, scaled by powers of alpha. constraint_acc: ExtensionTarget, + /// The evaluation of `X - g^(n-1)`. + z_last: ExtensionTarget, + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated /// with the first trace row, and zero at other points in the subgroup. lagrange_basis_first: ExtensionTarget, @@ -88,42 +98,45 @@ pub struct RecursiveConstraintConsumer, const D: us } impl, const D: usize> RecursiveConstraintConsumer { - /// Add one constraint. - pub fn one(&mut self, builder: &mut CircuitBuilder, constraint: ExtensionTarget) { + /// Add one constraint valid on all rows except the last. + pub fn constraint( + &mut self, + builder: &mut CircuitBuilder, + constraint: ExtensionTarget, + ) { + let filtered_constraint = builder.mul_extension(constraint, self.z_last); + self.constraint_wrapping(builder, filtered_constraint); + } + + /// Add one constraint valid on all rows. + pub fn constraint_wrapping( + &mut self, + builder: &mut CircuitBuilder, + constraint: ExtensionTarget, + ) { self.constraint_acc = builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint); } - /// Add a series of constraints. - pub fn many( - &mut self, - builder: &mut CircuitBuilder, - constraints: impl IntoIterator>, - ) { - constraints - .into_iter() - .for_each(|constraint| self.one(builder, constraint)); - } - /// Add one constraint, but first multiply it by a filter such that it will only apply to the /// first row of the trace. - pub fn one_first_row( + pub fn constraint_first_row( &mut self, builder: &mut CircuitBuilder, constraint: ExtensionTarget, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first); - self.one(builder, filtered_constraint); + self.constraint(builder, filtered_constraint); } /// Add one constraint, but first multiply it by a filter such that it will only apply to the /// last row of the trace. - pub fn one_last_row( + pub fn constraint_last_row( &mut self, builder: &mut CircuitBuilder, constraint: ExtensionTarget, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last); - self.one(builder, filtered_constraint); + self.constraint(builder, filtered_constraint); } } diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index ea834e99..f3ffd8a2 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -12,6 +12,7 @@ use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// Toy STARK system used for testing. /// Computes a Fibonacci sequence with state `[x0, x1]` using the state transition /// `x0 <- x1, x1 <- x0 + x1`. +#[derive(Copy, Clone)] struct FibonacciStark, const D: usize> { num_rows: usize, _phantom: PhantomData, @@ -59,14 +60,17 @@ impl, const D: usize> Stark for FibonacciStar P: PackedField, { // Check public inputs. - yield_constr.one_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]); - yield_constr.one_first_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_X1]); - yield_constr.one_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]); + yield_constr + .constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]); + yield_constr + .constraint_first_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_X1]); + yield_constr + .constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]); // x0 <- x1 - yield_constr.one(vars.next_values[0] - vars.local_values[1]); + yield_constr.constraint(vars.next_values[0] - vars.local_values[1]); // x1 <- x0 + x1 - yield_constr.one(vars.next_values[1] - vars.local_values[0] - vars.local_values[1]); + yield_constr.constraint(vars.next_values[1] - vars.local_values[0] - vars.local_values[1]); } fn eval_ext_recursively( @@ -89,6 +93,7 @@ mod tests { use crate::config::StarkConfig; use crate::fibonacci_stark::FibonacciStark; use crate::prover::prove; + use crate::verifier::verify; fn fibonacci(n: usize, x0: usize, x1: usize) -> usize { (0..n).fold((0, 1), |x, _| (x.1, x.0 + x.1)).1 @@ -110,14 +115,14 @@ mod tests { ]; let stark = S::new(num_rows); let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); - prove::( + let proof = prove::( stark, - config, + &config, trace, public_inputs, &mut TimingTree::default(), )?; - Ok(()) + verify(stark, proof, &config) } } diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs new file mode 100644 index 00000000..79e1c032 --- /dev/null +++ b/starky/src/get_challenges.rs @@ -0,0 +1,203 @@ +use anyhow::Result; +use plonky2::field::extension_field::Extendable; +use plonky2::field::polynomial::PolynomialCoeffs; +use plonky2::fri::proof::FriProof; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::merkle_tree::MerkleCap; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::GenericConfig; + +use crate::config::StarkConfig; +use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; + +#[allow(clippy::too_many_arguments)] +fn get_challenges, C: GenericConfig, const D: usize>( + trace_cap: &MerkleCap, + quotient_polys_cap: &MerkleCap, + openings: &StarkOpeningSet, + commit_phase_merkle_caps: &[MerkleCap], + final_poly: &PolynomialCoeffs, + pow_witness: F, + config: &StarkConfig, + degree_bits: usize, +) -> Result> { + let num_challenges = config.num_challenges; + let num_fri_queries = config.fri_config.num_query_rounds; + let lde_size = 1 << (degree_bits + config.fri_config.rate_bits); + + let mut challenger = Challenger::::new(); + + challenger.observe_cap(trace_cap); + let stark_alphas = challenger.get_n_challenges(num_challenges); + + challenger.observe_cap(quotient_polys_cap); + let stark_zeta = challenger.get_extension_challenge::(); + + openings.observe(&mut challenger); + + Ok(StarkProofChallenges { + stark_alphas, + stark_zeta, + fri_challenges: challenger.fri_challenges::( + commit_phase_merkle_caps, + final_poly, + pow_witness, + degree_bits, + &config.fri_config, + ), + }) +} + +impl, C: GenericConfig, const D: usize> + StarkProofWithPublicInputs +{ + pub(crate) fn fri_query_indices( + &self, + config: &StarkConfig, + degree_bits: usize, + ) -> anyhow::Result> { + Ok(self + .get_challenges(config, degree_bits)? + .fri_challenges + .fri_query_indices) + } + + /// Computes all Fiat-Shamir challenges used in the STARK proof. + pub(crate) fn get_challenges( + &self, + config: &StarkConfig, + degree_bits: usize, + ) -> Result> { + let StarkProof { + trace_cap, + quotient_polys_cap, + openings, + opening_proof: + FriProof { + commit_phase_merkle_caps, + final_poly, + pow_witness, + .. + }, + } = &self.proof; + + get_challenges::( + trace_cap, + quotient_polys_cap, + openings, + commit_phase_merkle_caps, + final_poly, + *pow_witness, + config, + degree_bits, + ) + } +} + +// TODO: Deal with the compressed stuff. +// impl, C: GenericConfig, const D: usize> +// CompressedProofWithPublicInputs +// { +// /// Computes all Fiat-Shamir challenges used in the Plonk proof. +// pub(crate) fn get_challenges( +// &self, +// common_data: &CommonCircuitData, +// ) -> anyhow::Result> { +// let CompressedProof { +// wires_cap, +// plonk_zs_partial_products_cap, +// quotient_polys_cap, +// openings, +// opening_proof: +// CompressedFriProof { +// commit_phase_merkle_caps, +// final_poly, +// pow_witness, +// .. +// }, +// } = &self.proof; +// +// get_challenges( +// self.get_public_inputs_hash(), +// wires_cap, +// plonk_zs_partial_products_cap, +// quotient_polys_cap, +// openings, +// commit_phase_merkle_caps, +// final_poly, +// *pow_witness, +// common_data, +// ) +// } +// +// /// Computes all coset elements that can be inferred in the FRI reduction steps. +// pub(crate) fn get_inferred_elements( +// &self, +// challenges: &ProofChallenges, +// common_data: &CommonCircuitData, +// ) -> FriInferredElements { +// let ProofChallenges { +// plonk_zeta, +// fri_alpha, +// fri_betas, +// fri_query_indices, +// .. +// } = challenges; +// let mut fri_inferred_elements = Vec::new(); +// // Holds the indices that have already been seen at each reduction depth. +// let mut seen_indices_by_depth = +// vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()]; +// let precomputed_reduced_evals = PrecomputedReducedOpenings::from_os_and_alpha( +// &self.proof.openings.to_fri_openings(), +// *fri_alpha, +// ); +// let log_n = common_data.degree_bits + common_data.config.fri_config.rate_bits; +// // Simulate the proof verification and collect the inferred elements. +// // The content of the loop is basically the same as the `fri_verifier_query_round` function. +// for &(mut x_index) in fri_query_indices { +// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR +// * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64); +// let mut old_eval = fri_combine_initial::( +// &common_data.get_fri_instance(*plonk_zeta), +// &self +// .proof +// .opening_proof +// .query_round_proofs +// .initial_trees_proofs[&x_index], +// *fri_alpha, +// subgroup_x, +// &precomputed_reduced_evals, +// &common_data.fri_params, +// ); +// for (i, &arity_bits) in common_data +// .fri_params +// .reduction_arity_bits +// .iter() +// .enumerate() +// { +// let coset_index = x_index >> arity_bits; +// if !seen_indices_by_depth[i].insert(coset_index) { +// // If this index has already been seen, we can skip the rest of the reductions. +// break; +// } +// fri_inferred_elements.push(old_eval); +// let arity = 1 << arity_bits; +// let mut evals = self.proof.opening_proof.query_round_proofs.steps[i][&coset_index] +// .evals +// .clone(); +// let x_index_within_coset = x_index & (arity - 1); +// evals.insert(x_index_within_coset, old_eval); +// old_eval = compute_evaluation( +// subgroup_x, +// x_index_within_coset, +// arity_bits, +// &evals, +// fri_betas[i], +// ); +// subgroup_x = subgroup_x.exp_power_of_2(arity_bits); +// x_index = coset_index; +// } +// } +// FriInferredElements(fri_inferred_elements) +// } +// } diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 541950ab..e56c0ef6 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -8,10 +8,12 @@ pub mod config; pub mod constraint_consumer; +mod get_challenges; pub mod proof; pub mod prover; pub mod stark; pub mod vars; +pub mod verifier; #[cfg(test)] pub mod fibonacci_stark; diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 4218e71f..5f96f1f4 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -1,20 +1,34 @@ use plonky2::field::extension_field::Extendable; use plonky2::fri::oracle::PolynomialBatch; -use plonky2::fri::proof::{CompressedFriProof, FriProof}; +use plonky2::fri::proof::{CompressedFriProof, FriChallenges, FriProof}; +use plonky2::fri::structure::{FriOpeningBatch, FriOpenings}; use plonky2::hash::hash_types::RichField; use plonky2::hash::merkle_tree::MerkleCap; -use plonky2::plonk::config::GenericConfig; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::{GenericConfig, Hasher}; use rayon::prelude::*; pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, + /// Merkle cap of LDEs of trace values. + pub quotient_polys_cap: MerkleCap, /// Purported values of each polynomial at the challenge point. pub openings: StarkOpeningSet, /// A batch FRI argument for all openings. pub opening_proof: FriProof, } +pub struct StarkProofWithPublicInputs< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +> { + pub proof: StarkProof, + // TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`. + pub public_inputs: Vec, +} + pub struct CompressedStarkProof< F: RichField + Extendable, C: GenericConfig, @@ -28,11 +42,31 @@ pub struct CompressedStarkProof< pub opening_proof: CompressedFriProof, } +pub struct CompressedStarkProofWithPublicInputs< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +> { + pub proof: CompressedStarkProof, + pub public_inputs: Vec, +} + +pub(crate) struct StarkProofChallenges, const D: usize> { + /// Random values used to combine STARK constraints. + pub stark_alphas: Vec, + + /// Point at which the STARK polynomials are opened. + pub stark_zeta: F::Extension, + + pub fri_challenges: FriChallenges, +} + /// Purported values of each polynomial at the challenge point. pub struct StarkOpeningSet, const D: usize> { pub local_values: Vec, pub next_values: Vec, pub permutation_zs: Vec, + pub permutation_zs_right: Vec, pub quotient_polys: Vec, } @@ -53,7 +87,50 @@ impl, const D: usize> StarkOpeningSet { local_values: eval_commitment(zeta, trace_commitment), next_values: eval_commitment(zeta * g, trace_commitment), permutation_zs: vec![/*TODO*/], + permutation_zs_right: vec![/*TODO*/], quotient_polys: eval_commitment(zeta, quotient_commitment), } } + + // TODO: Replace with a `observe_fri_openings` function. + // Note: Can't implement this directly on `Challenger` as it's in a different crate. + pub fn observe>(&self, challenger: &mut Challenger) { + let StarkOpeningSet { + local_values, + next_values, + permutation_zs, + permutation_zs_right, + quotient_polys, + } = self; + for v in &[ + local_values, + next_values, + permutation_zs, + permutation_zs_right, + quotient_polys, + ] { + challenger.observe_extension_elements(v); + } + } + + pub(crate) fn to_fri_openings(&self) -> FriOpenings { + let zeta_batch = FriOpeningBatch { + values: [ + self.local_values.as_slice(), + self.quotient_polys.as_slice(), + self.permutation_zs.as_slice(), + ] + .concat(), + }; + let zeta_right_batch = FriOpeningBatch { + values: [ + self.next_values.as_slice(), + self.permutation_zs_right.as_slice(), + ] + .concat(), + }; + FriOpenings { + batches: vec![zeta_batch, zeta_right_batch], + } + } } diff --git a/starky/src/prover.rs b/starky/src/prover.rs index e0652b24..d2c63e02 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -16,18 +16,17 @@ use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::proof::{StarkOpeningSet, StarkProof}; +use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vars::StarkEvaluationVars; -// TODO: Deal with public inputs. pub fn prove( stark: S, - config: StarkConfig, + config: &StarkConfig, trace: Vec<[F; S::COLUMNS]>, public_inputs: [F; S::PUBLIC_INPUTS], timing: &mut TimingTree, -) -> Result> +) -> Result> where F: RichField + Extendable, C: GenericConfig, @@ -101,7 +100,8 @@ where None, ) ); - challenger.observe_cap("ient_commitment.merkle_tree.cap); + let quotient_polys_cap = quotient_commitment.merkle_tree.cap.clone(); + challenger.observe_cap("ient_polys_cap); let zeta = challenger.get_extension_challenge::(); // To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and @@ -113,6 +113,7 @@ where "Opening point is in the subgroup." ); let openings = StarkOpeningSet::new(zeta, g, &trace_commitment, "ient_commitment); + openings.observe(&mut challenger); // TODO: Add permuation checks let initial_merkle_trees = &[&trace_commitment, "ient_commitment]; @@ -122,18 +123,23 @@ where timing, "compute openings proof", PolynomialBatch::prove_openings( - &S::fri_instance(zeta, g, rate_bits), + &S::fri_instance(zeta, g, rate_bits, config.num_challenges), initial_merkle_trees, &mut challenger, &fri_params, timing, ) ); - - Ok(StarkProof { + let proof = StarkProof { trace_cap, + quotient_polys_cap, openings, opening_proof, + }; + + Ok(StarkProofWithPublicInputs { + proof, + public_inputs: public_inputs.to_vec(), }) } @@ -157,27 +163,33 @@ where [(); S::PUBLIC_INPUTS]:, { let degree = 1 << degree_bits; - let points = F::two_adic_subgroup(degree_bits + rate_bits); // Evaluation of the first Lagrange polynomial on the LDE domain. let lagrange_first = { let mut evals = PolynomialValues::new(vec![F::ZERO; degree]); evals.values[0] = F::ONE; - evals.lde(rate_bits) + evals.lde_onto_coset(rate_bits) }; // Evaluation of the last Lagrange polynomial on the LDE domain. let lagrange_last = { let mut evals = PolynomialValues::new(vec![F::ZERO; degree]); evals.values[degree - 1] = F::ONE; - evals.lde(rate_bits) + evals.lde_onto_coset(rate_bits) }; - let z_h_on_coset = ZeroPolyOnCoset::new(degree_bits, rate_bits); + let z_h_on_coset = ZeroPolyOnCoset::::new(degree_bits, rate_bits); // Retrieve the LDE values at index `i`. let get_at_index = |comm: &PolynomialBatch, i: usize| -> [F; S::COLUMNS] { comm.get_lde_values(i).try_into().unwrap() }; + // Last element of the subgroup. + let last = F::primitive_root_of_unity(degree_bits).inverse(); + let coset = F::cyclic_subgroup_coset_known_order( + F::primitive_root_of_unity(degree_bits + rate_bits), + F::coset_shift(), + degree << rate_bits, + ); let quotient_values = (0..degree << rate_bits) .into_par_iter() @@ -185,17 +197,22 @@ where // TODO: Set `P` to a genuine `PackedField` here. let mut consumer = ConstraintConsumer::::new( alphas.clone(), + coset[i] - last, lagrange_first.values[i], lagrange_last.values[i], ); let vars = StarkEvaluationVars:: { local_values: &get_at_index(trace_commitment, i), - next_values: &get_at_index(trace_commitment, (i + 1) % (degree << rate_bits)), + next_values: &get_at_index( + trace_commitment, + (i + (1 << rate_bits)) % (degree << rate_bits), + ), public_inputs: &public_inputs, }; stark.eval_packed_base(vars, &mut consumer); - // TODO: Fix this once we a genuine `PackedField`. + // TODO: Fix this once we use a genuine `PackedField`. let mut constraints_evals = consumer.accumulators(); + // We divide the constraints evaluations by `Z_H(x)`. let denominator_inv = z_h_on_coset.eval_inverse(i); for eval in &mut constraints_evals { *eval *= denominator_inv; diff --git a/starky/src/stark.rs b/starky/src/stark.rs index f91d4fdd..00441240 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -9,6 +9,7 @@ use crate::vars::StarkEvaluationTargets; use crate::vars::StarkEvaluationVars; /// Represents a STARK system. +// TODO: Add a `constraint_degree` fn that returns the maximum constraint degree. pub trait Stark, const D: usize>: Sync { /// The total number of columns in the trace. const COLUMNS: usize; @@ -67,10 +68,11 @@ pub trait Stark, const D: usize>: Sync { zeta: F::Extension, g: F::Extension, rate_bits: usize, + num_challenges: usize, ) -> FriInstanceInfo { let no_blinding_oracle = FriOracleInfo { blinding: false }; let trace_info = FriPolynomialInfo::from_range(0, 0..Self::COLUMNS); - let quotient_info = FriPolynomialInfo::from_range(1, 0..1 << rate_bits); + let quotient_info = FriPolynomialInfo::from_range(1, 0..(1 << rate_bits) * num_challenges); let zeta_batch = FriBatchInfo { point: zeta, polynomials: [trace_info.clone(), quotient_info].concat(), diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs new file mode 100644 index 00000000..b91fe457 --- /dev/null +++ b/starky/src/verifier.rs @@ -0,0 +1,150 @@ +use anyhow::{ensure, Result}; +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::field_types::Field; +use plonky2::fri::verifier::verify_fri_proof; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::plonk_common::reduce_with_powers; +use plonky2_util::log2_strict; + +use crate::config::StarkConfig; +use crate::constraint_consumer::ConstraintConsumer; +use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; +use crate::stark::Stark; +use crate::vars::StarkEvaluationVars; + +pub fn verify< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + stark: S, + proof_with_pis: StarkProofWithPublicInputs, + config: &StarkConfig, +) -> Result<()> +where + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + let degree_bits = log2_strict(recover_degree(&proof_with_pis.proof, config)); + let challenges = proof_with_pis.get_challenges(config, degree_bits)?; + verify_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) +} + +pub(crate) fn verify_with_challenges< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + stark: S, + proof_with_pis: StarkProofWithPublicInputs, + challenges: StarkProofChallenges, + degree_bits: usize, + config: &StarkConfig, +) -> Result<()> +where + [(); S::COLUMNS]:, + [(); S::PUBLIC_INPUTS]:, +{ + let StarkProofWithPublicInputs { + proof, + public_inputs, + } = proof_with_pis; + let local_values = &proof.openings.local_values; + let next_values = &proof.openings.local_values; + let StarkOpeningSet { + local_values, + next_values, + permutation_zs, + permutation_zs_right, + quotient_polys, + } = &proof.openings; + let vars = StarkEvaluationVars { + local_values: &local_values.to_vec().try_into().unwrap(), + next_values: &next_values.to_vec().try_into().unwrap(), + public_inputs: &public_inputs + .into_iter() + .map(F::Extension::from_basefield) + .collect::>() + .try_into() + .unwrap(), + }; + + let (l_1, l_last) = eval_l_1_and_l_last(degree_bits, challenges.stark_zeta); + let last = F::primitive_root_of_unity(degree_bits).inverse(); + let z_last = challenges.stark_zeta - last.into(); + let mut consumer = ConstraintConsumer::::new( + challenges + .stark_alphas + .iter() + .map(|&alpha| F::Extension::from_basefield(alpha)) + .collect::>(), + z_last, + l_1, + l_last, + ); + stark.eval_ext(vars, &mut consumer); + let vanishing_polys_zeta = consumer.accumulators(); + + // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. + let quotient_polys_zeta = &proof.openings.quotient_polys; + let zeta_pow_deg = challenges.stark_zeta.exp_power_of_2(degree_bits); + let z_h_zeta = zeta_pow_deg - F::Extension::ONE; + // `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations. + // Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)` + // where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`. + // So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each + // `quotient_degree_factor`-sized chunk of the original evaluations. + for (i, chunk) in quotient_polys_zeta + .chunks(1 << config.fri_config.rate_bits) + .enumerate() + { + ensure!(vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg)); + } + + // TODO: Permutation polynomials. + let merkle_caps = &[proof.trace_cap, proof.quotient_polys_cap]; + + verify_fri_proof::( + &S::fri_instance( + challenges.stark_zeta, + F::primitive_root_of_unity(degree_bits).into(), + config.fri_config.rate_bits, + config.num_challenges, + ), + &proof.openings.to_fri_openings(), + &challenges.fri_challenges, + merkle_caps, + &proof.opening_proof, + &config.fri_params(degree_bits), + )?; + + Ok(()) +} + +/// Evaluate the Lagrange polynomials `L_1` and `L_n` at a point `x`. +/// `L_1(x) = (x^n - 1)/(n * (x - 1))` +/// `L_n(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup. +fn eval_l_1_and_l_last(log_n: usize, x: F) -> (F, F) { + let n = F::from_canonical_usize(1 << log_n); + let g = F::primitive_root_of_unity(log_n); + let z_x = x.exp_power_of_2(log_n) - F::ONE; + let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]); + + (z_x * invs[0], z_x * invs[1]) +} + +/// Recover the length of the trace from a STARK proof and a STARK config. +fn recover_degree, C: GenericConfig, const D: usize>( + proof: &StarkProof, + config: &StarkConfig, +) -> usize { + let initial_merkle_proof = &proof.opening_proof.query_round_proofs[0] + .initial_trees_proof + .evals_proofs[0] + .1; + let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); + 1 << (lde_bits - config.fri_config.rate_bits) +} diff --git a/system_zero/src/core_registers.rs b/system_zero/src/core_registers.rs index 249c16a3..21faa288 100644 --- a/system_zero/src/core_registers.rs +++ b/system_zero/src/core_registers.rs @@ -55,16 +55,16 @@ impl, const D: usize> SystemZero { let local_clock = vars.local_values[COL_CLOCK]; let next_clock = vars.next_values[COL_CLOCK]; let delta_clock = next_clock - local_clock; - yield_constr.one_first_row(local_clock); - yield_constr.one(delta_clock - FE::ONE); + yield_constr.constraint_first_row(local_clock); + yield_constr.constraint(delta_clock - FE::ONE); // The 16-bit table must start with 0, end with 2^16 - 1, and increment by 0 or 1. let local_range_16 = vars.local_values[COL_RANGE_16]; let next_range_16 = vars.next_values[COL_RANGE_16]; let delta_range_16 = next_range_16 - local_range_16; - yield_constr.one_first_row(local_range_16); - yield_constr.one_last_row(local_range_16 - FE::from_canonical_u64((1 << 16) - 1)); - yield_constr.one(delta_range_16 * (delta_range_16 - FE::ONE)); + yield_constr.constraint_first_row(local_range_16); + yield_constr.constraint_last_row(local_range_16 - FE::from_canonical_u64((1 << 16) - 1)); + yield_constr.constraint(delta_range_16 * (delta_range_16 - FE::ONE)); todo!() } diff --git a/system_zero/src/permutation_unit.rs b/system_zero/src/permutation_unit.rs index a490b49d..43883fca 100644 --- a/system_zero/src/permutation_unit.rs +++ b/system_zero/src/permutation_unit.rs @@ -53,7 +53,7 @@ impl, const D: usize> SystemZero { // Assert that the computed output matches the outputs in the trace. for i in 0..SPONGE_WIDTH { let out = local_values[col_permutation_output(i)]; - yield_constr.one(state[i] - out); + yield_constr.constraint(state[i] - out); } } @@ -80,7 +80,7 @@ impl, const D: usize> SystemZero { for i in 0..SPONGE_WIDTH { let out = local_values[col_permutation_output(i)]; let diff = builder.sub_extension(state[i], out); - yield_constr.one(builder, diff); + yield_constr.constraint(builder, diff); } } } diff --git a/system_zero/src/system_zero.rs b/system_zero/src/system_zero.rs index 49e25e6c..31b8434f 100644 --- a/system_zero/src/system_zero.rs +++ b/system_zero/src/system_zero.rs @@ -16,6 +16,7 @@ use crate::public_input_layout::NUM_PUBLIC_INPUTS; /// We require at least 2^16 rows as it helps support efficient 16-bit range checks. const MIN_TRACE_ROWS: usize = 1 << 16; +#[derive(Copy, Clone)] pub struct SystemZero, const D: usize> { _phantom: PhantomData, } @@ -92,6 +93,7 @@ mod tests { use starky::config::StarkConfig; use starky::prover::prove; use starky::stark::Stark; + use starky::verifier::verify; use crate::system_zero::SystemZero; @@ -108,8 +110,8 @@ mod tests { let config = StarkConfig::standard_fast_config(); let mut timing = TimingTree::new("prove", Level::Debug); let trace = system.generate_trace(); - prove::(system, config, trace, public_inputs, &mut timing)?; + let proof = prove::(system, &config, trace, public_inputs, &mut timing)?; - Ok(()) + verify(system, proof, &config) } }