From 8993270f80b8bad808c109b15807849ef42e5bf5 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Sat, 29 Jan 2022 12:49:00 +0100 Subject: [PATCH] Progress --- starky/src/get_challenges.rs | 219 +++++++++++++++++++++++++++++++++++ starky/src/lib.rs | 2 + starky/src/proof.rs | 57 ++++++++- starky/src/prover.rs | 17 ++- starky/src/verifier.rs | 105 +++++++++++++++++ 5 files changed, 394 insertions(+), 6 deletions(-) create mode 100644 starky/src/get_challenges.rs create mode 100644 starky/src/verifier.rs diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs new file mode 100644 index 00000000..f3c78701 --- /dev/null +++ b/starky/src/get_challenges.rs @@ -0,0 +1,219 @@ +use anyhow::Result; +use plonky2::field::extension_field::Extendable; +use plonky2::field::polynomial::PolynomialCoeffs; +use plonky2::fri::proof::FriProof; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::merkle_tree::MerkleCap; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::{GenericConfig, Hasher}; + +use crate::config::StarkConfig; +use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; + +fn get_challenges, C: GenericConfig, const D: usize>( + trace_cap: &MerkleCap, + quotient_polys_cap: &MerkleCap, + openings: &StarkOpeningSet, + commit_phase_merkle_caps: &[MerkleCap], + final_poly: &PolynomialCoeffs, + pow_witness: F, + config: &StarkConfig, + degree_bits: usize, +) -> Result> { + let num_challenges = config.num_challenges; + let num_fri_queries = config.fri_config.num_query_rounds; + let lde_size = 1 << (degree_bits + config.fri_config.rate_bits); + + let mut challenger = Challenger::::new(); + + challenger.observe_cap(trace_cap); + let stark_alphas = challenger.get_n_challenges(num_challenges); + + challenger.observe_cap(quotient_polys_cap); + let stark_zeta = challenger.get_extension_challenge::(); + + openings.observe(&mut challenger); + + // Scaling factor to combine polynomials. + let fri_alpha = challenger.get_extension_challenge::(); + + // Recover the random betas used in the FRI reductions. + let fri_betas = commit_phase_merkle_caps + .iter() + .map(|cap| { + challenger.observe_cap(cap); + challenger.get_extension_challenge::() + }) + .collect(); + + challenger.observe_extension_elements(&final_poly.coeffs); + + let fri_pow_response = C::InnerHasher::hash( + &challenger + .get_hash() + .elements + .iter() + .copied() + .chain(Some(pow_witness)) + .collect::>(), + false, + ) + .elements[0]; + + let fri_query_indices = (0..num_fri_queries) + .map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size) + .collect(); + + Ok(StarkProofChallenges { + stark_alphas, + stark_zeta, + fri_alpha, + fri_betas, + fri_pow_response, + fri_query_indices, + }) +} + +impl, C: GenericConfig, const D: usize> + StarkProofWithPublicInputs +{ + pub(crate) fn fri_query_indices(&self, config: &StarkConfig) -> anyhow::Result> { + Ok(self.get_challenges(config)?.fri_query_indices) + } + + /// Computes all Fiat-Shamir challenges used in the Plonk proof. + pub(crate) fn get_challenges( + &self, + config: &StarkConfig, + ) -> Result> { + let StarkProof { + trace_cap, + quotient_polys_cap, + openings, + opening_proof: + FriProof { + commit_phase_merkle_caps, + final_poly, + pow_witness, + .. + }, + } = &self.proof; + + get_challenges( + trace_cap, + quotient_polys_cap, + openings, + commit_phase_merkle_caps, + final_poly, + *pow_witness, + config, + ) + } +} + +// impl, C: GenericConfig, const D: usize> +// CompressedProofWithPublicInputs +// { +// /// Computes all Fiat-Shamir challenges used in the Plonk proof. +// pub(crate) fn get_challenges( +// &self, +// common_data: &CommonCircuitData, +// ) -> anyhow::Result> { +// let CompressedProof { +// wires_cap, +// plonk_zs_partial_products_cap, +// quotient_polys_cap, +// openings, +// opening_proof: +// CompressedFriProof { +// commit_phase_merkle_caps, +// final_poly, +// pow_witness, +// .. +// }, +// } = &self.proof; +// +// get_challenges( +// self.get_public_inputs_hash(), +// wires_cap, +// plonk_zs_partial_products_cap, +// quotient_polys_cap, +// openings, +// commit_phase_merkle_caps, +// final_poly, +// *pow_witness, +// common_data, +// ) +// } +// +// /// Computes all coset elements that can be inferred in the FRI reduction steps. +// pub(crate) fn get_inferred_elements( +// &self, +// challenges: &ProofChallenges, +// common_data: &CommonCircuitData, +// ) -> FriInferredElements { +// let ProofChallenges { +// plonk_zeta, +// fri_alpha, +// fri_betas, +// fri_query_indices, +// .. +// } = challenges; +// let mut fri_inferred_elements = Vec::new(); +// // Holds the indices that have already been seen at each reduction depth. +// let mut seen_indices_by_depth = +// vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()]; +// let precomputed_reduced_evals = PrecomputedReducedOpenings::from_os_and_alpha( +// &self.proof.openings.to_fri_openings(), +// *fri_alpha, +// ); +// let log_n = common_data.degree_bits + common_data.config.fri_config.rate_bits; +// // Simulate the proof verification and collect the inferred elements. +// // The content of the loop is basically the same as the `fri_verifier_query_round` function. +// for &(mut x_index) in fri_query_indices { +// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR +// * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64); +// let mut old_eval = fri_combine_initial::( +// &common_data.get_fri_instance(*plonk_zeta), +// &self +// .proof +// .opening_proof +// .query_round_proofs +// .initial_trees_proofs[&x_index], +// *fri_alpha, +// subgroup_x, +// &precomputed_reduced_evals, +// &common_data.fri_params, +// ); +// for (i, &arity_bits) in common_data +// .fri_params +// .reduction_arity_bits +// .iter() +// .enumerate() +// { +// let coset_index = x_index >> arity_bits; +// if !seen_indices_by_depth[i].insert(coset_index) { +// // If this index has already been seen, we can skip the rest of the reductions. +// break; +// } +// fri_inferred_elements.push(old_eval); +// let arity = 1 << arity_bits; +// let mut evals = self.proof.opening_proof.query_round_proofs.steps[i][&coset_index] +// .evals +// .clone(); +// let x_index_within_coset = x_index & (arity - 1); +// evals.insert(x_index_within_coset, old_eval); +// old_eval = compute_evaluation( +// subgroup_x, +// x_index_within_coset, +// arity_bits, +// &evals, +// fri_betas[i], +// ); +// subgroup_x = subgroup_x.exp_power_of_2(arity_bits); +// x_index = coset_index; +// } +// } +// FriInferredElements(fri_inferred_elements) +// } +// } diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 541950ab..e56c0ef6 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -8,10 +8,12 @@ pub mod config; pub mod constraint_consumer; +mod get_challenges; pub mod proof; pub mod prover; pub mod stark; pub mod vars; +pub mod verifier; #[cfg(test)] pub mod fibonacci_stark; diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 4218e71f..4d81793e 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -1,20 +1,34 @@ use plonky2::field::extension_field::Extendable; +use plonky2::field::field_types::Field; use plonky2::fri::oracle::PolynomialBatch; use plonky2::fri::proof::{CompressedFriProof, FriProof}; use plonky2::hash::hash_types::RichField; use plonky2::hash::merkle_tree::MerkleCap; -use plonky2::plonk::config::GenericConfig; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::{GenericConfig, Hasher}; use rayon::prelude::*; pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, + /// Merkle cap of LDEs of trace values. + pub quotient_polys_cap: MerkleCap, /// Purported values of each polynomial at the challenge point. pub openings: StarkOpeningSet, /// A batch FRI argument for all openings. pub opening_proof: FriProof, } +pub struct StarkProofWithPublicInputs< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +> { + pub proof: StarkProof, + // TODO: Maybe make it generic over a `S: Start` and replace with `[F; S::PUBLIC_INPUTS]`. + pub public_inputs: Vec, +} + pub struct CompressedStarkProof< F: RichField + Extendable, C: GenericConfig, @@ -28,6 +42,34 @@ pub struct CompressedStarkProof< pub opening_proof: CompressedFriProof, } +pub struct CompressedStarkProofWithPublicInputs< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +> { + pub proof: CompressedStarkProof, + pub public_inputs: Vec, +} + +pub(crate) struct StarkProofChallenges, const D: usize> { + // Random values used to combine PLONK constraints. + pub stark_alphas: Vec, + + // Point at which the PLONK polynomials are opened. + pub stark_zeta: F::Extension, + + // Scaling factor to combine polynomials. + pub fri_alpha: F::Extension, + + // Betas used in the FRI commit phase reductions. + pub fri_betas: Vec, + + pub fri_pow_response: F, + + // Indices at which the oracle is queried in FRI. + pub fri_query_indices: Vec, +} + /// Purported values of each polynomial at the challenge point. pub struct StarkOpeningSet, const D: usize> { pub local_values: Vec, @@ -56,4 +98,17 @@ impl, const D: usize> StarkOpeningSet { quotient_polys: eval_commitment(zeta, quotient_commitment), } } + + // Note: Can't implement this directly on `Challenger` as it's in a different crate. + pub fn observe>(&self, challenger: &mut Challenger) { + let StarkOpeningSet { + local_values, + next_values, + permutation_zs, + quotient_polys, + } = self; + for v in &[local_values, next_values, permutation_zs, quotient_polys] { + self.observe_extension_elements(v); + } + } } diff --git a/starky/src/prover.rs b/starky/src/prover.rs index e0652b24..4ba09e22 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -16,7 +16,7 @@ use rayon::prelude::*; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::proof::{StarkOpeningSet, StarkProof}; +use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vars::StarkEvaluationVars; @@ -27,7 +27,7 @@ pub fn prove( trace: Vec<[F; S::COLUMNS]>, public_inputs: [F; S::PUBLIC_INPUTS], timing: &mut TimingTree, -) -> Result> +) -> Result> where F: RichField + Extendable, C: GenericConfig, @@ -101,7 +101,8 @@ where None, ) ); - challenger.observe_cap("ient_commitment.merkle_tree.cap); + let quotient_polys_cap = quotient_commitment.merkle_tree.cap; + challenger.observe_cap(quotient_polys_cap); let zeta = challenger.get_extension_challenge::(); // To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and @@ -113,6 +114,7 @@ where "Opening point is in the subgroup." ); let openings = StarkOpeningSet::new(zeta, g, &trace_commitment, "ient_commitment); + openings.observe(&mut challenger); // TODO: Add permuation checks let initial_merkle_trees = &[&trace_commitment, "ient_commitment]; @@ -129,11 +131,16 @@ where timing, ) ); - - Ok(StarkProof { + let proof = StarkProof { trace_cap, + quotient_polys_cap, openings, opening_proof, + }; + + Ok(StarkProofWithPublicInputs { + proof, + public_inputs: public_inputs.to_vec(), }) } diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs new file mode 100644 index 00000000..d9c8c309 --- /dev/null +++ b/starky/src/verifier.rs @@ -0,0 +1,105 @@ +use anyhow::{ensure, Result}; +use plonky2::field::extension_field::Extendable; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_data::CommonCircuitData; +use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::proof::ProofWithPublicInputs; + +use crate::config::StarkConfig; +use crate::proof::{StarkProof, StarkProofWithPublicInputs}; +use crate::stark::Stark; + +pub(crate) fn verify< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + proof_with_pis: StarkProofWithPublicInputs, + config: &StarkConfig, +) -> Result<()> { + let challenges = proof_with_pis.get_challenges(config)?; + verify_with_challenges(proof_with_pis, challenges, verifier_data, common_data) +} + +pub(crate) fn verify_with_challenges< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + proof_with_pis: StarkProofWithPublicInputs, + challenges: ProofChallenges, + verifier_data: &VerifierOnlyCircuitData, + common_data: &CommonCircuitData, +) -> Result<()> { + assert_eq!( + proof_with_pis.public_inputs.len(), + common_data.num_public_inputs + ); + let public_inputs_hash = &proof_with_pis.get_public_inputs_hash(); + + let ProofWithPublicInputs { proof, .. } = proof_with_pis; + + let local_constants = &proof.openings.constants; + let local_wires = &proof.openings.wires; + let vars = EvaluationVars { + local_constants, + local_wires, + public_inputs_hash, + }; + let local_zs = &proof.openings.plonk_zs; + let next_zs = &proof.openings.plonk_zs_right; + let s_sigmas = &proof.openings.plonk_sigmas; + let partial_products = &proof.openings.partial_products; + + // Evaluate the vanishing polynomial at our challenge point, zeta. + let vanishing_polys_zeta = eval_vanishing_poly( + common_data, + challenges.plonk_zeta, + vars, + local_zs, + next_zs, + partial_products, + s_sigmas, + &challenges.plonk_betas, + &challenges.plonk_gammas, + &challenges.plonk_alphas, + ); + + // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. + let quotient_polys_zeta = &proof.openings.quotient_polys; + let zeta_pow_deg = challenges + .plonk_zeta + .exp_power_of_2(common_data.degree_bits); + let z_h_zeta = zeta_pow_deg - F::Extension::ONE; + // `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations. + // Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)` + // where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`. + // So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each + // `quotient_degree_factor`-sized chunk of the original evaluations. + for (i, chunk) in quotient_polys_zeta + .chunks(common_data.quotient_degree_factor) + .enumerate() + { + ensure!(vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg)); + } + + let merkle_caps = &[ + verifier_data.constants_sigmas_cap.clone(), + proof.wires_cap, + proof.plonk_zs_partial_products_cap, + proof.quotient_polys_cap, + ]; + + verify_fri_proof::( + &common_data.get_fri_instance(challenges.plonk_zeta), + &proof.openings, + &challenges, + merkle_caps, + &proof.opening_proof, + &common_data.fri_params, + )?; + + Ok(()) +}