This commit is contained in:
wborgeaud 2022-01-29 12:49:00 +01:00
parent 20a3683e09
commit 8993270f80
5 changed files with 394 additions and 6 deletions

View File

@ -0,0 +1,219 @@
use anyhow::Result;
use plonky2::field::extension_field::Extendable;
use plonky2::field::polynomial::PolynomialCoeffs;
use plonky2::fri::proof::FriProof;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, Hasher};
use crate::config::StarkConfig;
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
trace_cap: &MerkleCap<F, C::Hasher>,
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
openings: &StarkOpeningSet<F, D>,
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
pow_witness: F,
config: &StarkConfig,
degree_bits: usize,
) -> Result<StarkProofChallenges<F, D>> {
let num_challenges = config.num_challenges;
let num_fri_queries = config.fri_config.num_query_rounds;
let lde_size = 1 << (degree_bits + config.fri_config.rate_bits);
let mut challenger = Challenger::<F, C::Hasher>::new();
challenger.observe_cap(trace_cap);
let stark_alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge::<D>();
openings.observe(&mut challenger);
// Scaling factor to combine polynomials.
let fri_alpha = challenger.get_extension_challenge::<D>();
// Recover the random betas used in the FRI reductions.
let fri_betas = commit_phase_merkle_caps
.iter()
.map(|cap| {
challenger.observe_cap(cap);
challenger.get_extension_challenge::<D>()
})
.collect();
challenger.observe_extension_elements(&final_poly.coeffs);
let fri_pow_response = C::InnerHasher::hash(
&challenger
.get_hash()
.elements
.iter()
.copied()
.chain(Some(pow_witness))
.collect::<Vec<_>>(),
false,
)
.elements[0];
let fri_query_indices = (0..num_fri_queries)
.map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size)
.collect();
Ok(StarkProofChallenges {
stark_alphas,
stark_zeta,
fri_alpha,
fri_betas,
fri_pow_response,
fri_query_indices,
})
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
StarkProofWithPublicInputs<F, C, D>
{
pub(crate) fn fri_query_indices(&self, config: &StarkConfig) -> anyhow::Result<Vec<usize>> {
Ok(self.get_challenges(config)?.fri_query_indices)
}
/// Computes all Fiat-Shamir challenges used in the Plonk proof.
pub(crate) fn get_challenges(
&self,
config: &StarkConfig,
) -> Result<StarkProofChallenges<F, D>> {
let StarkProof {
trace_cap,
quotient_polys_cap,
openings,
opening_proof:
FriProof {
commit_phase_merkle_caps,
final_poly,
pow_witness,
..
},
} = &self.proof;
get_challenges(
trace_cap,
quotient_polys_cap,
openings,
commit_phase_merkle_caps,
final_poly,
*pow_witness,
config,
)
}
}
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
// CompressedProofWithPublicInputs<F, C, D>
// {
// /// Computes all Fiat-Shamir challenges used in the Plonk proof.
// pub(crate) fn get_challenges(
// &self,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> anyhow::Result<ProofChallenges<F, D>> {
// let CompressedProof {
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// opening_proof:
// CompressedFriProof {
// commit_phase_merkle_caps,
// final_poly,
// pow_witness,
// ..
// },
// } = &self.proof;
//
// get_challenges(
// self.get_public_inputs_hash(),
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// commit_phase_merkle_caps,
// final_poly,
// *pow_witness,
// common_data,
// )
// }
//
// /// Computes all coset elements that can be inferred in the FRI reduction steps.
// pub(crate) fn get_inferred_elements(
// &self,
// challenges: &ProofChallenges<F, D>,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> FriInferredElements<F, D> {
// let ProofChallenges {
// plonk_zeta,
// fri_alpha,
// fri_betas,
// fri_query_indices,
// ..
// } = challenges;
// let mut fri_inferred_elements = Vec::new();
// // Holds the indices that have already been seen at each reduction depth.
// let mut seen_indices_by_depth =
// vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()];
// let precomputed_reduced_evals = PrecomputedReducedOpenings::from_os_and_alpha(
// &self.proof.openings.to_fri_openings(),
// *fri_alpha,
// );
// let log_n = common_data.degree_bits + common_data.config.fri_config.rate_bits;
// // Simulate the proof verification and collect the inferred elements.
// // The content of the loop is basically the same as the `fri_verifier_query_round` function.
// for &(mut x_index) in fri_query_indices {
// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
// * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64);
// let mut old_eval = fri_combine_initial::<F, C, D>(
// &common_data.get_fri_instance(*plonk_zeta),
// &self
// .proof
// .opening_proof
// .query_round_proofs
// .initial_trees_proofs[&x_index],
// *fri_alpha,
// subgroup_x,
// &precomputed_reduced_evals,
// &common_data.fri_params,
// );
// for (i, &arity_bits) in common_data
// .fri_params
// .reduction_arity_bits
// .iter()
// .enumerate()
// {
// let coset_index = x_index >> arity_bits;
// if !seen_indices_by_depth[i].insert(coset_index) {
// // If this index has already been seen, we can skip the rest of the reductions.
// break;
// }
// fri_inferred_elements.push(old_eval);
// let arity = 1 << arity_bits;
// let mut evals = self.proof.opening_proof.query_round_proofs.steps[i][&coset_index]
// .evals
// .clone();
// let x_index_within_coset = x_index & (arity - 1);
// evals.insert(x_index_within_coset, old_eval);
// old_eval = compute_evaluation(
// subgroup_x,
// x_index_within_coset,
// arity_bits,
// &evals,
// fri_betas[i],
// );
// subgroup_x = subgroup_x.exp_power_of_2(arity_bits);
// x_index = coset_index;
// }
// }
// FriInferredElements(fri_inferred_elements)
// }
// }

View File

@ -8,10 +8,12 @@
pub mod config;
pub mod constraint_consumer;
mod get_challenges;
pub mod proof;
pub mod prover;
pub mod stark;
pub mod vars;
pub mod verifier;
#[cfg(test)]
pub mod fibonacci_stark;

View File

@ -1,20 +1,34 @@
use plonky2::field::extension_field::Extendable;
use plonky2::field::field_types::Field;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::fri::proof::{CompressedFriProof, FriProof};
use plonky2::hash::hash_types::RichField;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::plonk::config::GenericConfig;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, Hasher};
use rayon::prelude::*;
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of trace values.
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: FriProof<F, C::Hasher, D>,
}
pub struct StarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub proof: StarkProof<F, C, D>,
// TODO: Maybe make it generic over a `S: Start` and replace with `[F; S::PUBLIC_INPUTS]`.
pub public_inputs: Vec<F>,
}
pub struct CompressedStarkProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -28,6 +42,34 @@ pub struct CompressedStarkProof<
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}
pub struct CompressedStarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub proof: CompressedStarkProof<F, C, D>,
pub public_inputs: Vec<F>,
}
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
// Random values used to combine PLONK constraints.
pub stark_alphas: Vec<F>,
// Point at which the PLONK polynomials are opened.
pub stark_zeta: F::Extension,
// Scaling factor to combine polynomials.
pub fri_alpha: F::Extension,
// Betas used in the FRI commit phase reductions.
pub fri_betas: Vec<F::Extension>,
pub fri_pow_response: F,
// Indices at which the oracle is queried in FRI.
pub fri_query_indices: Vec<usize>,
}
/// Purported values of each polynomial at the challenge point.
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
pub local_values: Vec<F::Extension>,
@ -56,4 +98,17 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
quotient_polys: eval_commitment(zeta, quotient_commitment),
}
}
// Note: Can't implement this directly on `Challenger` as it's in a different crate.
pub fn observe<H: Hasher<F>>(&self, challenger: &mut Challenger<F, H>) {
let StarkOpeningSet {
local_values,
next_values,
permutation_zs,
quotient_polys,
} = self;
for v in &[local_values, next_values, permutation_zs, quotient_polys] {
self.observe_extension_elements(v);
}
}
}

View File

@ -16,7 +16,7 @@ use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::proof::{StarkOpeningSet, StarkProof};
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
use crate::stark::Stark;
use crate::vars::StarkEvaluationVars;
@ -27,7 +27,7 @@ pub fn prove<F, C, S, const D: usize>(
trace: Vec<[F; S::COLUMNS]>,
public_inputs: [F; S::PUBLIC_INPUTS],
timing: &mut TimingTree,
) -> Result<StarkProof<F, C, D>>
) -> Result<StarkProofWithPublicInputs<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -101,7 +101,8 @@ where
None,
)
);
challenger.observe_cap(&quotient_commitment.merkle_tree.cap);
let quotient_polys_cap = quotient_commitment.merkle_tree.cap;
challenger.observe_cap(quotient_polys_cap);
let zeta = challenger.get_extension_challenge::<D>();
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
@ -113,6 +114,7 @@ where
"Opening point is in the subgroup."
);
let openings = StarkOpeningSet::new(zeta, g, &trace_commitment, &quotient_commitment);
openings.observe(&mut challenger);
// TODO: Add permuation checks
let initial_merkle_trees = &[&trace_commitment, &quotient_commitment];
@ -129,11 +131,16 @@ where
timing,
)
);
Ok(StarkProof {
let proof = StarkProof {
trace_cap,
quotient_polys_cap,
openings,
opening_proof,
};
Ok(StarkProofWithPublicInputs {
proof,
public_inputs: public_inputs.to_vec(),
})
}

105
starky/src/verifier.rs Normal file
View File

@ -0,0 +1,105 @@
use anyhow::{ensure, Result};
use plonky2::field::extension_field::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_data::CommonCircuitData;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::proof::ProofWithPublicInputs;
use crate::config::StarkConfig;
use crate::proof::{StarkProof, StarkProofWithPublicInputs};
use crate::stark::Stark;
pub(crate) fn verify<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
proof_with_pis: StarkProofWithPublicInputs<F, C, D>,
config: &StarkConfig,
) -> Result<()> {
let challenges = proof_with_pis.get_challenges(config)?;
verify_with_challenges(proof_with_pis, challenges, verifier_data, common_data)
}
pub(crate) fn verify_with_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
proof_with_pis: StarkProofWithPublicInputs<F, C, D>,
challenges: ProofChallenges<F, D>,
verifier_data: &VerifierOnlyCircuitData<C, D>,
common_data: &CommonCircuitData<F, C, D>,
) -> Result<()> {
assert_eq!(
proof_with_pis.public_inputs.len(),
common_data.num_public_inputs
);
let public_inputs_hash = &proof_with_pis.get_public_inputs_hash();
let ProofWithPublicInputs { proof, .. } = proof_with_pis;
let local_constants = &proof.openings.constants;
let local_wires = &proof.openings.wires;
let vars = EvaluationVars {
local_constants,
local_wires,
public_inputs_hash,
};
let local_zs = &proof.openings.plonk_zs;
let next_zs = &proof.openings.plonk_zs_right;
let s_sigmas = &proof.openings.plonk_sigmas;
let partial_products = &proof.openings.partial_products;
// Evaluate the vanishing polynomial at our challenge point, zeta.
let vanishing_polys_zeta = eval_vanishing_poly(
common_data,
challenges.plonk_zeta,
vars,
local_zs,
next_zs,
partial_products,
s_sigmas,
&challenges.plonk_betas,
&challenges.plonk_gammas,
&challenges.plonk_alphas,
);
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
let quotient_polys_zeta = &proof.openings.quotient_polys;
let zeta_pow_deg = challenges
.plonk_zeta
.exp_power_of_2(common_data.degree_bits);
let z_h_zeta = zeta_pow_deg - F::Extension::ONE;
// `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations.
// Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)`
// where the "real" quotient polynomial is `t(X) = t_0(X) + t_1(X)*X^n + t_2(X)*X^{2n} + ...`.
// So to reconstruct `t(zeta)` we can compute `reduce_with_powers(chunk, zeta^n)` for each
// `quotient_degree_factor`-sized chunk of the original evaluations.
for (i, chunk) in quotient_polys_zeta
.chunks(common_data.quotient_degree_factor)
.enumerate()
{
ensure!(vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg));
}
let merkle_caps = &[
verifier_data.constants_sigmas_cap.clone(),
proof.wires_cap,
proof.plonk_zs_partial_products_cap,
proof.quotient_polys_cap,
];
verify_fri_proof::<F, C, D>(
&common_data.get_fri_instance(challenges.plonk_zeta),
&proof.openings,
&challenges,
merkle_caps,
&proof.opening_proof,
&common_data.fri_params,
)?;
Ok(())
}