diff --git a/plonky2/src/fri/challenges.rs b/plonky2/src/fri/challenges.rs index 82438383..9222ebc8 100644 --- a/plonky2/src/fri/challenges.rs +++ b/plonky2/src/fri/challenges.rs @@ -89,9 +89,9 @@ impl, H: AlgebraicHasher, const D: usize> commit_phase_merkle_caps: &[MerkleCapTarget], final_poly: &PolynomialCoeffsExtTarget, pow_witness: Target, - inner_common_data: &CommonCircuitData, + inner_fri_config: &FriConfig, ) -> FriChallengesTarget { - let num_fri_queries = inner_common_data.config.fri_config.num_query_rounds; + let num_fri_queries = inner_fri_config.num_query_rounds; // Scaling factor to combine polynomials. let fri_alpha = self.get_extension_challenge(builder); diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index 1519f6ec..c3a4403a 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -166,7 +166,7 @@ pub struct RecursiveChallenger, H: AlgebraicHasher< impl, H: AlgebraicHasher, const D: usize> RecursiveChallenger { - pub(crate) fn new(builder: &mut CircuitBuilder) -> Self { + pub fn new(builder: &mut CircuitBuilder) -> Self { let zero = builder.zero(); RecursiveChallenger { sponge_state: [zero; SPONGE_WIDTH], @@ -222,7 +222,7 @@ impl, H: AlgebraicHasher, const D: usize> .expect("Output buffer should be non-empty") } - pub(crate) fn get_n_challenges( + pub fn get_n_challenges( &mut self, builder: &mut CircuitBuilder, n: usize, diff --git a/plonky2/src/plonk/get_challenges.rs b/plonky2/src/plonk/get_challenges.rs index a67a6207..e8fd3933 100644 --- a/plonky2/src/plonk/get_challenges.rs +++ b/plonky2/src/plonk/get_challenges.rs @@ -275,7 +275,7 @@ impl, const D: usize> CircuitBuilder { commit_phase_merkle_caps, final_poly, pow_witness, - inner_common_data, + &inner_common_data.config.fri_config, ), } } diff --git a/starky/src/constraint_consumer.rs b/starky/src/constraint_consumer.rs index c909b520..932b2db8 100644 --- a/starky/src/constraint_consumer.rs +++ b/starky/src/constraint_consumer.rs @@ -80,10 +80,10 @@ impl ConstraintConsumer

{ pub struct RecursiveConstraintConsumer, const D: usize> { /// A random value used to combine multiple constraints into one. - alpha: Target, + alphas: Vec, /// A running sum of constraints that have been emitted so far, scaled by powers of alpha. - constraint_acc: ExtensionTarget, + constraint_accs: Vec>, /// The evaluation of `X - g^(n-1)`. z_last: ExtensionTarget, @@ -100,6 +100,27 @@ pub struct RecursiveConstraintConsumer, const D: us } impl, const D: usize> RecursiveConstraintConsumer { + pub fn new( + zero: ExtensionTarget, + alphas: Vec, + z_last: ExtensionTarget, + lagrange_basis_first: ExtensionTarget, + lagrange_basis_last: ExtensionTarget, + ) -> Self { + Self { + constraint_accs: vec![zero; alphas.len()], + alphas, + z_last, + lagrange_basis_first, + lagrange_basis_last, + _phantom: Default::default(), + } + } + + pub fn accumulators(self) -> Vec> { + self.constraint_accs + } + /// Add one constraint valid on all rows except the last. pub fn constraint( &mut self, @@ -116,8 +137,9 @@ impl, const D: usize> RecursiveConstraintConsumer, constraint: ExtensionTarget, ) { - self.constraint_acc = - builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint); + for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { + *acc = builder.scalar_mul_add_extension(alpha, *acc, constraint); + } } /// Add one constraint, but first multiply it by a filter such that it will only apply to the diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index 7e89ca3e..d44075e7 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -1,14 +1,21 @@ use anyhow::Result; use plonky2::field::extension_field::Extendable; use plonky2::field::polynomial::PolynomialCoeffs; -use plonky2::fri::proof::FriProof; -use plonky2::hash::hash_types::RichField; +use plonky2::fri::proof::{FriProof, FriProofTarget}; +use plonky2::gadgets::polynomial::PolynomialCoeffsExtTarget; +use plonky2::hash::hash_types::{MerkleCapTarget, RichField}; use plonky2::hash::merkle_tree::MerkleCap; -use plonky2::iop::challenger::Challenger; -use plonky2::plonk::config::GenericConfig; +use plonky2::iop::challenger::{Challenger, RecursiveChallenger}; +use plonky2::iop::target::Target; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::config::StarkConfig; -use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; +use crate::proof::{ + StarkOpeningSet, StarkOpeningSetTarget, StarkProof, StarkProofChallenges, + StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs, + StarkProofWithPublicInputsTarget, +}; #[allow(clippy::too_many_arguments)] fn get_challenges, C: GenericConfig, const D: usize>( @@ -94,6 +101,88 @@ impl, C: GenericConfig, const D: usize> } } +pub(crate) fn get_challenges_target< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +>( + builder: &mut CircuitBuilder, + trace_cap: &MerkleCapTarget, + quotient_polys_cap: &MerkleCapTarget, + openings: &StarkOpeningSetTarget, + commit_phase_merkle_caps: &[MerkleCapTarget], + final_poly: &PolynomialCoeffsExtTarget, + pow_witness: Target, + config: &StarkConfig, + degree_bits: usize, +) -> StarkProofChallengesTarget +where + C::Hasher: AlgebraicHasher, +{ + let num_challenges = config.num_challenges; + let num_fri_queries = config.fri_config.num_query_rounds; + let lde_size = 1 << (degree_bits + config.fri_config.rate_bits); + + let mut challenger = RecursiveChallenger::::new(builder); + + challenger.observe_cap(trace_cap); + let stark_alphas = challenger.get_n_challenges(builder, num_challenges); + + challenger.observe_cap(quotient_polys_cap); + let stark_zeta = challenger.get_extension_challenge(builder); + + challenger.observe_openings(&openings.to_fri_openings()); + + StarkProofChallengesTarget { + stark_alphas, + stark_zeta, + fri_challenges: challenger.fri_challenges::( + builder, + commit_phase_merkle_caps, + final_poly, + pow_witness, + &config.fri_config, + ), + } +} + +impl StarkProofWithPublicInputsTarget { + pub(crate) fn get_challenges, C: GenericConfig>( + &self, + builder: &mut CircuitBuilder, + config: &StarkConfig, + degree_bits: usize, + ) -> StarkProofChallengesTarget + where + C::Hasher: AlgebraicHasher, + { + let StarkProofTarget { + trace_cap, + quotient_polys_cap, + openings, + opening_proof: + FriProofTarget { + commit_phase_merkle_caps, + final_poly, + pow_witness, + .. + }, + } = &self.proof; + + get_challenges_target::( + builder, + trace_cap, + quotient_polys_cap, + openings, + commit_phase_merkle_caps, + final_poly, + *pow_witness, + config, + degree_bits, + ) + } +} + // TODO: Deal with the compressed stuff. // impl, C: GenericConfig, const D: usize> // CompressedProofWithPublicInputs diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 4a6de0bb..3e01621f 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -1,7 +1,11 @@ -use plonky2::field::extension_field::Extendable; +use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::fri::oracle::PolynomialBatch; -use plonky2::fri::proof::{CompressedFriProof, FriChallenges, FriProof, FriProofTarget}; -use plonky2::fri::structure::{FriOpeningBatch, FriOpenings}; +use plonky2::fri::proof::{ + CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget, +}; +use plonky2::fri::structure::{ + FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget, +}; use plonky2::hash::hash_types::{MerkleCapTarget, RichField}; use plonky2::hash::merkle_tree::MerkleCap; use plonky2::iop::ext_target::ExtensionTarget; @@ -9,6 +13,8 @@ use plonky2::iop::target::Target; use plonky2::plonk::config::GenericConfig; use rayon::prelude::*; +use crate::config::StarkConfig; + pub struct StarkProof, C: GenericConfig, const D: usize> { /// Merkle cap of LDEs of trace values. pub trace_cap: MerkleCap, @@ -20,6 +26,17 @@ pub struct StarkProof, C: GenericConfig, pub opening_proof: FriProof, } +impl, C: GenericConfig, const D: usize> StarkProof { + pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize { + let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] + .initial_trees_proof + .evals_proofs[0] + .1; + let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); + 1 << (lde_bits - config.fri_config.rate_bits) + } +} + pub struct StarkProofTarget { pub trace_cap: MerkleCapTarget, pub quotient_polys_cap: MerkleCapTarget, @@ -27,6 +44,17 @@ pub struct StarkProofTarget { pub opening_proof: FriProofTarget, } +impl StarkProofTarget { + pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize { + let initial_merkle_proof = &self.opening_proof.query_round_proofs[0] + .initial_trees_proof + .evals_proofs[0] + .1; + let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); + 1 << (lde_bits - config.fri_config.rate_bits) + } +} + pub struct StarkProofWithPublicInputs< F: RichField + Extendable, C: GenericConfig, @@ -74,6 +102,12 @@ pub(crate) struct StarkProofChallenges, const D: us pub fri_challenges: FriChallenges, } +pub(crate) struct StarkProofChallengesTarget { + pub stark_alphas: Vec, + pub stark_zeta: ExtensionTarget, + pub fri_challenges: FriChallengesTarget, +} + /// Purported values of each polynomial at the challenge point. pub struct StarkOpeningSet, const D: usize> { pub local_values: Vec, @@ -86,7 +120,7 @@ pub struct StarkOpeningSet, const D: usize> { impl, const D: usize> StarkOpeningSet { pub fn new>( zeta: F::Extension, - g: F::Extension, + g: F, trace_commitment: &PolynomialBatch, quotient_commitment: &PolynomialBatch, ) -> Self { @@ -98,7 +132,7 @@ impl, const D: usize> StarkOpeningSet { }; Self { local_values: eval_commitment(zeta, trace_commitment), - next_values: eval_commitment(zeta * g, trace_commitment), + next_values: eval_commitment(zeta.scalar_mul(g), trace_commitment), permutation_zs: vec![/*TODO*/], permutation_zs_right: vec![/*TODO*/], quotient_polys: eval_commitment(zeta, quotient_commitment), @@ -134,3 +168,26 @@ pub struct StarkOpeningSetTarget { pub permutation_zs_right: Vec>, pub quotient_polys: Vec>, } + +impl StarkOpeningSetTarget { + pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget { + let zeta_batch = FriOpeningBatchTarget { + values: [ + self.local_values.as_slice(), + self.quotient_polys.as_slice(), + self.permutation_zs.as_slice(), + ] + .concat(), + }; + let zeta_right_batch = FriOpeningBatchTarget { + values: [ + self.next_values.as_slice(), + self.permutation_zs_right.as_slice(), + ] + .concat(), + }; + FriOpeningsTarget { + batches: vec![zeta_batch, zeta_right_batch], + } + } +} diff --git a/starky/src/prover.rs b/starky/src/prover.rs index de97ecce..93e01677 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -106,7 +106,7 @@ where // To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and // `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since // `(g * zeta)^n = zeta^n`, where `n` is the order of `g`. - let g = F::Extension::primitive_root_of_unity(degree_bits); + let g = F::primitive_root_of_unity(degree_bits); ensure!( zeta.exp_power_of_2(degree_bits) != F::Extension::ONE, "Opening point is in the subgroup." @@ -122,7 +122,7 @@ where timing, "compute openings proof", PolynomialBatch::prove_openings( - &stark.fri_instance(zeta, g, rate_bits, config.num_challenges), + &stark.fri_instance(zeta, g, config.num_challenges), initial_merkle_trees, &mut challenger, &fri_params, diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index 5b8e9a8a..8f7f6d8f 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,195 +1,210 @@ use plonky2::field::extension_field::Extendable; +use plonky2::field::field_types::Field; use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::config::GenericConfig; -use crate::config::StarkConfig; -use crate::proof::StarkProofWithPublicInputsTarget; -use crate::stark::Stark; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; +use plonky2::util::reducing::ReducingFactorTarget; +use plonky2::with_context; -pub fn verify_stark_proof< F: RichField + Extendable, +use crate::config::StarkConfig; +use crate::constraint_consumer::RecursiveConstraintConsumer; +use crate::proof::{ + StarkOpeningSetTarget, StarkProofChallengesTarget, StarkProofWithPublicInputsTarget, +}; +use crate::stark::Stark; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; + +pub fn verify_stark_proof< + F: RichField + Extendable, C: GenericConfig, S: Stark, const D: usize, - >( - builder: &mut CircuitBuilder, - stark: S, - proof_with_pis: StarkProofWithPublicInputsTarget, - inner_config: &StarkConfig - ) - { - let StarkProofWithPublicInputsTarget { - proof, - public_inputs, - } = proof_with_pis; +>( + builder: &mut CircuitBuilder, + stark: S, + proof_with_pis: StarkProofWithPublicInputsTarget, + inner_config: &StarkConfig, +) where + C::Hasher: AlgebraicHasher, + [(); { S::COLUMNS }]:, + [(); { S::PUBLIC_INPUTS }]:, +{ + assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS); + let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config); + let challenges = proof_with_pis.get_challenges::(builder, inner_config, degree_bits); - assert_eq!(public_inputs.len(), inner_common_data.num_public_inputs); - let public_inputs_hash = self.hash_n_to_hash_no_pad::(public_inputs); - - self.verify_proof( - proof, - public_inputs_hash, - inner_verifier_data, - inner_common_data, - ); - } - - /// Recursively verifies an inner proof. - pub fn verify_proof>( - &mut self, - proof: ProofTarget, - public_inputs_hash: HashOutTarget, - inner_verifier_data: &VerifierCircuitTarget, - inner_common_data: &CommonCircuitData, - ) where - C::Hasher: AlgebraicHasher, - { - let one = self.one_extension(); - - let num_challenges = inner_common_data.config.num_challenges; - - let mut challenger = RecursiveChallenger::::new(self); - - let (betas, gammas, alphas, zeta) = - with_context!(self, "observe proof and generates challenges", { - // Observe the instance. - let digest = HashOutTarget::from_vec( - self.constants(&inner_common_data.circuit_digest.elements), - ); - challenger.observe_hash(&digest); - challenger.observe_hash(&public_inputs_hash); - - challenger.observe_cap(&proof.wires_cap); - let betas = challenger.get_n_challenges(self, num_challenges); - let gammas = challenger.get_n_challenges(self, num_challenges); - - challenger.observe_cap(&proof.plonk_zs_partial_products_cap); - let alphas = challenger.get_n_challenges(self, num_challenges); - - challenger.observe_cap(&proof.quotient_polys_cap); - let zeta = challenger.get_extension_challenge(self); - - (betas, gammas, alphas, zeta) - }); - - let local_constants = &proof.openings.constants; - let local_wires = &proof.openings.wires; - let vars = EvaluationTargets { - local_constants, - local_wires, - public_inputs_hash: &public_inputs_hash, - }; - let local_zs = &proof.openings.plonk_zs; - let next_zs = &proof.openings.plonk_zs_right; - let s_sigmas = &proof.openings.plonk_sigmas; - let partial_products = &proof.openings.partial_products; - - let zeta_pow_deg = self.exp_power_of_2_extension(zeta, inner_common_data.degree_bits); - let vanishing_polys_zeta = with_context!( - self, - "evaluate the vanishing polynomial at our challenge point, zeta.", - eval_vanishing_poly_recursively( - self, - inner_common_data, - zeta, - zeta_pow_deg, - vars, - local_zs, - next_zs, - partial_products, - s_sigmas, - &betas, - &gammas, - &alphas, - ) - ); - - with_context!(self, "check vanishing and quotient polynomials.", { - let quotient_polys_zeta = &proof.openings.quotient_polys; - let mut scale = ReducingFactorTarget::new(zeta_pow_deg); - let z_h_zeta = self.sub_extension(zeta_pow_deg, one); - for (i, chunk) in quotient_polys_zeta - .chunks(inner_common_data.quotient_degree_factor) - .enumerate() - { - let recombined_quotient = scale.reduce(chunk, self); - let computed_vanishing_poly = self.mul_extension(z_h_zeta, recombined_quotient); - self.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly); - } - }); - - let merkle_caps = &[ - inner_verifier_data.constants_sigmas_cap.clone(), - proof.wires_cap, - proof.plonk_zs_partial_products_cap, - proof.quotient_polys_cap, - ]; - - let fri_instance = inner_common_data.get_fri_instance_target(self, zeta); - with_context!( - self, - "verify FRI proof", - self.verify_fri_proof::( - &fri_instance, - &proof.openings, - merkle_caps, - &proof.opening_proof, - &mut challenger, - &inner_common_data.fri_params, - ) - ); - } - - pub fn add_virtual_proof_with_pis>( - &mut self, - common_data: &CommonCircuitData, - ) -> ProofWithPublicInputsTarget { - let proof = self.add_virtual_proof(common_data); - let public_inputs = self.add_virtual_targets(common_data.num_public_inputs); - ProofWithPublicInputsTarget { - proof, - public_inputs, - } - } - - fn add_virtual_proof>( - &mut self, - common_data: &CommonCircuitData, - ) -> ProofTarget { - let config = &common_data.config; - let fri_params = &common_data.fri_params; - let cap_height = fri_params.config.cap_height; - - let num_leaves_per_oracle = &[ - common_data.num_preprocessed_polys(), - config.num_wires, - common_data.num_zs_partial_products_polys(), - common_data.num_quotient_polys(), - ]; - - ProofTarget { - wires_cap: self.add_virtual_cap(cap_height), - plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height), - quotient_polys_cap: self.add_virtual_cap(cap_height), - openings: self.add_opening_set(common_data), - opening_proof: self.add_virtual_fri_proof(num_leaves_per_oracle, fri_params), - } - } - - fn add_opening_set>( - &mut self, - common_data: &CommonCircuitData, - ) -> OpeningSetTarget { - let config = &common_data.config; - let num_challenges = config.num_challenges; - let total_partial_products = num_challenges * common_data.num_partial_products; - OpeningSetTarget { - constants: self.add_virtual_extension_targets(common_data.num_constants), - plonk_sigmas: self.add_virtual_extension_targets(config.num_routed_wires), - wires: self.add_virtual_extension_targets(config.num_wires), - plonk_zs: self.add_virtual_extension_targets(num_challenges), - plonk_zs_right: self.add_virtual_extension_targets(num_challenges), - partial_products: self.add_virtual_extension_targets(total_partial_products), - quotient_polys: self.add_virtual_extension_targets(common_data.num_quotient_polys()), - } - } + verify_stark_proof_with_challenges::( + builder, + stark, + proof_with_pis, + challenges, + inner_config, + degree_bits, + ); } + +/// Recursively verifies an inner proof. +fn verify_stark_proof_with_challenges< + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + const D: usize, +>( + builder: &mut CircuitBuilder, + stark: S, + proof_with_pis: StarkProofWithPublicInputsTarget, + challenges: StarkProofChallengesTarget, + inner_config: &StarkConfig, + degree_bits: usize, +) where + C::Hasher: AlgebraicHasher, + [(); { S::COLUMNS }]:, + [(); { S::PUBLIC_INPUTS }]:, +{ + let one = builder.one_extension(); + + let StarkProofWithPublicInputsTarget { + proof, + public_inputs, + } = proof_with_pis; + let local_values = &proof.openings.local_values; + let next_values = &proof.openings.local_values; + let StarkOpeningSetTarget { + local_values, + next_values, + permutation_zs, + permutation_zs_right, + quotient_polys, + } = &proof.openings; + let vars = StarkEvaluationTargets { + local_values: &local_values.to_vec().try_into().unwrap(), + next_values: &next_values.to_vec().try_into().unwrap(), + public_inputs: &public_inputs + .into_iter() + .map(|t| builder.convert_to_ext(t)) + .collect::>() + .try_into() + .unwrap(), + }; + let (l_1, l_last) = + eval_l_1_and_l_last_recursively(builder, degree_bits, challenges.stark_zeta); + let last = + builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse()); + let z_last = builder.sub_extension(challenges.stark_zeta, last); + let mut consumer = RecursiveConstraintConsumer::::new( + builder.zero_extension(), + challenges.stark_alphas, + z_last, + l_1, + l_last, + ); + stark.eval_ext_recursively(builder, vars, &mut consumer); + let vanishing_polys_zeta = consumer.accumulators(); + + // Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta. + let quotient_polys_zeta = &proof.openings.quotient_polys; + let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits); + let mut scale = ReducingFactorTarget::new(zeta_pow_deg); + let z_h_zeta = builder.sub_extension(zeta_pow_deg, one); + for (i, chunk) in quotient_polys_zeta + .chunks(stark.quotient_degree_factor()) + .enumerate() + { + let recombined_quotient = scale.reduce(chunk, builder); + let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient); + builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly); + } + + // TODO: Permutation polynomials. + let merkle_caps = &[proof.trace_cap, proof.quotient_polys_cap]; + + let fri_instance = stark.fri_instance_target( + builder, + challenges.stark_zeta, + F::primitive_root_of_unity(degree_bits), + inner_config.num_challenges, + ); + builder.verify_fri_proof::( + &fri_instance, + &proof.openings.to_fri_openings(), + &challenges.fri_challenges, + merkle_caps, + &proof.opening_proof, + &inner_config.fri_params(degree_bits), + ); +} + +fn eval_l_1_and_l_last_recursively, const D: usize>( + builder: &mut CircuitBuilder, + log_n: usize, + x: ExtensionTarget, +) -> (ExtensionTarget, ExtensionTarget) { + let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n)); + let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n)); + let x_pow_n = builder.exp_power_of_2_extension(x, log_n); + let one = builder.one_extension(); + let z_x = builder.sub_extension(x_pow_n, one); + let l_1_deno = builder.mul_sub_extension(n, x, n); + let l_last_deno = builder.mul_sub_extension(g, x, one); + let l_last_deno = builder.mul_extension(n, l_last_deno); + + ( + builder.div_extension(z_x, l_1_deno), + builder.div_extension(z_x, l_last_deno), + ) +} + +// pub fn add_virtual_proof_with_pis>( +// &mut self, +// common_data: &CommonCircuitData, +// ) -> ProofWithPublicInputsTarget { +// let proof = self.add_virtual_proof(common_data); +// let public_inputs = self.add_virtual_targets(common_data.num_public_inputs); +// ProofWithPublicInputsTarget { +// proof, +// public_inputs, +// } +// } +// +// fn add_virtual_proof>( +// &mut self, +// common_data: &CommonCircuitData, +// ) -> ProofTarget { +// let config = &common_data.config; +// let fri_params = &common_data.fri_params; +// let cap_height = fri_params.config.cap_height; +// +// let num_leaves_per_oracle = &[ +// common_data.num_preprocessed_polys(), +// config.num_wires, +// common_data.num_zs_partial_products_polys(), +// common_data.num_quotient_polys(), +// ]; +// +// ProofTarget { +// wires_cap: self.add_virtual_cap(cap_height), +// plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height), +// quotient_polys_cap: self.add_virtual_cap(cap_height), +// openings: self.add_opening_set(common_data), +// opening_proof: self.add_virtual_fri_proof(num_leaves_per_oracle, fri_params), +// } +// } +// +// fn add_opening_set>( +// &mut self, +// common_data: &CommonCircuitData, +// ) -> OpeningSetTarget { +// let config = &common_data.config; +// let num_challenges = config.num_challenges; +// let total_partial_products = num_challenges * common_data.num_partial_products; +// OpeningSetTarget { +// constants: self.add_virtual_extension_targets(common_data.num_constants), +// plonk_sigmas: self.add_virtual_extension_targets(config.num_routed_wires), +// wires: self.add_virtual_extension_targets(config.num_wires), +// plonk_zs: self.add_virtual_extension_targets(num_challenges), +// plonk_zs_right: self.add_virtual_extension_targets(num_challenges), +// partial_products: self.add_virtual_extension_targets(total_partial_products), +// quotient_polys: self.add_virtual_extension_targets(common_data.num_quotient_polys()), +// } +// } diff --git a/starky/src/stark.rs b/starky/src/stark.rs index 4b20553e..3ef976e0 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -1,7 +1,11 @@ use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::packed_field::PackedField; -use plonky2::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOracleInfo, FriPolynomialInfo}; +use plonky2::fri::structure::{ + FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo, + FriPolynomialInfo, +}; use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; @@ -75,8 +79,7 @@ pub trait Stark, const D: usize>: Sync { fn fri_instance( &self, zeta: F::Extension, - g: F::Extension, - rate_bits: usize, + g: F, num_challenges: usize, ) -> FriInstanceInfo { let no_blinding_oracle = FriOracleInfo { blinding: false }; @@ -88,7 +91,7 @@ pub trait Stark, const D: usize>: Sync { polynomials: [trace_info.clone(), quotient_info].concat(), }; let zeta_right_batch = FriBatchInfo:: { - point: zeta * g, + point: zeta.scalar_mul(g), polynomials: trace_info, }; FriInstanceInfo { @@ -96,4 +99,32 @@ pub trait Stark, const D: usize>: Sync { batches: vec![zeta_batch], } } + + /// Computes the FRI instance used to prove this Stark. + // TODO: Permutation polynomials. + fn fri_instance_target( + &self, + builder: &mut CircuitBuilder, + zeta: ExtensionTarget, + g: F, + num_challenges: usize, + ) -> FriInstanceInfoTarget { + let no_blinding_oracle = FriOracleInfo { blinding: false }; + let trace_info = FriPolynomialInfo::from_range(0, 0..Self::COLUMNS); + let quotient_info = + FriPolynomialInfo::from_range(1, 0..self.quotient_degree_factor() * num_challenges); + let zeta_batch = FriBatchInfoTarget { + point: zeta, + polynomials: [trace_info.clone(), quotient_info].concat(), + }; + let zeta_right = builder.mul_const_extension(g, zeta); + let zeta_right_batch = FriBatchInfoTarget { + point: zeta_right, + polynomials: trace_info, + }; + FriInstanceInfoTarget { + oracles: vec![no_blinding_oracle; 3], + batches: vec![zeta_batch], + } + } } diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 91a51bed..f02a5619 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -27,7 +27,8 @@ where [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, { - let degree_bits = log2_strict(recover_degree(&proof_with_pis.proof, config)); + ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS); + let degree_bits = proof_with_pis.proof.recover_degree_bits(config); let challenges = proof_with_pis.get_challenges(config, degree_bits)?; verify_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) } @@ -110,8 +111,7 @@ where verify_fri_proof::( &stark.fri_instance( challenges.stark_zeta, - F::primitive_root_of_unity(degree_bits).into(), - config.fri_config.rate_bits, + F::primitive_root_of_unity(degree_bits), config.num_challenges, ), &proof.openings.to_fri_openings(), @@ -137,17 +137,6 @@ fn eval_l_1_and_l_last(log_n: usize, x: F) -> (F, F) { } /// Recover the length of the trace from a STARK proof and a STARK config. -fn recover_degree, C: GenericConfig, const D: usize>( - proof: &StarkProof, - config: &StarkConfig, -) -> usize { - let initial_merkle_proof = &proof.opening_proof.query_round_proofs[0] - .initial_trees_proof - .evals_proofs[0] - .1; - let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len(); - 1 << (lde_bits - config.fri_config.rate_bits) -} #[cfg(test)] mod tests {