mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-10 01:33:07 +00:00
Compiles
This commit is contained in:
parent
a2dbcf2ff1
commit
ba63a37b7d
@ -89,9 +89,9 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||
pow_witness: Target,
|
||||
inner_common_data: &CommonCircuitData<F, C, D>,
|
||||
inner_fri_config: &FriConfig,
|
||||
) -> FriChallengesTarget<D> {
|
||||
let num_fri_queries = inner_common_data.config.fri_config.num_query_rounds;
|
||||
let num_fri_queries = inner_fri_config.num_query_rounds;
|
||||
// Scaling factor to combine polynomials.
|
||||
let fri_alpha = self.get_extension_challenge(builder);
|
||||
|
||||
|
||||
@ -166,7 +166,7 @@ pub struct RecursiveChallenger<F: RichField + Extendable<D>, H: AlgebraicHasher<
|
||||
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
RecursiveChallenger<F, H, D>
|
||||
{
|
||||
pub(crate) fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
|
||||
pub fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
|
||||
let zero = builder.zero();
|
||||
RecursiveChallenger {
|
||||
sponge_state: [zero; SPONGE_WIDTH],
|
||||
@ -222,7 +222,7 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
.expect("Output buffer should be non-empty")
|
||||
}
|
||||
|
||||
pub(crate) fn get_n_challenges(
|
||||
pub fn get_n_challenges(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
n: usize,
|
||||
|
||||
@ -275,7 +275,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
inner_common_data,
|
||||
&inner_common_data.config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -80,10 +80,10 @@ impl<P: PackedField> ConstraintConsumer<P> {
|
||||
|
||||
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
||||
/// A random value used to combine multiple constraints into one.
|
||||
alpha: Target,
|
||||
alphas: Vec<Target>,
|
||||
|
||||
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
|
||||
constraint_acc: ExtensionTarget<D>,
|
||||
constraint_accs: Vec<ExtensionTarget<D>>,
|
||||
|
||||
/// The evaluation of `X - g^(n-1)`.
|
||||
z_last: ExtensionTarget<D>,
|
||||
@ -100,6 +100,27 @@ pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: us
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
||||
pub fn new(
|
||||
zero: ExtensionTarget<D>,
|
||||
alphas: Vec<Target>,
|
||||
z_last: ExtensionTarget<D>,
|
||||
lagrange_basis_first: ExtensionTarget<D>,
|
||||
lagrange_basis_last: ExtensionTarget<D>,
|
||||
) -> Self {
|
||||
Self {
|
||||
constraint_accs: vec![zero; alphas.len()],
|
||||
alphas,
|
||||
z_last,
|
||||
lagrange_basis_first,
|
||||
lagrange_basis_last,
|
||||
_phantom: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn accumulators(self) -> Vec<ExtensionTarget<D>> {
|
||||
self.constraint_accs
|
||||
}
|
||||
|
||||
/// Add one constraint valid on all rows except the last.
|
||||
pub fn constraint(
|
||||
&mut self,
|
||||
@ -116,8 +137,9 @@ impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
constraint: ExtensionTarget<D>,
|
||||
) {
|
||||
self.constraint_acc =
|
||||
builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint);
|
||||
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||
*acc = builder.scalar_mul_add_extension(alpha, *acc, constraint);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||
|
||||
@ -1,14 +1,21 @@
|
||||
use anyhow::Result;
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::polynomial::PolynomialCoeffs;
|
||||
use plonky2::fri::proof::FriProof;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::fri::proof::{FriProof, FriProofTarget};
|
||||
use plonky2::gadgets::polynomial::PolynomialCoeffsExtTarget;
|
||||
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::challenger::Challenger;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
|
||||
use crate::proof::{
|
||||
StarkOpeningSet, StarkOpeningSetTarget, StarkProof, StarkProofChallenges,
|
||||
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs,
|
||||
StarkProofWithPublicInputsTarget,
|
||||
};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
@ -94,6 +101,88 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_challenges_target<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
trace_cap: &MerkleCapTarget,
|
||||
quotient_polys_cap: &MerkleCapTarget,
|
||||
openings: &StarkOpeningSetTarget<D>,
|
||||
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||
pow_witness: Target,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let num_challenges = config.num_challenges;
|
||||
let num_fri_queries = config.fri_config.num_query_rounds;
|
||||
let lde_size = 1 << (degree_bits + config.fri_config.rate_bits);
|
||||
|
||||
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
|
||||
|
||||
challenger.observe_cap(trace_cap);
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
challenger.observe_cap(quotient_polys_cap);
|
||||
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||
|
||||
challenger.observe_openings(&openings.to_fri_openings());
|
||||
|
||||
StarkProofChallengesTarget {
|
||||
stark_alphas,
|
||||
stark_zeta,
|
||||
fri_challenges: challenger.fri_challenges::<C>(
|
||||
builder,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
&config.fri_config,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let StarkProofTarget {
|
||||
trace_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
opening_proof:
|
||||
FriProofTarget {
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
pow_witness,
|
||||
..
|
||||
},
|
||||
} = &self.proof;
|
||||
|
||||
get_challenges_target::<F, C, D>(
|
||||
builder,
|
||||
trace_cap,
|
||||
quotient_polys_cap,
|
||||
openings,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
*pow_witness,
|
||||
config,
|
||||
degree_bits,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Deal with the compressed stuff.
|
||||
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
// CompressedProofWithPublicInputs<F, C, D>
|
||||
|
||||
@ -1,7 +1,11 @@
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::fri::oracle::PolynomialBatch;
|
||||
use plonky2::fri::proof::{CompressedFriProof, FriChallenges, FriProof, FriProofTarget};
|
||||
use plonky2::fri::structure::{FriOpeningBatch, FriOpenings};
|
||||
use plonky2::fri::proof::{
|
||||
CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget,
|
||||
};
|
||||
use plonky2::fri::structure::{
|
||||
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
|
||||
};
|
||||
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use plonky2::hash::merkle_tree::MerkleCap;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
@ -9,6 +13,8 @@ use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
|
||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||
/// Merkle cap of LDEs of trace values.
|
||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||
@ -20,6 +26,17 @@ pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
|
||||
pub opening_proof: FriProof<F, C::Hasher, D>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
|
||||
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
1 << (lde_bits - config.fri_config.rate_bits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StarkProofTarget<const D: usize> {
|
||||
pub trace_cap: MerkleCapTarget,
|
||||
pub quotient_polys_cap: MerkleCapTarget,
|
||||
@ -27,6 +44,17 @@ pub struct StarkProofTarget<const D: usize> {
|
||||
pub opening_proof: FriProofTarget<D>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
1 << (lde_bits - config.fri_config.rate_bits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StarkProofWithPublicInputs<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
@ -74,6 +102,12 @@ pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: us
|
||||
pub fri_challenges: FriChallenges<F, D>,
|
||||
}
|
||||
|
||||
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||
pub stark_alphas: Vec<Target>,
|
||||
pub stark_zeta: ExtensionTarget<D>,
|
||||
pub fri_challenges: FriChallengesTarget<D>,
|
||||
}
|
||||
|
||||
/// Purported values of each polynomial at the challenge point.
|
||||
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
pub local_values: Vec<F::Extension>,
|
||||
@ -86,7 +120,7 @@ pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
pub fn new<C: GenericConfig<D, F = F>>(
|
||||
zeta: F::Extension,
|
||||
g: F::Extension,
|
||||
g: F,
|
||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||
) -> Self {
|
||||
@ -98,7 +132,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||
};
|
||||
Self {
|
||||
local_values: eval_commitment(zeta, trace_commitment),
|
||||
next_values: eval_commitment(zeta * g, trace_commitment),
|
||||
next_values: eval_commitment(zeta.scalar_mul(g), trace_commitment),
|
||||
permutation_zs: vec![/*TODO*/],
|
||||
permutation_zs_right: vec![/*TODO*/],
|
||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||
@ -134,3 +168,26 @@ pub struct StarkOpeningSetTarget<const D: usize> {
|
||||
pub permutation_zs_right: Vec<ExtensionTarget<D>>,
|
||||
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
|
||||
let zeta_batch = FriOpeningBatchTarget {
|
||||
values: [
|
||||
self.local_values.as_slice(),
|
||||
self.quotient_polys.as_slice(),
|
||||
self.permutation_zs.as_slice(),
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
let zeta_right_batch = FriOpeningBatchTarget {
|
||||
values: [
|
||||
self.next_values.as_slice(),
|
||||
self.permutation_zs_right.as_slice(),
|
||||
]
|
||||
.concat(),
|
||||
};
|
||||
FriOpeningsTarget {
|
||||
batches: vec![zeta_batch, zeta_right_batch],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -106,7 +106,7 @@ where
|
||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||
let g = F::Extension::primitive_root_of_unity(degree_bits);
|
||||
let g = F::primitive_root_of_unity(degree_bits);
|
||||
ensure!(
|
||||
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
||||
"Opening point is in the subgroup."
|
||||
@ -122,7 +122,7 @@ where
|
||||
timing,
|
||||
"compute openings proof",
|
||||
PolynomialBatch::prove_openings(
|
||||
&stark.fri_instance(zeta, g, rate_bits, config.num_challenges),
|
||||
&stark.fri_instance(zeta, g, config.num_challenges),
|
||||
initial_merkle_trees,
|
||||
&mut challenger,
|
||||
&fri_params,
|
||||
|
||||
@ -1,195 +1,210 @@
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::proof::StarkProofWithPublicInputsTarget;
|
||||
use crate::stark::Stark;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2::util::reducing::ReducingFactorTarget;
|
||||
use plonky2::with_context;
|
||||
|
||||
pub fn verify_stark_proof< F: RichField + Extendable<D>,
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::proof::{
|
||||
StarkOpeningSetTarget, StarkProofChallengesTarget, StarkProofWithPublicInputsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub fn verify_stark_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
inner_config: &StarkConfig
|
||||
)
|
||||
{
|
||||
let StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
inner_config: &StarkConfig,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); { S::COLUMNS }]:,
|
||||
[(); { S::PUBLIC_INPUTS }]:,
|
||||
{
|
||||
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
|
||||
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
|
||||
let challenges = proof_with_pis.get_challenges::<F, C>(builder, inner_config, degree_bits);
|
||||
|
||||
assert_eq!(public_inputs.len(), inner_common_data.num_public_inputs);
|
||||
let public_inputs_hash = self.hash_n_to_hash_no_pad::<C::InnerHasher>(public_inputs);
|
||||
|
||||
self.verify_proof(
|
||||
proof,
|
||||
public_inputs_hash,
|
||||
inner_verifier_data,
|
||||
inner_common_data,
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursively verifies an inner proof.
|
||||
pub fn verify_proof<C: GenericConfig<D, F = F>>(
|
||||
&mut self,
|
||||
proof: ProofTarget<D>,
|
||||
public_inputs_hash: HashOutTarget,
|
||||
inner_verifier_data: &VerifierCircuitTarget,
|
||||
inner_common_data: &CommonCircuitData<F, C, D>,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let one = self.one_extension();
|
||||
|
||||
let num_challenges = inner_common_data.config.num_challenges;
|
||||
|
||||
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(self);
|
||||
|
||||
let (betas, gammas, alphas, zeta) =
|
||||
with_context!(self, "observe proof and generates challenges", {
|
||||
// Observe the instance.
|
||||
let digest = HashOutTarget::from_vec(
|
||||
self.constants(&inner_common_data.circuit_digest.elements),
|
||||
);
|
||||
challenger.observe_hash(&digest);
|
||||
challenger.observe_hash(&public_inputs_hash);
|
||||
|
||||
challenger.observe_cap(&proof.wires_cap);
|
||||
let betas = challenger.get_n_challenges(self, num_challenges);
|
||||
let gammas = challenger.get_n_challenges(self, num_challenges);
|
||||
|
||||
challenger.observe_cap(&proof.plonk_zs_partial_products_cap);
|
||||
let alphas = challenger.get_n_challenges(self, num_challenges);
|
||||
|
||||
challenger.observe_cap(&proof.quotient_polys_cap);
|
||||
let zeta = challenger.get_extension_challenge(self);
|
||||
|
||||
(betas, gammas, alphas, zeta)
|
||||
});
|
||||
|
||||
let local_constants = &proof.openings.constants;
|
||||
let local_wires = &proof.openings.wires;
|
||||
let vars = EvaluationTargets {
|
||||
local_constants,
|
||||
local_wires,
|
||||
public_inputs_hash: &public_inputs_hash,
|
||||
};
|
||||
let local_zs = &proof.openings.plonk_zs;
|
||||
let next_zs = &proof.openings.plonk_zs_right;
|
||||
let s_sigmas = &proof.openings.plonk_sigmas;
|
||||
let partial_products = &proof.openings.partial_products;
|
||||
|
||||
let zeta_pow_deg = self.exp_power_of_2_extension(zeta, inner_common_data.degree_bits);
|
||||
let vanishing_polys_zeta = with_context!(
|
||||
self,
|
||||
"evaluate the vanishing polynomial at our challenge point, zeta.",
|
||||
eval_vanishing_poly_recursively(
|
||||
self,
|
||||
inner_common_data,
|
||||
zeta,
|
||||
zeta_pow_deg,
|
||||
vars,
|
||||
local_zs,
|
||||
next_zs,
|
||||
partial_products,
|
||||
s_sigmas,
|
||||
&betas,
|
||||
&gammas,
|
||||
&alphas,
|
||||
)
|
||||
);
|
||||
|
||||
with_context!(self, "check vanishing and quotient polynomials.", {
|
||||
let quotient_polys_zeta = &proof.openings.quotient_polys;
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
let z_h_zeta = self.sub_extension(zeta_pow_deg, one);
|
||||
for (i, chunk) in quotient_polys_zeta
|
||||
.chunks(inner_common_data.quotient_degree_factor)
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, self);
|
||||
let computed_vanishing_poly = self.mul_extension(z_h_zeta, recombined_quotient);
|
||||
self.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
});
|
||||
|
||||
let merkle_caps = &[
|
||||
inner_verifier_data.constants_sigmas_cap.clone(),
|
||||
proof.wires_cap,
|
||||
proof.plonk_zs_partial_products_cap,
|
||||
proof.quotient_polys_cap,
|
||||
];
|
||||
|
||||
let fri_instance = inner_common_data.get_fri_instance_target(self, zeta);
|
||||
with_context!(
|
||||
self,
|
||||
"verify FRI proof",
|
||||
self.verify_fri_proof::<C>(
|
||||
&fri_instance,
|
||||
&proof.openings,
|
||||
merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&mut challenger,
|
||||
&inner_common_data.fri_params,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
pub fn add_virtual_proof_with_pis<InnerC: GenericConfig<D, F = F>>(
|
||||
&mut self,
|
||||
common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
) -> ProofWithPublicInputsTarget<D> {
|
||||
let proof = self.add_virtual_proof(common_data);
|
||||
let public_inputs = self.add_virtual_targets(common_data.num_public_inputs);
|
||||
ProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_virtual_proof<InnerC: GenericConfig<D, F = F>>(
|
||||
&mut self,
|
||||
common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
) -> ProofTarget<D> {
|
||||
let config = &common_data.config;
|
||||
let fri_params = &common_data.fri_params;
|
||||
let cap_height = fri_params.config.cap_height;
|
||||
|
||||
let num_leaves_per_oracle = &[
|
||||
common_data.num_preprocessed_polys(),
|
||||
config.num_wires,
|
||||
common_data.num_zs_partial_products_polys(),
|
||||
common_data.num_quotient_polys(),
|
||||
];
|
||||
|
||||
ProofTarget {
|
||||
wires_cap: self.add_virtual_cap(cap_height),
|
||||
plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height),
|
||||
quotient_polys_cap: self.add_virtual_cap(cap_height),
|
||||
openings: self.add_opening_set(common_data),
|
||||
opening_proof: self.add_virtual_fri_proof(num_leaves_per_oracle, fri_params),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_opening_set<InnerC: GenericConfig<D, F = F>>(
|
||||
&mut self,
|
||||
common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
) -> OpeningSetTarget<D> {
|
||||
let config = &common_data.config;
|
||||
let num_challenges = config.num_challenges;
|
||||
let total_partial_products = num_challenges * common_data.num_partial_products;
|
||||
OpeningSetTarget {
|
||||
constants: self.add_virtual_extension_targets(common_data.num_constants),
|
||||
plonk_sigmas: self.add_virtual_extension_targets(config.num_routed_wires),
|
||||
wires: self.add_virtual_extension_targets(config.num_wires),
|
||||
plonk_zs: self.add_virtual_extension_targets(num_challenges),
|
||||
plonk_zs_right: self.add_virtual_extension_targets(num_challenges),
|
||||
partial_products: self.add_virtual_extension_targets(total_partial_products),
|
||||
quotient_polys: self.add_virtual_extension_targets(common_data.num_quotient_polys()),
|
||||
}
|
||||
}
|
||||
verify_stark_proof_with_challenges::<F, C, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
proof_with_pis,
|
||||
challenges,
|
||||
inner_config,
|
||||
degree_bits,
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursively verifies an inner proof.
|
||||
fn verify_stark_proof_with_challenges<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D>,
|
||||
const D: usize,
|
||||
>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: S,
|
||||
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||
challenges: StarkProofChallengesTarget<D>,
|
||||
inner_config: &StarkConfig,
|
||||
degree_bits: usize,
|
||||
) where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); { S::COLUMNS }]:,
|
||||
[(); { S::PUBLIC_INPUTS }]:,
|
||||
{
|
||||
let one = builder.one_extension();
|
||||
|
||||
let StarkProofWithPublicInputsTarget {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
let local_values = &proof.openings.local_values;
|
||||
let next_values = &proof.openings.local_values;
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
permutation_zs,
|
||||
permutation_zs_right,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
let vars = StarkEvaluationTargets {
|
||||
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||
public_inputs: &public_inputs
|
||||
.into_iter()
|
||||
.map(|t| builder.convert_to_ext(t))
|
||||
.collect::<Vec<_>>()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
};
|
||||
let (l_1, l_last) =
|
||||
eval_l_1_and_l_last_recursively(builder, degree_bits, challenges.stark_zeta);
|
||||
let last =
|
||||
builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse());
|
||||
let z_last = builder.sub_extension(challenges.stark_zeta, last);
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
challenges.stark_alphas,
|
||||
z_last,
|
||||
l_1,
|
||||
l_last,
|
||||
);
|
||||
stark.eval_ext_recursively(builder, vars, &mut consumer);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let quotient_polys_zeta = &proof.openings.quotient_polys;
|
||||
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
for (i, chunk) in quotient_polys_zeta
|
||||
.chunks(stark.quotient_degree_factor())
|
||||
.enumerate()
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, builder);
|
||||
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||
}
|
||||
|
||||
// TODO: Permutation polynomials.
|
||||
let merkle_caps = &[proof.trace_cap, proof.quotient_polys_cap];
|
||||
|
||||
let fri_instance = stark.fri_instance_target(
|
||||
builder,
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
inner_config.num_challenges,
|
||||
);
|
||||
builder.verify_fri_proof::<C>(
|
||||
&fri_instance,
|
||||
&proof.openings.to_fri_openings(),
|
||||
&challenges.fri_challenges,
|
||||
merkle_caps,
|
||||
&proof.opening_proof,
|
||||
&inner_config.fri_params(degree_bits),
|
||||
);
|
||||
}
|
||||
|
||||
fn eval_l_1_and_l_last_recursively<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
log_n: usize,
|
||||
x: ExtensionTarget<D>,
|
||||
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||
let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n));
|
||||
let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n));
|
||||
let x_pow_n = builder.exp_power_of_2_extension(x, log_n);
|
||||
let one = builder.one_extension();
|
||||
let z_x = builder.sub_extension(x_pow_n, one);
|
||||
let l_1_deno = builder.mul_sub_extension(n, x, n);
|
||||
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||
|
||||
(
|
||||
builder.div_extension(z_x, l_1_deno),
|
||||
builder.div_extension(z_x, l_last_deno),
|
||||
)
|
||||
}
|
||||
|
||||
// pub fn add_virtual_proof_with_pis<InnerC: GenericConfig<D, F = F>>(
|
||||
// &mut self,
|
||||
// common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
// ) -> ProofWithPublicInputsTarget<D> {
|
||||
// let proof = self.add_virtual_proof(common_data);
|
||||
// let public_inputs = self.add_virtual_targets(common_data.num_public_inputs);
|
||||
// ProofWithPublicInputsTarget {
|
||||
// proof,
|
||||
// public_inputs,
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// fn add_virtual_proof<InnerC: GenericConfig<D, F = F>>(
|
||||
// &mut self,
|
||||
// common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
// ) -> ProofTarget<D> {
|
||||
// let config = &common_data.config;
|
||||
// let fri_params = &common_data.fri_params;
|
||||
// let cap_height = fri_params.config.cap_height;
|
||||
//
|
||||
// let num_leaves_per_oracle = &[
|
||||
// common_data.num_preprocessed_polys(),
|
||||
// config.num_wires,
|
||||
// common_data.num_zs_partial_products_polys(),
|
||||
// common_data.num_quotient_polys(),
|
||||
// ];
|
||||
//
|
||||
// ProofTarget {
|
||||
// wires_cap: self.add_virtual_cap(cap_height),
|
||||
// plonk_zs_partial_products_cap: self.add_virtual_cap(cap_height),
|
||||
// quotient_polys_cap: self.add_virtual_cap(cap_height),
|
||||
// openings: self.add_opening_set(common_data),
|
||||
// opening_proof: self.add_virtual_fri_proof(num_leaves_per_oracle, fri_params),
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// fn add_opening_set<InnerC: GenericConfig<D, F = F>>(
|
||||
// &mut self,
|
||||
// common_data: &CommonCircuitData<F, InnerC, D>,
|
||||
// ) -> OpeningSetTarget<D> {
|
||||
// let config = &common_data.config;
|
||||
// let num_challenges = config.num_challenges;
|
||||
// let total_partial_products = num_challenges * common_data.num_partial_products;
|
||||
// OpeningSetTarget {
|
||||
// constants: self.add_virtual_extension_targets(common_data.num_constants),
|
||||
// plonk_sigmas: self.add_virtual_extension_targets(config.num_routed_wires),
|
||||
// wires: self.add_virtual_extension_targets(config.num_wires),
|
||||
// plonk_zs: self.add_virtual_extension_targets(num_challenges),
|
||||
// plonk_zs_right: self.add_virtual_extension_targets(num_challenges),
|
||||
// partial_products: self.add_virtual_extension_targets(total_partial_products),
|
||||
// quotient_polys: self.add_virtual_extension_targets(common_data.num_quotient_polys()),
|
||||
// }
|
||||
// }
|
||||
|
||||
@ -1,7 +1,11 @@
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOracleInfo, FriPolynomialInfo};
|
||||
use plonky2::fri::structure::{
|
||||
FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo,
|
||||
FriPolynomialInfo,
|
||||
};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
@ -75,8 +79,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
fn fri_instance(
|
||||
&self,
|
||||
zeta: F::Extension,
|
||||
g: F::Extension,
|
||||
rate_bits: usize,
|
||||
g: F,
|
||||
num_challenges: usize,
|
||||
) -> FriInstanceInfo<F, D> {
|
||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||
@ -88,7 +91,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
polynomials: [trace_info.clone(), quotient_info].concat(),
|
||||
};
|
||||
let zeta_right_batch = FriBatchInfo::<F, D> {
|
||||
point: zeta * g,
|
||||
point: zeta.scalar_mul(g),
|
||||
polynomials: trace_info,
|
||||
};
|
||||
FriInstanceInfo {
|
||||
@ -96,4 +99,32 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
batches: vec![zeta_batch],
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the FRI instance used to prove this Stark.
|
||||
// TODO: Permutation polynomials.
|
||||
fn fri_instance_target(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
g: F,
|
||||
num_challenges: usize,
|
||||
) -> FriInstanceInfoTarget<D> {
|
||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||
let trace_info = FriPolynomialInfo::from_range(0, 0..Self::COLUMNS);
|
||||
let quotient_info =
|
||||
FriPolynomialInfo::from_range(1, 0..self.quotient_degree_factor() * num_challenges);
|
||||
let zeta_batch = FriBatchInfoTarget {
|
||||
point: zeta,
|
||||
polynomials: [trace_info.clone(), quotient_info].concat(),
|
||||
};
|
||||
let zeta_right = builder.mul_const_extension(g, zeta);
|
||||
let zeta_right_batch = FriBatchInfoTarget {
|
||||
point: zeta_right,
|
||||
polynomials: trace_info,
|
||||
};
|
||||
FriInstanceInfoTarget {
|
||||
oracles: vec![no_blinding_oracle; 3],
|
||||
batches: vec![zeta_batch],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,7 +27,8 @@ where
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
{
|
||||
let degree_bits = log2_strict(recover_degree(&proof_with_pis.proof, config));
|
||||
ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS);
|
||||
let degree_bits = proof_with_pis.proof.recover_degree_bits(config);
|
||||
let challenges = proof_with_pis.get_challenges(config, degree_bits)?;
|
||||
verify_with_challenges(stark, proof_with_pis, challenges, degree_bits, config)
|
||||
}
|
||||
@ -110,8 +111,7 @@ where
|
||||
verify_fri_proof::<F, C, D>(
|
||||
&stark.fri_instance(
|
||||
challenges.stark_zeta,
|
||||
F::primitive_root_of_unity(degree_bits).into(),
|
||||
config.fri_config.rate_bits,
|
||||
F::primitive_root_of_unity(degree_bits),
|
||||
config.num_challenges,
|
||||
),
|
||||
&proof.openings.to_fri_openings(),
|
||||
@ -137,17 +137,6 @@ fn eval_l_1_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
}
|
||||
|
||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||
fn recover_degree<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
proof: &StarkProof<F, C, D>,
|
||||
config: &StarkConfig,
|
||||
) -> usize {
|
||||
let initial_merkle_proof = &proof.opening_proof.query_round_proofs[0]
|
||||
.initial_trees_proof
|
||||
.evals_proofs[0]
|
||||
.1;
|
||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||
1 << (lde_bits - config.fri_config.rate_bits)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user