Start of impl

This commit is contained in:
wborgeaud 2022-05-20 11:21:13 +02:00
parent 7d7851aba5
commit 8e8e4daa9c
8 changed files with 232 additions and 85 deletions

View File

@ -1,10 +1,12 @@
use anyhow::{ensure, Result};
use env_logger::Target;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::field_types::Field;
use plonky2::field::packed_field::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::config::GenericConfig;
use crate::all_stark::Table;

View File

@ -1,6 +1,6 @@
use itertools::izip;
use plonky2::field::extension_field::Extendable;
use plonky2::fri::proof::FriProof;
use plonky2::fri::proof::{FriProof, FriProofTarget};
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::plonk::circuit_builder::CircuitBuilder;
@ -9,8 +9,8 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use crate::all_stark::AllStark;
use crate::config::StarkConfig;
use crate::permutation::{
get_grand_product_challenge_set, get_n_grand_product_challenge_sets,
get_n_permutation_challenge_sets_target,
get_grand_product_challenge_set, get_grand_product_challenge_set_target,
get_n_grand_product_challenge_sets, get_n_grand_product_challenge_sets_target,
};
use crate::proof::*;
use crate::stark::Stark;
@ -46,6 +46,50 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
}
}
impl<const D: usize> AllProofTarget<D> {
pub(crate) fn get_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
>(
&self,
builder: &mut CircuitBuilder<F, D>,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
) -> AllProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F>,
{
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
}
let ctl_challenges =
get_grand_product_challenge_set_target(builder, &mut challenger, config.num_challenges);
AllProofChallengesTarget {
stark_challenges: izip!(
&self.stark_proofs,
all_stark.nums_permutation_zs(config),
all_stark.permutation_batch_sizes()
)
.map(|(proof, num_perm, batch_size)| {
proof.get_challenges::<F, C>(
builder,
&mut challenger,
num_perm > 0,
batch_size,
config,
)
})
.collect(),
ctl_challenges,
}
}
}
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
where
F: RichField + Extendable<D>,
@ -110,50 +154,94 @@ where
}
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
pub(crate) fn get_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
>(
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
&self,
builder: &mut CircuitBuilder<F, D>,
stark: &S,
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
stark_use_permutation: bool,
stark_permutation_batch_size: usize,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F>,
{
let proof = &self.proof;
let opening_proof = &proof.opening_proof;
let degree_bits = self.proof.recover_degree_bits(config);
let StarkProofTarget {
permutation_ctl_zs_cap,
quotient_polys_cap,
openings,
opening_proof:
FriProofTarget {
commit_phase_merkle_caps,
final_poly,
pow_witness,
..
},
..
} = &self.proof;
let num_challenges = config.num_challenges;
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(&proof.trace_cap);
let permutation_challenge_sets =
proof.permutation_zs_cap.as_ref().map(|permutation_zs_cap| {
let tmp = get_n_permutation_challenge_sets_target(
builder,
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
);
challenger.observe_cap(permutation_zs_cap);
tmp
});
let permutation_challenge_sets = stark_use_permutation.then(|| {
get_n_grand_product_challenge_sets_target(
builder,
challenger,
num_challenges,
stark_permutation_batch_size,
)
});
challenger.observe_cap(permutation_ctl_zs_cap);
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
challenger.observe_cap(&proof.quotient_polys_cap);
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge(builder);
challenger.observe_openings(&proof.openings.to_fri_openings());
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));
StarkProofChallengesTarget {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C>(
builder,
&opening_proof.commit_phase_merkle_caps,
&opening_proof.final_poly,
opening_proof.pow_witness,
commit_phase_merkle_caps,
final_poly,
*pow_witness,
&config.fri_config,
),
}
// let proof = &self.proof;
// let opening_proof = &proof.opening_proof;
// let num_challenges = config.num_challenges;
// let permutation_challenge_sets =
// proof.permutation_zs_cap.as_ref().map(|permutation_zs_cap| {
// let tmp = get_n_grand_product_challenge_sets_target(
// builder,
// &mut challenger,
// num_challenges,
// stark.permutation_batch_size(),
// );
// challenger.observe_cap(permutation_zs_cap);
// tmp
// });
// let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
// challenger.observe_cap(&proof.quotient_polys_cap);
// let stark_zeta = challenger.get_extension_challenge(builder);
// challenger.observe_openings(&proof.openings.to_fri_openings());
// StarkProofChallengesTarget {
// permutation_challenge_sets,
// stark_alphas,
// stark_zeta,
// fri_challenges: challenger.fri_challenges::<C>(
// builder,
// &opening_proof.commit_phase_merkle_caps,
// &opening_proof.final_poly,
// opening_proof.pow_witness,
// &config.fri_config,
// ),
// }
}
}

View File

@ -190,7 +190,7 @@ pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, H: Hasher<F>>(
.collect()
}
fn get_permutation_challenge_target<
fn get_grand_product_challenge_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
@ -203,7 +203,7 @@ fn get_permutation_challenge_target<
GrandProductChallenge { beta, gamma }
}
fn get_permutation_challenge_set_target<
pub(crate) fn get_grand_product_challenge_set_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
@ -213,12 +213,12 @@ fn get_permutation_challenge_set_target<
num_challenges: usize,
) -> GrandProductChallengeSet<Target> {
let challenges = (0..num_challenges)
.map(|_| get_permutation_challenge_target(builder, challenger))
.map(|_| get_grand_product_challenge_target(builder, challenger))
.collect();
GrandProductChallengeSet { challenges }
}
pub(crate) fn get_n_permutation_challenge_sets_target<
pub(crate) fn get_n_grand_product_challenge_sets_target<
F: RichField + Extendable<D>,
H: AlgebraicHasher<F>,
const D: usize,
@ -229,7 +229,7 @@ pub(crate) fn get_n_permutation_challenge_sets_target<
num_sets: usize,
) -> Vec<GrandProductChallengeSet<Target>> {
(0..num_sets)
.map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges))
.map(|_| get_grand_product_challenge_set_target(builder, challenger, num_challenges))
.collect()
}

View File

@ -27,6 +27,15 @@ pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usiz
pub ctl_challenges: GrandProductChallengeSet<F>,
}
pub struct AllProofTarget<const D: usize> {
pub stark_proofs: Vec<StarkProofWithPublicInputsTarget<D>>,
}
pub(crate) struct AllProofChallengesTarget<const D: usize> {
pub stark_challenges: Vec<StarkProofChallengesTarget<D>>,
pub ctl_challenges: GrandProductChallengeSet<Target>,
}
#[derive(Debug, Clone)]
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
@ -55,7 +64,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> S
pub struct StarkProofTarget<const D: usize> {
pub trace_cap: MerkleCapTarget,
pub permutation_zs_cap: Option<MerkleCapTarget>,
pub permutation_ctl_zs_cap: MerkleCapTarget,
pub quotient_polys_cap: MerkleCapTarget,
pub openings: StarkOpeningSetTarget<D>,
pub opening_proof: FriProofTarget<D>,
@ -203,38 +212,38 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
.copied()
.collect_vec(),
};
let mut batches = vec![zeta_batch, zeta_right_batch];
debug_assert!(!self.ctl_zs_last.is_empty());
let ctl_last_batch = FriOpeningBatch {
values: self
.ctl_zs_last
.iter()
.copied()
.map(F::Extension::from_basefield)
.collect(),
};
if !self.ctl_zs_last.is_empty() {
batches.push(FriOpeningBatch {
values: self
.ctl_zs_last
.iter()
.copied()
.map(F::Extension::from_basefield)
.collect(),
});
FriOpenings {
batches: vec![zeta_batch, zeta_right_batch, ctl_last_batch],
}
FriOpenings { batches }
}
}
pub struct StarkOpeningSetTarget<const D: usize> {
pub local_values: Vec<ExtensionTarget<D>>,
pub next_values: Vec<ExtensionTarget<D>>,
pub permutation_zs: Option<Vec<ExtensionTarget<D>>>,
pub permutation_zs_right: Option<Vec<ExtensionTarget<D>>>,
pub permutation_ctl_zs: Vec<ExtensionTarget<D>>,
pub permutation_ctl_zs_right: Vec<ExtensionTarget<D>>,
pub ctl_zs_last: Vec<Target>,
pub quotient_polys: Vec<ExtensionTarget<D>>,
}
impl<const D: usize> StarkOpeningSetTarget<D> {
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
pub(crate) fn to_fri_openings(&self, zero: Target) -> FriOpeningsTarget<D> {
let zeta_batch = FriOpeningBatchTarget {
values: self
.local_values
.iter()
.chain(self.permutation_zs.iter().flatten())
.chain(&self.permutation_ctl_zs)
.chain(&self.quotient_polys)
.copied()
.collect_vec(),
@ -243,12 +252,22 @@ impl<const D: usize> StarkOpeningSetTarget<D> {
values: self
.next_values
.iter()
.chain(self.permutation_zs_right.iter().flatten())
.chain(&self.permutation_ctl_zs_right)
.copied()
.collect_vec(),
};
debug_assert!(!self.ctl_zs_last.is_empty());
let ctl_last_batch = FriOpeningBatchTarget {
values: self
.ctl_zs_last
.iter()
.copied()
.map(|t| t.to_ext_target(zero))
.collect(),
};
FriOpeningsTarget {
batches: vec![zeta_batch, zeta_right_batch],
batches: vec![zeta_batch, zeta_right_batch, ctl_last_batch],
}
}
}

View File

@ -13,48 +13,75 @@ use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::util::reducing::ReducingFactorTarget;
use plonky2::with_context;
use crate::all_stark::AllStark;
use crate::config::StarkConfig;
use crate::constraint_consumer::RecursiveConstraintConsumer;
use crate::permutation::PermutationCheckDataTarget;
use crate::proof::{
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
AllProof, AllProofChallengesTarget, AllProofTarget, StarkOpeningSetTarget, StarkProof,
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs,
StarkProofWithPublicInputsTarget,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::vars::StarkEvaluationTargets;
pub fn verify_stark_proof_circuit<
pub fn verify_proof_circuit<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
stark: S,
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
all_stark: AllStark<F, D>,
all_proof: AllProofTarget<D>,
inner_config: &StarkConfig,
) where
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
let challenges = with_context!(
builder,
"compute challenges",
proof_with_pis.get_challenges::<F, C, S>(builder, &stark, inner_config)
let AllProofChallengesTarget {
stark_challenges,
ctl_challenges,
} = all_proof.get_challenges(builder, &all_stark, inner_config);
let nums_permutation_zs = all_stark.nums_permutation_zs(inner_config);
let AllStark {
cpu_stark,
keccak_stark,
cross_table_lookups,
} = all_stark;
let ctl_vars_per_table = CtlCheckVars::from_proofs(
&all_proof.stark_proofs,
&cross_table_lookups,
&ctl_challenges,
&nums_permutation_zs,
);
verify_stark_proof_with_challenges_circuit::<F, C, S, D>(
builder,
stark,
proof_with_pis,
challenges,
inner_config,
degree_bits,
);
verify_stark_proof_with_challenges(
cpu_stark,
&all_proof.stark_proofs[Table::Cpu as usize],
&stark_challenges[Table::Cpu as usize],
&ctl_vars_per_table[Table::Cpu as usize],
config,
)?;
verify_stark_proof_with_challenges(
keccak_stark,
&all_proof.stark_proofs[Table::Keccak as usize],
&stark_challenges[Table::Keccak as usize],
&ctl_vars_per_table[Table::Keccak as usize],
config,
)?;
verify_cross_table_lookups(
cross_table_lookups,
&all_proof.stark_proofs,
ctl_challenges,
config,
)
}
/// Recursively verifies an inner proof.
@ -85,8 +112,8 @@ fn verify_stark_proof_with_challenges_circuit<
let StarkOpeningSetTarget {
local_values,
next_values,
permutation_zs,
permutation_zs_right,
permutation_ctl_zs: permutation_zs,
permutation_ctl_zs_right: permutation_zs_right,
quotient_polys,
} = &proof.openings;
let vars = StarkEvaluationTargets {
@ -150,7 +177,7 @@ fn verify_stark_proof_with_challenges_circuit<
}
let merkle_caps = once(proof.trace_cap)
.chain(proof.permutation_zs_cap)
.chain(proof.permutation_ctl_zs_cap)
.chain(once(proof.quotient_polys_cap))
.collect_vec();
@ -231,7 +258,7 @@ pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, con
StarkProofTarget {
trace_cap: builder.add_virtual_cap(cap_height),
permutation_zs_cap,
permutation_ctl_zs_cap: permutation_zs_cap,
quotient_polys_cap: builder.add_virtual_cap(cap_height),
openings: add_stark_opening_set::<F, S, D>(builder, stark, config),
opening_proof: builder.add_virtual_fri_proof(&num_leaves_per_oracle, &fri_params),
@ -247,10 +274,10 @@ fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D:
StarkOpeningSetTarget {
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
permutation_zs: stark
permutation_ctl_zs: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
permutation_zs_right: stark
permutation_ctl_zs_right: stark
.uses_permutation_args()
.then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),
quotient_polys: builder
@ -301,7 +328,7 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
&proof.openings.to_fri_openings(),
);
if let Some(permutation_zs_cap_target) = &proof_target.permutation_zs_cap {
if let Some(permutation_zs_cap_target) = &proof_target.permutation_ctl_zs_cap {
witness.set_cap_target(permutation_zs_cap_target, &proof.permutation_ctl_zs_cap);
}
@ -316,9 +343,13 @@ fn check_permutation_options<F: RichField + Extendable<D>, S: Stark<F, D>, const
challenges: &StarkProofChallengesTarget<D>,
) -> Result<()> {
let options_is_some = [
proof_with_pis.proof.permutation_zs_cap.is_some(),
proof_with_pis.proof.openings.permutation_zs.is_some(),
proof_with_pis.proof.openings.permutation_zs_right.is_some(),
proof_with_pis.proof.permutation_ctl_zs_cap.is_some(),
proof_with_pis.proof.openings.permutation_ctl_zs.is_some(),
proof_with_pis
.proof
.openings
.permutation_ctl_zs_right
.is_some(),
challenges.permutation_challenge_sets.is_some(),
];
ensure!(

View File

@ -95,6 +95,7 @@ where
proof,
public_inputs,
} = proof_with_pis;
ensure!(public_inputs.len() == S::PUBLIC_INPUTS);
let StarkOpeningSet {
local_values,
next_values,

View File

@ -120,9 +120,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
pub fn convert_to_ext(&mut self, t: Target) -> ExtensionTarget<D> {
let zero = self.zero();
let mut arr = [zero; D];
arr[0] = t;
ExtensionTarget(arr)
t.to_ext_target(zero)
}
pub fn convert_to_ext_algebra(&mut self, et: ExtensionTarget<D>) -> ExtensionAlgebraTarget<D> {

View File

@ -1,5 +1,6 @@
use std::ops::Range;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::wire::Wire;
use crate::plonk::circuit_data::CircuitConfig;
@ -37,6 +38,13 @@ impl Target {
Target::VirtualTarget { index } => degree * num_wires + index,
}
}
/// Conversion to an `ExtensionTarget`.
pub fn to_ext_target<const D: usize>(self, zero: Self) -> ExtensionTarget<D> {
let mut arr = [zero; D];
arr[0] = self;
ExtensionTarget(arr)
}
}
/// A `Target` which has already been constrained such that it can only be 0 or 1.