PR feedback

This commit is contained in:
wborgeaud 2022-05-18 09:22:58 +02:00
parent 2ffd22acf5
commit f8b51743a9
15 changed files with 280 additions and 841 deletions

View File

@ -1,15 +1,11 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::field::extension_field::Extendable;
use plonky2::hash::hash_types::RichField;
use crate::config::StarkConfig;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::CrossTableLookup;
use crate::permutation::PermutationPair;
use crate::keccak::keccak_stark::KeccakStark;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
#[derive(Clone)]
pub struct AllStark<F: RichField + Extendable<D>, const D: usize> {
@ -27,88 +23,12 @@ impl<F: RichField + Extendable<D>, const D: usize> AllStark<F, D> {
}
}
#[derive(Copy, Clone)]
pub struct CpuStark<F, const D: usize> {
#[allow(dead_code)]
num_rows: usize,
f: PhantomData<F>,
}
#[derive(Copy, Clone)]
pub struct KeccakStark<F, const D: usize> {
#[allow(dead_code)]
num_rows: usize,
f: PhantomData<F>,
}
#[derive(Copy, Clone)]
pub enum Table {
Cpu = 0,
Keccak = 1,
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
const COLUMNS: usize = 10;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
}
fn eval_ext_recursively(
&self,
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
}
fn constraint_degree(&self) -> usize {
3
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair::singletons(8, 9)]
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
const COLUMNS: usize = 7;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
}
fn eval_ext_recursively(
&self,
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
}
fn constraint_degree(&self) -> usize {
3
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair::singletons(0, 6)]
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
@ -118,9 +38,11 @@ mod tests {
use plonky2::util::timing::TimingTree;
use rand::{thread_rng, Rng};
use crate::all_stark::{AllStark, CpuStark, KeccakStark, Table};
use crate::all_stark::{AllStark, Table};
use crate::config::StarkConfig;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::CrossTableLookup;
use crate::keccak::keccak_stark::KeccakStark;
use crate::prover::prove;
use crate::verifier::verify_proof;
@ -133,34 +55,32 @@ mod tests {
let config = StarkConfig::standard_fast_config();
let cpu_stark = CpuStark::<F, D> {
num_rows: 1 << 4,
f: Default::default(),
};
let cpu_rows = 1 << 4;
let keccak_stark = KeccakStark::<F, D> {
num_rows: 1 << 3,
f: Default::default(),
};
let keccak_rows = 1 << 3;
// let mut cpu_trace = vec![PolynomialValues::zero(cpu_stark.num_rows); CpuStark::COLUMNS];
let mut cpu_trace = vec![PolynomialValues::zero(cpu_stark.num_rows); 10];
// let mut keccak_trace =
// vec![PolynomialValues::zero(keccak_stark.num_rows); KeccakStark::COLUMNS];
let mut keccak_trace = vec![PolynomialValues::zero(keccak_stark.num_rows); 7];
let mut cpu_trace = vec![PolynomialValues::zero(cpu_rows); 10];
let mut keccak_trace = vec![PolynomialValues::zero(keccak_rows); 7];
let vs0 = (0..keccak_stark.num_rows)
let vs0 = (0..keccak_rows)
.map(F::from_canonical_usize)
.collect::<Vec<_>>();
let vs1 = (1..=keccak_stark.num_rows)
let vs1 = (1..=keccak_rows)
.map(F::from_canonical_usize)
.collect::<Vec<_>>();
let start = thread_rng().gen_range(0..cpu_stark.num_rows - keccak_stark.num_rows);
let start = thread_rng().gen_range(0..cpu_rows - keccak_rows);
let default = vec![F::ONE; 2];
cpu_trace[2].values = vec![default[0]; cpu_stark.num_rows];
cpu_trace[2].values[start..start + keccak_stark.num_rows].copy_from_slice(&vs0);
cpu_trace[4].values = vec![default[1]; cpu_stark.num_rows];
cpu_trace[4].values[start..start + keccak_stark.num_rows].copy_from_slice(&vs1);
cpu_trace[2].values = vec![default[0]; cpu_rows];
cpu_trace[2].values[start..start + keccak_rows].copy_from_slice(&vs0);
cpu_trace[4].values = vec![default[1]; cpu_rows];
cpu_trace[4].values[start..start + keccak_rows].copy_from_slice(&vs1);
keccak_trace[3].values[..].copy_from_slice(&vs0);
keccak_trace[5].values[..].copy_from_slice(&vs1);

View File

@ -0,0 +1,46 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
#[derive(Copy, Clone)]
pub struct CpuStark<F, const D: usize> {
pub f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
const COLUMNS: usize = 10;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
}
fn eval_ext_circuit(
&self,
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
}
fn constraint_degree(&self) -> usize {
3
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair::singletons(8, 9)]
}
}

1
starky2/src/cpu/mod.rs Normal file
View File

@ -0,0 +1 @@
pub mod cpu_stark;

View File

@ -6,8 +6,6 @@ use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::plonk_common::reduce_with_powers;
use plonky2::util::reducing::ReducingFactor;
use crate::all_stark::Table;
use crate::config::StarkConfig;
@ -84,64 +82,57 @@ pub fn cross_table_lookup_data<F: RichField, C: GenericConfig<D, F = F>, const D
challenger: &mut Challenger<F, C::Hasher>,
) -> Vec<CtlData<F>> {
let challenges = get_grand_product_challenge_set(challenger, config.num_challenges);
cross_table_lookups.iter().fold(
vec![CtlData::new(challenges.clone()); trace_poly_values.len()],
|mut acc, cross_table_lookup| {
let CrossTableLookup {
looking_table,
let mut ctl_data_per_table = vec![CtlData::new(challenges.clone()); trace_poly_values.len()];
for CrossTableLookup {
looking_table,
looking_columns,
looked_table,
looked_columns,
default,
} in cross_table_lookups
{
for &challenge in &challenges.challenges {
let z_looking = partial_products(
&trace_poly_values[*looking_table as usize],
looking_columns,
looked_table,
challenge,
);
let z_looked = partial_products(
&trace_poly_values[*looked_table as usize],
looked_columns,
default,
} = cross_table_lookup;
challenge,
);
for &GrandProductChallenge { beta, gamma } in &challenges.challenges {
let z_looking = partial_products(
&trace_poly_values[*looking_table as usize],
looking_columns,
beta,
gamma,
);
let z_looked = partial_products(
&trace_poly_values[*looked_table as usize],
looked_columns,
beta,
gamma,
);
debug_assert_eq!(
*z_looking.values.last().unwrap(),
*z_looked.values.last().unwrap()
* challenge.combine(default).exp_u64(
trace_poly_values[*looking_table as usize][0].len() as u64
- trace_poly_values[*looked_table as usize][0].len() as u64
)
);
debug_assert_eq!(
*z_looking.values.last().unwrap(),
*z_looked.values.last().unwrap()
* (gamma + reduce_with_powers(default.iter(), beta)).exp_u64(
trace_poly_values[*looking_table as usize][0].len() as u64
- trace_poly_values[*looked_table as usize][0].len() as u64
)
);
acc[*looking_table as usize]
.zs_columns
.push((z_looking, looking_columns.clone()));
acc[*looked_table as usize]
.zs_columns
.push((z_looked, looked_columns.clone()));
}
acc
},
)
ctl_data_per_table[*looking_table as usize]
.zs_columns
.push((z_looking, looking_columns.clone()));
ctl_data_per_table[*looked_table as usize]
.zs_columns
.push((z_looked, looked_columns.clone()));
}
}
ctl_data_per_table
}
fn partial_products<F: Field>(
trace: &[PolynomialValues<F>],
columns: &[usize],
beta: F,
gamma: F,
challenge: GrandProductChallenge<F>,
) -> PolynomialValues<F> {
let mut partial_prod = F::ONE;
let degree = trace[0].len();
let mut res = Vec::with_capacity(degree);
for i in 0..degree {
partial_prod *=
gamma + reduce_with_powers(columns.iter().map(|&j| &trace[j].values[i]), beta);
partial_prod *= challenge.combine(columns.iter().map(|&j| &trace[j].values[i]));
res.push(partial_prod);
}
res.into()
@ -173,53 +164,42 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
let mut ctl_zs = proofs
.iter()
.zip(num_permutation_zs)
.map(|(p, &num_permutation)| {
p.proof
.openings
.permutation_ctl_zs
.iter()
.skip(num_permutation)
.zip(
p.proof
.openings
.permutation_ctl_zs_right
.iter()
.skip(num_permutation),
)
.map(|(p, &num_perms)| {
let openings = &p.proof.openings;
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
let ctl_zs_right = openings.permutation_ctl_zs_right.iter().skip(num_perms);
ctl_zs.zip(ctl_zs_right)
})
.collect::<Vec<_>>();
cross_table_lookups
.iter()
.fold(vec![vec![]; proofs.len()], |mut acc, ctl| {
let CrossTableLookup {
looking_table,
looking_columns,
looked_table,
looked_columns,
..
} = ctl;
let mut ctl_vars_per_table = vec![vec![]; proofs.len()];
for CrossTableLookup {
looking_table,
looking_columns,
looked_table,
looked_columns,
..
} in cross_table_lookups
{
for &challenges in &ctl_challenges.challenges {
let (looking_z, looking_z_next) = ctl_zs[*looking_table as usize].next().unwrap();
ctl_vars_per_table[*looking_table as usize].push(Self {
local_z: *looking_z,
next_z: *looking_z_next,
challenges,
columns: looking_columns,
});
for &challenges in &ctl_challenges.challenges {
let (looking_z, looking_z_next) =
ctl_zs[*looking_table as usize].next().unwrap();
acc[*looking_table as usize].push(Self {
local_z: *looking_z,
next_z: *looking_z_next,
challenges,
columns: looking_columns,
});
let (looked_z, looked_z_next) = ctl_zs[*looked_table as usize].next().unwrap();
acc[*looked_table as usize].push(Self {
local_z: *looked_z,
next_z: *looked_z_next,
challenges,
columns: looked_columns,
});
}
acc
})
let (looked_z, looked_z_next) = ctl_zs[*looked_table as usize].next().unwrap();
ctl_vars_per_table[*looked_table as usize].push(Self {
local_z: *looked_z,
next_z: *looked_z_next,
challenges,
columns: looked_columns,
});
}
}
ctl_vars_per_table
}
}
@ -241,10 +221,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, C, S, const D: usize, con
challenges,
columns,
} = lookup_vars;
let mut factor = ReducingFactor::new(challenges.beta);
let mut combine = |v: &[P]| -> P {
factor.reduce_ext(columns.iter().map(|&i| v[i])) + FE::from_basefield(challenges.gamma)
};
let combine = |v: &[P]| -> P { challenges.combine(columns.iter().map(|&i| &v[i])) };
// Check value of `Z(1)`
consumer.constraint_first_row(*local_z - combine(vars.local_values));
@ -285,9 +262,8 @@ pub(crate) fn verify_cross_table_lookups<
let looked_degree = 1 << degrees_bits[looked_table as usize];
let looking_z = *ctl_zs_openings[looking_table as usize].next().unwrap();
let looked_z = *ctl_zs_openings[looked_table as usize].next().unwrap();
let GrandProductChallenge { beta, gamma } =
challenges.challenges[i % config.num_challenges];
let combined_default = gamma + reduce_with_powers(default.iter(), beta);
let challenge = challenges.challenges[i % config.num_challenges];
let combined_default = challenge.combine(default.iter());
ensure!(
looking_z == looked_z * combined_default.exp_u64(looking_degree - looked_degree),

View File

@ -1,11 +1,7 @@
use plonky2::field::extension_field::Extendable;
use plonky2::field::polynomial::PolynomialCoeffs;
use plonky2::fri::proof::{FriProof, FriProofTarget};
use plonky2::gadgets::polynomial::PolynomialCoeffsExtTarget;
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::fri::proof::FriProof;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
@ -18,56 +14,6 @@ use crate::permutation::{
use crate::proof::*;
use crate::stark::Stark;
fn get_challenges<F, C, S, const D: usize>(
challenger: &mut Challenger<F, C::Hasher>,
stark: &S,
permutation_ctl_zs_cap: &MerkleCap<F, C::Hasher>,
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
openings: &StarkOpeningSet<F, D>,
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
pow_witness: F,
config: &StarkConfig,
degree_bits: usize,
) -> StarkProofChallenges<F, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
{
let num_challenges = config.num_challenges;
let permutation_challenge_sets = stark.uses_permutation_args().then(|| {
get_n_grand_product_challenge_sets(
challenger,
num_challenges,
stark.permutation_batch_size(),
)
});
challenger.observe_cap(permutation_ctl_zs_cap);
let stark_alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge::<D>();
challenger.observe_openings(&openings.to_fri_openings());
StarkProofChallenges {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C, D>(
commit_phase_merkle_caps,
final_poly,
pow_witness,
degree_bits,
&config.fri_config,
),
}
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
@ -105,19 +51,6 @@ where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
// TODO: Should be used later in compression?
#![allow(dead_code)]
pub(crate) fn fri_query_indices<S: Stark<F, D>>(
&self,
stark: &S,
config: &StarkConfig,
) -> Vec<usize> {
let mut challenger = Challenger::new();
self.get_challenges(&mut challenger, stark, config)
.fri_challenges
.fri_query_indices
}
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges<S: Stark<F, D>>(
&self,
@ -141,77 +74,37 @@ where
..
} = &self.proof;
get_challenges::<F, C, S, D>(
challenger,
stark,
permutation_ctl_zs_cap,
quotient_polys_cap,
openings,
commit_phase_merkle_caps,
final_poly,
*pow_witness,
config,
degree_bits,
)
}
}
let num_challenges = config.num_challenges;
#[allow(clippy::too_many_arguments)]
pub(crate) fn get_challenges_target<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
trace_cap: &MerkleCapTarget,
permutation_zs_cap: Option<&MerkleCapTarget>,
quotient_polys_cap: &MerkleCapTarget,
openings: &StarkOpeningSetTarget<D>,
commit_phase_merkle_caps: &[MerkleCapTarget],
final_poly: &PolynomialCoeffsExtTarget<D>,
pow_witness: Target,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F>,
{
let num_challenges = config.num_challenges;
let permutation_challenge_sets = stark.uses_permutation_args().then(|| {
get_n_grand_product_challenge_sets(
challenger,
num_challenges,
stark.permutation_batch_size(),
)
});
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(permutation_ctl_zs_cap);
challenger.observe_cap(trace_cap);
let stark_alphas = challenger.get_n_challenges(num_challenges);
let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| {
let tmp = get_n_permutation_challenge_sets_target(
builder,
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
);
challenger.observe_cap(permutation_zs_cap);
tmp
});
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge::<D>();
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
challenger.observe_openings(&openings.to_fri_openings());
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge(builder);
challenger.observe_openings(&openings.to_fri_openings());
StarkProofChallengesTarget {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C>(
builder,
commit_phase_merkle_caps,
final_poly,
pow_witness,
&config.fri_config,
),
StarkProofChallenges {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C, D>(
commit_phase_merkle_caps,
final_poly,
*pow_witness,
degree_bits,
&config.fri_config,
),
}
}
}
@ -229,139 +122,37 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
where
C::Hasher: AlgebraicHasher<F>,
{
let StarkProofTarget {
trace_cap,
permutation_zs_cap,
quotient_polys_cap,
openings,
opening_proof:
FriProofTarget {
commit_phase_merkle_caps,
final_poly,
pow_witness,
..
},
} = &self.proof;
get_challenges_target::<F, C, S, D>(
builder,
stark,
trace_cap,
permutation_zs_cap.as_ref(),
quotient_polys_cap,
openings,
commit_phase_merkle_caps,
final_poly,
*pow_witness,
config,
)
let proof = &self.proof;
let opening_proof = &proof.opening_proof;
let num_challenges = config.num_challenges;
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(&proof.trace_cap);
let permutation_challenge_sets =
proof.permutation_zs_cap.as_ref().map(|permutation_zs_cap| {
let tmp = get_n_permutation_challenge_sets_target(
builder,
&mut challenger,
num_challenges,
stark.permutation_batch_size(),
);
challenger.observe_cap(permutation_zs_cap);
tmp
});
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
challenger.observe_cap(&proof.quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge(builder);
challenger.observe_openings(&proof.openings.to_fri_openings());
StarkProofChallengesTarget {
permutation_challenge_sets,
stark_alphas,
stark_zeta,
fri_challenges: challenger.fri_challenges::<C>(
builder,
&opening_proof.commit_phase_merkle_caps,
&opening_proof.final_poly,
opening_proof.pow_witness,
&config.fri_config,
),
}
}
}
// TODO: Deal with the compressed stuff.
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
// CompressedProofWithPublicInputs<F, C, D>
// {
// /// Computes all Fiat-Shamir challenges used in the Plonk proof.
// pub(crate) fn get_challenges(
// &self,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> anyhow::Result<ProofChallenges<F, D>> {
// let CompressedProof {
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// opening_proof:
// CompressedFriProof {
// commit_phase_merkle_caps,
// final_poly,
// pow_witness,
// ..
// },
// } = &self.proof;
//
// get_challenges(
// self.get_public_inputs_hash(),
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// commit_phase_merkle_caps,
// final_poly,
// *pow_witness,
// common_data,
// )
// }
//
// /// Computes all coset elements that can be inferred in the FRI reduction steps.
// pub(crate) fn get_inferred_elements(
// &self,
// challenges: &ProofChallenges<F, D>,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> FriInferredElements<F, D> {
// let ProofChallenges {
// plonk_zeta,
// fri_alpha,
// fri_betas,
// fri_query_indices,
// ..
// } = challenges;
// let mut fri_inferred_elements = Vec::new();
// // Holds the indices that have already been seen at each reduction depth.
// let mut seen_indices_by_depth =
// vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()];
// let precomputed_reduced_evals = PrecomputedReducedOpenings::from_os_and_alpha(
// &self.proof.openings.to_fri_openings(),
// *fri_alpha,
// );
// let log_n = common_data.degree_bits + common_data.config.fri_config.rate_bits;
// // Simulate the proof verification and collect the inferred elements.
// // The content of the loop is basically the same as the `fri_verifier_query_round` function.
// for &(mut x_index) in fri_query_indices {
// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
// * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64);
// let mut old_eval = fri_combine_initial::<F, C, D>(
// &common_data.get_fri_instance(*plonk_zeta),
// &self
// .proof
// .opening_proof
// .query_round_proofs
// .initial_trees_proofs[&x_index],
// *fri_alpha,
// subgroup_x,
// &precomputed_reduced_evals,
// &common_data.fri_params,
// );
// for (i, &arity_bits) in common_data
// .fri_params
// .reduction_arity_bits
// .iter()
// .enumerate()
// {
// let coset_index = x_index >> arity_bits;
// if !seen_indices_by_depth[i].insert(coset_index) {
// // If this index has already been seen, we can skip the rest of the reductions.
// break;
// }
// fri_inferred_elements.push(old_eval);
// let arity = 1 << arity_bits;
// let mut evals = self.proof.opening_proof.query_round_proofs.steps[i][&coset_index]
// .evals
// .clone();
// let x_index_within_coset = x_index & (arity - 1);
// evals.insert(x_index_within_coset, old_eval);
// old_eval = compute_evaluation(
// subgroup_x,
// x_index_within_coset,
// arity_bits,
// &evals,
// fri_betas[i],
// );
// subgroup_x = subgroup_x.exp_power_of_2(arity_bits);
// x_index = coset_index;
// }
// }
// FriInferredElements(fri_inferred_elements)
// }
// }

View File

@ -0,0 +1,46 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
#[derive(Copy, Clone)]
pub struct KeccakStark<F, const D: usize> {
pub(crate) f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
const COLUMNS: usize = 7;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
_vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
}
fn eval_ext_circuit(
&self,
_builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
}
fn constraint_degree(&self) -> usize {
3
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair::singletons(0, 6)]
}
}

View File

@ -0,0 +1 @@
pub mod keccak_stark;

View File

@ -3,12 +3,13 @@
#![allow(clippy::type_complexity)]
#![feature(generic_const_exprs)]
pub mod all_stark;
pub mod config;
pub mod constraint_consumer;
pub mod cpu;
pub mod cross_table_lookup;
mod get_challenges;
// pub mod mock_stark;
pub mod all_stark;
pub mod keccak;
pub mod permutation;
pub mod proof;
pub mod prover;

View File

@ -1,362 +0,0 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
struct AllStarks<F: RichField + Extendable<D>, const D: usize> {
fibonacci: FibonacciStark<F, D>,
multiplications: MultiplicationStark<F, D, 4>,
}
/// Toy STARK system used for testing.
/// Computes a Fibonacci sequence with state `[x0, x1, i, j]` using the state transition
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
/// Note: The `i, j` columns are only used to test the permutation argument.
#[derive(Copy, Clone)]
struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
num_rows: usize,
_phantom: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
// The first public input is `x0`.
const PI_INDEX_X0: usize = 0;
// The second public input is `x1`.
const PI_INDEX_X1: usize = 1;
// The third public input is the second element of the last row, which should be equal to the
// `num_rows`-th Fibonacci number.
const PI_INDEX_RES: usize = 2;
fn new(num_rows: usize) -> Self {
Self {
num_rows,
_phantom: PhantomData,
}
}
/// Generate the trace using `x0, x1, 0, 1` as initial state values.
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
let mut trace_rows = (0..self.num_rows)
.scan([x0, x1, F::ZERO, F::ONE], |acc, _| {
let tmp = *acc;
acc[0] = tmp[1];
acc[1] = tmp[0] + tmp[1];
acc[2] = tmp[2] + F::ONE;
acc[3] = tmp[3] + F::ONE;
Some(tmp)
})
.collect::<Vec<_>>();
trace_rows[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another.
trace_rows_to_poly_values(trace_rows)
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
const COLUMNS: usize = 4;
const PUBLIC_INPUTS: usize = 3;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
// Check public inputs.
yield_constr
.constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]);
yield_constr
.constraint_first_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_X1]);
yield_constr
.constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]);
// x0' <- x1
yield_constr.constraint_transition(vars.next_values[0] - vars.local_values[1]);
// x1' <- x0 + x1
yield_constr.constraint_transition(
vars.next_values[1] - vars.local_values[0] - vars.local_values[1],
);
}
fn eval_ext_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
// Check public inputs.
let pis_constraints = [
builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]),
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_X1]),
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_RES]),
];
yield_constr.constraint_first_row(builder, pis_constraints[0]);
yield_constr.constraint_first_row(builder, pis_constraints[1]);
yield_constr.constraint_last_row(builder, pis_constraints[2]);
// x0' <- x1
let first_col_constraint = builder.sub_extension(vars.next_values[0], vars.local_values[1]);
yield_constr.constraint_transition(builder, first_col_constraint);
// x1' <- x0 + x1
let second_col_constraint = {
let tmp = builder.sub_extension(vars.next_values[1], vars.local_values[0]);
builder.sub_extension(tmp, vars.local_values[1])
};
yield_constr.constraint_transition(builder, second_col_constraint);
}
fn constraint_degree(&self) -> usize {
2
}
fn permutation_pairs(&self) -> Vec<PermutationPair> {
vec![PermutationPair::singletons(2, 3)]
}
}
#[derive(Copy, Clone)]
struct MultiplicationStark<
F: RichField + Extendable<D>,
const D: usize,
const NUM_MULTIPLICANDS: usize,
> {
num_rows: usize,
_phantom: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize, const W: usize> MultiplicationStark<F, D, W> {
fn multiplicand(&self, i: usize) -> usize {
debug_assert!(i < W);
i
}
// Product of the first `i` multiplicands
fn intermediate_product(&self, i: usize) -> usize {
debug_assert!(i < W && i > 0);
W + i - 1
}
fn product(&self) -> usize {
2 * W - 2
}
const fn num_columns() -> usize {
2 * W - 1
}
fn new(num_rows: usize) -> Self {
Self {
num_rows,
_phantom: PhantomData,
}
}
fn generate_trace(&self, multiplicands: &[Vec<F>]) -> Vec<PolynomialValues<F>>
where
[(); Self::num_columns()]:,
{
debug_assert_eq!(multiplicands.len(), self.num_rows);
let mut trace_rows = multiplicands
.iter()
.map(|row| {
debug_assert_eq!(row.len(), W);
let mut result = [F::ZERO; Self::num_columns()];
for i in 0..W {
result[self.multiplicand(i)] = row[i];
}
let mut acc = row[0] * row[1];
for i in 1..W - 1 {
result[self.intermediate_product(i)] = acc;
acc *= row[i + 1];
}
result[self.product()] = acc;
result
})
.collect::<Vec<_>>();
trace_rows_to_poly_values(trace_rows)
}
}
impl<F: RichField + Extendable<D>, const D: usize, const W: usize> Stark<F, D>
for MultiplicationStark<F, D, W>
{
const COLUMNS: usize = 2 * W - 1;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
yield_constr.constraint(
vars.local_values[self.intermediate_product(1)]
- vars.local_values[self.multiplicand(0)] * vars.local_values[self.multiplicand(1)],
);
for i in 2..W - 1 {
yield_constr.constraint(
vars.local_values[self.intermediate_product(i)]
- vars.local_values[self.intermediate_product(i - 1)]
* vars.local_values[self.multiplicand(i)],
)
}
}
fn eval_ext_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
todo!()
}
fn constraint_degree(&self) -> usize {
2
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use plonky2::field::extension_field::Extendable;
use plonky2::field::field_types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::witness::PartialWitness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::{
AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig,
};
use plonky2::util::timing::TimingTree;
use crate::config::StarkConfig;
use crate::mock_stark::FibonacciStark;
use crate::proof::StarkProofWithPublicInputs;
use crate::prover::prove;
use crate::recursive_verifier::{
add_virtual_stark_proof_with_pis, recursively_verify_stark_proof,
set_stark_proof_with_pis_target,
};
use crate::stark::Stark;
use crate::stark_testing::test_stark_low_degree;
use crate::verifier::verify_stark_proof;
fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
(0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
}
#[test]
fn test_fibonacci_stark() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = FibonacciStark<F, D>;
let config = StarkConfig::standard_fast_config();
let num_rows = 1 << 5;
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
let stark = S::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
// let proof = prove::<F, C, S, D>(
// stark,
// &config,
// trace,
// public_inputs,
// &mut TimingTree::default(),
// )?;
// verify_stark_proof(stark, proof, &config)
Ok(())
}
#[test]
fn test_fibonacci_stark_degree() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = FibonacciStark<F, D>;
let num_rows = 1 << 5;
let stark = S::new(num_rows);
test_stark_low_degree(stark)
}
#[test]
fn test_recursive_stark_verifier() -> Result<()> {
init_logger();
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = FibonacciStark<F, D>;
let config = StarkConfig::standard_fast_config();
let num_rows = 1 << 5;
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
let stark = S::new(num_rows);
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
// let proof = prove::<F, C, S, D>(
// stark,
// &config,
// trace,
// public_inputs,
// &mut TimingTree::default(),
// )?;
// verify_stark_proof(stark, proof.clone(), &config)?;
//
// recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
Ok(())
}
fn recursive_proof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D> + Copy,
InnerC: GenericConfig<D, F = F>,
const D: usize,
>(
stark: S,
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
inner_config: &StarkConfig,
print_gate_counts: bool,
) -> Result<()>
where
InnerC::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
let circuit_config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
let mut pw = PartialWitness::new();
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
if print_gate_counts {
builder.print_gate_counts(0);
}
let data = builder.build::<C>();
let proof = data.prove(pw)?;
data.verify(proof)
}
fn init_logger() {
let _ = env_logger::builder().format_timestamp(None).try_init();
}
}

View File

@ -12,6 +12,7 @@ use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::plonk::plonk_common::reduce_with_powers;
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
use rayon::prelude::*;
@ -53,6 +54,20 @@ pub(crate) struct GrandProductChallenge<T: Copy> {
pub(crate) gamma: T,
}
impl<F: Field> GrandProductChallenge<F> {
pub(crate) fn combine<'a, FE, P, T: IntoIterator<Item = &'a P>, const D2: usize>(
&self,
terms: T,
) -> P
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
T::IntoIter: DoubleEndedIterator,
{
reduce_with_powers(terms, FE::from_basefield(self.beta)) + FE::from_basefield(self.gamma)
}
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
#[derive(Clone)]
pub(crate) struct GrandProductChallengeSet<T: Copy> {
@ -323,7 +338,7 @@ pub struct PermutationCheckDataTarget<const D: usize> {
pub(crate) permutation_challenge_sets: Vec<GrandProductChallengeSet<Target>>,
}
pub(crate) fn eval_permutation_checks_recursively<F, S, const D: usize>(
pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,

View File

@ -15,10 +15,12 @@ use plonky2::util::transpose;
use plonky2_util::{log2_ceil, log2_strict};
use rayon::prelude::*;
use crate::all_stark::{AllStark, CpuStark, KeccakStark, Table};
use crate::all_stark::{AllStark, Table};
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
use crate::keccak::keccak_stark::KeccakStark;
use crate::permutation::PermutationCheckVars;
use crate::permutation::{
compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet,

View File

@ -21,10 +21,10 @@ use crate::proof::{
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly_recursively;
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::vars::StarkEvaluationTargets;
pub fn recursively_verify_stark_proof<
pub fn verify_stark_proof_circuit<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
@ -47,7 +47,7 @@ pub fn recursively_verify_stark_proof<
proof_with_pis.get_challenges::<F, C, S>(builder, &stark, inner_config)
);
recursively_verify_stark_proof_with_challenges::<F, C, S, D>(
verify_stark_proof_with_challenges_circuit::<F, C, S, D>(
builder,
stark,
proof_with_pis,
@ -58,7 +58,7 @@ pub fn recursively_verify_stark_proof<
}
/// Recursively verifies an inner proof.
fn recursively_verify_stark_proof_with_challenges<
fn verify_stark_proof_with_challenges_circuit<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
@ -103,7 +103,7 @@ fn recursively_verify_stark_proof_with_challenges<
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
let (l_1, l_last) =
eval_l_1_and_l_last_recursively(builder, degree_bits, challenges.stark_zeta, z_h_zeta);
eval_l_1_and_l_last_circuit(builder, degree_bits, challenges.stark_zeta, z_h_zeta);
let last =
builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse());
let z_last = builder.sub_extension(challenges.stark_zeta, last);
@ -127,7 +127,7 @@ fn recursively_verify_stark_proof_with_challenges<
with_context!(
builder,
"evaluate vanishing polynomial",
eval_vanishing_poly_recursively::<F, C, S, D>(
eval_vanishing_poly_circuit::<F, C, S, D>(
builder,
&stark,
inner_config,
@ -170,7 +170,7 @@ fn recursively_verify_stark_proof_with_challenges<
);
}
fn eval_l_1_and_l_last_recursively<F: RichField + Extendable<D>, const D: usize>(
fn eval_l_1_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
log_n: usize,
x: ExtensionTarget<D>,

View File

@ -66,7 +66,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// `eval_ext`, except in the context of a recursive circuit.
/// Note: constraints must be added through`yeld_constr.constraint(builder, constraint)` in the
/// same order as they are given in `eval_packed_generic`.
fn eval_ext_recursively(
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,

View File

@ -8,7 +8,7 @@ use crate::config::StarkConfig;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{eval_cross_table_lookup_checks, CtlCheckVars};
use crate::permutation::{
eval_permutation_checks, eval_permutation_checks_recursively, PermutationCheckDataTarget,
eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget,
PermutationCheckVars,
};
use crate::stark::Stark;
@ -41,7 +41,7 @@ pub(crate) fn eval_vanishing_poly<F, FE, P, C, S, const D: usize, const D2: usiz
eval_cross_table_lookup_checks::<F, FE, P, C, S, D, D2>(vars, ctl_vars, consumer);
}
pub(crate) fn eval_vanishing_poly_recursively<F, C, S, const D: usize>(
pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
@ -55,9 +55,9 @@ pub(crate) fn eval_vanishing_poly_recursively<F, C, S, const D: usize>(
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_ext_recursively(builder, vars, consumer);
stark.eval_ext_circuit(builder, vars, consumer);
if let Some(permutation_data) = permutation_data {
eval_permutation_checks_recursively::<F, S, D>(
eval_permutation_checks_circuit::<F, S, D>(
builder,
stark,
config,

View File

@ -6,10 +6,12 @@ use plonky2::hash::hash_types::RichField;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::plonk::plonk_common::reduce_with_powers;
use crate::all_stark::{AllStark, CpuStark, KeccakStark, Table};
use crate::all_stark::{AllStark, Table};
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
use crate::keccak::keccak_stark::KeccakStark;
use crate::permutation::PermutationCheckVars;
use crate::proof::{
AllProof, AllProofChallenges, StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs,