Add mock test (doesn't work)

This commit is contained in:
wborgeaud 2022-05-12 20:38:11 +02:00
parent 3359ee708c
commit 17ba468e3a
7 changed files with 247 additions and 57 deletions

View File

@ -12,3 +12,4 @@ env_logger = "0.9.0"
itertools = "0.10.0"
log = "0.4.14"
rayon = "1.5.1"
rand = "0.8.5"

View File

@ -9,17 +9,22 @@ use crate::cross_table_lookup::CrossTableLookup;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
#[derive(Clone)]
pub struct AllStark<F: RichField + Extendable<D>, const D: usize> {
pub cpu_stark: CpuStark<F, D>,
pub keccak_stark: KeccakStark<F, D>,
pub cross_table_lookups: Vec<CrossTableLookup>,
}
#[derive(Copy, Clone)]
pub struct CpuStark<F, const D: usize> {
num_rows: usize,
f: PhantomData<F>,
}
#[derive(Copy, Clone)]
pub struct KeccakStark<F, const D: usize> {
num_rows: usize,
f: PhantomData<F>,
}
@ -30,7 +35,7 @@ pub enum Table {
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
const COLUMNS: usize = 0;
const COLUMNS: usize = 10;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
@ -41,7 +46,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
todo!()
}
fn eval_ext_recursively(
@ -50,16 +54,15 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
todo!()
}
fn constraint_degree(&self) -> usize {
todo!()
3
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
const COLUMNS: usize = 0;
const COLUMNS: usize = 7;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
@ -70,7 +73,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
todo!()
}
fn eval_ext_recursively(
@ -79,10 +81,89 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
_vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
_yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
todo!()
}
fn constraint_degree(&self) -> usize {
todo!()
3
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use plonky2::field::field_types::Field;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::util::timing::TimingTree;
use rand::{thread_rng, Rng};
use crate::all_stark::{AllStark, CpuStark, KeccakStark, Table};
use crate::config::StarkConfig;
use crate::cross_table_lookup::CrossTableLookup;
use crate::prover::prove;
use crate::stark::Stark;
use crate::verifier::verify_proof;
#[test]
fn test_all_stark() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
let config = StarkConfig::standard_fast_config();
let cpu_stark = CpuStark::<F, D> {
num_rows: 1 << 4,
f: Default::default(),
};
let keccak_stark = KeccakStark::<F, D> {
num_rows: 1 << 3,
f: Default::default(),
};
// let mut cpu_trace = vec![PolynomialValues::zero(cpu_stark.num_rows); CpuStark::COLUMNS];
let mut cpu_trace = vec![PolynomialValues::zero(cpu_stark.num_rows); 10];
// let mut keccak_trace =
// vec![PolynomialValues::zero(keccak_stark.num_rows); KeccakStark::COLUMNS];
let mut keccak_trace = vec![PolynomialValues::zero(keccak_stark.num_rows); 7];
let vs0 = (0..keccak_stark.num_rows)
.map(F::from_canonical_usize)
.collect::<Vec<_>>();
let vs1 = (1..=keccak_stark.num_rows)
.map(F::from_canonical_usize)
.collect::<Vec<_>>();
let start = thread_rng().gen_range(0..cpu_stark.num_rows - keccak_stark.num_rows);
let start = 0;
cpu_trace[2].values[start..start + keccak_stark.num_rows].copy_from_slice(&vs0);
cpu_trace[4].values[start..start + keccak_stark.num_rows].copy_from_slice(&vs1);
keccak_trace[3].values[..].copy_from_slice(&vs0);
keccak_trace[5].values[..].copy_from_slice(&vs1);
let cross_table_lookups = vec![CrossTableLookup {
looking_table: Table::Cpu,
looking_columns: vec![2, 4],
looked_table: Table::Keccak,
looked_columns: vec![3, 5],
}];
let all_stark = AllStark {
cpu_stark,
keccak_stark,
cross_table_lookups,
};
let proof = prove::<F, C, D>(
&all_stark,
&config,
vec![cpu_trace, keccak_trace],
vec![vec![]; 2],
&mut TimingTree::default(),
)?;
verify_proof(all_stark, proof, &config)
}
}

View File

@ -17,7 +17,7 @@ impl StarkConfig {
pub fn standard_fast_config() -> Self {
Self {
security_bits: 100,
num_challenges: 2,
num_challenges: 1,
fri_config: FriConfig {
rate_bits: 1,
cap_height: 4,

View File

@ -122,8 +122,9 @@ fn partial_products<F: Field>(
gamma: F,
) -> PolynomialValues<F> {
let mut partial_prod = F::ONE;
let mut res = Vec::new();
for i in 0..trace[0].len() {
let degree = trace[0].len();
let mut res = Vec::with_capacity(degree);
for i in 0..degree {
partial_prod *=
gamma + reduce_with_powers(columns.iter().map(|&j| &trace[j].values[i]), beta);
res.push(partial_prod);

View File

@ -1,5 +1,7 @@
//! Permutation arguments.
use std::fmt::Debug;
use itertools::Itertools;
use plonky2::field::batch_util::batch_multiply_inplace;
use plonky2::field::extension_field::{Extendable, FieldExtension};

View File

@ -31,8 +31,8 @@ use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
pub fn prove<F, C, S, const D: usize>(
all_stark: AllStark<F, D>,
pub fn prove<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
public_inputs: Vec<Vec<F>>,
@ -48,15 +48,8 @@ where
debug_assert_eq!(num_starks, trace_poly_values.len());
debug_assert_eq!(num_starks, public_inputs.len());
let degree = trace_poly_values[0].len();
let degree_bits = log2_strict(degree);
let fri_params = config.fri_params(degree_bits);
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
assert!(
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
"FRI total reduction arity is too large.",
);
let trace_commitments = timed!(
timing,
@ -144,7 +137,11 @@ where
let degree_bits = log2_strict(degree);
let fri_params = config.fri_params(degree_bits);
let rate_bits = config.fri_config.rate_bits;
let _cap_height = config.fri_config.cap_height;
let cap_height = config.fri_config.cap_height;
assert!(
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
"FRI total reduction arity is too large.",
);
// Permutation arguments.
let permutation_challenges = stark.uses_permutation_args().then(|| {
@ -167,7 +164,7 @@ where
}
};
let permutation_lookup_zs_commitment = (!z_polys.is_empty()).then(|| {
let permutation_ctl_zs_commitment = (!z_polys.is_empty()).then(|| {
PolynomialBatch::from_values(
z_polys,
rate_bits,
@ -177,28 +174,31 @@ where
None,
)
});
let permutation_zs_cap = permutation_lookup_zs_commitment
let permutation_zs_cap = permutation_ctl_zs_commitment
.as_ref()
.map(|commit| commit.merkle_tree.cap.clone());
if let Some(cap) = &permutation_zs_cap {
challenger.observe_cap(cap);
}
// TODO: if no permutation but lookup, this is wrong.
let zipped = if let (Some(x), Some(y)) = (
permutation_lookup_zs_commitment.as_ref(),
permutation_challenges.as_ref(),
) {
Some((x, y))
} else {
None
};
let alphas = challenger.get_n_challenges(config.num_challenges);
test_it(
stark,
trace_commitment,
permutation_ctl_zs_commitment.as_ref(),
permutation_challenges.as_ref(),
lookup_data,
public_inputs,
alphas.clone(),
degree_bits,
num_permutation_zs,
config,
);
let quotient_polys = compute_quotient_polys::<F, <F as Packable>::Packing, C, S, D>(
stark,
trace_commitment,
zipped,
permutation_ctl_zs_commitment.as_ref(),
permutation_challenges.as_ref(),
lookup_data,
public_inputs,
alphas,
@ -246,7 +246,7 @@ where
zeta,
g,
trace_commitment,
permutation_lookup_zs_commitment.as_ref(),
permutation_ctl_zs_commitment.as_ref(),
&quotient_commitment,
degree_bits,
stark.num_permutation_batches(config),
@ -254,7 +254,7 @@ where
challenger.observe_openings(&openings.to_fri_openings());
let initial_merkle_trees = once(trace_commitment)
.chain(&permutation_lookup_zs_commitment)
.chain(&permutation_ctl_zs_commitment)
.chain(once(&quotient_commitment))
.collect_vec();
@ -288,10 +288,8 @@ where
fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
stark: &S,
trace_commitment: &'a PolynomialBatch<F, C, D>,
permutation_zs_commitment_challenges: Option<(
&'a PolynomialBatch<F, C, D>,
&'a Vec<GrandProductChallengeSet<F>>,
)>,
permutation_ctl_zs_commitment: Option<&'a PolynomialBatch<F, C, D>>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
lookup_data: &LookupData<F>,
public_inputs: &[F],
alphas: Vec<F>,
@ -363,29 +361,32 @@ where
next_values: &get_trace_values_packed(i_next_start),
public_inputs,
};
let permutation_check_data = permutation_zs_commitment_challenges.as_ref().map(
|(permutation_zs_commitment, permutation_challenge_sets)| PermutationCheckVars {
local_zs: permutation_zs_commitment.get_lde_values_packed(i_start, step)
[..num_permutation_zs]
.to_vec(),
next_zs: permutation_zs_commitment.get_lde_values_packed(i_next_start, step)
[..num_permutation_zs]
.to_vec(),
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
},
);
let permutation_check_data =
if let (Some(permutation_zs_commitment), Some(permutation_challenge_sets)) =
(permutation_ctl_zs_commitment, permutation_challenges)
{
Some(PermutationCheckVars {
local_zs: permutation_zs_commitment.get_lde_values_packed(i_start, step)
[..num_permutation_zs]
.to_vec(),
next_zs: permutation_zs_commitment
.get_lde_values_packed(i_next_start, step)[..num_permutation_zs]
.to_vec(),
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
})
} else {
None
};
let lookup_check_data = lookup_data
.zs_columns
.iter()
.enumerate()
.map(|(i, (_, columns))| CTLCheckVars::<F, F, P, 1> {
local_z: permutation_zs_commitment_challenges
local_z: permutation_ctl_zs_commitment
.unwrap()
.0
.get_lde_values_packed(i_start, step)[num_permutation_zs + i],
next_z: permutation_zs_commitment_challenges
next_z: permutation_ctl_zs_commitment
.unwrap()
.0
.get_lde_values_packed(i_next_start, step)[num_permutation_zs + i],
challenges: lookup_data.challenges.challenges[i % config.num_challenges],
columns,
@ -415,3 +416,104 @@ where
.map(|values| values.coset_ifft(F::coset_shift()))
.collect()
}
fn test_it<'a, F, C, S, const D: usize>(
stark: &S,
trace_commitment: &'a PolynomialBatch<F, C, D>,
permutation_ctl_zs_commitment: Option<&'a PolynomialBatch<F, C, D>>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
lookup_data: &LookupData<F>,
public_inputs: &[F],
alphas: Vec<F>,
degree_bits: usize,
num_permutation_zs: usize,
config: &StarkConfig,
) where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
{
let degree = 1 << degree_bits;
// Evaluation of the first Lagrange polynomial on the LDE domain.
let lagrange_first = PolynomialValues::selector(degree, 0);
// Evaluation of the last Lagrange polynomial on the LDE domain.
let lagrange_last = PolynomialValues::selector(degree, degree - 1);
let subgroup = F::two_adic_subgroup(degree_bits);
// Retrieve the LDE values at index `i`.
let get_comm_values = |comm: &PolynomialBatch<F, C, D>, i| -> Vec<F> {
comm.polynomials
.iter()
.map(|poly| poly.eval(subgroup[i]))
.collect()
};
// Last element of the subgroup.
let last = F::primitive_root_of_unity(degree_bits).inverse();
let constraint_values = (0..degree)
.map(|i| {
let i_next = (i + 1) % degree;
let x = subgroup[i];
let z_last = x - last;
let lagrange_basis_first = lagrange_first.values[i];
let lagrange_basis_last = lagrange_last.values[i];
let mut consumer = ConstraintConsumer::new(
alphas.clone(),
z_last,
lagrange_basis_first,
lagrange_basis_last,
);
let vars = StarkEvaluationVars {
local_values: &get_comm_values(trace_commitment, i),
next_values: &get_comm_values(trace_commitment, i_next),
public_inputs,
};
let permutation_check_data =
if let (Some(permutation_zs_commitment), Some(permutation_challenge_sets)) =
(permutation_ctl_zs_commitment, permutation_challenges)
{
Some(PermutationCheckVars {
local_zs: get_comm_values(permutation_zs_commitment, i)
[..num_permutation_zs]
.to_vec(),
next_zs: get_comm_values(permutation_zs_commitment, i_next)
[..num_permutation_zs]
.to_vec(),
permutation_challenge_sets: permutation_challenge_sets.to_vec(),
})
} else {
None
};
let lookup_check_data = lookup_data
.zs_columns
.iter()
.enumerate()
.map(|(iii, (_, columns))| CTLCheckVars::<F, F, F, 1> {
local_z: get_comm_values(permutation_ctl_zs_commitment.unwrap(), i)
[num_permutation_zs + iii],
next_z: get_comm_values(permutation_ctl_zs_commitment.unwrap(), i_next)
[num_permutation_zs + iii],
challenges: lookup_data.challenges.challenges[iii % config.num_challenges],
columns,
})
.collect::<Vec<_>>();
eval_vanishing_poly::<F, F, F, C, S, D, 1>(
stark,
config,
vars,
permutation_check_data,
&lookup_check_data,
&mut consumer,
);
let mut constraints_evals = consumer.accumulators();
constraints_evals
})
.collect::<Vec<_>>();
dbg!(constraint_values);
}

View File

@ -9,7 +9,7 @@ use plonky2::hash::hash_types::RichField;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::plonk::plonk_common::reduce_with_powers;
use crate::all_stark::{AllStark, KeccakStark, Table};
use crate::all_stark::{AllStark, CpuStark, KeccakStark, Table};
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cross_table_lookup::{verify_cross_table_lookups, CTLCheckVars};
@ -27,6 +27,8 @@ pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
config: &StarkConfig,
) -> Result<()>
where
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); C::Hasher::HASH_SIZE]:,
{
@ -106,7 +108,8 @@ where
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
check_permutation_options(&stark, proof_with_pis, &challenges)?;
// TODO: Fix this to take CTLs into account
// check_permutation_options(&stark, proof_with_pis, &challenges)?;
let StarkProofWithPublicInputs {
proof,
public_inputs,