Challenger state works

This commit is contained in:
wborgeaud 2022-09-23 13:41:14 +02:00
parent 2c43da801e
commit e712986a92
7 changed files with 133 additions and 26 deletions

View File

@ -755,12 +755,13 @@ mod tests {
let circuit_config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
let mut pw = PartialWitness::new();
recursive_all_proof.verify_circuit(&mut builder, &mut pw);
let data = builder.build::<C>();
let proof = data.prove(pw)?;
data.verify(proof)
// let mut pw = PartialWitness::new();
recursive_all_proof.verify(inner_config)
// recursive_all_proof.verify_circuit(&mut builder, &mut pw);
//
// let data = builder.build::<C>();
// let proof = data.prove(pw)?;
// data.verify(proof)
}
fn init_logger() {

View File

@ -229,6 +229,7 @@ impl<const D: usize> StarkProofTarget<D> {
challenger.observe_cap(quotient_polys_cap);
let stark_zeta = challenger.get_extension_challenge(builder);
dbg!(stark_zeta);
challenger.observe_openings(&openings.to_fri_openings(builder.zero()));

View File

@ -1,5 +1,7 @@
//! Permutation arguments.
use std::fmt::Debug;
use itertools::Itertools;
use maybe_rayon::*;
use plonky2::field::batch_util::batch_multiply_inplace;
@ -42,14 +44,14 @@ impl PermutationPair {
}
/// A single instance of a permutation check protocol.
pub(crate) struct PermutationInstance<'a, T: Copy> {
pub(crate) struct PermutationInstance<'a, T: Copy + Eq + PartialEq + Debug> {
pub(crate) pair: &'a PermutationPair,
pub(crate) challenge: GrandProductChallenge<T>,
}
/// Randomness for a single instance of a permutation check protocol.
#[derive(Copy, Clone)]
pub(crate) struct GrandProductChallenge<T: Copy> {
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub(crate) struct GrandProductChallenge<T: Copy + Eq + PartialEq + Debug> {
/// Randomness used to combine multiple columns into one.
pub(crate) beta: T,
/// Random offset that's added to the beta-reduced column values.
@ -92,8 +94,8 @@ impl GrandProductChallenge<Target> {
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
#[derive(Clone)]
pub(crate) struct GrandProductChallengeSet<T: Copy> {
#[derive(Clone, Eq, PartialEq, Debug)]
pub(crate) struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {
pub(crate) challenges: Vec<GrandProductChallenge<T>>,
}
@ -261,7 +263,7 @@ pub(crate) fn get_n_grand_product_challenge_sets_target<
/// Before batching, each permutation pair leads to `num_challenges` permutation arguments, so we
/// start with the cartesian product of `permutation_pairs` and `0..num_challenges`. Then we
/// chunk these arguments based on our batch size.
pub(crate) fn get_permutation_batches<'a, T: Copy>(
pub(crate) fn get_permutation_batches<'a, T: Copy + Eq + PartialEq + Debug>(
permutation_pairs: &'a [PermutationPair],
permutation_challenge_sets: &[GrandProductChallengeSet<T>],
num_challenges: usize,

View File

@ -1,16 +1,19 @@
use anyhow::Result;
use anyhow::{ensure, Result};
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::fri::witness_util::set_fri_proof_target;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hash_types::{HashOut, RichField};
use plonky2::hash::hashing::SPONGE_WIDTH;
use plonky2::iop::challenger::RecursiveChallenger;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::iop::witness::{PartialWitness, Witness};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData, VerifierCircuitTarget};
use plonky2::plonk::config::GenericHashOut;
use plonky2::plonk::config::Hasher;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::plonk::proof::ProofWithPublicInputs;
@ -29,7 +32,8 @@ use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::{
GrandProductChallenge, GrandProductChallengeSet, PermutationCheckDataTarget,
get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet,
PermutationCheckDataTarget,
};
use crate::proof::{
AllChallengerState, AllProof, AllProofChallengesTarget, AllProofTarget, BlockMetadata,
@ -52,14 +56,88 @@ pub struct RecursiveAllProof<
[(ProofWithPublicInputs<F, C, D>, VerifierCircuitData<F, C, D>); NUM_TABLES],
}
struct PublicInputs<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
trace_cap: MerkleCap<F, C::Hasher>,
ctl_zs_last: Vec<F>,
ctl_challenges: GrandProductChallengeSet<F>,
challenger_state_before: [F; SPONGE_WIDTH],
challenger_state_after: [F; SPONGE_WIDTH],
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
PublicInputs<F, C, D>
{
fn from_vec(v: &[F], config: &StarkConfig) -> Self {
let mut start = 0;
let trace_cap = MerkleCap(
v[start..4 * (1 << config.fri_config.cap_height)]
.chunks(4)
.map(|chunk| <C::Hasher as Hasher<F>>::Hash::from_vec(chunk))
.collect(),
);
start += 4 * (1 << config.fri_config.cap_height);
let ctl_challenges = GrandProductChallengeSet {
challenges: (0..config.num_challenges)
.map(|i| GrandProductChallenge {
beta: v[start + 2 * i],
gamma: v[start + 2 * i + 1],
})
.collect(),
};
start += 2 * config.num_challenges;
let challenger_state_before = v[start..start + SPONGE_WIDTH].try_into().unwrap();
let challenger_state_after = v[start + SPONGE_WIDTH..start + 2 * SPONGE_WIDTH]
.try_into()
.unwrap();
start += 2 * SPONGE_WIDTH;
let ctl_zs_last = v[start..].to_vec();
Self {
trace_cap,
ctl_zs_last,
ctl_challenges,
challenger_state_before,
challenger_state_after,
}
}
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
RecursiveAllProof<F, C, D>
{
/// Verify every recursive proof.
pub fn verify(self) -> Result<()>
pub fn verify(self, inner_config: &StarkConfig) -> Result<()>
where
[(); C::Hasher::HASH_SIZE]:,
{
let pis: [_; NUM_TABLES] = std::array::from_fn(|i| {
PublicInputs::<F, C, D>::from_vec(
&self.recursive_proofs[i].0.public_inputs,
inner_config,
)
});
let mut challenger = Challenger::<F, C::Hasher>::new();
for pi in &pis {
challenger.observe_cap(&pi.trace_cap);
}
let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, inner_config.num_challenges);
for pi in &pis {
ensure!(ctl_challenges == pi.ctl_challenges);
}
challenger.duplexing();
let state = challenger.state();
ensure!(state == pis[0].challenger_state_before);
for i in 1..NUM_TABLES {
dbg!(i);
dbg!(
pis[i].challenger_state_before,
pis[i - 1].challenger_state_after
);
ensure!(pis[i].challenger_state_before == pis[i - 1].challenger_state_after);
}
for (proof, verifier_data) in self.recursive_proofs {
verifier_data.verify(proof)?;
}
@ -124,6 +202,14 @@ where
proof.num_ctl_zs(),
);
set_stark_proof_target(&mut pw, &proof_target, proof, builder.zero());
builder.register_public_inputs(
&proof_target
.trace_cap
.0
.iter()
.flat_map(|h| h.elements)
.collect::<Vec<_>>(),
);
let ctl_challenges_target = GrandProductChallengeSet {
challenges: (0..inner_config.num_challenges)
@ -166,6 +252,8 @@ where
let challenger_state = challenger.state();
builder.register_public_inputs(&challenger_state);
builder.register_public_inputs(&proof_target.openings.ctl_zs_last);
verify_stark_proof_with_challenges_circuit::<F, C, _, D>(
&mut builder,
&stark,

View File

@ -78,6 +78,12 @@ impl<F: RichField> GenericHashOut<F> for HashOut<F> {
fn to_vec(&self) -> Vec<F> {
self.elements.to_vec()
}
fn from_vec(v: &[F]) -> Self {
Self {
elements: v.try_into().unwrap(),
}
}
}
impl<F: Field> Default for HashOut<F> {
@ -148,6 +154,10 @@ impl<F: RichField, const N: usize> GenericHashOut<F> for BytesHash<N> {
})
.collect()
}
fn from_vec(_v: &[F]) -> Self {
todo!()
}
}
impl<const N: usize> Serialize for BytesHash<N> {

View File

@ -287,6 +287,9 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
}
pub fn duplexing(&mut self, builder: &mut CircuitBuilder<F, D>) {
if self.input_buffer.is_empty() {
self.sponge_state = builder.permute::<H>(self.sponge_state);
} else {
for input_chunk in self.input_buffer.chunks(SPONGE_RATE) {
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
// where we would xor or add in the inputs. This is a well-known variant, though,
@ -298,6 +301,7 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
// Apply the permutation.
self.sponge_state = builder.permute::<H>(self.sponge_state);
}
}
self.output_buffer = self.sponge_state[0..SPONGE_RATE].to_vec();

View File

@ -20,6 +20,7 @@ pub trait GenericHashOut<F: RichField>:
fn from_bytes(bytes: &[u8]) -> Self;
fn to_vec(&self) -> Vec<F>;
fn from_vec(v: &[F]) -> Self;
}
/// Trait for hash functions.