Fix compile time problems and generic hash implementation (#1024)

* Fix egregious magic number.

* Remove generic consts from core permutations.

* Remove redundant `where` clauses.

* Remove HashConfig and friends.

* Refactor Permutation code.

* Remove redundant `where` clauses and `use`s.

* Introduce AlgebraicPermutation to wrap `[Target; WIDTH]`s.

* Remove `generic_const_expr` feature from plonky2!

* Remove `generic_const_expr` feature from plonky2!

* Compile time fixed! Start removing `generic_const_expr` from evm.

* Remove redundant `where` clauses from Starky.

* Remove `generic_const_expr`s from benchmarks.

* Remove redundant HASH_SIZE `where` clause.

* Clippy.

* Fix unrelated OsRng issue in `bench_recursion`.

* Fix function doc.
This commit is contained in:
Hamish Ivey-Law 2023-05-11 02:59:02 +10:00 committed by GitHub
parent 5dc043aa4c
commit 2c5f6fd62a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 761 additions and 1187 deletions

View File

@ -8,7 +8,6 @@ use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
@ -328,10 +327,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<F>,
num_permutation_zs: &[usize; NUM_TABLES],
) -> [Vec<Self>; NUM_TABLES]
where
[(); C::HCO::WIDTH]:,
{
) -> [Vec<Self>; NUM_TABLES] {
let mut ctl_zs = proofs
.iter()
.zip(num_permutation_zs)

View File

@ -3,12 +3,11 @@ use std::collections::BTreeMap;
use std::ops::Range;
use hashbrown::HashMap;
use itertools::Itertools;
use itertools::{zip_eq, Itertools};
use plonky2::field::extension::Extendable;
use plonky2::fri::FriParams;
use plonky2::gates::noop::NoopGate;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::RecursiveChallenger;
use plonky2::iop::target::{BoolTarget, Target};
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
@ -16,7 +15,7 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{
CircuitConfig, CircuitData, CommonCircuitData, VerifierCircuitTarget,
};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
use plonky2::recursion::cyclic_recursion::check_cyclic_proof_verifier_data;
use plonky2::recursion::dummy_circuit::cyclic_base_proof;
@ -56,7 +55,7 @@ pub struct AllRecursiveCircuits<F, C, const D: usize>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
/// The EVM root circuit, which aggregates the (shrunk) per-table recursive proofs.
pub root: RootCircuitData<F, C, D>,
@ -265,15 +264,12 @@ impl<F, C, const D: usize> AllRecursiveCircuits<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::Hasher::HASH_SIZE]:,
C::Hasher: AlgebraicHasher<F>,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn to_bytes(
&self,
@ -401,14 +397,14 @@ where
let recursive_proofs =
core::array::from_fn(|i| builder.add_virtual_proof_with_pis(inner_common_data[i]));
let pis: [_; NUM_TABLES] = core::array::from_fn(|i| {
PublicInputs::<Target, C::HCO>::from_vec(
PublicInputs::<Target, <C::Hasher as AlgebraicHasher<F>>::AlgebraicPermutation>::from_vec(
&recursive_proofs[i].public_inputs,
stark_config,
)
});
let index_verifier_data = core::array::from_fn(|_i| builder.add_virtual_target());
let mut challenger = RecursiveChallenger::<F, C::HCO, C::Hasher, D>::new(&mut builder);
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(&mut builder);
for pi in &pis {
for h in &pi.trace_cap {
challenger.observe_elements(h);
@ -434,16 +430,16 @@ where
}
let state = challenger.compact(&mut builder);
for k in 0..C::HCO::WIDTH {
builder.connect(state[k], pis[0].challenger_state_before[k]);
for (&before, &s) in zip_eq(state.as_ref(), pis[0].challenger_state_before.as_ref()) {
builder.connect(before, s);
}
// Check that the challenger state is consistent between proofs.
for i in 1..NUM_TABLES {
for k in 0..C::HCO::WIDTH {
builder.connect(
pis[i].challenger_state_before[k],
pis[i - 1].challenger_state_after[k],
);
for (&before, &after) in zip_eq(
pis[i].challenger_state_before.as_ref(),
pis[i - 1].challenger_state_after.as_ref(),
) {
builder.connect(before, after);
}
}
@ -701,7 +697,7 @@ pub struct RecursiveCircuitsForTable<F, C, const D: usize>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
/// A map from `log_2(height)` to a chain of shrinking recursion circuits starting at that
/// height.
@ -712,10 +708,7 @@ impl<F, C, const D: usize> RecursiveCircuitsForTable<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::Hasher::HASH_SIZE]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
pub fn to_buffer(
&self,
@ -800,7 +793,7 @@ struct RecursiveCircuitsForTableSize<F, C, const D: usize>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
initial_wrapper: StarkWrapperCircuit<F, C, D>,
shrinking_wrappers: Vec<PlonkWrapperCircuit<F, C, D>>,
@ -810,10 +803,7 @@ impl<F, C, const D: usize> RecursiveCircuitsForTableSize<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::Hasher::HASH_SIZE]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
pub fn to_buffer(
&self,

View File

@ -1,7 +1,6 @@
use plonky2::field::extension::Extendable;
use plonky2::fri::proof::{FriProof, FriProofTarget};
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
@ -14,21 +13,14 @@ use crate::permutation::{
};
use crate::proof::*;
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D>
where
[(); C::HCO::WIDTH]:,
{
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
) -> AllProofChallenges<F, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
) -> AllProofChallenges<F, D> {
let mut challenger = Challenger::<F, C::Hasher>::new();
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
@ -61,12 +53,8 @@ where
&self,
all_stark: &AllStark<F, D>,
config: &StarkConfig,
) -> AllChallengerState<F, C::HCO, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
) -> AllChallengerState<F, C::Hasher, D> {
let mut challenger = Challenger::<F, C::Hasher>::new();
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
@ -106,15 +94,11 @@ where
/// Computes all Fiat-Shamir challenges used in the STARK proof.
pub(crate) fn get_challenges(
&self,
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
stark_use_permutation: bool,
stark_permutation_batch_size: usize,
config: &StarkConfig,
) -> StarkProofChallenges<F, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> StarkProofChallenges<F, D> {
let degree_bits = self.recover_degree_bits(config);
let StarkProof {
@ -169,15 +153,13 @@ impl<const D: usize> StarkProofTarget<D> {
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
&self,
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, C::HCO, C::Hasher, D>,
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
stark_use_permutation: bool,
stark_permutation_batch_size: usize,
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let StarkProofTarget {
permutation_ctl_zs_cap,

View File

@ -9,7 +9,6 @@ use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
@ -201,38 +200,29 @@ fn poly_product_elementwise<F: Field>(
product
}
fn get_grand_product_challenge<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
) -> GrandProductChallenge<F>
where
[(); HC::WIDTH]:,
{
fn get_grand_product_challenge<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
) -> GrandProductChallenge<F> {
let beta = challenger.get_challenge();
let gamma = challenger.get_challenge();
GrandProductChallenge { beta, gamma }
}
pub(crate) fn get_grand_product_challenge_set<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
pub(crate) fn get_grand_product_challenge_set<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
num_challenges: usize,
) -> GrandProductChallengeSet<F>
where
[(); HC::WIDTH]:,
{
) -> GrandProductChallengeSet<F> {
let challenges = (0..num_challenges)
.map(|_| get_grand_product_challenge(challenger))
.collect();
GrandProductChallengeSet { challenges }
}
pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
pub(crate) fn get_n_grand_product_challenge_sets<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
num_challenges: usize,
num_sets: usize,
) -> Vec<GrandProductChallengeSet<F>>
where
[(); HC::WIDTH]:,
{
) -> Vec<GrandProductChallengeSet<F>> {
(0..num_sets)
.map(|_| get_grand_product_challenge_set(challenger, num_challenges))
.collect()
@ -240,16 +230,12 @@ where
fn get_grand_product_challenge_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
) -> GrandProductChallenge<Target>
where
[(); HC::WIDTH]:,
{
challenger: &mut RecursiveChallenger<F, H, D>,
) -> GrandProductChallenge<Target> {
let beta = challenger.get_challenge(builder);
let gamma = challenger.get_challenge(builder);
GrandProductChallenge { beta, gamma }
@ -257,17 +243,13 @@ where
pub(crate) fn get_grand_product_challenge_set_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
) -> GrandProductChallengeSet<Target>
where
[(); HC::WIDTH]:,
{
) -> GrandProductChallengeSet<Target> {
let challenges = (0..num_challenges)
.map(|_| get_grand_product_challenge_target(builder, challenger))
.collect();
@ -276,18 +258,14 @@ where
pub(crate) fn get_n_grand_product_challenge_sets_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
num_sets: usize,
) -> Vec<GrandProductChallengeSet<Target>>
where
[(); HC::WIDTH]:,
{
) -> Vec<GrandProductChallengeSet<Target>> {
(0..num_sets)
.map(|_| get_grand_product_challenge_set_target(builder, challenger, num_challenges))
.collect()

View File

@ -7,11 +7,10 @@ use plonky2::fri::structure::{
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
};
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
use plonky2::hash::hashing::HashConfig;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use plonky2_maybe_rayon::*;
use serde::{Deserialize, Serialize};
@ -22,19 +21,13 @@ use crate::permutation::GrandProductChallengeSet;
/// A STARK proof for each table, plus some metadata used to create recursive wrapper proofs.
#[derive(Debug, Clone)]
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
where
[(); C::HCO::WIDTH]:,
{
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
pub stark_proofs: [StarkProofWithMetadata<F, C, D>; NUM_TABLES],
pub(crate) ctl_challenges: GrandProductChallengeSet<F>,
pub public_values: PublicValues,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D>
where
[(); C::HCO::WIDTH]:,
{
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
pub fn degree_bits(&self, config: &StarkConfig) -> [usize; NUM_TABLES] {
core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config))
}
@ -46,13 +39,10 @@ pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usiz
}
#[allow(unused)] // TODO: should be used soon
pub(crate) struct AllChallengerState<F: RichField + Extendable<D>, HC: HashConfig, const D: usize>
where
[(); HC::WIDTH]:,
{
pub(crate) struct AllChallengerState<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
/// Sponge state of the challenger before starting each proof,
/// along with the final state after all proofs are done. This final state isn't strictly needed.
pub states: [[F; HC::WIDTH]; NUM_TABLES + 1],
pub states: [H::Permutation; NUM_TABLES + 1],
pub ctl_challenges: GrandProductChallengeSet<F>,
}
@ -109,15 +99,15 @@ pub struct BlockMetadataTarget {
#[derive(Debug, Clone)]
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of permutation Z values.
pub permutation_ctl_zs_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub permutation_ctl_zs_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of trace values.
pub quotient_polys_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: FriProof<F, C::HCO, C::Hasher, D>,
pub opening_proof: FriProof<F, C::Hasher, D>,
}
/// A `StarkProof` along with some metadata about the initial Fiat-Shamir state, which is used when
@ -127,9 +117,8 @@ pub struct StarkProofWithMetadata<F, C, const D: usize>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
{
pub(crate) init_challenger_state: [F; C::HCO::WIDTH],
pub(crate) init_challenger_state: <C::Hasher as Hasher<F>>::Permutation,
pub(crate) proof: StarkProof<F, C, D>,
}

View File

@ -11,9 +11,8 @@ use plonky2::field::types::Field;
use plonky2::field::zero_poly_coset::ZeroPolyOnCoset;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::plonk::config::GenericConfig;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use plonky2::util::transpose;
@ -56,9 +55,6 @@ where
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::Hasher::HASH_SIZE]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let (proof, _outputs) = prove_with_outputs(all_stark, config, inputs, timing)?;
Ok(proof)
@ -75,14 +71,11 @@ pub fn prove_with_outputs<F, C, const D: usize>(
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
timed!(timing, "build kernel", Lazy::force(&KERNEL));
let (traces, public_values, outputs) = timed!(
@ -110,9 +103,6 @@ where
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::Hasher::HASH_SIZE]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
@ -146,7 +136,7 @@ where
.iter()
.map(|c| c.merkle_tree.cap.clone())
.collect::<Vec<_>>();
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
let mut challenger = Challenger::<F, C::Hasher>::new();
for cap in &trace_caps {
challenger.observe_cap(cap);
}
@ -189,20 +179,17 @@ fn prove_with_commitments<F, C, const D: usize>(
trace_poly_values: [Vec<PolynomialValues<F>>; NUM_TABLES],
trace_commitments: Vec<PolynomialBatch<F, C, D>>,
ctl_data_per_table: [CtlData<F>; NUM_TABLES],
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
timing: &mut TimingTree,
) -> Result<[StarkProofWithMetadata<F, C, D>; NUM_TABLES]>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let cpu_proof = timed!(
timing,
@ -285,7 +272,7 @@ pub(crate) fn prove_single_table<F, C, S, const D: usize>(
trace_poly_values: &[PolynomialValues<F>],
trace_commitment: &PolynomialBatch<F, C, D>,
ctl_data: &CtlData<F>,
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
timing: &mut TimingTree,
) -> Result<StarkProofWithMetadata<F, C, D>>
where
@ -293,8 +280,6 @@ where
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let degree = trace_poly_values[0].len();
let degree_bits = log2_strict(degree);

View File

@ -1,7 +1,6 @@
use std::fmt::Debug;
use anyhow::{ensure, Result};
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::fri::witness_util::set_fri_proof_target;
@ -9,14 +8,14 @@ use plonky2::gates::exponentiation::ExponentiationGate;
use plonky2::gates::gate::GateRef;
use plonky2::gates::noop::NoopGate;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::hash::hashing::PlonkyPermutation;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::iop::witness::{PartialWitness, Witness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{CircuitConfig, CircuitData, VerifierCircuitData};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
use plonky2::util::reducing::ReducingFactorTarget;
use plonky2::util::serialization::{
@ -52,35 +51,25 @@ pub struct RecursiveAllProof<
pub recursive_proofs: [ProofWithPublicInputs<F, C, D>; NUM_TABLES],
}
pub(crate) struct PublicInputs<T: Copy + Eq + PartialEq + Debug, HC: HashConfig>
where
[(); HC::WIDTH]:,
pub(crate) struct PublicInputs<T: Copy + Default + Eq + PartialEq + Debug, P: PlonkyPermutation<T>>
{
pub(crate) trace_cap: Vec<Vec<T>>,
pub(crate) ctl_zs_last: Vec<T>,
pub(crate) ctl_challenges: GrandProductChallengeSet<T>,
pub(crate) challenger_state_before: [T; HC::WIDTH],
pub(crate) challenger_state_after: [T; HC::WIDTH],
pub(crate) challenger_state_before: P,
pub(crate) challenger_state_after: P,
}
/// Similar to the unstable `Iterator::next_chunk`. Could be replaced with that when it's stable.
fn next_chunk<T: Debug, const N: usize>(iter: &mut impl Iterator<Item = T>) -> [T; N] {
(0..N)
.flat_map(|_| iter.next())
.collect_vec()
.try_into()
.expect("Not enough elements")
}
impl<T: Copy + Eq + PartialEq + Debug, HC: HashConfig> PublicInputs<T, HC>
where
[(); HC::WIDTH]:,
{
impl<T: Copy + Debug + Default + Eq + PartialEq, P: PlonkyPermutation<T>> PublicInputs<T, P> {
pub(crate) fn from_vec(v: &[T], config: &StarkConfig) -> Self {
let mut iter = v.iter().copied();
let trace_cap = (0..config.fri_config.num_cap_elements())
.map(|_| next_chunk::<_, 4>(&mut iter).to_vec())
.collect();
// TODO: Document magic number 4; probably comes from
// Ethereum 256 bits = 4 * Goldilocks 64 bits
let nelts = config.fri_config.num_cap_elements();
let mut trace_cap = Vec::with_capacity(nelts);
for i in 0..nelts {
trace_cap.push(v[4 * i..4 * (i + 1)].to_vec());
}
let mut iter = v.iter().copied().skip(4 * nelts);
let ctl_challenges = GrandProductChallengeSet {
challenges: (0..config.num_challenges)
.map(|_| GrandProductChallenge {
@ -89,8 +78,8 @@ where
})
.collect(),
};
let challenger_state_before = next_chunk(&mut iter);
let challenger_state_after = next_chunk(&mut iter);
let challenger_state_before = P::new(&mut iter);
let challenger_state_after = P::new(&mut iter);
let ctl_zs_last: Vec<_> = iter.collect();
Self {
@ -112,19 +101,15 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
verifier_data: &[VerifierCircuitData<F, C, D>; NUM_TABLES],
cross_table_lookups: Vec<CrossTableLookup<F>>,
inner_config: &StarkConfig,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
let pis: [_; NUM_TABLES] = core::array::from_fn(|i| {
PublicInputs::<F, C::HCO>::from_vec(
PublicInputs::<F, <C::Hasher as Hasher<F>>::Permutation>::from_vec(
&self.recursive_proofs[i].public_inputs,
inner_config,
)
});
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
let mut challenger = Challenger::<F, C::Hasher>::new();
for pi in &pis {
for h in &pi.trace_cap {
challenger.observe_elements(h);
@ -165,12 +150,13 @@ pub(crate) struct StarkWrapperCircuit<F, C, const D: usize>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
pub(crate) circuit: CircuitData<F, C, D>,
pub(crate) stark_proof_target: StarkProofTarget<D>,
pub(crate) ctl_challenges_target: GrandProductChallengeSet<Target>,
pub(crate) init_challenger_state_target: [Target; C::HCO::WIDTH],
pub(crate) init_challenger_state_target:
<C::Hasher as AlgebraicHasher<F>>::AlgebraicPermutation,
pub(crate) zero_target: Target,
}
@ -178,9 +164,7 @@ impl<F, C, const D: usize> StarkWrapperCircuit<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
pub fn to_buffer(
&self,
@ -189,7 +173,7 @@ where
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<()> {
buffer.write_circuit_data(&self.circuit, gate_serializer, generator_serializer)?;
buffer.write_target_vec(&self.init_challenger_state_target)?;
buffer.write_target_vec(self.init_challenger_state_target.as_ref())?;
buffer.write_target(self.zero_target)?;
self.stark_proof_target.to_buffer(buffer)?;
self.ctl_challenges_target.to_buffer(buffer)?;
@ -202,7 +186,9 @@ where
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<Self> {
let circuit = buffer.read_circuit_data(gate_serializer, generator_serializer)?;
let init_challenger_state_target = buffer.read_target_vec()?;
let target_vec = buffer.read_target_vec()?;
let init_challenger_state_target =
<C::Hasher as AlgebraicHasher<F>>::AlgebraicPermutation::new(target_vec.into_iter());
let zero_target = buffer.read_target()?;
let stark_proof_target = StarkProofTarget::from_buffer(buffer)?;
let ctl_challenges_target = GrandProductChallengeSet::from_buffer(buffer)?;
@ -210,7 +196,7 @@ where
circuit,
stark_proof_target,
ctl_challenges_target,
init_challenger_state_target: init_challenger_state_target.try_into().unwrap(),
init_challenger_state_target,
zero_target,
})
}
@ -240,8 +226,8 @@ where
}
inputs.set_target_arr(
self.init_challenger_state_target,
proof_with_metadata.init_challenger_state,
self.init_challenger_state_target.as_ref(),
proof_with_metadata.init_challenger_state.as_ref(),
);
self.circuit.prove(inputs)
@ -263,9 +249,7 @@ impl<F, C, const D: usize> PlonkWrapperCircuit<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
pub(crate) fn prove(
&self,
@ -294,9 +278,7 @@ pub(crate) fn recursive_stark_circuit<
) -> StarkWrapperCircuit<F, C, D>
where
[(); S::COLUMNS]:,
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let mut builder = CircuitBuilder::<F, D>::new(circuit_config.clone());
let zero_target = builder.zero();
@ -333,9 +315,12 @@ where
num_permutation_zs,
);
let init_challenger_state_target = core::array::from_fn(|_| builder.add_virtual_public_input());
let init_challenger_state_target =
<C::Hasher as AlgebraicHasher<F>>::AlgebraicPermutation::new(std::iter::from_fn(|| {
Some(builder.add_virtual_public_input())
}));
let mut challenger =
RecursiveChallenger::<F, C::HCO, C::Hasher, D>::from_state(init_challenger_state_target);
RecursiveChallenger::<F, C::Hasher, D>::from_state(init_challenger_state_target);
let challenges = proof_target.get_challenges::<F, C>(
&mut builder,
&mut challenger,
@ -344,7 +329,7 @@ where
inner_config,
);
let challenger_state = challenger.compact(&mut builder);
builder.register_public_inputs(&challenger_state);
builder.register_public_inputs(challenger_state.as_ref());
builder.register_public_inputs(&proof_target.openings.ctl_zs_last);
@ -399,9 +384,8 @@ fn verify_stark_proof_with_challenges_circuit<
ctl_vars: &[CtlCheckVarsTarget<F, D>],
inner_config: &StarkConfig,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); C::HCO::WIDTH]:,
{
let zero = builder.zero();
let one = builder.one_extension();
@ -620,7 +604,7 @@ pub(crate) fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D:
zero: Target,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
W: Witness<F>,
{
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);
@ -674,16 +658,16 @@ pub(crate) fn set_trie_roots_target<F, W, const D: usize>(
W: Witness<F>,
{
witness.set_target_arr(
trie_roots_target.state_root,
h256_limbs(trie_roots.state_root),
&trie_roots_target.state_root,
&h256_limbs(trie_roots.state_root),
);
witness.set_target_arr(
trie_roots_target.transactions_root,
h256_limbs(trie_roots.transactions_root),
&trie_roots_target.transactions_root,
&h256_limbs(trie_roots.transactions_root),
);
witness.set_target_arr(
trie_roots_target.receipts_root,
h256_limbs(trie_roots.receipts_root),
&trie_roots_target.receipts_root,
&h256_limbs(trie_roots.receipts_root),
);
}
@ -696,8 +680,8 @@ pub(crate) fn set_block_metadata_target<F, W, const D: usize>(
W: Witness<F>,
{
witness.set_target_arr(
block_metadata_target.block_beneficiary,
h160_limbs(block_metadata.block_beneficiary),
&block_metadata_target.block_beneficiary,
&h160_limbs(block_metadata.block_beneficiary),
);
witness.set_target(
block_metadata_target.block_timestamp,

View File

@ -3,11 +3,11 @@ use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
use plonky2::field::types::{Field, Sample};
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::hash::hashing::PlonkyPermutation;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::config::{GenericConfig, Hasher};
use plonky2::util::transpose;
use plonky2_util::{log2_ceil, log2_strict};
@ -87,8 +87,8 @@ pub fn test_stark_circuit_constraints<
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
[(); <C::Hasher as Hasher<F>>::Permutation::WIDTH]:,
[(); <C::InnerHasher as Hasher<F>>::Permutation::WIDTH]:,
{
// Compute native constraint evaluation on random values.
let vars = StarkEvaluationVars {

View File

@ -5,7 +5,6 @@ use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::types::Field;
use plonky2::fri::verifier::verify_fri_proof;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::plonk_common::reduce_with_powers;
@ -37,8 +36,6 @@ where
[(); KeccakSpongeStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let AllProofChallenges {
stark_challenges,
@ -120,7 +117,6 @@ pub(crate) fn verify_stark_proof_with_challenges<
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); C::HCO::WIDTH]:,
{
log::debug!("Checking proof: {}", type_name::<S>());
validate_proof_shape(stark, proof, config, ctl_vars.len())?;

View File

@ -6,16 +6,14 @@ use plonky2::field::types::Sample;
use plonky2::hash::hash_types::{BytesHash, RichField};
use plonky2::hash::keccak::KeccakHash;
use plonky2::hash::poseidon::{Poseidon, SPONGE_WIDTH};
use plonky2::plonk::config::{Hasher, KeccakHashConfig};
use plonky2::plonk::config::Hasher;
use tynm::type_name;
pub(crate) fn bench_keccak<F: RichField>(c: &mut Criterion) {
c.bench_function("keccak256", |b| {
b.iter_batched(
|| (BytesHash::<32>::rand(), BytesHash::<32>::rand()),
|(left, right)| {
<KeccakHash<32> as Hasher<F, KeccakHashConfig>>::two_to_one(left, right)
},
|(left, right)| <KeccakHash<32> as Hasher<F>>::two_to_one(left, right),
BatchSize::SmallInput,
)
});

View File

@ -1,23 +1,17 @@
#![feature(generic_const_exprs)]
mod allocator;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::hash::keccak::KeccakHash;
use plonky2::hash::merkle_tree::MerkleTree;
use plonky2::hash::poseidon::PoseidonHash;
use plonky2::plonk::config::{Hasher, KeccakHashConfig, PoseidonHashConfig};
use plonky2::plonk::config::Hasher;
use tynm::type_name;
const ELEMS_PER_LEAF: usize = 135;
pub(crate) fn bench_merkle_tree<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(c: &mut Criterion)
where
[(); HC::WIDTH]:,
{
pub(crate) fn bench_merkle_tree<F: RichField, H: Hasher<F>>(c: &mut Criterion) {
let mut group = c.benchmark_group(&format!(
"merkle-tree<{}, {}>",
type_name::<F>(),
@ -29,14 +23,14 @@ where
let size = 1 << size_log;
group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| {
let leaves = vec![F::rand_vec(ELEMS_PER_LEAF); size];
b.iter(|| MerkleTree::<F, HC, H>::new(leaves.clone(), 0));
b.iter(|| MerkleTree::<F, H>::new(leaves.clone(), 0));
});
}
}
fn criterion_benchmark(c: &mut Criterion) {
bench_merkle_tree::<GoldilocksField, PoseidonHashConfig, PoseidonHash>(c);
bench_merkle_tree::<GoldilocksField, KeccakHashConfig, KeccakHash<25>>(c);
bench_merkle_tree::<GoldilocksField, PoseidonHash>(c);
bench_merkle_tree::<GoldilocksField, KeccakHash<25>>(c);
}
criterion_group!(benches, criterion_benchmark);

View File

@ -3,8 +3,6 @@
// put it in `src/bin/`, but then we wouldn't have access to
// `[dev-dependencies]`.
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
#![allow(clippy::upper_case_acronyms)]
use core::num::ParseIntError;
@ -15,7 +13,6 @@ use anyhow::{anyhow, Context as _, Result};
use log::{info, Level, LevelFilter};
use plonky2::gates::noop::NoopGate;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::{CircuitConfig, CommonCircuitData, VerifierOnlyCircuitData};
@ -68,11 +65,7 @@ struct Options {
fn dummy_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
config: &CircuitConfig,
log2_size: usize,
) -> Result<ProofTuple<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<ProofTuple<F, C, D>> {
// 'size' is in degree, but we want number of noop gates. A non-zero amount of padding will be added and size will be rounded to the next power of two. To hit our target size, we go just under the previous power of two and hope padding is less than half the proof.
let num_dummy_gates = match log2_size {
0 => return Err(anyhow!("size must be at least 1")),
@ -109,11 +102,7 @@ fn recursive_proof<
min_degree_bits: Option<usize>,
) -> Result<ProofTuple<F, C, D>>
where
InnerC::Hasher: AlgebraicHasher<F, InnerC::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
[(); InnerC::HCO::WIDTH]:,
[(); InnerC::HCI::WIDTH]:,
InnerC::Hasher: AlgebraicHasher<F>,
{
let (inner_proof, inner_vd, inner_cd) = inner;
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
@ -155,11 +144,7 @@ fn test_serialization<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
proof: &ProofWithPublicInputs<F, C, D>,
vd: &VerifierOnlyCircuitData<C, D>,
cd: &CommonCircuitData<F, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
let proof_bytes = proof.to_bytes();
info!("Proof length: {} bytes", proof_bytes.len());
let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?;
@ -252,8 +237,7 @@ fn main() -> Result<()> {
builder.try_init()?;
// Initialize randomness source
let mut rng = OsRng;
let rng_seed = options.seed.unwrap_or_else(|| rng.next_u64());
let rng_seed = options.seed.unwrap_or_else(|| OsRng.next_u64());
info!("Using random seed {rng_seed:16x}");
let _rng = ChaCha8Rng::seed_from_u64(rng_seed);
// TODO: Use `rng` to create deterministic runs

View File

@ -76,7 +76,7 @@ impl<F, C, const D: usize> WitnessGeneratorSerializer<F, D> for CustomGeneratorS
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
impl_generator_serializer! {
CustomGeneratorSerializer,

View File

@ -5,21 +5,16 @@ use crate::fri::structure::{FriOpenings, FriOpeningsTarget};
use crate::fri::FriConfig;
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::hash_types::{MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::challenger::{Challenger, RecursiveChallenger};
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
impl<F: RichField, HCO: HashConfig, H: Hasher<F, HCO>> Challenger<F, HCO, H>
where
[(); HCO::WIDTH]:,
{
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
pub fn observe_openings<const D: usize>(&mut self, openings: &FriOpenings<F, D>)
where
F: RichField + Extendable<D>,
[(); HCO::WIDTH]:,
{
for v in &openings.batches {
self.observe_extension_elements(&v.values);
@ -28,7 +23,7 @@ where
pub fn fri_challenges<C: GenericConfig<D, F = F>, const D: usize>(
&mut self,
commit_phase_merkle_caps: &[MerkleCap<F, C::HCO, C::Hasher>],
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
pow_witness: F,
degree_bits: usize,
@ -36,8 +31,6 @@ where
) -> FriChallenges<F, D>
where
F: RichField + Extendable<D>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let num_fri_queries = config.num_query_rounds;
let lde_size = 1 << (degree_bits + config.rate_bits);
@ -48,7 +41,7 @@ where
let fri_betas = commit_phase_merkle_caps
.iter()
.map(|cap| {
self.observe_cap::<C::HCO, C::Hasher>(cap);
self.observe_cap::<C::Hasher>(cap);
self.get_extension_challenge::<D>()
})
.collect();
@ -71,15 +64,10 @@ where
}
}
impl<F: RichField + Extendable<D>, HCO: HashConfig, H: AlgebraicHasher<F, HCO>, const D: usize>
RecursiveChallenger<F, HCO, H, D>
where
[(); HCO::WIDTH]:,
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
RecursiveChallenger<F, H, D>
{
pub fn observe_openings(&mut self, openings: &FriOpeningsTarget<D>)
where
[(); HCO::WIDTH]:,
{
pub fn observe_openings(&mut self, openings: &FriOpeningsTarget<D>) {
for v in &openings.batches {
self.observe_extension_elements(&v.values);
}

View File

@ -14,7 +14,6 @@ use crate::fri::prover::fri_proof;
use crate::fri::structure::{FriBatchInfo, FriInstanceInfo};
use crate::fri::FriParams;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleTree;
use crate::iop::challenger::Challenger;
use crate::plonk::config::GenericConfig;
@ -31,7 +30,7 @@ pub const SALT_SIZE: usize = 4;
pub struct PolynomialBatch<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
pub polynomials: Vec<PolynomialCoeffs<F>>,
pub merkle_tree: MerkleTree<F, C::HCO, C::Hasher>,
pub merkle_tree: MerkleTree<F, C::Hasher>,
pub degree_log: usize,
pub rate_bits: usize,
pub blinding: bool,
@ -48,10 +47,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
cap_height: usize,
timing: &mut TimingTree,
fft_root_table: Option<&FftRootTable<F>>,
) -> Self
where
[(); C::HCO::WIDTH]:,
{
) -> Self {
let coeffs = timed!(
timing,
"IFFT",
@ -76,10 +72,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
cap_height: usize,
timing: &mut TimingTree,
fft_root_table: Option<&FftRootTable<F>>,
) -> Self
where
[(); C::HCO::WIDTH]:,
{
) -> Self {
let degree = polynomials[0].len();
let lde_values = timed!(
timing,
@ -169,14 +162,10 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub fn prove_openings(
instance: &FriInstanceInfo<F, D>,
oracles: &[&Self],
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
fri_params: &FriParams,
timing: &mut TimingTree,
) -> FriProof<F, C::HCO, C::Hasher, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> FriProof<F, C::Hasher, D> {
assert!(D > 1, "Not implemented for D=1.");
let alpha = challenger.get_extension_challenge::<D>();
let mut alpha = ReducingFactor::new(alpha);

View File

@ -10,7 +10,6 @@ use crate::field::polynomial::PolynomialCoeffs;
use crate::fri::FriParams;
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::hash_types::{MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
use crate::hash::merkle_tree::MerkleCap;
use crate::hash::path_compression::{compress_merkle_proofs, decompress_merkle_proofs};
@ -23,14 +22,9 @@ use crate::plonk::proof::{FriInferredElements, ProofChallenges};
/// Evaluations and Merkle proof produced by the prover in a FRI query step.
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct FriQueryStep<
F: RichField + Extendable<D>,
HC: HashConfig,
H: Hasher<F, HC>,
const D: usize,
> {
pub struct FriQueryStep<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
pub evals: Vec<F::Extension>,
pub merkle_proof: MerkleProof<F, HC, H>,
pub merkle_proof: MerkleProof<F, H>,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -43,11 +37,11 @@ pub struct FriQueryStepTarget<const D: usize> {
/// before they are combined into a composition polynomial.
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct FriInitialTreeProof<F: RichField, HC: HashConfig, H: Hasher<F, HC>> {
pub evals_proofs: Vec<(Vec<F>, MerkleProof<F, HC, H>)>,
pub struct FriInitialTreeProof<F: RichField, H: Hasher<F>> {
pub evals_proofs: Vec<(Vec<F>, MerkleProof<F, H>)>,
}
impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> FriInitialTreeProof<F, HC, H> {
impl<F: RichField, H: Hasher<F>> FriInitialTreeProof<F, H> {
pub(crate) fn unsalted_eval(&self, oracle_index: usize, poly_index: usize, salted: bool) -> F {
self.unsalted_evals(oracle_index, salted)[poly_index]
}
@ -82,14 +76,9 @@ impl FriInitialTreeProofTarget {
/// Proof for a FRI query round.
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct FriQueryRound<
F: RichField + Extendable<D>,
HC: HashConfig,
H: Hasher<F, HC>,
const D: usize,
> {
pub initial_trees_proof: FriInitialTreeProof<F, HC, H>,
pub steps: Vec<FriQueryStep<F, HC, H, D>>,
pub struct FriQueryRound<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
pub initial_trees_proof: FriInitialTreeProof<F, H>,
pub steps: Vec<FriQueryStep<F, H, D>>,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -101,28 +90,22 @@ pub struct FriQueryRoundTarget<const D: usize> {
/// Compressed proof of the FRI query rounds.
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedFriQueryRounds<
F: RichField + Extendable<D>,
HC: HashConfig,
H: Hasher<F, HC>,
const D: usize,
> {
pub struct CompressedFriQueryRounds<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
/// Query indices.
pub indices: Vec<usize>,
/// Map from initial indices `i` to the `FriInitialProof` for the `i`th leaf.
pub initial_trees_proofs: HashMap<usize, FriInitialTreeProof<F, HC, H>>,
pub initial_trees_proofs: HashMap<usize, FriInitialTreeProof<F, H>>,
/// For each FRI query step, a map from indices `i` to the `FriQueryStep` for the `i`th leaf.
pub steps: Vec<HashMap<usize, FriQueryStep<F, HC, H, D>>>,
pub steps: Vec<HashMap<usize, FriQueryStep<F, H, D>>>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct FriProof<F: RichField + Extendable<D>, HC: HashConfig, H: Hasher<F, HC>, const D: usize>
{
pub struct FriProof<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
/// A Merkle cap for each reduced polynomial in the commit phase.
pub commit_phase_merkle_caps: Vec<MerkleCap<F, HC, H>>,
pub commit_phase_merkle_caps: Vec<MerkleCap<F, H>>,
/// Query rounds proofs
pub query_round_proofs: Vec<FriQueryRound<F, HC, H, D>>,
pub query_round_proofs: Vec<FriQueryRound<F, H, D>>,
/// The final polynomial in coefficient form.
pub final_poly: PolynomialCoeffs<F::Extension>,
/// Witness showing that the prover did PoW.
@ -139,31 +122,20 @@ pub struct FriProofTarget<const D: usize> {
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedFriProof<
F: RichField + Extendable<D>,
HC: HashConfig,
H: Hasher<F, HC>,
const D: usize,
> {
pub struct CompressedFriProof<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> {
/// A Merkle cap for each reduced polynomial in the commit phase.
pub commit_phase_merkle_caps: Vec<MerkleCap<F, HC, H>>,
pub commit_phase_merkle_caps: Vec<MerkleCap<F, H>>,
/// Compressed query rounds proof.
pub query_round_proofs: CompressedFriQueryRounds<F, HC, H, D>,
pub query_round_proofs: CompressedFriQueryRounds<F, H, D>,
/// The final polynomial in coefficient form.
pub final_poly: PolynomialCoeffs<F::Extension>,
/// Witness showing that the prover did PoW.
pub pow_witness: F,
}
impl<F: RichField + Extendable<D>, HCO: HashConfig, H: Hasher<F, HCO>, const D: usize>
FriProof<F, HCO, H, D>
{
impl<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> FriProof<F, H, D> {
/// Compress all the Merkle paths in the FRI proof and remove duplicate indices.
pub fn compress(
self,
indices: &[usize],
params: &FriParams,
) -> CompressedFriProof<F, HCO, H, D> {
pub fn compress(self, indices: &[usize], params: &FriParams) -> CompressedFriProof<F, H, D> {
let FriProof {
commit_phase_merkle_caps,
query_round_proofs,
@ -263,19 +235,14 @@ impl<F: RichField + Extendable<D>, HCO: HashConfig, H: Hasher<F, HCO>, const D:
}
}
impl<F: RichField + Extendable<D>, HCO: HashConfig, H: Hasher<F, HCO>, const D: usize>
CompressedFriProof<F, HCO, H, D>
{
impl<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> CompressedFriProof<F, H, D> {
/// Decompress all the Merkle paths in the FRI proof and reinsert duplicate indices.
pub(crate) fn decompress(
self,
challenges: &ProofChallenges<F, D>,
fri_inferred_elements: FriInferredElements<F, D>,
params: &FriParams,
) -> FriProof<F, HCO, H, D>
where
[(); HCO::WIDTH]:,
{
) -> FriProof<F, H, D> {
let CompressedFriProof {
commit_phase_merkle_caps,
query_round_proofs,

View File

@ -7,10 +7,10 @@ use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound, FriQueryStep};
use crate::fri::{FriConfig, FriParams};
use crate::hash::hash_types::RichField;
use crate::hash::hashing::{HashConfig, PlonkyPermutation};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::merkle_tree::MerkleTree;
use crate::iop::challenger::Challenger;
use crate::plonk::config::{GenericConfig, Hasher};
use crate::plonk::config::GenericConfig;
use crate::plonk::plonk_common::reduce_with_powers;
use crate::timed;
use crate::util::reverse_index_bits_in_place;
@ -18,19 +18,15 @@ use crate::util::timing::TimingTree;
/// Builds a FRI proof.
pub fn fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
initial_merkle_trees: &[&MerkleTree<F, C::HCO, C::Hasher>],
initial_merkle_trees: &[&MerkleTree<F, C::Hasher>],
// Coefficients of the polynomial on which the LDT is performed. Only the first `1/rate` coefficients are non-zero.
lde_polynomial_coeffs: PolynomialCoeffs<F::Extension>,
// Evaluation of the polynomial on the large domain.
lde_polynomial_values: PolynomialValues<F::Extension>,
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
fri_params: &FriParams,
timing: &mut TimingTree,
) -> FriProof<F, C::HCO, C::Hasher, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> FriProof<F, C::Hasher, D> {
let n = lde_polynomial_values.len();
assert_eq!(lde_polynomial_coeffs.len(), n);
@ -66,19 +62,16 @@ where
}
type FriCommitedTrees<F, C, const D: usize> = (
Vec<MerkleTree<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>>,
Vec<MerkleTree<F, <C as GenericConfig<D>>::Hasher>>,
PolynomialCoeffs<<F as Extendable<D>>::Extension>,
);
fn fri_committed_trees<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
mut coeffs: PolynomialCoeffs<F::Extension>,
mut values: PolynomialValues<F::Extension>,
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
fri_params: &FriParams,
) -> FriCommitedTrees<F, C, D>
where
[(); C::HCO::WIDTH]:,
{
) -> FriCommitedTrees<F, C, D> {
let mut trees = Vec::new();
let mut shift = F::MULTIPLICATIVE_GROUP_GENERATOR;
@ -91,8 +84,7 @@ where
.par_chunks(arity)
.map(|chunk: &[F::Extension]| flatten(chunk))
.collect();
let tree =
MerkleTree::<F, C::HCO, C::Hasher>::new(chunked_values, fri_params.config.cap_height);
let tree = MerkleTree::<F, C::Hasher>::new(chunked_values, fri_params.config.cap_height);
challenger.observe_cap(&tree.cap);
trees.push(tree);
@ -121,13 +113,9 @@ where
/// Performs the proof-of-work (a.k.a. grinding) step of the FRI protocol. Returns the PoW witness.
fn fri_proof_of_work<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
challenger: &mut Challenger<F, C::Hasher>,
config: &FriConfig,
) -> F
where
[(); C::HCI::WIDTH]:,
[(); C::HCO::WIDTH]:,
{
) -> F {
let min_leading_zeros = config.proof_of_work_bits + (64 - F::order().bits()) as u32;
// The easiest implementation would be repeatedly clone our Challenger. With each clone, we'd
@ -138,8 +126,8 @@ where
// since it stores vectors, which means allocations. We'd like a more compact state to clone.
//
// We know that a duplex will be performed right after we send the PoW witness, so we can ignore
// any output_buffer, which will be invalidated. We also know input_buffer.len() < HCO::WIDTH,
// an invariant of Challenger.
// any output_buffer, which will be invalidated. We also know
// input_buffer.len() < H::Permutation::WIDTH, an invariant of Challenger.
//
// We separate the duplex operation into two steps, one which can be performed now, and the
// other which depends on the PoW witness candidate. The first step is the overwrite our sponge
@ -148,20 +136,15 @@ where
// obtaining our duplex's post-state which contains the PoW response.
let mut duplex_intermediate_state = challenger.sponge_state;
let witness_input_pos = challenger.input_buffer.len();
for (i, input) in challenger.input_buffer.iter().enumerate() {
duplex_intermediate_state[i] = *input;
}
duplex_intermediate_state.set_from_iter(challenger.input_buffer.clone().into_iter(), 0);
let pow_witness = (0..=F::NEG_ONE.to_canonical_u64())
.into_par_iter()
.find_any(|&candidate| {
let mut duplex_state = duplex_intermediate_state;
duplex_state[witness_input_pos] = F::from_canonical_u64(candidate);
duplex_state =
<<C as GenericConfig<D>>::Hasher as Hasher<F, C::HCO>>::Permutation::permute(
duplex_state,
);
let pow_response = duplex_state[C::HCO::RATE - 1];
duplex_state.set_elt(F::from_canonical_u64(candidate), witness_input_pos);
duplex_state.permute();
let pow_response = duplex_state.squeeze().iter().last().unwrap();
let leading_zeros = pow_response.to_canonical_u64().leading_zeros();
leading_zeros >= min_leading_zeros
})
@ -181,15 +164,12 @@ fn fri_prover_query_rounds<
C: GenericConfig<D, F = F>,
const D: usize,
>(
initial_merkle_trees: &[&MerkleTree<F, C::HCO, C::Hasher>],
trees: &[MerkleTree<F, C::HCO, C::Hasher>],
challenger: &mut Challenger<F, C::HCO, C::Hasher>,
initial_merkle_trees: &[&MerkleTree<F, C::Hasher>],
trees: &[MerkleTree<F, C::Hasher>],
challenger: &mut Challenger<F, C::Hasher>,
n: usize,
fri_params: &FriParams,
) -> Vec<FriQueryRound<F, C::HCO, C::Hasher, D>>
where
[(); C::HCO::WIDTH]:,
{
) -> Vec<FriQueryRound<F, C::Hasher, D>> {
challenger
.get_n_challenges(fri_params.config.num_query_rounds)
.into_par_iter()
@ -205,14 +185,11 @@ fn fri_prover_query_round<
C: GenericConfig<D, F = F>,
const D: usize,
>(
initial_merkle_trees: &[&MerkleTree<F, C::HCO, C::Hasher>],
trees: &[MerkleTree<F, C::HCO, C::Hasher>],
initial_merkle_trees: &[&MerkleTree<F, C::Hasher>],
trees: &[MerkleTree<F, C::Hasher>],
mut x_index: usize,
fri_params: &FriParams,
) -> FriQueryRound<F, C::HCO, C::Hasher, D>
where
[(); C::HCO::WIDTH]:,
{
) -> FriQueryRound<F, C::Hasher, D> {
let mut query_steps = Vec::new();
let initial_proof = initial_merkle_trees
.iter()

View File

@ -14,7 +14,6 @@ use crate::gates::coset_interpolation::CosetInterpolationGate;
use crate::gates::gate::Gate;
use crate::gates::random_access::RandomAccessGate;
use crate::hash::hash_types::{MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::iop::ext_target::{flatten_target, ExtensionTarget};
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
@ -108,8 +107,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
proof: &FriProofTarget<D>,
params: &FriParams,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
if let Some(max_arity_bits) = params.max_arity_bits() {
self.check_recursion_config(max_arity_bits);
@ -177,15 +175,13 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
}
fn fri_verify_initial_proof<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
fn fri_verify_initial_proof<H: AlgebraicHasher<F>>(
&mut self,
x_index_bits: &[BoolTarget],
proof: &FriInitialTreeProofTarget,
initial_merkle_caps: &[MerkleCapTarget],
cap_index: Target,
) where
[(); HC::WIDTH]:,
{
) {
for (i, ((evals, merkle_proof), cap)) in proof
.evals_proofs
.iter()
@ -195,7 +191,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
with_context!(
self,
&format!("verify {i}'th initial Merkle proof"),
self.verify_merkle_proof_to_cap_with_cap_index::<HC, H>(
self.verify_merkle_proof_to_cap_with_cap_index::<H>(
evals.clone(),
x_index_bits,
cap_index,
@ -262,8 +258,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
round_proof: &FriQueryRoundTarget<D>,
params: &FriParams,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let n_log = log2_strict(n);
@ -277,7 +272,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
with_context!(
self,
"check FRI initial proof",
self.fri_verify_initial_proof::<C::HCO, C::Hasher>(
self.fri_verify_initial_proof::<C::Hasher>(
&x_index_bits,
&round_proof.initial_trees_proof,
initial_merkle_caps,
@ -337,7 +332,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
with_context!(
self,
"verify FRI round Merkle proof.",
self.verify_merkle_proof_to_cap_with_cap_index::<C::HCO, C::Hasher>(
self.verify_merkle_proof_to_cap_with_cap_index::<C::Hasher>(
flatten_target(evals),
&coset_index_bits,
cap_index,

View File

@ -9,7 +9,7 @@ use crate::plonk::config::GenericConfig;
use crate::plonk::plonk_common::salt_size;
pub(crate) fn validate_fri_proof_shape<F, C, const D: usize>(
proof: &FriProof<F, C::HCO, C::Hasher, D>,
proof: &FriProof<F, C::Hasher, D>,
instance: &FriInstanceInfo<F, D>,
params: &FriParams,
) -> anyhow::Result<()>

View File

@ -10,7 +10,6 @@ use crate::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOpenings};
use crate::fri::validate_shape::validate_fri_proof_shape;
use crate::fri::{FriConfig, FriParams};
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::verify_merkle_proof_to_cap;
use crate::hash::merkle_tree::MerkleCap;
use crate::plonk::config::{GenericConfig, Hasher};
@ -59,17 +58,18 @@ pub(crate) fn fri_verify_proof_of_work<F: RichField + Extendable<D>, const D: us
Ok(())
}
pub fn verify_fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
pub fn verify_fri_proof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
instance: &FriInstanceInfo<F, D>,
openings: &FriOpenings<F, D>,
challenges: &FriChallenges<F, D>,
initial_merkle_caps: &[MerkleCap<F, C::HCO, C::Hasher>],
proof: &FriProof<F, C::HCO, C::Hasher, D>,
initial_merkle_caps: &[MerkleCap<F, C::Hasher>],
proof: &FriProof<F, C::Hasher, D>,
params: &FriParams,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
{
) -> Result<()> {
validate_fri_proof_shape::<F, C, D>(proof, instance, params)?;
// Size of the LDE domain.
@ -107,16 +107,13 @@ where
Ok(())
}
fn fri_verify_initial_proof<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
fn fri_verify_initial_proof<F: RichField, H: Hasher<F>>(
x_index: usize,
proof: &FriInitialTreeProof<F, HC, H>,
initial_merkle_caps: &[MerkleCap<F, HC, H>],
) -> Result<()>
where
[(); HC::WIDTH]:,
{
proof: &FriInitialTreeProof<F, H>,
initial_merkle_caps: &[MerkleCap<F, H>],
) -> Result<()> {
for ((evals, merkle_proof), cap) in proof.evals_proofs.iter().zip(initial_merkle_caps) {
verify_merkle_proof_to_cap::<F, HC, H>(evals.clone(), x_index, cap, merkle_proof)?;
verify_merkle_proof_to_cap::<F, H>(evals.clone(), x_index, cap, merkle_proof)?;
}
Ok(())
@ -128,7 +125,7 @@ pub(crate) fn fri_combine_initial<
const D: usize,
>(
instance: &FriInstanceInfo<F, D>,
proof: &FriInitialTreeProof<F, C::HCO, C::Hasher>,
proof: &FriInitialTreeProof<F, C::Hasher>,
alpha: F::Extension,
subgroup_x: F,
precomputed_reduced_evals: &PrecomputedReducedOpenings<F, D>,
@ -171,17 +168,14 @@ fn fri_verifier_query_round<
instance: &FriInstanceInfo<F, D>,
challenges: &FriChallenges<F, D>,
precomputed_reduced_evals: &PrecomputedReducedOpenings<F, D>,
initial_merkle_caps: &[MerkleCap<F, C::HCO, C::Hasher>],
proof: &FriProof<F, C::HCO, C::Hasher, D>,
initial_merkle_caps: &[MerkleCap<F, C::Hasher>],
proof: &FriProof<F, C::Hasher, D>,
mut x_index: usize,
n: usize,
round_proof: &FriQueryRound<F, C::HCO, C::Hasher, D>,
round_proof: &FriQueryRound<F, C::Hasher, D>,
params: &FriParams,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
{
fri_verify_initial_proof::<F, C::HCO, C::Hasher>(
) -> Result<()> {
fri_verify_initial_proof::<F, C::Hasher>(
x_index,
&round_proof.initial_trees_proof,
initial_merkle_caps,
@ -222,7 +216,7 @@ where
challenges.fri_betas[i],
);
verify_merkle_proof_to_cap::<F, C::HCO, C::Hasher>(
verify_merkle_proof_to_cap::<F, C::Hasher>(
flatten(evals),
coset_index,
&proof.commit_phase_merkle_caps[i],

View File

@ -3,20 +3,18 @@ use itertools::Itertools;
use crate::field::extension::Extendable;
use crate::fri::proof::{FriProof, FriProofTarget};
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::iop::witness::WitnessWrite;
use crate::plonk::config::AlgebraicHasher;
/// Set the targets in a `FriProofTarget` to their corresponding values in a `FriProof`.
pub fn set_fri_proof_target<F, W, HC, H, const D: usize>(
pub fn set_fri_proof_target<F, W, H, const D: usize>(
witness: &mut W,
fri_proof_target: &FriProofTarget<D>,
fri_proof: &FriProof<F, HC, H, D>,
fri_proof: &FriProof<F, H, D>,
) where
F: RichField + Extendable<D>,
W: WitnessWrite<F> + ?Sized,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
{
witness.set_target(fri_proof_target.pow_witness, fri_proof.pow_witness);

View File

@ -1,27 +1,26 @@
use crate::field::extension::Extendable;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::iop::target::{BoolTarget, Target};
use crate::iop::target::BoolTarget;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::AlgebraicHasher;
impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
pub fn permute<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub fn permute<H: AlgebraicHasher<F>>(
&mut self,
inputs: [Target; HC::WIDTH],
) -> [Target; HC::WIDTH] {
inputs: H::AlgebraicPermutation,
) -> H::AlgebraicPermutation {
// We don't want to swap any inputs, so set that wire to 0.
let _false = self._false();
self.permute_swapped::<HC, H>(inputs, _false)
self.permute_swapped::<H>(inputs, _false)
}
/// Conditionally swap two chunks of the inputs (useful in verifying Merkle proofs), then apply
/// a cryptographic permutation.
pub(crate) fn permute_swapped<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub(crate) fn permute_swapped<H: AlgebraicHasher<F>>(
&mut self,
inputs: [Target; HC::WIDTH],
inputs: H::AlgebraicPermutation,
swap: BoolTarget,
) -> [Target; HC::WIDTH] {
) -> H::AlgebraicPermutation {
H::permute_swapped(inputs, swap, self)
}
}

View File

@ -8,7 +8,6 @@ use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::field::types::{Field, Sample};
use crate::gates::gate::Gate;
use crate::hash::hash_types::{HashOut, RichField};
use crate::hash::hashing::HashConfig;
use crate::iop::witness::{PartialWitness, WitnessWrite};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::circuit_data::CircuitConfig;
@ -94,11 +93,7 @@ pub fn test_eval_fns<
const D: usize,
>(
gate: G,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
// Test that `eval_unfiltered` and `eval_unfiltered_base` are coherent.
let wires_base = F::rand_vec(gate.num_wires());
let constants_base = F::rand_vec(gate.num_constants());

View File

@ -14,35 +14,37 @@ pub trait RichField: PrimeField64 + Poseidon {}
impl RichField for GoldilocksField {}
pub const NUM_HASH_OUT_ELTS: usize = 4;
/// Represents a ~256 bit hash output.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[serde(bound = "")]
pub struct HashOut<F: Field> {
pub elements: [F; 4],
pub elements: [F; NUM_HASH_OUT_ELTS],
}
impl<F: Field> HashOut<F> {
pub const ZERO: Self = Self {
elements: [F::ZERO; 4],
elements: [F::ZERO; NUM_HASH_OUT_ELTS],
};
// TODO: Switch to a TryFrom impl.
pub fn from_vec(elements: Vec<F>) -> Self {
debug_assert!(elements.len() == 4);
debug_assert!(elements.len() == NUM_HASH_OUT_ELTS);
Self {
elements: elements.try_into().unwrap(),
}
}
pub fn from_partial(elements_in: &[F]) -> Self {
let mut elements = [F::ZERO; 4];
let mut elements = [F::ZERO; NUM_HASH_OUT_ELTS];
elements[0..elements_in.len()].copy_from_slice(elements_in);
Self { elements }
}
}
impl<F: Field> From<[F; 4]> for HashOut<F> {
fn from(elements: [F; 4]) -> Self {
impl<F: Field> From<[F; NUM_HASH_OUT_ELTS]> for HashOut<F> {
fn from(elements: [F; NUM_HASH_OUT_ELTS]) -> Self {
Self { elements }
}
}
@ -51,7 +53,7 @@ impl<F: Field> TryFrom<&[F]> for HashOut<F> {
type Error = anyhow::Error;
fn try_from(elements: &[F]) -> Result<Self, Self::Error> {
ensure!(elements.len() == 4);
ensure!(elements.len() == NUM_HASH_OUT_ELTS);
Ok(Self {
elements: elements.try_into().unwrap(),
})
@ -90,7 +92,7 @@ impl<F: RichField> GenericHashOut<F> for HashOut<F> {
HashOut {
elements: bytes
.chunks(8)
.take(4)
.take(NUM_HASH_OUT_ELTS)
.map(|x| F::from_canonical_u64(u64::from_le_bytes(x.try_into().unwrap())))
.collect::<Vec<_>>()
.try_into()
@ -112,27 +114,27 @@ impl<F: Field> Default for HashOut<F> {
/// Represents a ~256 bit hash output.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct HashOutTarget {
pub elements: [Target; 4],
pub elements: [Target; NUM_HASH_OUT_ELTS],
}
impl HashOutTarget {
// TODO: Switch to a TryFrom impl.
pub fn from_vec(elements: Vec<Target>) -> Self {
debug_assert!(elements.len() == 4);
debug_assert!(elements.len() == NUM_HASH_OUT_ELTS);
Self {
elements: elements.try_into().unwrap(),
}
}
pub fn from_partial(elements_in: &[Target], zero: Target) -> Self {
let mut elements = [zero; 4];
let mut elements = [zero; NUM_HASH_OUT_ELTS];
elements[0..elements_in.len()].copy_from_slice(elements_in);
Self { elements }
}
}
impl From<[Target; 4]> for HashOutTarget {
fn from(elements: [Target; 4]) -> Self {
impl From<[Target; NUM_HASH_OUT_ELTS]> for HashOutTarget {
fn from(elements: [Target; NUM_HASH_OUT_ELTS]) -> Self {
Self { elements }
}
}
@ -141,7 +143,7 @@ impl TryFrom<&[Target]> for HashOutTarget {
type Error = anyhow::Error;
fn try_from(elements: &[Target]) -> Result<Self, Self::Error> {
ensure!(elements.len() == 4);
ensure!(elements.len() == NUM_HASH_OUT_ELTS);
Ok(Self {
elements: elements.try_into().unwrap(),
})

View File

@ -2,137 +2,145 @@
use alloc::vec::Vec;
use core::fmt::Debug;
use std::iter::repeat;
use crate::field::extension::Extendable;
use crate::hash::hash_types::{HashOut, HashOutTarget, RichField};
use crate::field::types::Field;
use crate::hash::hash_types::{HashOut, HashOutTarget, RichField, NUM_HASH_OUT_ELTS};
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::AlgebraicHasher;
pub trait HashConfig: Clone + Debug + Eq + PartialEq {
const RATE: usize;
const WIDTH: usize;
}
impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
pub fn hash_or_noop<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
&mut self,
inputs: Vec<Target>,
) -> HashOutTarget
where
[(); HC::WIDTH]:,
{
pub fn hash_or_noop<H: AlgebraicHasher<F>>(&mut self, inputs: Vec<Target>) -> HashOutTarget {
let zero = self.zero();
if inputs.len() <= 4 {
if inputs.len() <= NUM_HASH_OUT_ELTS {
HashOutTarget::from_partial(&inputs, zero)
} else {
self.hash_n_to_hash_no_pad::<HC, H>(inputs)
self.hash_n_to_hash_no_pad::<H>(inputs)
}
}
pub fn hash_n_to_hash_no_pad<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub fn hash_n_to_hash_no_pad<H: AlgebraicHasher<F>>(
&mut self,
inputs: Vec<Target>,
) -> HashOutTarget
where
[(); HC::WIDTH]:,
{
HashOutTarget::from_vec(self.hash_n_to_m_no_pad::<HC, H>(inputs, 4))
) -> HashOutTarget {
HashOutTarget::from_vec(self.hash_n_to_m_no_pad::<H>(inputs, NUM_HASH_OUT_ELTS))
}
pub fn hash_n_to_m_no_pad<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub fn hash_n_to_m_no_pad<H: AlgebraicHasher<F>>(
&mut self,
inputs: Vec<Target>,
num_outputs: usize,
) -> Vec<Target>
where
[(); HC::WIDTH]:,
{
) -> Vec<Target> {
let zero = self.zero();
let mut state = [zero; HC::WIDTH];
let mut state = H::AlgebraicPermutation::new(std::iter::repeat(zero));
// Absorb all input chunks.
for input_chunk in inputs.chunks(HC::RATE) {
for input_chunk in inputs.chunks(H::AlgebraicPermutation::RATE) {
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
// where we would xor or add in the inputs. This is a well-known variant, though,
// sometimes called "overwrite mode".
state[..input_chunk.len()].copy_from_slice(input_chunk);
state = self.permute::<HC, H>(state);
state.set_from_slice(input_chunk, 0);
state = self.permute::<H>(state);
}
// Squeeze until we have the desired number of outputs.
let mut outputs = Vec::with_capacity(num_outputs);
loop {
for i in 0..HC::RATE {
outputs.push(state[i]);
for &s in state.squeeze() {
outputs.push(s);
if outputs.len() == num_outputs {
return outputs;
}
}
state = self.permute::<HC, H>(state);
state = self.permute::<H>(state);
}
}
}
/// A one-way compression function which takes two ~256 bit inputs and returns a ~256 bit output.
pub fn compress<F: RichField, HC: HashConfig, P: PlonkyPermutation<F, HC>>(
x: HashOut<F>,
y: HashOut<F>,
) -> HashOut<F>
where
[(); HC::WIDTH]:,
/// Permutation that can be used in the sponge construction for an algebraic hash.
pub trait PlonkyPermutation<T: Copy + Default>:
AsRef<[T]> + Copy + Debug + Default + Eq + Sync + Send
{
let mut perm_inputs = [F::ZERO; HC::WIDTH];
perm_inputs[..4].copy_from_slice(&x.elements);
perm_inputs[4..8].copy_from_slice(&y.elements);
HashOut {
elements: P::permute(perm_inputs)[..4].try_into().unwrap(),
}
const RATE: usize;
const WIDTH: usize;
/// Initialises internal state with values from `iter` until
/// `iter` is exhausted or `Self::WIDTH` values have been
/// received; remaining state (if any) initialised with
/// `T::default()`. To initialise remaining elements with a
/// different value, instead of your original `iter` pass
/// `iter.chain(std::iter::repeat(F::from_canonical_u64(12345)))`
/// or similar.
fn new<I: IntoIterator<Item = T>>(iter: I) -> Self;
/// Set idx-th state element to be `elt`. Panics if `idx >= WIDTH`.
fn set_elt(&mut self, elt: T, idx: usize);
/// Set state element `i` to be `elts[i] for i =
/// start_idx..start_idx + n` where `n = min(elts.len(),
/// WIDTH-start_idx)`. Panics if `start_idx > WIDTH`.
fn set_from_iter<I: IntoIterator<Item = T>>(&mut self, elts: I, start_idx: usize);
/// Same semantics as for `set_from_iter` but probably faster than
/// just calling `set_from_iter(elts.iter())`.
fn set_from_slice(&mut self, elts: &[T], start_idx: usize);
/// Apply permutation to internal state
fn permute(&mut self);
/// Return a slice of `RATE` elements
fn squeeze(&self) -> &[T];
}
/// Permutation that can be used in the sponge construction for an algebraic hash.
pub trait PlonkyPermutation<F: RichField, HC: HashConfig> {
fn permute(input: [F; HC::WIDTH]) -> [F; HC::WIDTH]
where
[(); HC::WIDTH]:;
/// A one-way compression function which takes two ~256 bit inputs and returns a ~256 bit output.
pub fn compress<F: Field, P: PlonkyPermutation<F>>(x: HashOut<F>, y: HashOut<F>) -> HashOut<F> {
// TODO: With some refactoring, this function could be implemented as
// hash_n_to_m_no_pad(chain(x.elements, y.elements), NUM_HASH_OUT_ELTS).
debug_assert_eq!(x.elements.len(), NUM_HASH_OUT_ELTS);
debug_assert_eq!(y.elements.len(), NUM_HASH_OUT_ELTS);
debug_assert!(P::RATE >= NUM_HASH_OUT_ELTS);
let mut perm = P::new(repeat(F::ZERO));
perm.set_from_slice(&x.elements, 0);
perm.set_from_slice(&y.elements, NUM_HASH_OUT_ELTS);
perm.permute();
HashOut {
elements: perm.squeeze()[..NUM_HASH_OUT_ELTS].try_into().unwrap(),
}
}
/// Hash a message without any padding step. Note that this can enable length-extension attacks.
/// However, it is still collision-resistant in cases where the input has a fixed length.
pub fn hash_n_to_m_no_pad<F: RichField, HC: HashConfig, P: PlonkyPermutation<F, HC>>(
pub fn hash_n_to_m_no_pad<F: RichField, P: PlonkyPermutation<F>>(
inputs: &[F],
num_outputs: usize,
) -> Vec<F>
where
[(); HC::WIDTH]:,
{
let mut state = [F::ZERO; HC::WIDTH];
) -> Vec<F> {
let mut perm = P::new(repeat(F::ZERO));
// Absorb all input chunks.
for input_chunk in inputs.chunks(HC::RATE) {
state[..input_chunk.len()].copy_from_slice(input_chunk);
state = P::permute(state);
for input_chunk in inputs.chunks(P::RATE) {
perm.set_from_slice(input_chunk, 0);
perm.permute();
}
// Squeeze until we have the desired number of outputs.
let mut outputs = Vec::new();
loop {
for &item in state.iter().take(HC::RATE) {
for &item in perm.squeeze() {
outputs.push(item);
if outputs.len() == num_outputs {
return outputs;
}
}
state = P::permute(state);
perm.permute();
}
}
pub fn hash_n_to_hash_no_pad<F: RichField, HC: HashConfig, P: PlonkyPermutation<F, HC>>(
inputs: &[F],
) -> HashOut<F>
where
[(); HC::WIDTH]:,
{
HashOut::from_vec(hash_n_to_m_no_pad::<F, HC, P>(inputs, 4))
pub fn hash_n_to_hash_no_pad<F: RichField, P: PlonkyPermutation<F>>(inputs: &[F]) -> HashOut<F> {
HashOut::from_vec(hash_n_to_m_no_pad::<F, P>(inputs, NUM_HASH_OUT_ELTS))
}

View File

@ -8,7 +8,7 @@ use keccak_hash::keccak;
use crate::hash::hash_types::{BytesHash, RichField};
use crate::hash::hashing::PlonkyPermutation;
use crate::plonk::config::{Hasher, KeccakHashConfig};
use crate::plonk::config::Hasher;
use crate::util::serialization::Write;
pub const SPONGE_RATE: usize = 8;
@ -18,21 +18,59 @@ pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
/// Keccak-256 pseudo-permutation (not necessarily one-to-one) used in the challenger.
/// A state `input: [F; 12]` is sent to the field representation of `H(input) || H(H(input)) || H(H(H(input)))`
/// where `H` is the Keccak-256 hash.
pub struct KeccakPermutation;
impl<F: RichField> PlonkyPermutation<F, KeccakHashConfig> for KeccakPermutation {
fn permute(input: [F; SPONGE_WIDTH]) -> [F; SPONGE_WIDTH]
where
[(); SPONGE_WIDTH]:,
{
let mut state = vec![0u8; SPONGE_WIDTH * size_of::<u64>()];
#[derive(Copy, Clone, Default, Debug, PartialEq)]
pub struct KeccakPermutation<F: RichField> {
state: [F; SPONGE_WIDTH],
}
impl<F: RichField> Eq for KeccakPermutation<F> {}
impl<F: RichField> AsRef<[F]> for KeccakPermutation<F> {
fn as_ref(&self) -> &[F] {
&self.state
}
}
// TODO: Several implementations here are copied from
// PoseidonPermutation; they should be refactored.
impl<F: RichField> PlonkyPermutation<F> for KeccakPermutation<F> {
const RATE: usize = SPONGE_RATE;
const WIDTH: usize = SPONGE_WIDTH;
fn new<I: IntoIterator<Item = F>>(elts: I) -> Self {
let mut perm = Self {
state: [F::default(); SPONGE_WIDTH],
};
perm.set_from_iter(elts, 0);
perm
}
fn set_elt(&mut self, elt: F, idx: usize) {
self.state[idx] = elt;
}
fn set_from_slice(&mut self, elts: &[F], start_idx: usize) {
let begin = start_idx;
let end = start_idx + elts.len();
self.state[begin..end].copy_from_slice(elts);
}
fn set_from_iter<I: IntoIterator<Item = F>>(&mut self, elts: I, start_idx: usize) {
for (s, e) in self.state[start_idx..].iter_mut().zip(elts) {
*s = e;
}
}
fn permute(&mut self) {
let mut state_bytes = vec![0u8; SPONGE_WIDTH * size_of::<u64>()];
for i in 0..SPONGE_WIDTH {
state[i * size_of::<u64>()..(i + 1) * size_of::<u64>()]
.copy_from_slice(&input[i].to_canonical_u64().to_le_bytes());
state_bytes[i * size_of::<u64>()..(i + 1) * size_of::<u64>()]
.copy_from_slice(&self.state[i].to_canonical_u64().to_le_bytes());
}
let hash_onion = iter::repeat_with(|| {
let output = keccak(state.clone()).to_fixed_bytes();
state = output.to_vec();
let output = keccak(state_bytes.clone()).to_fixed_bytes();
state_bytes = output.to_vec();
output
});
@ -49,21 +87,25 @@ impl<F: RichField> PlonkyPermutation<F, KeccakHashConfig> for KeccakPermutation
.filter(|&word| word < F::ORDER)
.map(F::from_canonical_u64);
hash_onion_elems
self.state = hash_onion_elems
.take(SPONGE_WIDTH)
.collect_vec()
.try_into()
.unwrap()
.unwrap();
}
fn squeeze(&self) -> &[F] {
&self.state[..Self::RATE]
}
}
/// Keccak-256 hash function.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct KeccakHash<const N: usize>;
impl<F: RichField, const N: usize> Hasher<F, KeccakHashConfig> for KeccakHash<N> {
impl<F: RichField, const N: usize> Hasher<F> for KeccakHash<N> {
const HASH_SIZE: usize = N;
type Hash = BytesHash<N>;
type Permutation = KeccakPermutation;
type Permutation = KeccakPermutation<F>;
fn hash_no_pad(input: &[F]) -> Self::Hash {
let mut buffer = Vec::new();

View File

@ -6,8 +6,8 @@ use itertools::Itertools;
use serde::{Deserialize, Serialize};
use crate::field::extension::Extendable;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField, NUM_HASH_OUT_ELTS};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
@ -16,12 +16,12 @@ use crate::plonk::config::{AlgebraicHasher, Hasher};
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[serde(bound = "")]
pub struct MerkleProof<F: RichField, HC: HashConfig, H: Hasher<F, HC>> {
pub struct MerkleProof<F: RichField, H: Hasher<F>> {
/// The Merkle digest of each sibling subtree, staying from the bottommost layer.
pub siblings: Vec<H::Hash>,
}
impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> MerkleProof<F, HC, H> {
impl<F: RichField, H: Hasher<F>> MerkleProof<F, H> {
pub fn len(&self) -> usize {
self.siblings.len()
}
@ -39,30 +39,24 @@ pub struct MerkleProofTarget {
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given root.
pub fn verify_merkle_proof<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
pub fn verify_merkle_proof<F: RichField, H: Hasher<F>>(
leaf_data: Vec<F>,
leaf_index: usize,
merkle_root: H::Hash,
proof: &MerkleProof<F, HC, H>,
) -> Result<()>
where
[(); HC::WIDTH]:,
{
proof: &MerkleProof<F, H>,
) -> Result<()> {
let merkle_cap = MerkleCap(vec![merkle_root]);
verify_merkle_proof_to_cap(leaf_data, leaf_index, &merkle_cap, proof)
}
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given cap.
pub fn verify_merkle_proof_to_cap<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
pub fn verify_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
leaf_data: Vec<F>,
leaf_index: usize,
merkle_cap: &MerkleCap<F, HC, H>,
proof: &MerkleProof<F, HC, H>,
) -> Result<()>
where
[(); HC::WIDTH]:,
{
merkle_cap: &MerkleCap<F, H>,
proof: &MerkleProof<F, H>,
) -> Result<()> {
let mut index = leaf_index;
let mut current_digest = H::hash_or_noop(&leaf_data);
for &sibling_digest in proof.siblings.iter() {
@ -85,32 +79,28 @@ where
impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given root. The index is given by its little-endian bits.
pub fn verify_merkle_proof<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub fn verify_merkle_proof<H: AlgebraicHasher<F>>(
&mut self,
leaf_data: Vec<Target>,
leaf_index_bits: &[BoolTarget],
merkle_root: HashOutTarget,
proof: &MerkleProofTarget,
) where
[(); HC::WIDTH]:,
{
) {
let merkle_cap = MerkleCapTarget(vec![merkle_root]);
self.verify_merkle_proof_to_cap::<HC, H>(leaf_data, leaf_index_bits, &merkle_cap, proof);
self.verify_merkle_proof_to_cap::<H>(leaf_data, leaf_index_bits, &merkle_cap, proof);
}
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given cap. The index is given by its little-endian bits.
pub fn verify_merkle_proof_to_cap<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
pub fn verify_merkle_proof_to_cap<H: AlgebraicHasher<F>>(
&mut self,
leaf_data: Vec<Target>,
leaf_index_bits: &[BoolTarget],
merkle_cap: &MerkleCapTarget,
proof: &MerkleProofTarget,
) where
[(); HC::WIDTH]:,
{
) {
let cap_index = self.le_sum(leaf_index_bits[proof.siblings.len()..].iter().copied());
self.verify_merkle_proof_to_cap_with_cap_index::<HC, H>(
self.verify_merkle_proof_to_cap_with_cap_index::<H>(
leaf_data,
leaf_index_bits,
cap_index,
@ -121,34 +111,38 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
/// Same as `verify_merkle_proof_to_cap`, except with the final "cap index" as separate parameter,
/// rather than being contained in `leaf_index_bits`.
pub(crate) fn verify_merkle_proof_to_cap_with_cap_index<
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
>(
pub(crate) fn verify_merkle_proof_to_cap_with_cap_index<H: AlgebraicHasher<F>>(
&mut self,
leaf_data: Vec<Target>,
leaf_index_bits: &[BoolTarget],
cap_index: Target,
merkle_cap: &MerkleCapTarget,
proof: &MerkleProofTarget,
) where
[(); HC::WIDTH]:,
{
) {
debug_assert!(H::AlgebraicPermutation::RATE >= NUM_HASH_OUT_ELTS);
let zero = self.zero();
let mut state: HashOutTarget = self.hash_or_noop::<HC, H>(leaf_data);
let mut state: HashOutTarget = self.hash_or_noop::<H>(leaf_data);
debug_assert_eq!(state.elements.len(), NUM_HASH_OUT_ELTS);
for (&bit, &sibling) in leaf_index_bits.iter().zip(&proof.siblings) {
let mut perm_inputs = [zero; HC::WIDTH];
perm_inputs[..4].copy_from_slice(&state.elements);
perm_inputs[4..8].copy_from_slice(&sibling.elements);
let perm_outs = self.permute_swapped::<HC, H>(perm_inputs, bit);
let hash_outs = perm_outs[0..4].try_into().unwrap();
debug_assert_eq!(sibling.elements.len(), NUM_HASH_OUT_ELTS);
let mut perm_inputs = H::AlgebraicPermutation::default();
perm_inputs.set_from_slice(&state.elements, 0);
perm_inputs.set_from_slice(&sibling.elements, NUM_HASH_OUT_ELTS);
// Ensure the rest of the state, if any, is zero:
perm_inputs.set_from_iter(std::iter::repeat(zero), 2 * NUM_HASH_OUT_ELTS);
let perm_outs = self.permute_swapped::<H>(perm_inputs, bit);
let hash_outs = perm_outs.squeeze()[0..NUM_HASH_OUT_ELTS]
.try_into()
.unwrap();
state = HashOutTarget {
elements: hash_outs,
};
}
for i in 0..4 {
for i in 0..NUM_HASH_OUT_ELTS {
let result = self.random_access(
cap_index,
merkle_cap.0.iter().map(|h| h.elements[i]).collect(),
@ -158,7 +152,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
pub fn connect_hashes(&mut self, x: HashOutTarget, y: HashOutTarget) {
for i in 0..4 {
for i in 0..NUM_HASH_OUT_ELTS {
self.connect(x.elements[i], y.elements[i]);
}
}
@ -207,10 +201,7 @@ mod tests {
let n = 1 << log_n;
let cap_height = 1;
let leaves = random_data::<F>(n, 7);
let tree =
MerkleTree::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>::new(
leaves, cap_height,
);
let tree = MerkleTree::<F, <C as GenericConfig<D>>::Hasher>::new(leaves, cap_height);
let i: usize = OsRng.gen_range(0..n);
let proof = tree.prove(i);
@ -232,7 +223,7 @@ mod tests {
pw.set_target(data[j], tree.leaves[i][j]);
}
builder.verify_merkle_proof_to_cap::<<C as GenericConfig<D>>::HCI, <C as GenericConfig<D>>::InnerHasher>(
builder.verify_merkle_proof_to_cap::<<C as GenericConfig<D>>::InnerHasher>(
data, &i_bits, &cap_t, &proof_t,
);

View File

@ -6,7 +6,6 @@ use plonky2_maybe_rayon::*;
use serde::{Deserialize, Serialize};
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::MerkleProof;
use crate::plonk::config::{GenericHashOut, Hasher};
use crate::util::log2_strict;
@ -16,9 +15,9 @@ use crate::util::log2_strict;
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[serde(bound = "")]
// TODO: Change H to GenericHashOut<F>, since this only cares about the hash, not the hasher.
pub struct MerkleCap<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(pub Vec<H::Hash>);
pub struct MerkleCap<F: RichField, H: Hasher<F>>(pub Vec<H::Hash>);
impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> MerkleCap<F, HC, H> {
impl<F: RichField, H: Hasher<F>> MerkleCap<F, H> {
pub fn len(&self) -> usize {
self.0.len()
}
@ -37,7 +36,7 @@ impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> MerkleCap<F, HC, H> {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct MerkleTree<F: RichField, HC: HashConfig, H: Hasher<F, HC>> {
pub struct MerkleTree<F: RichField, H: Hasher<F>> {
/// The data in the leaves of the Merkle tree.
pub leaves: Vec<Vec<F>>,
@ -52,7 +51,7 @@ pub struct MerkleTree<F: RichField, HC: HashConfig, H: Hasher<F, HC>> {
pub digests: Vec<H::Hash>,
/// The Merkle cap.
pub cap: MerkleCap<F, HC, H>,
pub cap: MerkleCap<F, H>,
}
fn capacity_up_to_mut<T>(v: &mut Vec<T>, len: usize) -> &mut [MaybeUninit<T>] {
@ -67,13 +66,10 @@ fn capacity_up_to_mut<T>(v: &mut Vec<T>, len: usize) -> &mut [MaybeUninit<T>] {
}
}
fn fill_subtree<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
fn fill_subtree<F: RichField, H: Hasher<F>>(
digests_buf: &mut [MaybeUninit<H::Hash>],
leaves: &[Vec<F>],
) -> H::Hash
where
[(); HC::WIDTH]:,
{
) -> H::Hash {
assert_eq!(leaves.len(), digests_buf.len() / 2 + 1);
if digests_buf.is_empty() {
H::hash_or_noop(&leaves[0])
@ -89,8 +85,8 @@ where
let (left_leaves, right_leaves) = leaves.split_at(leaves.len() / 2);
let (left_digest, right_digest) = plonky2_maybe_rayon::join(
|| fill_subtree::<F, HC, H>(left_digests_buf, left_leaves),
|| fill_subtree::<F, HC, H>(right_digests_buf, right_leaves),
|| fill_subtree::<F, H>(left_digests_buf, left_leaves),
|| fill_subtree::<F, H>(right_digests_buf, right_leaves),
);
left_digest_mem.write(left_digest);
@ -99,14 +95,12 @@ where
}
}
fn fill_digests_buf<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
fn fill_digests_buf<F: RichField, H: Hasher<F>>(
digests_buf: &mut [MaybeUninit<H::Hash>],
cap_buf: &mut [MaybeUninit<H::Hash>],
leaves: &[Vec<F>],
cap_height: usize,
) where
[(); HC::WIDTH]:,
{
) {
// Special case of a tree that's all cap. The usual case will panic because we'll try to split
// an empty slice into chunks of `0`. (We would not need this if there was a way to split into
// `blah` chunks as opposed to chunks _of_ `blah`.)
@ -132,15 +126,12 @@ fn fill_digests_buf<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
// We have `1 << cap_height` sub-trees, one for each entry in `cap`. They are totally
// independent, so we schedule one task for each. `digests_buf` and `leaves` are split
// into `1 << cap_height` slices, one for each sub-tree.
subtree_cap.write(fill_subtree::<F, HC, H>(subtree_digests, subtree_leaves));
subtree_cap.write(fill_subtree::<F, H>(subtree_digests, subtree_leaves));
},
);
}
impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> MerkleTree<F, HC, H>
where
[(); HC::WIDTH]:,
{
impl<F: RichField, H: Hasher<F>> MerkleTree<F, H> {
pub fn new(leaves: Vec<Vec<F>>, cap_height: usize) -> Self {
let log2_leaves_len = log2_strict(leaves.len());
assert!(
@ -158,7 +149,7 @@ where
let digests_buf = capacity_up_to_mut(&mut digests, num_digests);
let cap_buf = capacity_up_to_mut(&mut cap, len_cap);
fill_digests_buf::<F, HC, H>(digests_buf, cap_buf, &leaves[..], cap_height);
fill_digests_buf::<F, H>(digests_buf, cap_buf, &leaves[..], cap_height);
unsafe {
// SAFETY: `fill_digests_buf` and `cap` initialized the spare capacity up to
@ -179,7 +170,7 @@ where
}
/// Create a Merkle proof from a leaf index.
pub fn prove(&self, leaf_index: usize) -> MerkleProof<F, HC, H> {
pub fn prove(&self, leaf_index: usize) -> MerkleProof<F, H> {
let cap_height = log2_strict(self.cap.len());
let num_layers = log2_strict(self.leaves.len()) - cap_height;
debug_assert_eq!(leaf_index >> (cap_height + num_layers), 0);
@ -229,14 +220,15 @@ mod tests {
(0..n).map(|_| F::rand_vec(k)).collect()
}
fn verify_all_leaves<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
fn verify_all_leaves<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
leaves: Vec<Vec<F>>,
cap_height: usize,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
{
let tree = MerkleTree::<F, C::HCO, C::Hasher>::new(leaves.clone(), cap_height);
) -> Result<()> {
let tree = MerkleTree::<F, C::Hasher>::new(leaves.clone(), cap_height);
for (i, leaf) in leaves.into_iter().enumerate() {
let proof = tree.prove(i);
verify_merkle_proof_to_cap(leaf, i, &tree.cap, &proof)?;
@ -255,9 +247,7 @@ mod tests {
let cap_height = log_n + 1; // Should panic if `cap_height > len_n`.
let leaves = random_data::<F>(1 << log_n, 7);
let _ = MerkleTree::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>::new(
leaves, cap_height,
);
let _ = MerkleTree::<F, <C as GenericConfig<D>>::Hasher>::new(leaves, cap_height);
}
#[test]

View File

@ -5,16 +5,15 @@ use hashbrown::HashMap;
use num::Integer;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::MerkleProof;
use crate::plonk::config::Hasher;
/// Compress multiple Merkle proofs on the same tree by removing redundancy in the Merkle paths.
pub(crate) fn compress_merkle_proofs<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
pub(crate) fn compress_merkle_proofs<F: RichField, H: Hasher<F>>(
cap_height: usize,
indices: &[usize],
proofs: &[MerkleProof<F, HC, H>],
) -> Vec<MerkleProof<F, HC, H>> {
proofs: &[MerkleProof<F, H>],
) -> Vec<MerkleProof<F, H>> {
assert!(!proofs.is_empty());
let height = cap_height + proofs[0].siblings.len();
let num_leaves = 1 << height;
@ -54,16 +53,13 @@ pub(crate) fn compress_merkle_proofs<F: RichField, HC: HashConfig, H: Hasher<F,
/// Decompress compressed Merkle proofs.
/// Note: The data and indices must be in the same order as in `compress_merkle_proofs`.
pub(crate) fn decompress_merkle_proofs<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
pub(crate) fn decompress_merkle_proofs<F: RichField, H: Hasher<F>>(
leaves_data: &[Vec<F>],
leaves_indices: &[usize],
compressed_proofs: &[MerkleProof<F, HC, H>],
compressed_proofs: &[MerkleProof<F, H>],
height: usize,
cap_height: usize,
) -> Vec<MerkleProof<F, HC, H>>
where
[(); HC::WIDTH]:,
{
) -> Vec<MerkleProof<F, H>> {
let num_leaves = 1 << height;
let compressed_proofs = compressed_proofs.to_vec();
let mut decompressed_proofs = Vec::with_capacity(compressed_proofs.len());
@ -134,11 +130,7 @@ mod tests {
let h = 10;
let cap_height = 3;
let vs = (0..1 << h).map(|_| vec![F::rand()]).collect::<Vec<_>>();
let mt =
MerkleTree::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>::new(
vs.clone(),
cap_height,
);
let mt = MerkleTree::<F, <C as GenericConfig<D>>::Hasher>::new(vs.clone(), cap_height);
let mut rng = OsRng;
let k = rng.gen_range(1..=1 << h);

View File

@ -3,6 +3,7 @@
use alloc::vec;
use alloc::vec::Vec;
use std::fmt::Debug;
use unroll::unroll_for_loops;
@ -16,7 +17,7 @@ use crate::hash::hashing::{compress, hash_n_to_hash_no_pad, PlonkyPermutation};
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::{AlgebraicHasher, Hasher, PoseidonHashConfig};
use crate::plonk::config::{AlgebraicHasher, Hasher};
pub const SPONGE_RATE: usize = 8;
pub const SPONGE_CAPACITY: usize = 4;
@ -632,36 +633,99 @@ pub trait Poseidon: PrimeField64 {
}
}
pub struct PoseidonPermutation;
impl<F: RichField> PlonkyPermutation<F, PoseidonHashConfig> for PoseidonPermutation {
fn permute(input: [F; SPONGE_WIDTH]) -> [F; SPONGE_WIDTH] {
F::poseidon(input)
#[derive(Copy, Clone, Default, Debug, PartialEq)]
pub struct PoseidonPermutation<T> {
state: [T; SPONGE_WIDTH],
}
impl<T: Eq> Eq for PoseidonPermutation<T> {}
impl<T> AsRef<[T]> for PoseidonPermutation<T> {
fn as_ref(&self) -> &[T] {
&self.state
}
}
trait Permuter: Sized {
fn permute(input: [Self; SPONGE_WIDTH]) -> [Self; SPONGE_WIDTH];
}
impl<F: Poseidon> Permuter for F {
fn permute(input: [Self; SPONGE_WIDTH]) -> [Self; SPONGE_WIDTH] {
<F as Poseidon>::poseidon(input)
}
}
impl Permuter for Target {
fn permute(_input: [Self; SPONGE_WIDTH]) -> [Self; SPONGE_WIDTH] {
panic!("Call `permute_swapped()` instead of `permute()`");
}
}
impl<T: Copy + Debug + Default + Eq + Permuter + Send + Sync> PlonkyPermutation<T>
for PoseidonPermutation<T>
{
const RATE: usize = SPONGE_RATE;
const WIDTH: usize = SPONGE_WIDTH;
fn new<I: IntoIterator<Item = T>>(elts: I) -> Self {
let mut perm = Self {
state: [T::default(); SPONGE_WIDTH],
};
perm.set_from_iter(elts, 0);
perm
}
fn set_elt(&mut self, elt: T, idx: usize) {
self.state[idx] = elt;
}
fn set_from_slice(&mut self, elts: &[T], start_idx: usize) {
let begin = start_idx;
let end = start_idx + elts.len();
self.state[begin..end].copy_from_slice(elts);
}
fn set_from_iter<I: IntoIterator<Item = T>>(&mut self, elts: I, start_idx: usize) {
for (s, e) in self.state[start_idx..].iter_mut().zip(elts) {
*s = e;
}
}
fn permute(&mut self) {
self.state = T::permute(self.state);
}
fn squeeze(&self) -> &[T] {
&self.state[..Self::RATE]
}
}
/// Poseidon hash function.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct PoseidonHash;
impl<F: RichField> Hasher<F, PoseidonHashConfig> for PoseidonHash {
impl<F: RichField> Hasher<F> for PoseidonHash {
const HASH_SIZE: usize = 4 * 8;
type Hash = HashOut<F>;
type Permutation = PoseidonPermutation;
type Permutation = PoseidonPermutation<F>;
fn hash_no_pad(input: &[F]) -> Self::Hash {
hash_n_to_hash_no_pad::<F, PoseidonHashConfig, Self::Permutation>(input)
hash_n_to_hash_no_pad::<F, Self::Permutation>(input)
}
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
compress::<F, PoseidonHashConfig, Self::Permutation>(left, right)
compress::<F, Self::Permutation>(left, right)
}
}
impl<F: RichField> AlgebraicHasher<F, PoseidonHashConfig> for PoseidonHash {
impl<F: RichField> AlgebraicHasher<F> for PoseidonHash {
type AlgebraicPermutation = PoseidonPermutation<Target>;
fn permute_swapped<const D: usize>(
inputs: [Target; SPONGE_WIDTH],
inputs: Self::AlgebraicPermutation,
swap: BoolTarget,
builder: &mut CircuitBuilder<F, D>,
) -> [Target; SPONGE_WIDTH]
) -> Self::AlgebraicPermutation
where
F: RichField + Extendable<D>,
{
@ -673,6 +737,7 @@ impl<F: RichField> AlgebraicHasher<F, PoseidonHashConfig> for PoseidonHash {
builder.connect(swap.target, swap_wire);
// Route input wires.
let inputs = inputs.as_ref();
for i in 0..SPONGE_WIDTH {
let in_wire = PoseidonGate::<F, D>::wire_input(i);
let in_wire = Target::wire(gate, in_wire);
@ -680,11 +745,9 @@ impl<F: RichField> AlgebraicHasher<F, PoseidonHashConfig> for PoseidonHash {
}
// Collect output wires.
(0..SPONGE_WIDTH)
.map(|i| Target::wire(gate, PoseidonGate::<F, D>::wire_output(i)))
.collect::<Vec<_>>()
.try_into()
.unwrap()
Self::AlgebraicPermutation::new(
(0..SPONGE_WIDTH).map(|i| Target::wire(gate, PoseidonGate::<F, D>::wire_output(i))),
)
}
}

View File

@ -4,7 +4,7 @@ use core::marker::PhantomData;
use crate::field::extension::{Extendable, FieldExtension};
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::{HashConfig, PlonkyPermutation};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::Target;
@ -13,14 +13,10 @@ use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher};
/// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir.
#[derive(Clone)]
pub struct Challenger<F: RichField, HC: HashConfig, H: Hasher<F, HC>>
where
[(); HC::WIDTH]:,
{
pub(crate) sponge_state: [F; HC::WIDTH],
pub struct Challenger<F: RichField, H: Hasher<F>> {
pub(crate) sponge_state: H::Permutation,
pub(crate) input_buffer: Vec<F>,
output_buffer: Vec<F>,
_phantom: PhantomData<H>,
}
/// Observes prover messages, and generates verifier challenges based on the transcript.
@ -31,16 +27,12 @@ where
/// design, but it can be viewed as a duplex sponge whose inputs are sometimes zero (when we perform
/// multiple squeezes) and whose outputs are sometimes ignored (when we perform multiple
/// absorptions). Thus the security properties of a duplex sponge still apply to our design.
impl<F: RichField, HC: HashConfig, H: Hasher<F, HC>> Challenger<F, HC, H>
where
[(); HC::WIDTH]:,
{
pub fn new() -> Challenger<F, HC, H> {
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
pub fn new() -> Challenger<F, H> {
Challenger {
sponge_state: [F::ZERO; HC::WIDTH],
input_buffer: Vec::with_capacity(HC::RATE),
output_buffer: Vec::with_capacity(HC::RATE),
_phantom: Default::default(),
sponge_state: H::Permutation::new(std::iter::repeat(F::ZERO)),
input_buffer: Vec::with_capacity(H::Permutation::RATE),
output_buffer: Vec::with_capacity(H::Permutation::RATE),
}
}
@ -50,7 +42,7 @@ where
self.input_buffer.push(element);
if self.input_buffer.len() == HC::RATE {
if self.input_buffer.len() == H::Permutation::RATE {
self.duplexing();
}
}
@ -71,23 +63,19 @@ where
pub fn observe_extension_elements<const D: usize>(&mut self, elements: &[F::Extension])
where
F: RichField + Extendable<D>,
[(); HC::WIDTH]:,
{
for element in elements {
self.observe_extension_element(element);
}
}
pub fn observe_hash<OHC: HashConfig, OH: Hasher<F, OHC>>(&mut self, hash: OH::Hash) {
pub fn observe_hash<OH: Hasher<F>>(&mut self, hash: OH::Hash) {
self.observe_elements(&hash.to_vec())
}
pub fn observe_cap<OHC: HashConfig, OH: Hasher<F, OHC>>(
&mut self,
cap: &MerkleCap<F, OHC, OH>,
) {
pub fn observe_cap<OH: Hasher<F>>(&mut self, cap: &MerkleCap<F, OH>) {
for &hash in &cap.0 {
self.observe_hash::<OHC, OH>(hash);
self.observe_hash::<OH>(hash);
}
}
@ -139,24 +127,23 @@ where
/// Absorb any buffered inputs. After calling this, the input buffer will be empty, and the
/// output buffer will be full.
fn duplexing(&mut self) {
assert!(self.input_buffer.len() <= HC::RATE);
assert!(self.input_buffer.len() <= H::Permutation::RATE);
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
// where we would xor or add in the inputs. This is a well-known variant, though,
// sometimes called "overwrite mode".
for (i, input) in self.input_buffer.drain(..).enumerate() {
self.sponge_state[i] = input;
}
self.sponge_state
.set_from_iter(self.input_buffer.drain(..), 0);
// Apply the permutation.
self.sponge_state = H::Permutation::permute(self.sponge_state);
self.sponge_state.permute();
self.output_buffer.clear();
self.output_buffer
.extend_from_slice(&self.sponge_state[0..HC::RATE]);
.extend_from_slice(self.sponge_state.squeeze());
}
pub fn compact(&mut self) -> [F; HC::WIDTH] {
pub fn compact(&mut self) -> H::Permutation {
if !self.input_buffer.is_empty() {
self.duplexing();
}
@ -165,48 +152,37 @@ where
}
}
impl<F: RichField, HC: HashConfig, H: AlgebraicHasher<F, HC>> Default for Challenger<F, HC, H>
where
[(); HC::WIDTH]:,
{
impl<F: RichField, H: AlgebraicHasher<F>> Default for Challenger<F, H> {
fn default() -> Self {
Self::new()
}
}
/// A recursive version of `Challenger`. The main difference is that `RecursiveChallenger`'s input
/// buffer can grow beyond `HC::RATE`. This is so that `observe_element` etc do not need access
/// buffer can grow beyond `H::Permutation::RATE`. This is so that `observe_element` etc do not need access
/// to the `CircuitBuilder`.
pub struct RecursiveChallenger<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
const D: usize,
> where
[(); HC::WIDTH]:,
pub struct RecursiveChallenger<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
{
sponge_state: [Target; HC::WIDTH],
sponge_state: H::AlgebraicPermutation,
input_buffer: Vec<Target>,
output_buffer: Vec<Target>,
__: PhantomData<(F, H)>,
}
impl<F: RichField + Extendable<D>, HC: HashConfig, H: AlgebraicHasher<F, HC>, const D: usize>
RecursiveChallenger<F, HC, H, D>
where
[(); HC::WIDTH]:,
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
RecursiveChallenger<F, H, D>
{
pub fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
let zero = builder.zero();
Self {
sponge_state: [zero; HC::WIDTH],
sponge_state: H::AlgebraicPermutation::new(std::iter::repeat(zero)),
input_buffer: Vec::new(),
output_buffer: Vec::new(),
__: PhantomData,
}
}
pub fn from_state(sponge_state: [Target; HC::WIDTH]) -> Self {
pub fn from_state(sponge_state: H::AlgebraicPermutation) -> Self {
Self {
sponge_state,
input_buffer: vec![],
@ -253,8 +229,8 @@ where
if self.output_buffer.is_empty() {
// Evaluate the permutation to produce `r` new outputs.
self.sponge_state = builder.permute::<HC, H>(self.sponge_state);
self.output_buffer = self.sponge_state[0..HC::RATE].to_vec();
self.sponge_state = builder.permute::<H>(self.sponge_state);
self.output_buffer = self.sponge_state.squeeze().to_vec();
}
self.output_buffer
@ -295,24 +271,20 @@ where
return;
}
for input_chunk in self.input_buffer.chunks(HC::RATE) {
for input_chunk in self.input_buffer.chunks(H::AlgebraicPermutation::RATE) {
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
// where we would xor or add in the inputs. This is a well-known variant, though,
// sometimes called "overwrite mode".
for (i, &input) in input_chunk.iter().enumerate() {
self.sponge_state[i] = input;
}
// Apply the permutation.
self.sponge_state = builder.permute::<HC, H>(self.sponge_state);
self.sponge_state.set_from_slice(input_chunk, 0);
self.sponge_state = builder.permute::<H>(self.sponge_state);
}
self.output_buffer = self.sponge_state[0..HC::RATE].to_vec();
self.output_buffer = self.sponge_state.squeeze().to_vec();
self.input_buffer.clear();
}
pub fn compact(&mut self, builder: &mut CircuitBuilder<F, D>) -> [Target; HC::WIDTH] {
pub fn compact(&mut self, builder: &mut CircuitBuilder<F, D>) -> H::AlgebraicPermutation {
self.absorb_buffered_inputs(builder);
self.output_buffer.clear();
self.sponge_state
@ -335,11 +307,7 @@ mod tests {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
let mut challenger = Challenger::<
F,
<C as GenericConfig<D>>::HCI,
<C as GenericConfig<D>>::InnerHasher,
>::new();
let mut challenger = Challenger::<F, <C as GenericConfig<D>>::InnerHasher>::new();
let mut challenges = Vec::new();
for i in 1..10 {
@ -373,11 +341,7 @@ mod tests {
.map(|&n| F::rand_vec(n))
.collect();
let mut challenger = Challenger::<
F,
<C as GenericConfig<D>>::HCI,
<C as GenericConfig<D>>::InnerHasher,
>::new();
let mut challenger = Challenger::<F, <C as GenericConfig<D>>::InnerHasher>::new();
let mut outputs_per_round: Vec<Vec<F>> = Vec::new();
for (r, inputs) in inputs_per_round.iter().enumerate() {
challenger.observe_elements(inputs);
@ -386,12 +350,8 @@ mod tests {
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
let mut recursive_challenger = RecursiveChallenger::<
F,
<C as GenericConfig<D>>::HCI,
<C as GenericConfig<D>>::InnerHasher,
D,
>::new(&mut builder);
let mut recursive_challenger =
RecursiveChallenger::<F, <C as GenericConfig<D>>::InnerHasher, D>::new(&mut builder);
let mut recursive_outputs_per_round: Vec<Vec<Target>> = Vec::new();
for (r, inputs) in inputs_per_round.iter().enumerate() {
recursive_challenger.observe_elements(&builder.constants(inputs));

View File

@ -2,14 +2,13 @@ use alloc::vec;
use alloc::vec::Vec;
use hashbrown::HashMap;
use itertools::Itertools;
use itertools::{zip_eq, Itertools};
use crate::field::extension::{Extendable, FieldExtension};
use crate::field::types::Field;
use crate::fri::structure::{FriOpenings, FriOpeningsTarget};
use crate::fri::witness_util::set_fri_proof_target;
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::{BoolTarget, Target};
@ -28,10 +27,10 @@ pub trait WitnessWrite<F: Field> {
.for_each(|(&t, x)| self.set_target(t, x));
}
fn set_cap_target<HC: HashConfig, H: AlgebraicHasher<F, HC>>(
fn set_cap_target<H: AlgebraicHasher<F>>(
&mut self,
ct: &MerkleCapTarget,
value: &MerkleCap<F, HC, H>,
value: &MerkleCap<F, H>,
) where
F: RichField,
{
@ -44,13 +43,11 @@ pub trait WitnessWrite<F: Field> {
where
F: RichField + Extendable<D>,
{
self.set_target_arr(et.0, value.to_basefield_array());
self.set_target_arr(&et.0, &value.to_basefield_array());
}
fn set_target_arr<const N: usize>(&mut self, targets: [Target; N], values: [F; N]) {
(0..N).for_each(|i| {
self.set_target(targets[i], values[i]);
});
fn set_target_arr(&mut self, targets: &[Target], values: &[F]) {
zip_eq(targets, values).for_each(|(&target, &value)| self.set_target(target, value));
}
fn set_extension_targets<const D: usize>(
@ -78,7 +75,7 @@ pub trait WitnessWrite<F: Field> {
proof_with_pis: &ProofWithPublicInputs<F, C, D>,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
let ProofWithPublicInputs {
proof,
@ -104,7 +101,7 @@ pub trait WitnessWrite<F: Field> {
proof: &Proof<F, C, D>,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
self.set_cap_target(&proof_target.wires_cap, &proof.wires_cap);
self.set_cap_target(
@ -143,7 +140,7 @@ pub trait WitnessWrite<F: Field> {
vd: &VerifierOnlyCircuitData<C, D>,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
self.set_cap_target(&vdt.constants_sigmas_cap, &vd.constants_sigmas_cap);
self.set_hash_target(vdt.circuit_digest, vd.circuit_digest);
@ -225,14 +222,10 @@ pub trait Witness<F: Field>: WitnessWrite<F> {
}
}
fn get_merkle_cap_target<HC, H: Hasher<F, HC>>(
&self,
cap_target: MerkleCapTarget,
) -> MerkleCap<F, HC, H>
fn get_merkle_cap_target<H: Hasher<F>>(&self, cap_target: MerkleCapTarget) -> MerkleCap<F, H>
where
F: RichField,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
{
let cap = cap_target
.0

View File

@ -1,8 +1,6 @@
#![allow(clippy::too_many_arguments)]
#![allow(clippy::needless_range_loop)]
#![allow(clippy::upper_case_acronyms)]
#![allow(incomplete_features)]
#![feature(generic_const_exprs)]
#![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;

View File

@ -27,7 +27,6 @@ use crate::gates::noop::NoopGate;
use crate::gates::public_input::PublicInputGate;
use crate::gates::selectors::selector_polynomials;
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::MerkleProofTarget;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
@ -435,9 +434,9 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
}
pub fn constant_merkle_cap<HC: HashConfig, H: Hasher<F, HC, Hash = HashOut<F>>>(
pub fn constant_merkle_cap<H: Hasher<F, Hash = HashOut<F>>>(
&mut self,
cap: &MerkleCap<F, HC, H>,
cap: &MerkleCap<F, H>,
) -> MerkleCapTarget {
MerkleCapTarget(cap.0.iter().map(|h| self.constant_hash(*h)).collect())
}
@ -447,7 +446,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
verifier_data: &VerifierOnlyCircuitData<C, D>,
) -> VerifierCircuitTarget
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
VerifierCircuitTarget {
constants_sigmas_cap: self.constant_merkle_cap(&verifier_data.constants_sigmas_cap),
@ -738,11 +737,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
/// Builds a "full circuit", with both prover and verifier data.
pub fn build<C: GenericConfig<D, F = F>>(mut self) -> CircuitData<F, C, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn build<C: GenericConfig<D, F = F>>(mut self) -> CircuitData<F, C, D> {
let mut timing = TimingTree::new("preprocess", Level::Trace);
#[cfg(feature = "std")]
let start = Instant::now();
@ -753,7 +748,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
// those hash wires match the claimed public inputs.
let num_public_inputs = self.public_inputs.len();
let public_inputs_hash =
self.hash_n_to_hash_no_pad::<C::HCI, C::InnerHasher>(self.public_inputs.clone());
self.hash_n_to_hash_no_pad::<C::InnerHasher>(self.public_inputs.clone());
let pi_gate = self.add_gate(PublicInputGate, vec![]);
for (&hash_part, wire) in public_inputs_hash
.elements
@ -946,22 +941,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
/// Builds a "prover circuit", with data needed to generate proofs but not verify them.
pub fn build_prover<C: GenericConfig<D, F = F>>(self) -> ProverCircuitData<F, C, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn build_prover<C: GenericConfig<D, F = F>>(self) -> ProverCircuitData<F, C, D> {
// TODO: Can skip parts of this.
let circuit_data = self.build::<C>();
circuit_data.prover_data()
}
/// Builds a "verifier circuit", with data needed to verify proofs but not generate them.
pub fn build_verifier<C: GenericConfig<D, F = F>>(self) -> VerifierCircuitData<F, C, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn build_verifier<C: GenericConfig<D, F = F>>(self) -> VerifierCircuitData<F, C, D> {
// TODO: Can skip parts of this.
let circuit_data = self.build::<C>();
circuit_data.verifier_data()

View File

@ -19,7 +19,6 @@ use crate::fri::{FriConfig, FriParams};
use crate::gates::gate::GateRef;
use crate::gates::selectors::SelectorsInfo;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::generator::WitnessGeneratorRef;
@ -139,11 +138,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
buffer.read_circuit_data(gate_serializer, generator_serializer)
}
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>> {
prove::<F, C, D>(
&self.prover_only,
&self.common,
@ -152,44 +147,28 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
)
}
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()> {
verify::<F, C, D>(proof_with_pis, &self.verifier_only, &self.common)
}
pub fn verify_compressed(
&self,
compressed_proof_with_pis: CompressedProofWithPublicInputs<F, C, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
compressed_proof_with_pis.verify(&self.verifier_only, &self.common)
}
pub fn compress(
&self,
proof: ProofWithPublicInputs<F, C, D>,
) -> Result<CompressedProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<CompressedProofWithPublicInputs<F, C, D>> {
proof.compress(&self.verifier_only.circuit_digest, &self.common)
}
pub fn decompress(
&self,
proof: CompressedProofWithPublicInputs<F, C, D>,
) -> Result<ProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<ProofWithPublicInputs<F, C, D>> {
proof.decompress(&self.verifier_only.circuit_digest, &self.common)
}
@ -256,11 +235,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
buffer.read_prover_circuit_data(gate_serializer, generator_serializer)
}
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>> {
prove::<F, C, D>(
&self.prover_only,
&self.common,
@ -298,22 +273,14 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
buffer.read_verifier_circuit_data(gate_serializer)
}
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()> {
verify::<F, C, D>(proof_with_pis, &self.verifier_only, &self.common)
}
pub fn verify_compressed(
&self,
compressed_proof_with_pis: CompressedProofWithPublicInputs<F, C, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
compressed_proof_with_pis.verify(&self.verifier_only, &self.common)
}
}
@ -344,17 +311,17 @@ pub struct ProverOnlyCircuitData<
pub fft_root_table: Option<FftRootTable<F>>,
/// A digest of the "circuit" (i.e. the instance, minus public inputs), which can be used to
/// seed Fiat-Shamir.
pub circuit_digest: <<C as GenericConfig<D>>::Hasher as Hasher<F, C::HCO>>::Hash,
pub circuit_digest: <<C as GenericConfig<D>>::Hasher as Hasher<F>>::Hash,
}
/// Circuit data required by the verifier, but not the prover.
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct VerifierOnlyCircuitData<C: GenericConfig<D>, const D: usize> {
/// A commitment to each constant polynomial and each permutation polynomial.
pub constants_sigmas_cap: MerkleCap<C::F, C::HCO, C::Hasher>,
pub constants_sigmas_cap: MerkleCap<C::F, C::Hasher>,
/// A digest of the "circuit" (i.e. the instance, minus public inputs), which can be used to
/// seed Fiat-Shamir.
pub circuit_digest: <<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
pub circuit_digest: <<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
}
impl<C: GenericConfig<D>, const D: usize> VerifierOnlyCircuitData<C, D> {

View File

@ -9,7 +9,7 @@ use crate::field::extension::quadratic::QuadraticExtension;
use crate::field::extension::{Extendable, FieldExtension};
use crate::field::goldilocks_field::GoldilocksField;
use crate::hash::hash_types::{HashOut, RichField};
use crate::hash::hashing::{HashConfig, PlonkyPermutation};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::keccak::KeccakHash;
use crate::hash::poseidon::PoseidonHash;
use crate::iop::target::{BoolTarget, Target};
@ -25,7 +25,7 @@ pub trait GenericHashOut<F: RichField>:
}
/// Trait for hash functions.
pub trait Hasher<F: RichField, HC: HashConfig>: Sized + Clone + Debug + Eq + PartialEq {
pub trait Hasher<F: RichField>: Sized + Copy + Debug + Eq + PartialEq {
/// Size of `Hash` in bytes.
const HASH_SIZE: usize;
@ -33,22 +33,17 @@ pub trait Hasher<F: RichField, HC: HashConfig>: Sized + Clone + Debug + Eq + Par
type Hash: GenericHashOut<F>;
/// Permutation used in the sponge construction.
type Permutation: PlonkyPermutation<F, HC>;
type Permutation: PlonkyPermutation<F>;
/// Hash a message without any padding step. Note that this can enable length-extension attacks.
/// However, it is still collision-resistant in cases where the input has a fixed length.
fn hash_no_pad(input: &[F]) -> Self::Hash
where
[(); HC::WIDTH]:;
fn hash_no_pad(input: &[F]) -> Self::Hash;
/// Pad the message using the `pad10*1` rule, then hash it.
fn hash_pad(input: &[F]) -> Self::Hash
where
[(); HC::WIDTH]:,
{
fn hash_pad(input: &[F]) -> Self::Hash {
let mut padded_input = input.to_vec();
padded_input.push(F::ONE);
while (padded_input.len() + 1) % HC::WIDTH != 0 {
while (padded_input.len() + 1) % Self::Permutation::WIDTH != 0 {
padded_input.push(F::ZERO);
}
padded_input.push(F::ONE);
@ -57,10 +52,7 @@ pub trait Hasher<F: RichField, HC: HashConfig>: Sized + Clone + Debug + Eq + Par
/// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a
/// no-op.
fn hash_or_noop(inputs: &[F]) -> Self::Hash
where
[(); HC::WIDTH]:,
{
fn hash_or_noop(inputs: &[F]) -> Self::Hash {
if inputs.len() * 8 <= Self::HASH_SIZE {
let mut inputs_bytes = vec![0u8; Self::HASH_SIZE];
for i in 0..inputs.len() {
@ -73,22 +65,21 @@ pub trait Hasher<F: RichField, HC: HashConfig>: Sized + Clone + Debug + Eq + Par
}
}
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash
where
[(); HC::WIDTH]:;
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash;
}
/// Trait for algebraic hash functions, built from a permutation using the sponge construction.
pub trait AlgebraicHasher<F: RichField, HC: HashConfig>: Hasher<F, HC, Hash = HashOut<F>> {
pub trait AlgebraicHasher<F: RichField>: Hasher<F, Hash = HashOut<F>> {
type AlgebraicPermutation: PlonkyPermutation<Target>;
/// Circuit to conditionally swap two chunks of the inputs (useful in verifying Merkle proofs),
/// then apply the permutation.
fn permute_swapped<const D: usize>(
inputs: [Target; HC::WIDTH],
inputs: Self::AlgebraicPermutation,
swap: BoolTarget,
builder: &mut CircuitBuilder<F, D>,
) -> [Target; HC::WIDTH]
) -> Self::AlgebraicPermutation
where
[(); HC::WIDTH]:,
F: RichField + Extendable<D>;
}
@ -100,48 +91,28 @@ pub trait GenericConfig<const D: usize>:
type F: RichField + Extendable<D, Extension = Self::FE>;
/// Field extension of degree D of the main field.
type FE: FieldExtension<D, BaseField = Self::F>;
/// Hash configuration for this GenericConfig's `Hasher`.
type HCO: HashConfig;
/// Hash configuration for this GenericConfig's `InnerHasher`.
type HCI: HashConfig;
/// Hash function used for building Merkle trees.
type Hasher: Hasher<Self::F, Self::HCO>;
type Hasher: Hasher<Self::F>;
/// Algebraic hash function used for the challenger and hashing public inputs.
type InnerHasher: AlgebraicHasher<Self::F, Self::HCI>;
type InnerHasher: AlgebraicHasher<Self::F>;
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PoseidonHashConfig;
impl HashConfig for PoseidonHashConfig {
const RATE: usize = 8;
const WIDTH: usize = 12;
}
/// Configuration using Poseidon over the Goldilocks field.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize)]
pub struct PoseidonGoldilocksConfig;
impl GenericConfig<2> for PoseidonGoldilocksConfig {
type F = GoldilocksField;
type FE = QuadraticExtension<Self::F>;
type HCO = PoseidonHashConfig;
type HCI = PoseidonHashConfig;
type Hasher = PoseidonHash;
type InnerHasher = PoseidonHash;
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct KeccakHashConfig;
impl HashConfig for KeccakHashConfig {
const RATE: usize = 8;
const WIDTH: usize = 12;
}
/// Configuration using truncated Keccak over the Goldilocks field.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct KeccakGoldilocksConfig;
impl GenericConfig<2> for KeccakGoldilocksConfig {
type F = GoldilocksField;
type FE = QuadraticExtension<Self::F>;
type HCO = KeccakHashConfig;
type HCI = PoseidonHashConfig;
type Hasher = KeccakHash<25>;
type InnerHasher = PoseidonHash;
}

View File

@ -9,7 +9,6 @@ use crate::fri::proof::{CompressedFriProof, FriChallenges, FriProof, FriProofTar
use crate::fri::verifier::{compute_evaluation, fri_combine_initial, PrecomputedReducedOpenings};
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::challenger::{Challenger, RecursiveChallenger};
use crate::iop::target::Target;
@ -24,38 +23,34 @@ use crate::plonk::proof::{
use crate::util::reverse_bits;
fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
wires_cap: &MerkleCap<F, C::HCO, C::Hasher>,
plonk_zs_partial_products_cap: &MerkleCap<F, C::HCO, C::Hasher>,
quotient_polys_cap: &MerkleCap<F, C::HCO, C::Hasher>,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,
wires_cap: &MerkleCap<F, C::Hasher>,
plonk_zs_partial_products_cap: &MerkleCap<F, C::Hasher>,
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
openings: &OpeningSet<F, D>,
commit_phase_merkle_caps: &[MerkleCap<F, C::HCO, C::Hasher>],
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
pow_witness: F,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofChallenges<F, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<ProofChallenges<F, D>> {
let config = &common_data.config;
let num_challenges = config.num_challenges;
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
let mut challenger = Challenger::<F, C::Hasher>::new();
// Observe the instance.
challenger.observe_hash::<C::HCO, C::Hasher>(*circuit_digest);
challenger.observe_hash::<C::HCI, C::InnerHasher>(public_inputs_hash);
challenger.observe_hash::<C::Hasher>(*circuit_digest);
challenger.observe_hash::<C::InnerHasher>(public_inputs_hash);
challenger.observe_cap::<C::HCO, C::Hasher>(wires_cap);
challenger.observe_cap::<C::Hasher>(wires_cap);
let plonk_betas = challenger.get_n_challenges(num_challenges);
let plonk_gammas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap::<C::HCO, C::Hasher>(plonk_zs_partial_products_cap);
challenger.observe_cap::<C::Hasher>(plonk_zs_partial_products_cap);
let plonk_alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap::<C::HCO, C::Hasher>(quotient_polys_cap);
challenger.observe_cap::<C::Hasher>(quotient_polys_cap);
let plonk_zeta = challenger.get_extension_challenge::<D>();
challenger.observe_openings(&openings.to_fri_openings());
@ -80,13 +75,9 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
pub(crate) fn fri_query_indices(
&self,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<Vec<usize>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<Vec<usize>> {
Ok(self
.get_challenges(self.get_public_inputs_hash(), circuit_digest, common_data)?
.fri_challenges
@ -96,14 +87,10 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
/// Computes all Fiat-Shamir challenges used in the Plonk proof.
pub fn get_challenges(
&self,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofChallenges<F, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<ProofChallenges<F, D>> {
let Proof {
wires_cap,
plonk_zs_partial_products_cap,
@ -139,14 +126,10 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
/// Computes all Fiat-Shamir challenges used in the Plonk proof.
pub(crate) fn get_challenges(
&self,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofChallenges<F, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<ProofChallenges<F, D>> {
let CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
@ -266,14 +249,12 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
inner_common_data: &CommonCircuitData<F, D>,
) -> ProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let config = &inner_common_data.config;
let num_challenges = config.num_challenges;
let mut challenger = RecursiveChallenger::<F, C::HCO, C::Hasher, D>::new(self);
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(self);
// Observe the instance.
challenger.observe_hash(&inner_circuit_digest);
@ -316,9 +297,7 @@ impl<const D: usize> ProofWithPublicInputsTarget<D> {
inner_common_data: &CommonCircuitData<F, D>,
) -> ProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let ProofTarget {
wires_cap,

View File

@ -15,7 +15,6 @@ use crate::fri::structure::{
};
use crate::fri::FriParams;
use crate::hash::hash_types::{MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::Target;
@ -30,15 +29,15 @@ use crate::util::serialization::{Buffer, Read};
#[serde(bound = "")]
pub struct Proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of wire values.
pub wires_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub wires_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_partial_products_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub plonk_zs_partial_products_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of the quotient polynomial components.
pub quotient_polys_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: OpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: FriProof<F, C::HCO, C::Hasher, D>,
pub opening_proof: FriProof<F, C::Hasher, D>,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -87,13 +86,9 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
pub fn compress(
self,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<CompressedProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<CompressedProofWithPublicInputs<F, C, D>> {
let indices = self.fri_query_indices(circuit_digest, common_data)?;
let compressed_proof = self.proof.compress(&indices, &common_data.fri_params);
Ok(CompressedProofWithPublicInputs {
@ -104,10 +99,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub fn get_public_inputs_hash(
&self,
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash
where
[(); C::HCI::WIDTH]:,
{
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
C::InnerHasher::hash_no_pad(&self.public_inputs)
}
@ -137,15 +129,15 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub struct CompressedProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
/// Merkle cap of LDEs of wire values.
pub wires_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub wires_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_partial_products_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub plonk_zs_partial_products_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of the quotient polynomial components.
pub quotient_polys_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: OpeningSet<F, D>,
/// A compressed batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::HCO, C::Hasher, D>,
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
@ -157,10 +149,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
challenges: &ProofChallenges<F, D>,
fri_inferred_elements: FriInferredElements<F, D>,
params: &FriParams,
) -> Proof<F, C, D>
where
[(); C::HCO::WIDTH]:,
{
) -> Proof<F, C, D> {
let CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
@ -195,13 +184,9 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
pub fn decompress(
self,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<ProofWithPublicInputs<F, C, D>> {
let challenges =
self.get_challenges(self.get_public_inputs_hash(), circuit_digest, common_data)?;
let fri_inferred_elements = self.get_inferred_elements(&challenges, common_data);
@ -218,11 +203,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
self,
verifier_data: &VerifierOnlyCircuitData<C, D>,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> anyhow::Result<()> {
ensure!(
self.public_inputs.len() == common_data.num_public_inputs,
"Number of public inputs doesn't match circuit data."
@ -248,10 +229,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub(crate) fn get_public_inputs_hash(
&self,
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash
where
[(); C::HCI::WIDTH]:,
{
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
C::InnerHasher::hash_no_pad(&self.public_inputs)
}

View File

@ -11,7 +11,6 @@ use crate::field::types::Field;
use crate::field::zero_poly_coset::ZeroPolyOnCoset;
use crate::fri::oracle::PolynomialBatch;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::iop::challenger::Challenger;
use crate::iop::generator::generate_partial_witness;
use crate::iop::witness::{MatrixWitness, PartialWitness, Witness};
@ -33,10 +32,8 @@ pub fn prove<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D:
timing: &mut TimingTree,
) -> Result<ProofWithPublicInputs<F, C, D>>
where
C::Hasher: Hasher<F, C::HCO>,
C::InnerHasher: Hasher<F, C::HCI>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: Hasher<F>,
C::InnerHasher: Hasher<F>,
{
let config = &common_data.config;
let num_challenges = config.num_challenges;
@ -81,13 +78,13 @@ where
)
);
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
let mut challenger = Challenger::<F, C::Hasher>::new();
// Observe the instance.
challenger.observe_hash::<C::HCO, C::Hasher>(prover_data.circuit_digest);
challenger.observe_hash::<C::HCI, C::InnerHasher>(public_inputs_hash);
challenger.observe_hash::<C::Hasher>(prover_data.circuit_digest);
challenger.observe_hash::<C::InnerHasher>(public_inputs_hash);
challenger.observe_cap::<C::HCO, C::Hasher>(&wires_commitment.merkle_tree.cap);
challenger.observe_cap::<C::Hasher>(&wires_commitment.merkle_tree.cap);
let betas = challenger.get_n_challenges(num_challenges);
let gammas = challenger.get_n_challenges(num_challenges);
@ -121,8 +118,7 @@ where
)
);
challenger
.observe_cap::<C::HCO, C::Hasher>(&partial_products_and_zs_commitment.merkle_tree.cap);
challenger.observe_cap::<C::Hasher>(&partial_products_and_zs_commitment.merkle_tree.cap);
let alphas = challenger.get_n_challenges(num_challenges);
@ -170,7 +166,7 @@ where
)
);
challenger.observe_cap::<C::HCO, C::Hasher>(&quotient_polys_commitment.merkle_tree.cap);
challenger.observe_cap::<C::Hasher>(&quotient_polys_commitment.merkle_tree.cap);
let zeta = challenger.get_extension_challenge::<D>();
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
@ -324,7 +320,7 @@ fn compute_quotient_polys<
>(
common_data: &CommonCircuitData<F, D>,
prover_data: &'a ProverOnlyCircuitData<F, C, D>,
public_inputs_hash: &<<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
public_inputs_hash: &<<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,
wires_commitment: &'a PolynomialBatch<F, C, D>,
zs_partial_products_commitment: &'a PolynomialBatch<F, C, D>,
betas: &[F],

View File

@ -4,7 +4,6 @@ use crate::field::extension::Extendable;
use crate::field::types::Field;
use crate::fri::verifier::verify_fri_proof;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::HashConfig;
use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
use crate::plonk::config::{GenericConfig, Hasher};
use crate::plonk::plonk_common::reduce_with_powers;
@ -17,11 +16,7 @@ pub(crate) fn verify<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, c
proof_with_pis: ProofWithPublicInputs<F, C, D>,
verifier_data: &VerifierOnlyCircuitData<C, D>,
common_data: &CommonCircuitData<F, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
validate_proof_with_pis_shape(&proof_with_pis, common_data)?;
let public_inputs_hash = proof_with_pis.get_public_inputs_hash();
@ -46,14 +41,11 @@ pub(crate) fn verify_with_challenges<
const D: usize,
>(
proof: Proof<F, C, D>,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,
challenges: ProofChallenges<F, D>,
verifier_data: &VerifierOnlyCircuitData<C, D>,
common_data: &CommonCircuitData<F, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
{
) -> Result<()> {
let local_constants = &proof.openings.constants;
let local_wires = &proof.openings.wires;
let vars = EvaluationVars {

View File

@ -8,7 +8,6 @@ use crate::fri::proof::{
};
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::MerkleProofTarget;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::{BoolTarget, Target};
@ -30,9 +29,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
inner_verifier_data1: &VerifierCircuitTarget,
inner_common_data: &CommonCircuitData<F, D>,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let selected_proof =
self.select_proof_with_pis(condition, proof_with_pis0, proof_with_pis1);
@ -61,9 +58,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
inner_common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<()>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let (dummy_proof_with_pis_target, dummy_verifier_data_target) =
self.dummy_proof_and_vk::<C>(inner_common_data)?;

View File

@ -4,7 +4,6 @@ use anyhow::{ensure, Result};
use crate::field::extension::Extendable;
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
@ -18,7 +17,7 @@ use crate::util::serialization::{Buffer, IoResult, Read, Write};
impl<C: GenericConfig<D>, const D: usize> VerifierOnlyCircuitData<C, D> {
fn from_slice(slice: &[C::F], common_data: &CommonCircuitData<C::F, D>) -> Result<Self>
where
C::Hasher: AlgebraicHasher<C::F, C::HCO>,
C::Hasher: AlgebraicHasher<C::F>,
{
// The structure of the public inputs is `[..., circuit_digest, constants_sigmas_cap]`.
let cap_len = common_data.config.fri_config.num_cap_elements();
@ -107,9 +106,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
common_data: &CommonCircuitData<F, D>,
) -> Result<()>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let verifier_data = self
.verifier_data_public_input
@ -161,9 +158,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
common_data: &CommonCircuitData<F, D>,
) -> Result<()>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let (dummy_proof_with_pis_target, dummy_verifier_data_target) =
self.dummy_proof_and_vk::<C>(common_data)?;
@ -190,9 +185,7 @@ pub fn check_cyclic_proof_verifier_data<
common_data: &CommonCircuitData<F, D>,
) -> Result<()>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let pis = VerifierOnlyCircuitData::<C, D>::from_slice(&proof.public_inputs, common_data)?;
ensure!(verifier_data.constants_sigmas_cap == pis.constants_sigmas_cap);
@ -209,14 +202,12 @@ mod tests {
use crate::field::types::{Field, PrimeField64};
use crate::gates::noop::NoopGate;
use crate::hash::hash_types::{HashOutTarget, RichField};
use crate::hash::hashing::{hash_n_to_hash_no_pad, HashConfig};
use crate::hash::hashing::hash_n_to_hash_no_pad;
use crate::hash::poseidon::{PoseidonHash, PoseidonPermutation};
use crate::iop::witness::{PartialWitness, WitnessWrite};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::circuit_data::{CircuitConfig, CommonCircuitData};
use crate::plonk::config::{
AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig, PoseidonHashConfig,
};
use crate::plonk::config::{AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig};
use crate::recursion::cyclic_recursion::check_cyclic_proof_verifier_data;
use crate::recursion::dummy_circuit::cyclic_base_proof;
@ -227,9 +218,7 @@ mod tests {
const D: usize,
>() -> CommonCircuitData<F, D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let config = CircuitConfig::standard_recursion_config();
let builder = CircuitBuilder::<F, D>::new(config);
@ -274,9 +263,8 @@ mod tests {
let initial_hash_target = builder.add_virtual_hash();
builder.register_public_inputs(&initial_hash_target.elements);
let current_hash_in = builder.add_virtual_hash();
let current_hash_out = builder.hash_n_to_hash_no_pad::<PoseidonHashConfig, PoseidonHash>(
current_hash_in.elements.to_vec(),
);
let current_hash_out =
builder.hash_n_to_hash_no_pad::<PoseidonHash>(current_hash_in.elements.to_vec());
builder.register_public_inputs(&current_hash_out.elements);
let counter = builder.add_virtual_public_input();
@ -377,8 +365,7 @@ mod tests {
fn iterate_poseidon<F: RichField>(initial_state: [F; 4], n: usize) -> [F; 4] {
let mut current = initial_state;
for _ in 0..n {
current = hash_n_to_hash_no_pad::<F, PoseidonHashConfig, PoseidonPermutation>(&current)
.elements;
current = hash_n_to_hash_no_pad::<F, PoseidonPermutation<F>>(&current).elements;
}
current
}

View File

@ -10,7 +10,6 @@ use crate::fri::proof::{FriProof, FriProofTarget};
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::gates::noop::NoopGate;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::generator::{GeneratedValues, SimpleGenerator};
use crate::iop::target::Target;
@ -39,9 +38,7 @@ pub fn cyclic_base_proof<F, C, const D: usize>(
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
C::Hasher: AlgebraicHasher<C::F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<C::F>,
{
let pis_len = common_data.num_public_inputs;
let cap_elements = common_data.config.fri_config.num_cap_elements();
@ -76,8 +73,6 @@ pub(crate) fn dummy_proof<
nonzero_public_inputs: HashMap<usize, F>,
) -> anyhow::Result<ProofWithPublicInputs<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let mut pw = PartialWitness::new();
for i in 0..circuit.common.num_public_inputs {
@ -94,11 +89,7 @@ pub(crate) fn dummy_circuit<
const D: usize,
>(
common_data: &CommonCircuitData<F, D>,
) -> CircuitData<F, C, D>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> CircuitData<F, C, D> {
let config = common_data.config.clone();
assert!(
!common_data.config.zero_knowledge,
@ -132,9 +123,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<(ProofWithPublicInputsTarget<D>, VerifierCircuitTarget)>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let dummy_circuit = dummy_circuit::<F, C, D>(common_data);
let dummy_proof_with_pis = dummy_proof::<F, C, D>(&dummy_circuit, HashMap::new())?;
@ -212,10 +201,9 @@ where
let verifier_data = VerifierOnlyCircuitData {
constants_sigmas_cap: MerkleCap(vec![]),
circuit_digest:
<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash::from_bytes(
&vec![0; <<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::HASH_SIZE],
),
circuit_digest: <<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash::from_bytes(
&vec![0; <<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::HASH_SIZE],
),
};
Self {
@ -231,7 +219,7 @@ impl<F, C, const D: usize> DummyProofGenerator<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
pub fn deserialize_with_circuit_data(
src: &mut Buffer,
@ -254,7 +242,7 @@ impl<F, C, const D: usize> SimpleGenerator<F> for DummyProofGenerator<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
fn id(&self) -> String {
"DummyProofGenerator".to_string()

View File

@ -1,6 +1,5 @@
use crate::field::extension::Extendable;
use crate::hash::hash_types::{HashOutTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::circuit_data::{CommonCircuitData, VerifierCircuitTarget};
use crate::plonk::config::{AlgebraicHasher, GenericConfig};
@ -21,16 +20,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
inner_verifier_data: &VerifierCircuitTarget,
inner_common_data: &CommonCircuitData<F, D>,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
assert_eq!(
proof_with_pis.public_inputs.len(),
inner_common_data.num_public_inputs
);
let public_inputs_hash = self
.hash_n_to_hash_no_pad::<C::HCI, C::InnerHasher>(proof_with_pis.public_inputs.clone());
let public_inputs_hash =
self.hash_n_to_hash_no_pad::<C::InnerHasher>(proof_with_pis.public_inputs.clone());
let challenges = proof_with_pis.get_challenges::<F, C>(
self,
public_inputs_hash,
@ -56,8 +53,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
inner_verifier_data: &VerifierCircuitTarget,
inner_common_data: &CommonCircuitData<F, D>,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let one = self.one_extension();
@ -329,11 +325,7 @@ mod tests {
fn dummy_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
config: &CircuitConfig,
num_dummy_gates: u64,
) -> Result<Proof<F, C, D>>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<Proof<F, C, D>> {
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
for _ in 0..num_dummy_gates {
builder.add_gate(NoopGate, vec![]);
@ -362,11 +354,7 @@ mod tests {
print_timing: bool,
) -> Result<Proof<F, C, D>>
where
InnerC::Hasher: AlgebraicHasher<F, InnerC::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
[(); InnerC::HCO::WIDTH]:,
[(); InnerC::HCI::WIDTH]:,
InnerC::Hasher: AlgebraicHasher<F>,
{
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
let mut pw = PartialWitness::new();
@ -418,11 +406,7 @@ mod tests {
proof: &ProofWithPublicInputs<F, C, D>,
vd: &VerifierOnlyCircuitData<C, D>,
cd: &CommonCircuitData<F, D>,
) -> Result<()>
where
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
) -> Result<()> {
let proof_bytes = proof.to_bytes();
info!("Proof length: {} bytes", proof_bytes.len());
let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?;

View File

@ -133,7 +133,7 @@ pub mod default {
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F> + 'static,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
{
impl_generator_serializer! {
DefaultGeneratorSerializer,

View File

@ -32,7 +32,6 @@ use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::gates::gate::GateRef;
use crate::gates::selectors::SelectorsInfo;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::HashConfig;
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
use crate::hash::merkle_tree::{MerkleCap, MerkleTree};
use crate::iop::ext_target::ExtensionTarget;
@ -235,11 +234,10 @@ pub trait Read {
/// Reads a hash value from `self`.
#[inline]
fn read_hash<F, HC, H>(&mut self) -> IoResult<H::Hash>
fn read_hash<F, H>(&mut self) -> IoResult<H::Hash>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
let mut buf = vec![0; H::HASH_SIZE];
self.read_exact(&mut buf)?;
@ -259,29 +257,27 @@ pub trait Read {
/// Reads a vector of Hash from `self`.
#[inline]
fn read_hash_vec<F, HC, H>(&mut self, length: usize) -> IoResult<Vec<H::Hash>>
fn read_hash_vec<F, H>(&mut self, length: usize) -> IoResult<Vec<H::Hash>>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
(0..length)
.map(|_| self.read_hash::<F, HC, H>())
.map(|_| self.read_hash::<F, H>())
.collect::<Result<Vec<_>, _>>()
}
/// Reads a value of type [`MerkleCap`] from `self` with the given `cap_height`.
#[inline]
fn read_merkle_cap<F, HC, H>(&mut self, cap_height: usize) -> IoResult<MerkleCap<F, HC, H>>
fn read_merkle_cap<F, H>(&mut self, cap_height: usize) -> IoResult<MerkleCap<F, H>>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
let cap_length = 1 << cap_height;
Ok(MerkleCap(
(0..cap_length)
.map(|_| self.read_hash::<F, HC, H>())
.map(|_| self.read_hash::<F, H>())
.collect::<Result<Vec<_>, _>>()?,
))
}
@ -299,11 +295,10 @@ pub trait Read {
/// Reads a value of type [`MerkleTree`] from `self`.
#[inline]
fn read_merkle_tree<F, HC, H>(&mut self) -> IoResult<MerkleTree<F, HC, H>>
fn read_merkle_tree<F, H>(&mut self) -> IoResult<MerkleTree<F, H>>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
let leaves_len = self.read_usize()?;
let mut leaves = Vec::with_capacity(leaves_len);
@ -313,9 +308,9 @@ pub trait Read {
}
let digests_len = self.read_usize()?;
let digests = self.read_hash_vec::<F, HC, H>(digests_len)?;
let digests = self.read_hash_vec::<F, H>(digests_len)?;
let cap_height = self.read_usize()?;
let cap = self.read_merkle_cap::<F, HC, H>(cap_height)?;
let cap = self.read_merkle_cap::<F, H>(cap_height)?;
Ok(MerkleTree {
leaves,
digests,
@ -379,16 +374,15 @@ pub trait Read {
/// Reads a value of type [`MerkleProof`] from `self`.
#[inline]
fn read_merkle_proof<F, HC, H>(&mut self) -> IoResult<MerkleProof<F, HC, H>>
fn read_merkle_proof<F, H>(&mut self) -> IoResult<MerkleProof<F, H>>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
let length = self.read_u8()?;
Ok(MerkleProof {
siblings: (0..length)
.map(|_| self.read_hash::<F, HC, H>())
.map(|_| self.read_hash::<F, H>())
.collect::<Result<_, _>>()?,
})
}
@ -409,7 +403,7 @@ pub trait Read {
fn read_fri_initial_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<FriInitialTreeProof<F, C::HCO, C::Hasher>>
) -> IoResult<FriInitialTreeProof<F, C::Hasher>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -461,7 +455,7 @@ pub trait Read {
&mut self,
arity: usize,
compressed: bool,
) -> IoResult<FriQueryStep<F, C::HCO, C::Hasher, D>>
) -> IoResult<FriQueryStep<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -491,7 +485,7 @@ pub trait Read {
fn read_fri_query_rounds<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<Vec<FriQueryRound<F, C::HCO, C::Hasher, D>>>
) -> IoResult<Vec<FriQueryRound<F, C::Hasher, D>>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -540,7 +534,7 @@ pub trait Read {
fn read_fri_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<FriProof<F, C::HCO, C::Hasher, D>>
) -> IoResult<FriProof<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -829,8 +823,7 @@ pub trait Read {
false => None,
};
let circuit_digest =
self.read_hash::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>()?;
let circuit_digest = self.read_hash::<F, <C as GenericConfig<D>>::Hasher>()?;
Ok(ProverOnlyCircuitData {
generators,
@ -871,8 +864,7 @@ pub trait Read {
) -> IoResult<VerifierOnlyCircuitData<C, D>> {
let height = self.read_usize()?;
let constants_sigmas_cap = self.read_merkle_cap(height)?;
let circuit_digest =
self.read_hash::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>()?;
let circuit_digest = self.read_hash::<F, <C as GenericConfig<D>>::Hasher>()?;
Ok(VerifierOnlyCircuitData {
constants_sigmas_cap,
circuit_digest,
@ -984,7 +976,7 @@ pub trait Read {
fn read_compressed_fri_query_rounds<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedFriQueryRounds<F, C::HCO, C::Hasher, D>>
) -> IoResult<CompressedFriQueryRounds<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -1032,7 +1024,7 @@ pub trait Read {
fn read_compressed_fri_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedFriProof<F, C::HCO, C::Hasher, D>>
) -> IoResult<CompressedFriProof<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -1256,11 +1248,10 @@ pub trait Write {
/// Writes a hash `h` to `self`.
#[inline]
fn write_hash<F, HC, H>(&mut self, h: H::Hash) -> IoResult<()>
fn write_hash<F, H>(&mut self, h: H::Hash) -> IoResult<()>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
self.write_all(&h.to_bytes())
}
@ -1277,15 +1268,14 @@ pub trait Write {
/// Writes a vector of Hash `v` to `self.`
#[inline]
fn write_hash_vec<F, HC, H>(&mut self, v: &[H::Hash]) -> IoResult<()>
fn write_hash_vec<F, H>(&mut self, v: &[H::Hash]) -> IoResult<()>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_hash::<F, HC, H>(elem)?;
self.write_hash::<F, H>(elem)?;
}
Ok(())
@ -1293,14 +1283,13 @@ pub trait Write {
/// Writes `cap`, a value of type [`MerkleCap`], to `self`.
#[inline]
fn write_merkle_cap<F, HC, H>(&mut self, cap: &MerkleCap<F, HC, H>) -> IoResult<()>
fn write_merkle_cap<F, H>(&mut self, cap: &MerkleCap<F, H>) -> IoResult<()>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
for &a in &cap.0 {
self.write_hash::<F, HC, H>(a)?;
self.write_hash::<F, H>(a)?;
}
Ok(())
}
@ -1317,18 +1306,17 @@ pub trait Write {
/// Writes `tree`, a value of type [`MerkleTree`], to `self`.
#[inline]
fn write_merkle_tree<F, HC, H>(&mut self, tree: &MerkleTree<F, HC, H>) -> IoResult<()>
fn write_merkle_tree<F, H>(&mut self, tree: &MerkleTree<F, H>) -> IoResult<()>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
self.write_usize(tree.leaves.len())?;
for i in 0..tree.leaves.len() {
self.write_usize(tree.leaves[i].len())?;
self.write_field_vec(&tree.leaves[i])?;
}
self.write_hash_vec::<F, HC, H>(&tree.digests)?;
self.write_hash_vec::<F, H>(&tree.digests)?;
self.write_usize(tree.cap.height())?;
self.write_merkle_cap(&tree.cap)?;
@ -1367,11 +1355,10 @@ pub trait Write {
/// Writes a value `p` of type [`MerkleProof`] to `self.`
#[inline]
fn write_merkle_proof<F, HC, H>(&mut self, p: &MerkleProof<F, HC, H>) -> IoResult<()>
fn write_merkle_proof<F, H>(&mut self, p: &MerkleProof<F, H>) -> IoResult<()>
where
F: RichField,
HC: HashConfig,
H: Hasher<F, HC>,
H: Hasher<F>,
{
let length = p.siblings.len();
self.write_u8(
@ -1380,7 +1367,7 @@ pub trait Write {
.expect("Merkle proof length must fit in u8."),
)?;
for &h in &p.siblings {
self.write_hash::<F, HC, H>(h)?;
self.write_hash::<F, H>(h)?;
}
Ok(())
}
@ -1404,7 +1391,7 @@ pub trait Write {
#[inline]
fn write_fri_initial_proof<F, C, const D: usize>(
&mut self,
fitp: &FriInitialTreeProof<F, C::HCO, C::Hasher>,
fitp: &FriInitialTreeProof<F, C::Hasher>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
@ -1435,7 +1422,7 @@ pub trait Write {
#[inline]
fn write_fri_query_step<F, C, const D: usize>(
&mut self,
fqs: &FriQueryStep<F, C::HCO, C::Hasher, D>,
fqs: &FriQueryStep<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
@ -1459,7 +1446,7 @@ pub trait Write {
#[inline]
fn write_fri_query_rounds<F, C, const D: usize>(
&mut self,
fqrs: &[FriQueryRound<F, C::HCO, C::Hasher, D>],
fqrs: &[FriQueryRound<F, C::Hasher, D>],
) -> IoResult<()>
where
F: RichField + Extendable<D>,
@ -1495,7 +1482,7 @@ pub trait Write {
#[inline]
fn write_fri_proof<F, C, const D: usize>(
&mut self,
fp: &FriProof<F, C::HCO, C::Hasher, D>,
fp: &FriProof<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
@ -1771,9 +1758,7 @@ pub trait Write {
None => self.write_bool(false)?,
}
self.write_hash::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>(
*circuit_digest,
)?;
self.write_hash::<F, <C as GenericConfig<D>>::Hasher>(*circuit_digest)?;
Ok(())
}
@ -1807,9 +1792,7 @@ pub trait Write {
self.write_usize(constants_sigmas_cap.height())?;
self.write_merkle_cap(constants_sigmas_cap)?;
self.write_hash::<F, <C as GenericConfig<D>>::HCO, <C as GenericConfig<D>>::Hasher>(
*circuit_digest,
)?;
self.write_hash::<F, <C as GenericConfig<D>>::Hasher>(*circuit_digest)?;
Ok(())
}
@ -1903,7 +1886,7 @@ pub trait Write {
#[inline]
fn write_compressed_fri_query_rounds<F, C, const D: usize>(
&mut self,
cfqrs: &CompressedFriQueryRounds<F, C::HCO, C::Hasher, D>,
cfqrs: &CompressedFriQueryRounds<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
@ -1931,7 +1914,7 @@ pub trait Write {
#[inline]
fn write_compressed_fri_proof<F, C, const D: usize>(
&mut self,
fp: &CompressedFriProof<F, C::HCO, C::Hasher, D>,
fp: &CompressedFriProof<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,

View File

@ -127,7 +127,6 @@ mod tests {
use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::witness::PartialWitness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
@ -235,13 +234,9 @@ mod tests {
print_gate_counts: bool,
) -> Result<()>
where
InnerC::Hasher: AlgebraicHasher<F, InnerC::HCO>,
InnerC::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
[(); InnerC::HCO::WIDTH]:,
[(); InnerC::HCI::WIDTH]:,
{
let circuit_config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);

View File

@ -5,7 +5,6 @@ use plonky2::field::polynomial::PolynomialCoeffs;
use plonky2::fri::proof::{FriProof, FriProofTarget};
use plonky2::gadgets::polynomial::PolynomialCoeffsExtTarget;
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
use plonky2::hash::hashing::HashConfig;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::target::Target;
@ -21,11 +20,11 @@ use crate::stark::Stark;
fn get_challenges<F, C, S, const D: usize>(
stark: &S,
trace_cap: &MerkleCap<F, C::HCO, C::Hasher>,
permutation_zs_cap: Option<&MerkleCap<F, C::HCO, C::Hasher>>,
quotient_polys_cap: &MerkleCap<F, C::HCO, C::Hasher>,
trace_cap: &MerkleCap<F, C::Hasher>,
permutation_zs_cap: Option<&MerkleCap<F, C::Hasher>>,
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
openings: &StarkOpeningSet<F, D>,
commit_phase_merkle_caps: &[MerkleCap<F, C::HCO, C::Hasher>],
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
final_poly: &PolynomialCoeffs<F::Extension>,
pow_witness: F,
config: &StarkConfig,
@ -35,12 +34,10 @@ where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let num_challenges = config.num_challenges;
let mut challenger = Challenger::<F, C::HCO, C::Hasher>::new();
let mut challenger = Challenger::<F, C::Hasher>::new();
challenger.observe_cap(trace_cap);
@ -79,8 +76,6 @@ impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
// TODO: Should be used later in compression?
#![allow(dead_code)]
@ -150,13 +145,11 @@ pub(crate) fn get_challenges_target<
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let num_challenges = config.num_challenges;
let mut challenger = RecursiveChallenger::<F, C::HCO, C::Hasher, D>::new(builder);
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
challenger.observe_cap(trace_cap);
@ -204,9 +197,7 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
config: &StarkConfig,
) -> StarkProofChallengesTarget<D>
where
C::Hasher: AlgebraicHasher<F, C::HCO>,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
C::Hasher: AlgebraicHasher<F>,
{
let StarkProofTarget {
trace_cap,

View File

@ -10,7 +10,6 @@ use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
@ -150,38 +149,29 @@ fn poly_product_elementwise<F: Field>(
product
}
fn get_permutation_challenge<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
) -> PermutationChallenge<F>
where
[(); HC::WIDTH]:,
{
fn get_permutation_challenge<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
) -> PermutationChallenge<F> {
let beta = challenger.get_challenge();
let gamma = challenger.get_challenge();
PermutationChallenge { beta, gamma }
}
fn get_permutation_challenge_set<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
fn get_permutation_challenge_set<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
num_challenges: usize,
) -> PermutationChallengeSet<F>
where
[(); HC::WIDTH]:,
{
) -> PermutationChallengeSet<F> {
let challenges = (0..num_challenges)
.map(|_| get_permutation_challenge(challenger))
.collect();
PermutationChallengeSet { challenges }
}
pub(crate) fn get_n_permutation_challenge_sets<F: RichField, HC: HashConfig, H: Hasher<F, HC>>(
challenger: &mut Challenger<F, HC, H>,
pub(crate) fn get_n_permutation_challenge_sets<F: RichField, H: Hasher<F>>(
challenger: &mut Challenger<F, H>,
num_challenges: usize,
num_sets: usize,
) -> Vec<PermutationChallengeSet<F>>
where
[(); HC::WIDTH]:,
{
) -> Vec<PermutationChallengeSet<F>> {
(0..num_sets)
.map(|_| get_permutation_challenge_set(challenger, num_challenges))
.collect()
@ -189,16 +179,12 @@ where
fn get_permutation_challenge_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
) -> PermutationChallenge<Target>
where
[(); HC::WIDTH]:,
{
challenger: &mut RecursiveChallenger<F, H, D>,
) -> PermutationChallenge<Target> {
let beta = challenger.get_challenge(builder);
let gamma = challenger.get_challenge(builder);
PermutationChallenge { beta, gamma }
@ -206,17 +192,13 @@ where
fn get_permutation_challenge_set_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
) -> PermutationChallengeSet<Target>
where
[(); HC::WIDTH]:,
{
) -> PermutationChallengeSet<Target> {
let challenges = (0..num_challenges)
.map(|_| get_permutation_challenge_target(builder, challenger))
.collect();
@ -225,18 +207,14 @@ where
pub(crate) fn get_n_permutation_challenge_sets_target<
F: RichField + Extendable<D>,
HC: HashConfig,
H: AlgebraicHasher<F, HC>,
H: AlgebraicHasher<F>,
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
challenger: &mut RecursiveChallenger<F, HC, H, D>,
challenger: &mut RecursiveChallenger<F, H, D>,
num_challenges: usize,
num_sets: usize,
) -> Vec<PermutationChallengeSet<Target>>
where
[(); HC::WIDTH]:,
{
) -> Vec<PermutationChallengeSet<Target>> {
(0..num_sets)
.map(|_| get_permutation_challenge_set_target(builder, challenger, num_challenges))
.collect()

View File

@ -23,15 +23,15 @@ use crate::permutation::PermutationChallengeSet;
#[derive(Debug, Clone)]
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Merkle cap of LDEs of permutation Z values.
pub permutation_zs_cap: Option<MerkleCap<F, C::HCO, C::Hasher>>,
pub permutation_zs_cap: Option<MerkleCap<F, C::Hasher>>,
/// Merkle cap of LDEs of trace values.
pub quotient_polys_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub quotient_polys_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: FriProof<F, C::HCO, C::Hasher, D>,
pub opening_proof: FriProof<F, C::Hasher, D>,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
@ -88,11 +88,11 @@ pub struct CompressedStarkProof<
const D: usize,
> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::HCO, C::Hasher>,
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::HCO, C::Hasher, D>,
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}
pub struct CompressedStarkProofWithPublicInputs<

View File

@ -11,7 +11,6 @@ use plonky2::field::types::Field;
use plonky2::field::zero_poly_coset::ZeroPolyOnCoset;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::GenericConfig;
use plonky2::timed;
@ -43,8 +42,6 @@ where
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
let degree = trace_poly_values[0].len();
let degree_bits = log2_strict(degree);

View File

@ -7,7 +7,6 @@ use plonky2::field::extension::Extendable;
use plonky2::field::types::Field;
use plonky2::fri::witness_util::set_fri_proof_target;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::witness::Witness;
use plonky2::plonk::circuit_builder::CircuitBuilder;
@ -37,11 +36,9 @@ pub fn verify_stark_proof_circuit<
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
inner_config: &StarkConfig,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
@ -75,10 +72,9 @@ fn verify_stark_proof_with_challenges_circuit<
inner_config: &StarkConfig,
degree_bits: usize,
) where
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
{
check_permutation_options(&stark, &proof_with_pis, &challenges).unwrap();
let one = builder.one_extension();
@ -269,7 +265,7 @@ pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
W: Witness<F>,
{
let StarkProofWithPublicInputs {
@ -295,7 +291,7 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
proof: &StarkProof<F, C, D>,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F, C::HCO>,
C::Hasher: AlgebraicHasher<F>,
W: Witness<F>,
{
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);

View File

@ -6,7 +6,6 @@ use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
use plonky2::field::types::{Field, Sample};
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
@ -90,8 +89,6 @@ pub fn test_stark_circuit_constraints<
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
// Compute native constraint evaluation on random values.
let vars = StarkEvaluationVars {

View File

@ -7,7 +7,6 @@ use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::types::Field;
use plonky2::fri::verifier::verify_fri_proof;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::HashConfig;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::plonk_common::reduce_with_powers;
@ -32,8 +31,6 @@ pub fn verify_stark_proof<
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
[(); C::HCI::WIDTH]:,
{
ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS);
let degree_bits = proof_with_pis.proof.recover_degree_bits(config);
@ -56,7 +53,6 @@ pub(crate) fn verify_stark_proof_with_challenges<
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::HCO::WIDTH]:,
{
validate_proof_shape(&stark, &proof_with_pis, config)?;
check_permutation_options(&stark, &proof_with_pis, &challenges)?;