From 1d013b95ddfb02519c75cc8d5e3f64684a79b269 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Fri, 11 Feb 2022 16:22:57 +0100 Subject: [PATCH 1/4] Fix `hash_or_noop` in Merkle proof. --- plonky2/src/hash/merkle_proofs.rs | 2 +- plonky2/src/plonk/config.rs | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index f90f0657..7ef81570 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -32,7 +32,7 @@ pub(crate) fn verify_merkle_proof>( proof: &MerkleProof, ) -> Result<()> { let mut index = leaf_index; - let mut current_digest = H::hash_no_pad(&leaf_data); + let mut current_digest = H::hash_or_noop(&leaf_data); for &sibling_digest in proof.siblings.iter() { let bit = index & 1; index >>= 1; diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index fdca7037..76891240 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -46,6 +46,17 @@ pub trait Hasher: Sized + Clone + Debug + Eq + PartialEq { Self::hash_no_pad(&padded_input) } + /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a + /// no-op. + fn hash_or_noop(inputs: &[F]) -> Self::Hash { + if inputs.len() <= 4 { + let inputs_bytes = HashOut::from_partial(inputs).to_bytes(); + Self::Hash::from_bytes(&inputs_bytes) + } else { + Self::hash_no_pad(inputs) + } + } + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash; } From f7256a6efc361d206879b65c5240ba7fe25d7a3c Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Fri, 11 Feb 2022 16:41:44 +0100 Subject: [PATCH 2/4] Other fixes --- plonky2/src/hash/merkle_tree.rs | 4 ++-- plonky2/src/hash/path_compression.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index e9460c14..f9890aa5 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -63,7 +63,7 @@ fn fill_subtree>( ) -> H::Hash { assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { - H::hash_no_pad(&leaves[0]) + H::hash_or_noop(&leaves[0]) } else { // Layout is: left recursive output || left child digest // || right child digest || right recursive output. @@ -99,7 +99,7 @@ fn fill_digests_buf>( .par_iter_mut() .zip(leaves) .for_each(|(cap_buf, leaf)| { - cap_buf.write(H::hash_no_pad(leaf)); + cap_buf.write(H::hash_or_noop(leaf)); }); return; } diff --git a/plonky2/src/hash/path_compression.rs b/plonky2/src/hash/path_compression.rs index 56c355fd..fe7850f4 100644 --- a/plonky2/src/hash/path_compression.rs +++ b/plonky2/src/hash/path_compression.rs @@ -66,7 +66,7 @@ pub(crate) fn decompress_merkle_proofs>( for (&i, v) in leaves_indices.iter().zip(leaves_data) { // Observe the leaves. - seen.insert(i + num_leaves, H::hash_no_pad(v)); + seen.insert(i + num_leaves, H::hash_or_noop(v)); } // Iterators over the siblings. From 736b65b0a7d595b0e1417bd08607edcda859e548 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Sat, 12 Feb 2022 15:18:20 +0100 Subject: [PATCH 3/4] PR feedback --- plonky2/src/hash/hashing.rs | 10 ---------- plonky2/src/plonk/config.rs | 9 +++++++-- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index 468bd1b8..9d043ea3 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -12,16 +12,6 @@ pub(crate) const SPONGE_RATE: usize = 8; pub(crate) const SPONGE_CAPACITY: usize = 4; pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; -/// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a -/// no-op. -pub fn hash_or_noop>(inputs: &[F]) -> HashOut { - if inputs.len() <= 4 { - HashOut::from_partial(inputs) - } else { - hash_n_to_hash_no_pad::(inputs) - } -} - impl, const D: usize> CircuitBuilder { pub fn hash_or_noop>(&mut self, inputs: Vec) -> HashOutTarget { let zero = self.zero(); diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 76891240..40179c38 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -3,6 +3,7 @@ use std::fmt::Debug; use plonky2_field::extension_field::quadratic::QuadraticExtension; use plonky2_field::extension_field::{Extendable, FieldExtension}; use plonky2_field::goldilocks_field::GoldilocksField; +use plonky2_util::ceil_div_usize; use serde::{de::DeserializeOwned, Serialize}; use crate::hash::hash_types::HashOut; @@ -49,8 +50,12 @@ pub trait Hasher: Sized + Clone + Debug + Eq + PartialEq { /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. fn hash_or_noop(inputs: &[F]) -> Self::Hash { - if inputs.len() <= 4 { - let inputs_bytes = HashOut::from_partial(inputs).to_bytes(); + if inputs.len() * ceil_div_usize(F::BITS, 8) <= Self::HASH_SIZE { + let mut inputs_bytes = inputs + .iter() + .flat_map(|x| x.to_canonical_u64().to_le_bytes()) + .collect::>(); + inputs_bytes.resize(Self::HASH_SIZE, 0); Self::Hash::from_bytes(&inputs_bytes) } else { Self::hash_no_pad(inputs) From 7af2d05828240123e70f108dc0baf67a5338788c Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Sun, 13 Feb 2022 15:04:40 +0100 Subject: [PATCH 4/4] Save allocation and add const generic bound --- plonky2/benches/merkle.rs | 7 +++++- plonky2/src/fri/oracle.rs | 17 ++++++++++---- plonky2/src/fri/proof.rs | 5 ++++- plonky2/src/fri/prover.rs | 10 +++++++-- plonky2/src/fri/verifier.rs | 21 ++++++++++------- plonky2/src/gates/gate_testing.rs | 7 ++++-- plonky2/src/hash/merkle_proofs.rs | 5 ++++- plonky2/src/hash/merkle_tree.rs | 25 +++++++++++++-------- plonky2/src/hash/path_compression.rs | 5 ++++- plonky2/src/plonk/circuit_builder.rs | 15 ++++++++++--- plonky2/src/plonk/circuit_data.rs | 30 ++++++++++++++++++++----- plonky2/src/plonk/config.rs | 18 ++++++++------- plonky2/src/plonk/proof.rs | 15 ++++++++++--- plonky2/src/plonk/prover.rs | 5 ++++- plonky2/src/plonk/recursive_verifier.rs | 15 ++++++++++--- plonky2/src/plonk/verifier.rs | 10 +++++++-- starky/src/prover.rs | 3 ++- starky/src/verifier.rs | 4 +++- 18 files changed, 160 insertions(+), 57 deletions(-) diff --git a/plonky2/benches/merkle.rs b/plonky2/benches/merkle.rs index 7445682b..8bc43730 100644 --- a/plonky2/benches/merkle.rs +++ b/plonky2/benches/merkle.rs @@ -1,3 +1,5 @@ +#![feature(generic_const_exprs)] + use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::hash::hash_types::RichField; @@ -9,7 +11,10 @@ use tynm::type_name; const ELEMS_PER_LEAF: usize = 135; -pub(crate) fn bench_merkle_tree>(c: &mut Criterion) { +pub(crate) fn bench_merkle_tree>(c: &mut Criterion) +where + [(); H::HASH_SIZE]:, +{ let mut group = c.benchmark_group(&format!( "merkle-tree<{}, {}>", type_name::(), diff --git a/plonky2/src/fri/oracle.rs b/plonky2/src/fri/oracle.rs index 0922962a..bd1e9ac5 100644 --- a/plonky2/src/fri/oracle.rs +++ b/plonky2/src/fri/oracle.rs @@ -12,7 +12,7 @@ use crate::fri::FriParams; use crate::hash::hash_types::RichField; use crate::hash::merkle_tree::MerkleTree; use crate::iop::challenger::Challenger; -use crate::plonk::config::GenericConfig; +use crate::plonk::config::{GenericConfig, Hasher}; use crate::timed; use crate::util::reducing::ReducingFactor; use crate::util::reverse_bits; @@ -43,7 +43,10 @@ impl, C: GenericConfig, const D: usize> cap_height: usize, timing: &mut TimingTree, fft_root_table: Option<&FftRootTable>, - ) -> Self { + ) -> Self + where + [(); C::Hasher::HASH_SIZE]:, + { let coeffs = timed!( timing, "IFFT", @@ -68,7 +71,10 @@ impl, C: GenericConfig, const D: usize> cap_height: usize, timing: &mut TimingTree, fft_root_table: Option<&FftRootTable>, - ) -> Self { + ) -> Self + where + [(); C::Hasher::HASH_SIZE]:, + { let degree = polynomials[0].len(); let lde_values = timed!( timing, @@ -133,7 +139,10 @@ impl, C: GenericConfig, const D: usize> challenger: &mut Challenger, fri_params: &FriParams, timing: &mut TimingTree, - ) -> FriProof { + ) -> FriProof + where + [(); C::Hasher::HASH_SIZE]:, + { assert!(D > 1, "Not implemented for D=1."); let alpha = challenger.get_extension_challenge::(); let mut alpha = ReducingFactor::new(alpha); diff --git a/plonky2/src/fri/proof.rs b/plonky2/src/fri/proof.rs index 44f74cba..9c6961a4 100644 --- a/plonky2/src/fri/proof.rs +++ b/plonky2/src/fri/proof.rs @@ -245,7 +245,10 @@ impl, H: Hasher, const D: usize> CompressedFriPr challenges: &ProofChallenges, fri_inferred_elements: FriInferredElements, params: &FriParams, - ) -> FriProof { + ) -> FriProof + where + [(); H::HASH_SIZE]:, + { let CompressedFriProof { commit_phase_merkle_caps, query_round_proofs, diff --git a/plonky2/src/fri/prover.rs b/plonky2/src/fri/prover.rs index 5cd5fdf1..5a20ab9d 100644 --- a/plonky2/src/fri/prover.rs +++ b/plonky2/src/fri/prover.rs @@ -24,7 +24,10 @@ pub fn fri_proof, C: GenericConfig, const challenger: &mut Challenger, fri_params: &FriParams, timing: &mut TimingTree, -) -> FriProof { +) -> FriProof +where + [(); C::Hasher::HASH_SIZE]:, +{ let n = lde_polynomial_values.len(); assert_eq!(lde_polynomial_coeffs.len(), n); @@ -68,7 +71,10 @@ fn fri_committed_trees, C: GenericConfig, ) -> ( Vec>, PolynomialCoeffs, -) { +) +where + [(); C::Hasher::HASH_SIZE]:, +{ let mut trees = Vec::new(); let mut shift = F::MULTIPLICATIVE_GROUP_GENERATOR; diff --git a/plonky2/src/fri/verifier.rs b/plonky2/src/fri/verifier.rs index 49cfa053..2607ab0d 100644 --- a/plonky2/src/fri/verifier.rs +++ b/plonky2/src/fri/verifier.rs @@ -56,18 +56,17 @@ pub(crate) fn fri_verify_proof_of_work, const D: us Ok(()) } -pub fn verify_fri_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( +pub fn verify_fri_proof, C: GenericConfig, const D: usize>( instance: &FriInstanceInfo, openings: &FriOpenings, challenges: &FriChallenges, initial_merkle_caps: &[MerkleCap], proof: &FriProof, params: &FriParams, -) -> Result<()> { +) -> Result<()> +where + [(); C::Hasher::HASH_SIZE]:, +{ ensure!( params.final_poly_len() == proof.final_poly.len(), "Final polynomial has wrong degree." @@ -112,7 +111,10 @@ fn fri_verify_initial_proof>( x_index: usize, proof: &FriInitialTreeProof, initial_merkle_caps: &[MerkleCap], -) -> Result<()> { +) -> Result<()> +where + [(); H::HASH_SIZE]:, +{ for ((evals, merkle_proof), cap) in proof.evals_proofs.iter().zip(initial_merkle_caps) { verify_merkle_proof::(evals.clone(), x_index, cap, merkle_proof)?; } @@ -177,7 +179,10 @@ fn fri_verifier_query_round< n: usize, round_proof: &FriQueryRound, params: &FriParams, -) -> Result<()> { +) -> Result<()> +where + [(); C::Hasher::HASH_SIZE]:, +{ fri_verify_initial_proof::( x_index, &round_proof.initial_trees_proof, diff --git a/plonky2/src/gates/gate_testing.rs b/plonky2/src/gates/gate_testing.rs index ea1ef9a4..51768ba8 100644 --- a/plonky2/src/gates/gate_testing.rs +++ b/plonky2/src/gates/gate_testing.rs @@ -10,7 +10,7 @@ use crate::hash::hash_types::RichField; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; -use crate::plonk::config::GenericConfig; +use crate::plonk::config::{GenericConfig, Hasher}; use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBaseBatch}; use crate::plonk::verifier::verify; use crate::util::transpose; @@ -92,7 +92,10 @@ pub fn test_eval_fns< const D: usize, >( gate: G, -) -> Result<()> { +) -> Result<()> +where + [(); C::Hasher::HASH_SIZE]:, +{ // Test that `eval_unfiltered` and `eval_unfiltered_base` are coherent. let wires_base = F::rand_vec(gate.num_wires()); let constants_base = F::rand_vec(gate.num_constants()); diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index 7ef81570..c3ebf406 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -30,7 +30,10 @@ pub(crate) fn verify_merkle_proof>( leaf_index: usize, merkle_cap: &MerkleCap, proof: &MerkleProof, -) -> Result<()> { +) -> Result<()> +where + [(); H::HASH_SIZE]:, +{ let mut index = leaf_index; let mut current_digest = H::hash_or_noop(&leaf_data); for &sibling_digest in proof.siblings.iter() { diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index f9890aa5..5fbc441c 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -60,7 +60,10 @@ fn capacity_up_to_mut(v: &mut Vec, len: usize) -> &mut [MaybeUninit] { fn fill_subtree>( digests_buf: &mut [MaybeUninit], leaves: &[Vec], -) -> H::Hash { +) -> H::Hash +where + [(); H::HASH_SIZE]:, +{ assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { H::hash_or_noop(&leaves[0]) @@ -89,7 +92,9 @@ fn fill_digests_buf>( cap_buf: &mut [MaybeUninit], leaves: &[Vec], cap_height: usize, -) { +) where + [(); H::HASH_SIZE]:, +{ // Special case of a tree that's all cap. The usual case will panic because we'll try to split // an empty slice into chunks of `0`. (We would not need this if there was a way to split into // `blah` chunks as opposed to chunks _of_ `blah`.) @@ -121,7 +126,10 @@ fn fill_digests_buf>( } impl> MerkleTree { - pub fn new(leaves: Vec>, cap_height: usize) -> Self { + pub fn new(leaves: Vec>, cap_height: usize) -> Self + where + [(); H::HASH_SIZE]:, + { let log2_leaves_len = log2_strict(leaves.len()); assert!( cap_height <= log2_leaves_len, @@ -208,14 +216,13 @@ mod tests { (0..n).map(|_| F::rand_vec(k)).collect() } - fn verify_all_leaves< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + fn verify_all_leaves, C: GenericConfig, const D: usize>( leaves: Vec>, cap_height: usize, - ) -> Result<()> { + ) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { let tree = MerkleTree::::new(leaves.clone(), cap_height); for (i, leaf) in leaves.into_iter().enumerate() { let proof = tree.prove(i); diff --git a/plonky2/src/hash/path_compression.rs b/plonky2/src/hash/path_compression.rs index fe7850f4..6dae3d94 100644 --- a/plonky2/src/hash/path_compression.rs +++ b/plonky2/src/hash/path_compression.rs @@ -57,7 +57,10 @@ pub(crate) fn decompress_merkle_proofs>( compressed_proofs: &[MerkleProof], height: usize, cap_height: usize, -) -> Vec> { +) -> Vec> +where + [(); H::HASH_SIZE]:, +{ let num_leaves = 1 << height; let compressed_proofs = compressed_proofs.to_vec(); let mut decompressed_proofs = Vec::with_capacity(compressed_proofs.len()); diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index cf89bf1a..7811c0db 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -610,7 +610,10 @@ impl, const D: usize> CircuitBuilder { } /// Builds a "full circuit", with both prover and verifier data. - pub fn build>(mut self) -> CircuitData { + pub fn build>(mut self) -> CircuitData + where + [(); C::Hasher::HASH_SIZE]:, + { let mut timing = TimingTree::new("preprocess", Level::Trace); let start = Instant::now(); let rate_bits = self.config.fri_config.rate_bits; @@ -776,7 +779,10 @@ impl, const D: usize> CircuitBuilder { } /// Builds a "prover circuit", with data needed to generate proofs but not verify them. - pub fn build_prover>(self) -> ProverCircuitData { + pub fn build_prover>(self) -> ProverCircuitData + where + [(); C::Hasher::HASH_SIZE]:, + { // TODO: Can skip parts of this. let CircuitData { prover_only, @@ -790,7 +796,10 @@ impl, const D: usize> CircuitBuilder { } /// Builds a "verifier circuit", with data needed to verify proofs but not generate them. - pub fn build_verifier>(self) -> VerifierCircuitData { + pub fn build_verifier>(self) -> VerifierCircuitData + where + [(); C::Hasher::HASH_SIZE]:, + { // TODO: Can skip parts of this. let CircuitData { verifier_only, diff --git a/plonky2/src/plonk/circuit_data.rs b/plonky2/src/plonk/circuit_data.rs index 7e667b8d..3d4ee2df 100644 --- a/plonky2/src/plonk/circuit_data.rs +++ b/plonky2/src/plonk/circuit_data.rs @@ -104,7 +104,10 @@ pub struct CircuitData, C: GenericConfig, impl, C: GenericConfig, const D: usize> CircuitData { - pub fn prove(&self, inputs: PartialWitness) -> Result> { + pub fn prove(&self, inputs: PartialWitness) -> Result> + where + [(); C::Hasher::HASH_SIZE]:, + { prove( &self.prover_only, &self.common, @@ -113,14 +116,20 @@ impl, C: GenericConfig, const D: usize> ) } - pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> { + pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { verify(proof_with_pis, &self.verifier_only, &self.common) } pub fn verify_compressed( &self, compressed_proof_with_pis: CompressedProofWithPublicInputs, - ) -> Result<()> { + ) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { compressed_proof_with_pis.verify(&self.verifier_only, &self.common) } } @@ -144,7 +153,10 @@ pub struct ProverCircuitData< impl, C: GenericConfig, const D: usize> ProverCircuitData { - pub fn prove(&self, inputs: PartialWitness) -> Result> { + pub fn prove(&self, inputs: PartialWitness) -> Result> + where + [(); C::Hasher::HASH_SIZE]:, + { prove( &self.prover_only, &self.common, @@ -168,14 +180,20 @@ pub struct VerifierCircuitData< impl, C: GenericConfig, const D: usize> VerifierCircuitData { - pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> { + pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { verify(proof_with_pis, &self.verifier_only, &self.common) } pub fn verify_compressed( &self, compressed_proof_with_pis: CompressedProofWithPublicInputs, - ) -> Result<()> { + ) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { compressed_proof_with_pis.verify(&self.verifier_only, &self.common) } } diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 40179c38..cb6d9a9b 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -3,7 +3,6 @@ use std::fmt::Debug; use plonky2_field::extension_field::quadratic::QuadraticExtension; use plonky2_field::extension_field::{Extendable, FieldExtension}; use plonky2_field::goldilocks_field::GoldilocksField; -use plonky2_util::ceil_div_usize; use serde::{de::DeserializeOwned, Serialize}; use crate::hash::hash_types::HashOut; @@ -49,13 +48,16 @@ pub trait Hasher: Sized + Clone + Debug + Eq + PartialEq { /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. - fn hash_or_noop(inputs: &[F]) -> Self::Hash { - if inputs.len() * ceil_div_usize(F::BITS, 8) <= Self::HASH_SIZE { - let mut inputs_bytes = inputs - .iter() - .flat_map(|x| x.to_canonical_u64().to_le_bytes()) - .collect::>(); - inputs_bytes.resize(Self::HASH_SIZE, 0); + fn hash_or_noop(inputs: &[F]) -> Self::Hash + where + [(); Self::HASH_SIZE]:, + { + if inputs.len() <= 4 { + let mut inputs_bytes = [0u8; Self::HASH_SIZE]; + for i in 0..inputs.len() { + inputs_bytes[i * 8..(i + 1) * 8] + .copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes()); + } Self::Hash::from_bytes(&inputs_bytes) } else { Self::hash_no_pad(inputs) diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index 3de608d4..145ef694 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -138,7 +138,10 @@ impl, C: GenericConfig, const D: usize> challenges: &ProofChallenges, fri_inferred_elements: FriInferredElements, params: &FriParams, - ) -> Proof { + ) -> Proof + where + [(); C::Hasher::HASH_SIZE]:, + { let CompressedProof { wires_cap, plonk_zs_partial_products_cap, @@ -174,7 +177,10 @@ impl, C: GenericConfig, const D: usize> pub fn decompress( self, common_data: &CommonCircuitData, - ) -> anyhow::Result> { + ) -> anyhow::Result> + where + [(); C::Hasher::HASH_SIZE]:, + { let challenges = self.get_challenges(self.get_public_inputs_hash(), common_data)?; let fri_inferred_elements = self.get_inferred_elements(&challenges, common_data); let decompressed_proof = @@ -190,7 +196,10 @@ impl, C: GenericConfig, const D: usize> self, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, - ) -> anyhow::Result<()> { + ) -> anyhow::Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { ensure!( self.public_inputs.len() == common_data.num_public_inputs, "Number of public inputs doesn't match circuit data." diff --git a/plonky2/src/plonk/prover.rs b/plonky2/src/plonk/prover.rs index d49014f0..1d99b60a 100644 --- a/plonky2/src/plonk/prover.rs +++ b/plonky2/src/plonk/prover.rs @@ -31,7 +31,10 @@ pub(crate) fn prove, C: GenericConfig, co common_data: &CommonCircuitData, inputs: PartialWitness, timing: &mut TimingTree, -) -> Result> { +) -> Result> +where + [(); C::Hasher::HASH_SIZE]:, +{ let config = &common_data.config; let num_challenges = config.num_challenges; let quotient_degree = common_data.quotient_degree(); diff --git a/plonky2/src/plonk/recursive_verifier.rs b/plonky2/src/plonk/recursive_verifier.rs index c91cbba2..6210bb29 100644 --- a/plonky2/src/plonk/recursive_verifier.rs +++ b/plonky2/src/plonk/recursive_verifier.rs @@ -187,7 +187,9 @@ mod tests { use crate::gates::noop::NoopGate; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_data::{CircuitConfig, VerifierOnlyCircuitData}; - use crate::plonk::config::{GenericConfig, KeccakGoldilocksConfig, PoseidonGoldilocksConfig}; + use crate::plonk::config::{ + GenericConfig, Hasher, KeccakGoldilocksConfig, PoseidonGoldilocksConfig, + }; use crate::plonk::proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs}; use crate::plonk::prover::prove; use crate::util::timing::TimingTree; @@ -322,7 +324,10 @@ mod tests { ProofWithPublicInputs, VerifierOnlyCircuitData, CommonCircuitData, - )> { + )> + where + [(); C::Hasher::HASH_SIZE]:, + { let mut builder = CircuitBuilder::::new(config.clone()); for _ in 0..num_dummy_gates { builder.add_gate(NoopGate, vec![]); @@ -356,6 +361,7 @@ mod tests { )> where InnerC::Hasher: AlgebraicHasher, + [(); C::Hasher::HASH_SIZE]:, { let mut builder = CircuitBuilder::::new(config.clone()); let mut pw = PartialWitness::new(); @@ -407,7 +413,10 @@ mod tests { >( proof: &ProofWithPublicInputs, cd: &CommonCircuitData, - ) -> Result<()> { + ) -> Result<()> + where + [(); C::Hasher::HASH_SIZE]:, + { let proof_bytes = proof.to_bytes()?; info!("Proof length: {} bytes", proof_bytes.len()); let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?; diff --git a/plonky2/src/plonk/verifier.rs b/plonky2/src/plonk/verifier.rs index 5d69dcb1..ee0e976f 100644 --- a/plonky2/src/plonk/verifier.rs +++ b/plonky2/src/plonk/verifier.rs @@ -15,7 +15,10 @@ pub(crate) fn verify, C: GenericConfig, c proof_with_pis: ProofWithPublicInputs, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, -) -> Result<()> { +) -> Result<()> +where + [(); C::Hasher::HASH_SIZE]:, +{ ensure!( proof_with_pis.public_inputs.len() == common_data.num_public_inputs, "Number of public inputs doesn't match circuit data." @@ -42,7 +45,10 @@ pub(crate) fn verify_with_challenges< challenges: ProofChallenges, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, -) -> Result<()> { +) -> Result<()> +where + [(); C::Hasher::HASH_SIZE]:, +{ let local_constants = &proof.openings.constants; let local_wires = &proof.openings.wires; let vars = EvaluationVars { diff --git a/starky/src/prover.rs b/starky/src/prover.rs index de97ecce..e88aa619 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -7,7 +7,7 @@ use plonky2::field::zero_poly_coset::ZeroPolyOnCoset; use plonky2::fri::oracle::PolynomialBatch; use plonky2::hash::hash_types::RichField; use plonky2::iop::challenger::Challenger; -use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::config::{GenericConfig, Hasher}; use plonky2::timed; use plonky2::util::timing::TimingTree; use plonky2::util::transpose; @@ -33,6 +33,7 @@ where S: Stark, [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, + [(); C::Hasher::HASH_SIZE]:, { let degree = trace.len(); let degree_bits = log2_strict(degree); diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 91a51bed..8bf1faab 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -3,7 +3,7 @@ use plonky2::field::extension_field::{Extendable, FieldExtension}; use plonky2::field::field_types::Field; use plonky2::fri::verifier::verify_fri_proof; use plonky2::hash::hash_types::RichField; -use plonky2::plonk::config::GenericConfig; +use plonky2::plonk::config::{GenericConfig, Hasher}; use plonky2::plonk::plonk_common::reduce_with_powers; use plonky2_util::log2_strict; @@ -26,6 +26,7 @@ pub fn verify< where [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, + [(); C::Hasher::HASH_SIZE]:, { let degree_bits = log2_strict(recover_degree(&proof_with_pis.proof, config)); let challenges = proof_with_pis.get_challenges(config, degree_bits)?; @@ -47,6 +48,7 @@ pub(crate) fn verify_with_challenges< where [(); S::COLUMNS]:, [(); S::PUBLIC_INPUTS]:, + [(); C::Hasher::HASH_SIZE]:, { let StarkProofWithPublicInputs { proof,