diff --git a/src/fri/proof.rs b/src/fri/proof.rs index 264e2e9e..03239629 100644 --- a/src/fri/proof.rs +++ b/src/fri/proof.rs @@ -169,6 +169,7 @@ impl, const D: usize> FriProof { index >>= reduction_arity_bits[i]; steps_indices[i].push(index); let mut evals = query_step.evals; + // Remove the element that can be inferred. evals.remove(index_within_coset); steps_evals[i].push(evals); steps_proofs[i].push(query_step.merkle_proof); @@ -252,7 +253,7 @@ impl, const D: usize> CompressedFriProof { let mut fri_query_inferred_elements = if let Some(v) = fri_query_inferred_elements { v.iter().copied() } else { - panic!() + panic!("Proof challenges must be computed with `CompressedProofWithPublicInputs::get_challenges()`.") }; let cap_height = common_data.config.cap_height; let reduction_arity_bits = &common_data.fri_params.reduction_arity_bits; @@ -281,6 +282,7 @@ impl, const D: usize> CompressedFriProof { }) .collect::>(); + // Holds the `evals` vectors that have already been reconstructed at each reduction depth. let mut evals_by_depth = vec![ HashMap::>::new(); common_data.fri_params.reduction_arity_bits.len() @@ -303,8 +305,10 @@ impl, const D: usize> CompressedFriProof { } = query_round_proofs.steps[i][&index].clone(); steps_indices[i].push(index); if let Some(v) = evals_by_depth[i].get(&index) { + // If this index has already been seen, get `evals` from the `HashMap`. evals = v.to_vec(); } else { + // Otherwise insert the next inferred element. evals.insert( index_within_coset, fri_query_inferred_elements.next().unwrap(), diff --git a/src/fri/verifier.rs b/src/fri/verifier.rs index dce20c38..add03a9d 100644 --- a/src/fri/verifier.rs +++ b/src/fri/verifier.rs @@ -157,10 +157,6 @@ pub(crate) fn fri_combine_initial, const D: usize>( let config = &common_data.config; assert!(D > 1, "Not implemented for D=1."); let degree_log = common_data.degree_bits; - // debug_assert_eq!( - // degree_log, - // common_data.config.cap_height + proof.evals_proofs[0].1.siblings.len() - config.rate_bits - // ); let subgroup_x = F::Extension::from_basefield(subgroup_x); let mut alpha = ReducingFactor::new(alpha); let mut sum = F::Extension::ZERO; diff --git a/src/plonk/get_challenges.rs b/src/plonk/get_challenges.rs index c579aa46..0f6888b3 100644 --- a/src/plonk/get_challenges.rs +++ b/src/plonk/get_challenges.rs @@ -152,12 +152,15 @@ impl, const D: usize> CompressedProofWithPublicInpu .map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size) .collect::>(); + let mut fri_query_inferred_elements = Vec::new(); + // Holds the indices that have already been seen at each reduction depth. let mut seen_indices_by_depth = vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()]; let precomputed_reduced_evals = PrecomputedReducedEvals::from_os_and_alpha(&self.proof.openings, fri_alpha); - let mut fri_query_inferred_elements = Vec::new(); let log_n = common_data.degree_bits + common_data.config.rate_bits; + // Simulate the proof verification and add collect the inferred elements. + // The content of the loop is basically the same as the ` fri_verifier_query_round` function. for &(mut x_index) in &fri_query_indices { let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64); @@ -180,6 +183,7 @@ impl, const D: usize> CompressedProofWithPublicInpu .enumerate() { if !seen_indices_by_depth[i].insert(x_index >> arity_bits) { + // If this index has already been seen, we can skip the rest of the reductions. break; } fri_query_inferred_elements.push(old_eval); diff --git a/src/plonk/proof.rs b/src/plonk/proof.rs index ecc4e3a4..5a0abfd7 100644 --- a/src/plonk/proof.rs +++ b/src/plonk/proof.rs @@ -168,7 +168,6 @@ impl, const D: usize> CompressedProofWithPublicInpu common_data: &CommonCircuitData, ) -> anyhow::Result<()> { let challenges = self.get_challenges(common_data)?; - dbg!(&challenges.fri_query_inferred_elements); let compressed_proof = self.proof.decompress(&challenges, common_data); verify_with_challenges( ProofWithPublicInputs { @@ -329,18 +328,6 @@ mod tests { // Verify that `decompress ∘ compress = identity`. let compressed_proof = proof.clone().compress(&data.common)?; let decompressed_compressed_proof = compressed_proof.clone().decompress(&data.common)?; - for i in 0..proof.proof.opening_proof.query_round_proofs.len() { - let qrp = proof.proof.opening_proof.query_round_proofs[i].clone(); - let dqrp = decompressed_compressed_proof - .proof - .opening_proof - .query_round_proofs[i] - .clone(); - for j in 0..qrp.steps.len() { - dbg!(&qrp.steps[j].evals); - dbg!(&dqrp.steps[j].evals); - } - } assert_eq!(proof, decompressed_compressed_proof); verify(proof, &data.verifier_only, &data.common)?; diff --git a/src/util/serialization.rs b/src/util/serialization.rs index e9164687..172b4d67 100644 --- a/src/util/serialization.rs +++ b/src/util/serialization.rs @@ -242,7 +242,6 @@ impl Buffer { &mut self, fqs: &FriQueryStep, ) -> Result<()> { - dbg!(self.0.position()); self.write_field_ext_vec::(&fqs.evals)?; self.write_merkle_proof(&fqs.merkle_proof) } @@ -251,7 +250,6 @@ impl Buffer { arity: usize, compressed: bool, ) -> Result> { - dbg!(self.0.position()); let evals = self.read_field_ext_vec::(arity - if compressed { 1 } else { 0 })?; let merkle_proof = self.read_merkle_proof()?; Ok(FriQueryStep {