This commit is contained in:
wborgeaud 2021-10-11 17:24:06 +02:00
parent acadd64371
commit ddac8026f2
5 changed files with 10 additions and 21 deletions

View File

@ -169,6 +169,7 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
index >>= reduction_arity_bits[i];
steps_indices[i].push(index);
let mut evals = query_step.evals;
// Remove the element that can be inferred.
evals.remove(index_within_coset);
steps_evals[i].push(evals);
steps_proofs[i].push(query_step.merkle_proof);
@ -252,7 +253,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedFriProof<F, D> {
let mut fri_query_inferred_elements = if let Some(v) = fri_query_inferred_elements {
v.iter().copied()
} else {
panic!()
panic!("Proof challenges must be computed with `CompressedProofWithPublicInputs::get_challenges()`.")
};
let cap_height = common_data.config.cap_height;
let reduction_arity_bits = &common_data.fri_params.reduction_arity_bits;
@ -281,6 +282,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedFriProof<F, D> {
})
.collect::<Vec<_>>();
// Holds the `evals` vectors that have already been reconstructed at each reduction depth.
let mut evals_by_depth = vec![
HashMap::<usize, Vec<_>>::new();
common_data.fri_params.reduction_arity_bits.len()
@ -303,8 +305,10 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedFriProof<F, D> {
} = query_round_proofs.steps[i][&index].clone();
steps_indices[i].push(index);
if let Some(v) = evals_by_depth[i].get(&index) {
// If this index has already been seen, get `evals` from the `HashMap`.
evals = v.to_vec();
} else {
// Otherwise insert the next inferred element.
evals.insert(
index_within_coset,
fri_query_inferred_elements.next().unwrap(),

View File

@ -157,10 +157,6 @@ pub(crate) fn fri_combine_initial<F: RichField + Extendable<D>, const D: usize>(
let config = &common_data.config;
assert!(D > 1, "Not implemented for D=1.");
let degree_log = common_data.degree_bits;
// debug_assert_eq!(
// degree_log,
// common_data.config.cap_height + proof.evals_proofs[0].1.siblings.len() - config.rate_bits
// );
let subgroup_x = F::Extension::from_basefield(subgroup_x);
let mut alpha = ReducingFactor::new(alpha);
let mut sum = F::Extension::ZERO;

View File

@ -152,12 +152,15 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedProofWithPublicInpu
.map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size)
.collect::<Vec<_>>();
let mut fri_query_inferred_elements = Vec::new();
// Holds the indices that have already been seen at each reduction depth.
let mut seen_indices_by_depth =
vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()];
let precomputed_reduced_evals =
PrecomputedReducedEvals::from_os_and_alpha(&self.proof.openings, fri_alpha);
let mut fri_query_inferred_elements = Vec::new();
let log_n = common_data.degree_bits + common_data.config.rate_bits;
// Simulate the proof verification and add collect the inferred elements.
// The content of the loop is basically the same as the ` fri_verifier_query_round` function.
for &(mut x_index) in &fri_query_indices {
let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
* F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64);
@ -180,6 +183,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedProofWithPublicInpu
.enumerate()
{
if !seen_indices_by_depth[i].insert(x_index >> arity_bits) {
// If this index has already been seen, we can skip the rest of the reductions.
break;
}
fri_query_inferred_elements.push(old_eval);

View File

@ -168,7 +168,6 @@ impl<F: RichField + Extendable<D>, const D: usize> CompressedProofWithPublicInpu
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<()> {
let challenges = self.get_challenges(common_data)?;
dbg!(&challenges.fri_query_inferred_elements);
let compressed_proof = self.proof.decompress(&challenges, common_data);
verify_with_challenges(
ProofWithPublicInputs {
@ -329,18 +328,6 @@ mod tests {
// Verify that `decompress ∘ compress = identity`.
let compressed_proof = proof.clone().compress(&data.common)?;
let decompressed_compressed_proof = compressed_proof.clone().decompress(&data.common)?;
for i in 0..proof.proof.opening_proof.query_round_proofs.len() {
let qrp = proof.proof.opening_proof.query_round_proofs[i].clone();
let dqrp = decompressed_compressed_proof
.proof
.opening_proof
.query_round_proofs[i]
.clone();
for j in 0..qrp.steps.len() {
dbg!(&qrp.steps[j].evals);
dbg!(&dqrp.steps[j].evals);
}
}
assert_eq!(proof, decompressed_compressed_proof);
verify(proof, &data.verifier_only, &data.common)?;

View File

@ -242,7 +242,6 @@ impl Buffer {
&mut self,
fqs: &FriQueryStep<F, D>,
) -> Result<()> {
dbg!(self.0.position());
self.write_field_ext_vec::<F, D>(&fqs.evals)?;
self.write_merkle_proof(&fqs.merkle_proof)
}
@ -251,7 +250,6 @@ impl Buffer {
arity: usize,
compressed: bool,
) -> Result<FriQueryStep<F, D>> {
dbg!(self.0.position());
let evals = self.read_field_ext_vec::<F, D>(arity - if compressed { 1 } else { 0 })?;
let merkle_proof = self.read_merkle_proof()?;
Ok(FriQueryStep {