2021-10-04 10:03:00 +02:00
|
|
|
use std::collections::HashMap;
|
2021-09-29 21:01:15 +02:00
|
|
|
|
2021-09-21 08:37:23 +02:00
|
|
|
use itertools::izip;
|
2021-07-29 22:00:29 -07:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
|
|
|
|
|
use crate::field::extension_field::target::ExtensionTarget;
|
2021-09-20 17:34:52 +02:00
|
|
|
use crate::field::extension_field::{flatten, unflatten, Extendable};
|
|
|
|
|
use crate::field::field_types::{Field, RichField};
|
2021-07-29 22:00:29 -07:00
|
|
|
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
|
2021-08-10 16:18:42 +02:00
|
|
|
use crate::hash::hash_types::MerkleCapTarget;
|
2021-07-29 22:00:29 -07:00
|
|
|
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
|
2021-08-10 13:33:44 +02:00
|
|
|
use crate::hash::merkle_tree::MerkleCap;
|
2021-09-20 17:34:52 +02:00
|
|
|
use crate::hash::path_compression::{compress_merkle_proofs, decompress_merkle_proofs};
|
2021-07-29 22:00:29 -07:00
|
|
|
use crate::iop::target::Target;
|
2021-09-20 17:34:52 +02:00
|
|
|
use crate::plonk::circuit_data::CommonCircuitData;
|
2021-07-29 22:00:29 -07:00
|
|
|
use crate::plonk::plonk_common::PolynomialsIndexBlinding;
|
2021-10-12 08:38:43 +02:00
|
|
|
use crate::plonk::proof::{FriInferredElements, ProofChallenges};
|
2021-07-29 22:00:29 -07:00
|
|
|
use crate::polynomial::polynomial::PolynomialCoeffs;
|
|
|
|
|
|
|
|
|
|
/// Evaluations and Merkle proof produced by the prover in a FRI query step.
|
2021-09-20 17:34:52 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
2021-07-29 22:00:29 -07:00
|
|
|
#[serde(bound = "")]
|
|
|
|
|
pub struct FriQueryStep<F: Extendable<D>, const D: usize> {
|
|
|
|
|
pub evals: Vec<F::Extension>,
|
|
|
|
|
pub merkle_proof: MerkleProof<F>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub struct FriQueryStepTarget<const D: usize> {
|
|
|
|
|
pub evals: Vec<ExtensionTarget<D>>,
|
|
|
|
|
pub merkle_proof: MerkleProofTarget,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Evaluations and Merkle proofs of the original set of polynomials,
|
|
|
|
|
/// before they are combined into a composition polynomial.
|
2021-09-20 17:34:52 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
2021-07-29 22:00:29 -07:00
|
|
|
#[serde(bound = "")]
|
|
|
|
|
pub struct FriInitialTreeProof<F: Field> {
|
|
|
|
|
pub evals_proofs: Vec<(Vec<F>, MerkleProof<F>)>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<F: Field> FriInitialTreeProof<F> {
|
|
|
|
|
pub(crate) fn unsalted_evals(
|
|
|
|
|
&self,
|
|
|
|
|
polynomials: PolynomialsIndexBlinding,
|
|
|
|
|
zero_knowledge: bool,
|
|
|
|
|
) -> &[F] {
|
|
|
|
|
let evals = &self.evals_proofs[polynomials.index].0;
|
|
|
|
|
&evals[..evals.len() - polynomials.salt_size(zero_knowledge)]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub struct FriInitialTreeProofTarget {
|
|
|
|
|
pub evals_proofs: Vec<(Vec<Target>, MerkleProofTarget)>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl FriInitialTreeProofTarget {
|
|
|
|
|
pub(crate) fn unsalted_evals(
|
|
|
|
|
&self,
|
|
|
|
|
polynomials: PolynomialsIndexBlinding,
|
|
|
|
|
zero_knowledge: bool,
|
|
|
|
|
) -> &[Target] {
|
|
|
|
|
let evals = &self.evals_proofs[polynomials.index].0;
|
|
|
|
|
&evals[..evals.len() - polynomials.salt_size(zero_knowledge)]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Proof for a FRI query round.
|
2021-09-20 17:34:52 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
2021-07-29 22:00:29 -07:00
|
|
|
#[serde(bound = "")]
|
|
|
|
|
pub struct FriQueryRound<F: Extendable<D>, const D: usize> {
|
|
|
|
|
pub initial_trees_proof: FriInitialTreeProof<F>,
|
|
|
|
|
pub steps: Vec<FriQueryStep<F, D>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub struct FriQueryRoundTarget<const D: usize> {
|
|
|
|
|
pub initial_trees_proof: FriInitialTreeProofTarget,
|
|
|
|
|
pub steps: Vec<FriQueryStepTarget<D>>,
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-30 18:58:36 +02:00
|
|
|
/// Compressed proof of the FRI query rounds.
|
2021-09-29 21:01:15 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
|
|
|
|
#[serde(bound = "")]
|
|
|
|
|
pub struct CompressedFriQueryRounds<F: Extendable<D>, const D: usize> {
|
2021-10-01 16:54:14 +02:00
|
|
|
/// Query indices.
|
|
|
|
|
pub indices: Vec<usize>,
|
2021-09-30 18:58:36 +02:00
|
|
|
/// Map from initial indices `i` to the `FriInitialProof` for the `i`th leaf.
|
2021-09-29 21:01:15 +02:00
|
|
|
pub initial_trees_proofs: HashMap<usize, FriInitialTreeProof<F>>,
|
2021-10-02 09:53:31 +02:00
|
|
|
/// For each FRI query step, a map from indices `i` to the `FriQueryStep` for the `i`th leaf.
|
2021-09-29 21:01:15 +02:00
|
|
|
pub steps: Vec<HashMap<usize, FriQueryStep<F, D>>>,
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-20 17:34:52 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
2021-07-29 22:00:29 -07:00
|
|
|
#[serde(bound = "")]
|
2021-09-30 06:56:32 +02:00
|
|
|
pub struct FriProof<F: Extendable<D>, const D: usize> {
|
2021-08-10 15:53:27 +02:00
|
|
|
/// A Merkle cap for each reduced polynomial in the commit phase.
|
|
|
|
|
pub commit_phase_merkle_caps: Vec<MerkleCap<F>>,
|
2021-07-29 22:00:29 -07:00
|
|
|
/// Query rounds proofs
|
|
|
|
|
pub query_round_proofs: Vec<FriQueryRound<F, D>>,
|
|
|
|
|
/// The final polynomial in coefficient form.
|
|
|
|
|
pub final_poly: PolynomialCoeffs<F::Extension>,
|
|
|
|
|
/// Witness showing that the prover did PoW.
|
|
|
|
|
pub pow_witness: F,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct FriProofTarget<const D: usize> {
|
2021-08-10 15:53:27 +02:00
|
|
|
pub commit_phase_merkle_caps: Vec<MerkleCapTarget>,
|
2021-07-29 22:00:29 -07:00
|
|
|
pub query_round_proofs: Vec<FriQueryRoundTarget<D>>,
|
|
|
|
|
pub final_poly: PolynomialCoeffsExtTarget<D>,
|
|
|
|
|
pub pow_witness: Target,
|
|
|
|
|
}
|
2021-09-20 17:34:52 +02:00
|
|
|
|
2021-09-29 21:01:15 +02:00
|
|
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
|
|
|
|
#[serde(bound = "")]
|
|
|
|
|
pub struct CompressedFriProof<F: Extendable<D>, const D: usize> {
|
|
|
|
|
/// A Merkle cap for each reduced polynomial in the commit phase.
|
|
|
|
|
pub commit_phase_merkle_caps: Vec<MerkleCap<F>>,
|
2021-09-30 18:58:36 +02:00
|
|
|
/// Compressed query rounds proof.
|
2021-09-29 21:01:15 +02:00
|
|
|
pub query_round_proofs: CompressedFriQueryRounds<F, D>,
|
|
|
|
|
/// The final polynomial in coefficient form.
|
|
|
|
|
pub final_poly: PolynomialCoeffs<F::Extension>,
|
|
|
|
|
/// Witness showing that the prover did PoW.
|
|
|
|
|
pub pow_witness: F,
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-30 06:56:32 +02:00
|
|
|
impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
2021-09-29 21:01:15 +02:00
|
|
|
/// Compress all the Merkle paths in the FRI proof and remove duplicate indices.
|
|
|
|
|
pub fn compress(
|
|
|
|
|
self,
|
|
|
|
|
indices: &[usize],
|
|
|
|
|
common_data: &CommonCircuitData<F, D>,
|
|
|
|
|
) -> CompressedFriProof<F, D> {
|
2021-09-30 06:56:32 +02:00
|
|
|
let FriProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
commit_phase_merkle_caps,
|
2021-09-29 21:01:15 +02:00
|
|
|
query_round_proofs,
|
2021-09-20 17:34:52 +02:00
|
|
|
final_poly,
|
|
|
|
|
pow_witness,
|
|
|
|
|
..
|
|
|
|
|
} = self;
|
|
|
|
|
let cap_height = common_data.config.cap_height;
|
Automatically select FRI reduction arities (#282)
* Automatically select FRI reduction arities
This way when a proof's degree changes, we won't need to manually update the `FriConfig`s of any recursive proofs on top of it.
For now I've added two methods of selecting arities. The first, `ConstantArityBits`, just applies a fixed reduciton arity until the degree has shrunk below a certain threshold. The second, `MinSize`, searches for the sequence of arities that minimizes proof size.
Note that this optimization is approximate -- e.g. it doesn't account for the effect of compression, and doesn't count some minor contributions to proof size, like the Merkle roots from the commit phase. It also assumes we're not using Merkle caps in serialized proofs, and that we're inferring one of the evaluations, even though we haven't made those changes yet.
I think we should generally use `ConstantArityBits` for proofs that we will recurse on, since using a single arity tends to be more recursion-friendly. We could use `MinSize` for generating final bridge proofs, since we won't do further recursion on top of those.
* Fix tests
* Feedback
2021-10-04 13:52:05 -07:00
|
|
|
let reduction_arity_bits = &common_data.fri_params.reduction_arity_bits;
|
2021-09-20 17:34:52 +02:00
|
|
|
let num_reductions = reduction_arity_bits.len();
|
|
|
|
|
let num_initial_trees = query_round_proofs[0].initial_trees_proof.evals_proofs.len();
|
|
|
|
|
|
|
|
|
|
// "Transpose" the query round proofs, so that information for each Merkle tree is collected together.
|
|
|
|
|
let mut initial_trees_indices = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut initial_trees_leaves = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut initial_trees_proofs = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut steps_indices = vec![vec![]; num_reductions];
|
|
|
|
|
let mut steps_evals = vec![vec![]; num_reductions];
|
|
|
|
|
let mut steps_proofs = vec![vec![]; num_reductions];
|
|
|
|
|
|
2021-09-25 19:41:48 -07:00
|
|
|
for (mut index, qrp) in indices.iter().cloned().zip(&query_round_proofs) {
|
2021-09-20 17:34:52 +02:00
|
|
|
let FriQueryRound {
|
|
|
|
|
initial_trees_proof,
|
|
|
|
|
steps,
|
|
|
|
|
} = qrp.clone();
|
|
|
|
|
for (i, (leaves_data, proof)) in
|
|
|
|
|
initial_trees_proof.evals_proofs.into_iter().enumerate()
|
|
|
|
|
{
|
|
|
|
|
initial_trees_indices[i].push(index);
|
|
|
|
|
initial_trees_leaves[i].push(leaves_data);
|
|
|
|
|
initial_trees_proofs[i].push(proof);
|
|
|
|
|
}
|
|
|
|
|
for (i, query_step) in steps.into_iter().enumerate() {
|
2021-10-07 19:30:39 +02:00
|
|
|
let index_within_coset = index & ((1 << reduction_arity_bits[i]) - 1);
|
2021-09-20 17:34:52 +02:00
|
|
|
index >>= reduction_arity_bits[i];
|
|
|
|
|
steps_indices[i].push(index);
|
2021-10-07 19:30:39 +02:00
|
|
|
let mut evals = query_step.evals;
|
2021-10-11 17:24:06 +02:00
|
|
|
// Remove the element that can be inferred.
|
2021-10-07 19:30:39 +02:00
|
|
|
evals.remove(index_within_coset);
|
|
|
|
|
steps_evals[i].push(evals);
|
2021-09-20 17:34:52 +02:00
|
|
|
steps_proofs[i].push(query_step.merkle_proof);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Compress all Merkle proofs.
|
|
|
|
|
let initial_trees_proofs = initial_trees_indices
|
|
|
|
|
.iter()
|
|
|
|
|
.zip(initial_trees_proofs)
|
|
|
|
|
.map(|(is, ps)| compress_merkle_proofs(cap_height, is, &ps))
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
let steps_proofs = steps_indices
|
|
|
|
|
.iter()
|
|
|
|
|
.zip(steps_proofs)
|
|
|
|
|
.map(|(is, ps)| compress_merkle_proofs(cap_height, is, &ps))
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
2021-09-29 21:01:15 +02:00
|
|
|
let mut compressed_query_proofs = CompressedFriQueryRounds {
|
2021-10-01 16:54:14 +02:00
|
|
|
indices: indices.to_vec(),
|
2021-09-29 21:01:15 +02:00
|
|
|
initial_trees_proofs: HashMap::new(),
|
|
|
|
|
steps: vec![HashMap::new(); num_reductions],
|
|
|
|
|
};
|
|
|
|
|
|
2021-09-21 08:37:23 +02:00
|
|
|
// Replace the query round proofs with the compressed versions.
|
2021-09-30 19:09:27 +02:00
|
|
|
for (i, mut index) in indices.iter().copied().enumerate() {
|
2021-09-29 21:01:15 +02:00
|
|
|
let initial_proof = FriInitialTreeProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
evals_proofs: (0..num_initial_trees)
|
|
|
|
|
.map(|j| {
|
|
|
|
|
(
|
|
|
|
|
initial_trees_leaves[j][i].clone(),
|
|
|
|
|
initial_trees_proofs[j][i].clone(),
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.collect(),
|
|
|
|
|
};
|
2021-09-29 21:01:15 +02:00
|
|
|
compressed_query_proofs
|
|
|
|
|
.initial_trees_proofs
|
2021-10-02 09:53:31 +02:00
|
|
|
.entry(index)
|
|
|
|
|
.or_insert(initial_proof);
|
2021-09-29 21:01:15 +02:00
|
|
|
for j in 0..num_reductions {
|
2021-09-30 06:56:32 +02:00
|
|
|
index >>= reduction_arity_bits[j];
|
2021-09-29 21:01:15 +02:00
|
|
|
let query_step = FriQueryStep {
|
2021-09-20 17:34:52 +02:00
|
|
|
evals: steps_evals[j][i].clone(),
|
|
|
|
|
merkle_proof: steps_proofs[j][i].clone(),
|
2021-09-29 21:01:15 +02:00
|
|
|
};
|
2021-10-02 09:53:31 +02:00
|
|
|
compressed_query_proofs.steps[j]
|
|
|
|
|
.entry(index)
|
|
|
|
|
.or_insert(query_step);
|
2021-09-29 21:01:15 +02:00
|
|
|
}
|
2021-09-20 17:34:52 +02:00
|
|
|
}
|
|
|
|
|
|
2021-09-29 21:01:15 +02:00
|
|
|
CompressedFriProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
commit_phase_merkle_caps,
|
2021-09-29 21:01:15 +02:00
|
|
|
query_round_proofs: compressed_query_proofs,
|
2021-09-20 17:34:52 +02:00
|
|
|
final_poly,
|
|
|
|
|
pow_witness,
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-09-29 21:01:15 +02:00
|
|
|
}
|
2021-09-20 17:34:52 +02:00
|
|
|
|
2021-09-29 21:01:15 +02:00
|
|
|
impl<F: RichField + Extendable<D>, const D: usize> CompressedFriProof<F, D> {
|
2021-09-30 18:58:36 +02:00
|
|
|
/// Decompress all the Merkle paths in the FRI proof and reinsert duplicate indices.
|
2021-10-07 19:30:39 +02:00
|
|
|
pub(crate) fn decompress(
|
2021-09-29 21:01:15 +02:00
|
|
|
self,
|
2021-10-07 19:30:39 +02:00
|
|
|
challenges: &ProofChallenges<F, D>,
|
2021-10-12 08:38:43 +02:00
|
|
|
fri_inferred_elements: FriInferredElements<F, D>,
|
2021-09-29 21:01:15 +02:00
|
|
|
common_data: &CommonCircuitData<F, D>,
|
2021-09-30 06:56:32 +02:00
|
|
|
) -> FriProof<F, D> {
|
2021-09-29 21:01:15 +02:00
|
|
|
let CompressedFriProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
commit_phase_merkle_caps,
|
2021-09-29 21:01:15 +02:00
|
|
|
query_round_proofs,
|
2021-09-20 17:34:52 +02:00
|
|
|
final_poly,
|
|
|
|
|
pow_witness,
|
|
|
|
|
..
|
|
|
|
|
} = self;
|
2021-10-07 19:30:39 +02:00
|
|
|
let ProofChallenges {
|
|
|
|
|
fri_query_indices: indices,
|
|
|
|
|
..
|
|
|
|
|
} = challenges;
|
2021-10-12 08:38:43 +02:00
|
|
|
let mut fri_inferred_elements = fri_inferred_elements.0.into_iter();
|
2021-09-20 17:34:52 +02:00
|
|
|
let cap_height = common_data.config.cap_height;
|
Automatically select FRI reduction arities (#282)
* Automatically select FRI reduction arities
This way when a proof's degree changes, we won't need to manually update the `FriConfig`s of any recursive proofs on top of it.
For now I've added two methods of selecting arities. The first, `ConstantArityBits`, just applies a fixed reduciton arity until the degree has shrunk below a certain threshold. The second, `MinSize`, searches for the sequence of arities that minimizes proof size.
Note that this optimization is approximate -- e.g. it doesn't account for the effect of compression, and doesn't count some minor contributions to proof size, like the Merkle roots from the commit phase. It also assumes we're not using Merkle caps in serialized proofs, and that we're inferring one of the evaluations, even though we haven't made those changes yet.
I think we should generally use `ConstantArityBits` for proofs that we will recurse on, since using a single arity tends to be more recursion-friendly. We could use `MinSize` for generating final bridge proofs, since we won't do further recursion on top of those.
* Fix tests
* Feedback
2021-10-04 13:52:05 -07:00
|
|
|
let reduction_arity_bits = &common_data.fri_params.reduction_arity_bits;
|
2021-09-20 17:34:52 +02:00
|
|
|
let num_reductions = reduction_arity_bits.len();
|
2021-09-29 21:01:15 +02:00
|
|
|
let num_initial_trees = query_round_proofs
|
|
|
|
|
.initial_trees_proofs
|
|
|
|
|
.values()
|
|
|
|
|
.next()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.evals_proofs
|
|
|
|
|
.len();
|
2021-09-20 17:34:52 +02:00
|
|
|
|
|
|
|
|
// "Transpose" the query round proofs, so that information for each Merkle tree is collected together.
|
|
|
|
|
let mut initial_trees_indices = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut initial_trees_leaves = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut initial_trees_proofs = vec![vec![]; num_initial_trees];
|
|
|
|
|
let mut steps_indices = vec![vec![]; num_reductions];
|
|
|
|
|
let mut steps_evals = vec![vec![]; num_reductions];
|
|
|
|
|
let mut steps_proofs = vec![vec![]; num_reductions];
|
|
|
|
|
let height = common_data.degree_bits + common_data.config.rate_bits;
|
|
|
|
|
let heights = reduction_arity_bits
|
|
|
|
|
.iter()
|
|
|
|
|
.scan(height, |acc, &bits| {
|
|
|
|
|
*acc -= bits;
|
|
|
|
|
Some(*acc)
|
|
|
|
|
})
|
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
2021-10-11 17:24:06 +02:00
|
|
|
// Holds the `evals` vectors that have already been reconstructed at each reduction depth.
|
2021-10-11 16:40:31 +02:00
|
|
|
let mut evals_by_depth = vec![
|
|
|
|
|
HashMap::<usize, Vec<_>>::new();
|
|
|
|
|
common_data.fri_params.reduction_arity_bits.len()
|
|
|
|
|
];
|
2021-10-11 16:51:30 +02:00
|
|
|
for &(mut index) in indices {
|
2021-10-04 10:09:17 +02:00
|
|
|
let initial_trees_proof = query_round_proofs.initial_trees_proofs[&index].clone();
|
2021-09-20 17:34:52 +02:00
|
|
|
for (i, (leaves_data, proof)) in
|
|
|
|
|
initial_trees_proof.evals_proofs.into_iter().enumerate()
|
|
|
|
|
{
|
|
|
|
|
initial_trees_indices[i].push(index);
|
|
|
|
|
initial_trees_leaves[i].push(leaves_data);
|
|
|
|
|
initial_trees_proofs[i].push(proof);
|
|
|
|
|
}
|
2021-09-29 21:01:15 +02:00
|
|
|
for i in 0..num_reductions {
|
2021-10-07 19:30:39 +02:00
|
|
|
let index_within_coset = index & ((1 << reduction_arity_bits[i]) - 1);
|
2021-09-20 17:34:52 +02:00
|
|
|
index >>= reduction_arity_bits[i];
|
2021-10-07 19:30:39 +02:00
|
|
|
let FriQueryStep {
|
|
|
|
|
mut evals,
|
|
|
|
|
merkle_proof,
|
|
|
|
|
} = query_round_proofs.steps[i][&index].clone();
|
2021-09-20 17:34:52 +02:00
|
|
|
steps_indices[i].push(index);
|
2021-10-11 16:40:31 +02:00
|
|
|
if let Some(v) = evals_by_depth[i].get(&index) {
|
2021-10-11 17:24:06 +02:00
|
|
|
// If this index has already been seen, get `evals` from the `HashMap`.
|
2021-10-11 16:40:31 +02:00
|
|
|
evals = v.to_vec();
|
|
|
|
|
} else {
|
2021-10-11 17:24:06 +02:00
|
|
|
// Otherwise insert the next inferred element.
|
2021-10-12 08:38:43 +02:00
|
|
|
evals.insert(index_within_coset, fri_inferred_elements.next().unwrap());
|
2021-10-11 16:40:31 +02:00
|
|
|
evals_by_depth[i].insert(index, evals.clone());
|
|
|
|
|
}
|
2021-10-07 19:30:39 +02:00
|
|
|
steps_evals[i].push(flatten(&evals));
|
|
|
|
|
steps_proofs[i].push(merkle_proof);
|
2021-09-20 17:34:52 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-21 08:37:23 +02:00
|
|
|
// Decompress all Merkle proofs.
|
|
|
|
|
let initial_trees_proofs = izip!(
|
|
|
|
|
&initial_trees_leaves,
|
|
|
|
|
&initial_trees_indices,
|
|
|
|
|
initial_trees_proofs
|
|
|
|
|
)
|
2021-09-30 19:09:27 +02:00
|
|
|
.map(|(ls, is, ps)| decompress_merkle_proofs(ls, is, &ps, height, cap_height))
|
2021-09-21 08:37:23 +02:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
let steps_proofs = izip!(&steps_evals, &steps_indices, steps_proofs, heights)
|
|
|
|
|
.map(|(ls, is, ps, h)| decompress_merkle_proofs(ls, is, &ps, h, cap_height))
|
2021-09-20 17:34:52 +02:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
2021-09-29 21:01:15 +02:00
|
|
|
let mut decompressed_query_proofs = Vec::with_capacity(num_reductions);
|
2021-10-02 09:53:31 +02:00
|
|
|
for i in 0..indices.len() {
|
2021-09-29 21:01:15 +02:00
|
|
|
let initial_trees_proof = FriInitialTreeProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
evals_proofs: (0..num_initial_trees)
|
|
|
|
|
.map(|j| {
|
|
|
|
|
(
|
|
|
|
|
initial_trees_leaves[j][i].clone(),
|
|
|
|
|
initial_trees_proofs[j][i].clone(),
|
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
.collect(),
|
|
|
|
|
};
|
2021-09-29 21:01:15 +02:00
|
|
|
let steps = (0..num_reductions)
|
2021-09-20 17:34:52 +02:00
|
|
|
.map(|j| FriQueryStep {
|
|
|
|
|
evals: unflatten(&steps_evals[j][i]),
|
|
|
|
|
merkle_proof: steps_proofs[j][i].clone(),
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
2021-09-29 21:01:15 +02:00
|
|
|
decompressed_query_proofs.push(FriQueryRound {
|
|
|
|
|
initial_trees_proof,
|
|
|
|
|
steps,
|
|
|
|
|
})
|
2021-09-20 17:34:52 +02:00
|
|
|
}
|
|
|
|
|
|
2021-09-30 06:56:32 +02:00
|
|
|
FriProof {
|
2021-09-20 17:34:52 +02:00
|
|
|
commit_phase_merkle_caps,
|
2021-09-29 21:01:15 +02:00
|
|
|
query_round_proofs: decompressed_query_proofs,
|
2021-09-20 17:34:52 +02:00
|
|
|
final_poly,
|
|
|
|
|
pow_witness,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|