Merge pull request #170 from mir-protocol/merkle_cap

Replace Merkle roots with Merkle caps
This commit is contained in:
wborgeaud 2021-08-11 08:40:12 +02:00 committed by GitHub
commit debc0e9cb3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 329 additions and 153 deletions

View File

@ -27,10 +27,12 @@ fn bench_prove<F: Field + Extendable<D>, const D: usize>() -> Result<()> {
rate_bits: 3,
num_challenges: 3,
zero_knowledge: false,
cap_height: 1,
fri_config: FriConfig {
proof_of_work_bits: 20,
reduction_arity_bits: vec![2, 2, 2, 2, 2, 2],
num_query_rounds: 35,
cap_height: 1,
},
};

View File

@ -3,8 +3,7 @@ use rayon::prelude::*;
use crate::field::extension_field::Extendable;
use crate::field::field_types::Field;
use crate::fri::proof::FriProof;
use crate::fri::{prover::fri_proof, verifier::verify_fri_proof};
use crate::hash::hash_types::HashOut;
use crate::fri::prover::fri_proof;
use crate::hash::merkle_tree::MerkleTree;
use crate::iop::challenger::Challenger;
use crate::plonk::circuit_data::CommonCircuitData;
@ -34,6 +33,7 @@ impl<F: Field> PolynomialBatchCommitment<F> {
values: Vec<PolynomialValues<F>>,
rate_bits: usize,
blinding: bool,
cap_height: usize,
timing: &mut TimingTree,
) -> Self {
let coeffs = timed!(
@ -42,7 +42,7 @@ impl<F: Field> PolynomialBatchCommitment<F> {
values.par_iter().map(|v| v.ifft()).collect::<Vec<_>>()
);
Self::from_coeffs(coeffs, rate_bits, blinding, timing)
Self::from_coeffs(coeffs, rate_bits, blinding, cap_height, timing)
}
/// Creates a list polynomial commitment for the polynomials `polynomials`.
@ -50,6 +50,7 @@ impl<F: Field> PolynomialBatchCommitment<F> {
polynomials: Vec<PolynomialCoeffs<F>>,
rate_bits: usize,
blinding: bool,
cap_height: usize,
timing: &mut TimingTree,
) -> Self {
let degree = polynomials[0].len();
@ -61,7 +62,11 @@ impl<F: Field> PolynomialBatchCommitment<F> {
let mut leaves = timed!(timing, "transpose LDEs", transpose(&lde_values));
reverse_index_bits_in_place(&mut leaves);
let merkle_tree = timed!(timing, "build Merkle tree", MerkleTree::new(leaves, false));
let merkle_tree = timed!(
timing,
"build Merkle tree",
MerkleTree::new(leaves, cap_height, false)
);
Self {
polynomials,
@ -240,7 +245,9 @@ mod tests {
use anyhow::Result;
use super::*;
use crate::fri::verifier::verify_fri_proof;
use crate::fri::FriConfig;
use crate::hash::hash_types::HashOut;
use crate::plonk::circuit_data::CircuitConfig;
fn gen_random_test_case<F: Field + Extendable<D>, const D: usize>(
@ -274,6 +281,7 @@ mod tests {
proof_of_work_bits: 2,
reduction_arity_bits: vec![2, 3, 1, 2],
num_query_rounds: 3,
cap_height: 1,
};
// We only care about `fri_config, num_constants`, and `num_routed_wires` here.
let common_data = CommonCircuitData {
@ -298,6 +306,7 @@ mod tests {
gen_random_test_case(ks[i], degree_bits),
common_data.config.rate_bits,
PlonkPolynomials::polynomials(i).blinding,
common_data.config.cap_height,
&mut TimingTree::default(),
)
})
@ -312,17 +321,17 @@ mod tests {
&mut TimingTree::default(),
);
let merkle_roots = &[
lpcs[0].merkle_tree.root,
lpcs[1].merkle_tree.root,
lpcs[2].merkle_tree.root,
lpcs[3].merkle_tree.root,
let merkle_caps = &[
lpcs[0].merkle_tree.cap.clone(),
lpcs[1].merkle_tree.cap.clone(),
lpcs[2].merkle_tree.cap.clone(),
lpcs[3].merkle_tree.cap.clone(),
];
verify_fri_proof(
&os,
zeta,
merkle_roots,
merkle_caps,
&proof,
&mut Challenger::new(),
&common_data,

View File

@ -20,6 +20,8 @@ pub struct FriConfig {
/// Number of query rounds to perform.
pub num_query_rounds: usize,
pub cap_height: usize,
}
fn fri_delta(rate_log: usize, conjecture: bool) -> f64 {

View File

@ -4,8 +4,9 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field_types::Field;
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::hash_types::{HashOut, HashOutTarget};
use crate::hash::hash_types::MerkleCapTarget;
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::plonk::plonk_common::PolynomialsIndexBlinding;
use crate::polynomial::polynomial::PolynomialCoeffs;
@ -76,8 +77,8 @@ pub struct FriQueryRoundTarget<const D: usize> {
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct FriProof<F: Extendable<D>, const D: usize> {
/// A Merkle root for each reduced polynomial in the commit phase.
pub commit_phase_merkle_roots: Vec<HashOut<F>>,
/// A Merkle cap for each reduced polynomial in the commit phase.
pub commit_phase_merkle_caps: Vec<MerkleCap<F>>,
/// Query rounds proofs
pub query_round_proofs: Vec<FriQueryRound<F, D>>,
/// The final polynomial in coefficient form.
@ -87,7 +88,7 @@ pub struct FriProof<F: Extendable<D>, const D: usize> {
}
pub struct FriProofTarget<const D: usize> {
pub commit_phase_merkle_roots: Vec<HashOutTarget>,
pub commit_phase_merkle_caps: Vec<MerkleCapTarget>,
pub query_round_proofs: Vec<FriQueryRoundTarget<D>>,
pub final_poly: PolynomialCoeffsExtTarget<D>,
pub pow_witness: Target,

View File

@ -53,7 +53,7 @@ pub fn fri_proof<F: Field + Extendable<D>, const D: usize>(
fri_prover_query_rounds(initial_merkle_trees, &trees, challenger, n, config);
FriProof {
commit_phase_merkle_roots: trees.iter().map(|t| t.root).collect(),
commit_phase_merkle_caps: trees.iter().map(|t| t.cap.clone()).collect(),
query_round_proofs,
final_poly: final_coeffs,
pow_witness,
@ -80,10 +80,11 @@ fn fri_committed_trees<F: Field + Extendable<D>, const D: usize>(
.par_chunks(arity)
.map(|chunk: &[F::Extension]| flatten(chunk))
.collect(),
config.cap_height,
false,
);
challenger.observe_hash(&tree.root);
challenger.observe_cap(&tree.cap);
trees.push(tree);
let beta = challenger.get_extension_challenge();
@ -153,9 +154,7 @@ fn fri_prover_query_round<F: Field + Extendable<D>, const D: usize>(
.collect::<Vec<_>>();
for (i, tree) in trees.iter().enumerate() {
let arity_bits = config.reduction_arity_bits[i];
let arity = 1 << arity_bits;
let mut evals = unflatten(tree.get(x_index >> arity_bits));
evals.remove(x_index & (arity - 1));
let evals = unflatten(tree.get(x_index >> arity_bits));
let merkle_proof = tree.prove(x_index >> arity_bits);
query_steps.push(FriQueryStep {

View File

@ -3,7 +3,7 @@ use crate::field::extension_field::Extendable;
use crate::field::field_types::Field;
use crate::fri::proof::{FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget};
use crate::fri::FriConfig;
use crate::hash::hash_types::HashOutTarget;
use crate::hash::hash_types::MerkleCapTarget;
use crate::iop::challenger::RecursiveChallenger;
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
@ -83,7 +83,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
os: &OpeningSetTarget<D>,
// Point at which the PLONK polynomials are opened.
zeta: ExtensionTarget<D>,
initial_merkle_roots: &[HashOutTarget],
initial_merkle_caps: &[MerkleCapTarget],
proof: &FriProofTarget<D>,
challenger: &mut RecursiveChallenger,
common_data: &CommonCircuitData<F, D>,
@ -108,10 +108,10 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self,
"recover the random betas used in the FRI reductions.",
proof
.commit_phase_merkle_roots
.commit_phase_merkle_caps
.iter()
.map(|root| {
challenger.observe_hash(root);
.map(|cap| {
challenger.observe_cap(cap);
challenger.get_extension_challenge(self)
})
.collect::<Vec<_>>()
@ -160,7 +160,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
zeta,
alpha,
precomputed_reduced_evals,
initial_merkle_roots,
initial_merkle_caps,
proof,
challenger,
n,
@ -176,18 +176,25 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
&mut self,
x_index_bits: &[Target],
proof: &FriInitialTreeProofTarget,
initial_merkle_roots: &[HashOutTarget],
initial_merkle_caps: &[MerkleCapTarget],
cap_index: Target,
) {
for (i, ((evals, merkle_proof), &root)) in proof
for (i, ((evals, merkle_proof), cap)) in proof
.evals_proofs
.iter()
.zip(initial_merkle_roots)
.zip(initial_merkle_caps)
.enumerate()
{
with_context!(
self,
&format!("verify {}'th initial Merkle proof", i),
self.verify_merkle_proof(evals.clone(), x_index_bits, root, merkle_proof)
self.verify_merkle_proof_with_cap_index(
evals.clone(),
x_index_bits,
cap_index,
cap,
merkle_proof
)
);
}
}
@ -203,8 +210,12 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
) -> ExtensionTarget<D> {
assert!(D > 1, "Not implemented for D=1.");
let config = self.config.clone();
let degree_log = proof.evals_proofs[0].1.siblings.len() - config.rate_bits;
let one = self.one_extension();
let degree_log = common_data.degree_bits;
debug_assert_eq!(
degree_log,
common_data.config.cap_height + proof.evals_proofs[0].1.siblings.len()
- config.rate_bits
);
let subgroup_x = self.convert_to_ext(subgroup_x);
let vanish_zeta = self.sub_extension(subgroup_x, zeta);
let mut alpha = ReducingFactorTarget::new(alpha);
@ -266,7 +277,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
zeta: ExtensionTarget<D>,
alpha: ExtensionTarget<D>,
precomputed_reduced_evals: PrecomputedReducedEvalsTarget<D>,
initial_merkle_roots: &[HashOutTarget],
initial_merkle_caps: &[MerkleCapTarget],
proof: &FriProofTarget<D>,
challenger: &mut RecursiveChallenger,
n: usize,
@ -279,6 +290,10 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
// TODO: Do we need to range check `x_index` to a target smaller than `p`?
let x_index = challenger.get_challenge(self);
let mut x_index_bits = self.low_bits(x_index, n_log, 64);
let cap_index = self.le_sum(
x_index_bits[x_index_bits.len() - common_data.config.fri_config.cap_height..]
.into_iter(),
);
let mut domain_size = n;
with_context!(
self,
@ -286,7 +301,8 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.fri_verify_initial_proof(
&x_index_bits,
&round_proof.initial_trees_proof,
initial_merkle_roots,
initial_merkle_caps,
cap_index
)
);
let mut old_x_index_bits = Vec::new();
@ -331,19 +347,20 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
)
)
};
let mut evals = round_proof.steps[i].evals.clone();
let evals = round_proof.steps[i].evals.clone();
// Insert P(y) into the evaluation vector, since it wasn't included by the prover.
let high_x_index_bits = x_index_bits.split_off(arity_bits);
old_x_index_bits = x_index_bits;
let low_x_index = self.le_sum(old_x_index_bits.iter());
evals = self.insert(low_x_index, e_x, evals);
self.random_access(low_x_index, e_x, evals.clone());
with_context!(
self,
"verify FRI round Merkle proof.",
self.verify_merkle_proof(
self.verify_merkle_proof_with_cap_index(
flatten_target(&evals),
&high_x_index_bits,
proof.commit_phase_merkle_roots[i],
cap_index,
&proof.commit_phase_merkle_caps[i],
&round_proof.steps[i].merkle_proof,
)
);

View File

@ -5,9 +5,9 @@ use crate::field::field_types::Field;
use crate::field::interpolation::{barycentric_weights, interpolate, interpolate2};
use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound};
use crate::fri::FriConfig;
use crate::hash::hash_types::HashOut;
use crate::hash::hashing::hash_n_to_1;
use crate::hash::merkle_proofs::verify_merkle_proof;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::challenger::Challenger;
use crate::plonk::circuit_data::CommonCircuitData;
use crate::plonk::plonk_common::PlonkPolynomials;
@ -73,7 +73,7 @@ pub fn verify_fri_proof<F: Field + Extendable<D>, const D: usize>(
os: &OpeningSet<F, D>,
// Point at which the PLONK polynomials are opened.
zeta: F::Extension,
initial_merkle_roots: &[HashOut<F>],
initial_merkle_caps: &[MerkleCap<F>],
proof: &FriProof<F, D>,
challenger: &mut Challenger<F>,
common_data: &CommonCircuitData<F, D>,
@ -95,10 +95,10 @@ pub fn verify_fri_proof<F: Field + Extendable<D>, const D: usize>(
// Recover the random betas used in the FRI reductions.
let betas = proof
.commit_phase_merkle_roots
.commit_phase_merkle_caps
.iter()
.map(|root| {
challenger.observe_hash(root);
.map(|cap| {
challenger.observe_cap(cap);
challenger.get_extension_challenge()
})
.collect::<Vec<_>>();
@ -123,7 +123,7 @@ pub fn verify_fri_proof<F: Field + Extendable<D>, const D: usize>(
zeta,
alpha,
precomputed_reduced_evals,
initial_merkle_roots,
initial_merkle_caps,
&proof,
challenger,
n,
@ -139,10 +139,10 @@ pub fn verify_fri_proof<F: Field + Extendable<D>, const D: usize>(
fn fri_verify_initial_proof<F: Field>(
x_index: usize,
proof: &FriInitialTreeProof<F>,
initial_merkle_roots: &[HashOut<F>],
initial_merkle_caps: &[MerkleCap<F>],
) -> Result<()> {
for ((evals, merkle_proof), &root) in proof.evals_proofs.iter().zip(initial_merkle_roots) {
verify_merkle_proof(evals.clone(), x_index, root, merkle_proof, false)?;
for ((evals, merkle_proof), cap) in proof.evals_proofs.iter().zip(initial_merkle_caps) {
verify_merkle_proof(evals.clone(), x_index, cap, merkle_proof, false)?;
}
Ok(())
@ -150,7 +150,7 @@ fn fri_verify_initial_proof<F: Field>(
/// Holds the reduced (by `alpha`) evaluations at `zeta` for the polynomial opened just at
/// zeta, for `Z` at zeta and for `Z` at `g*zeta`.
#[derive(Copy, Clone)]
#[derive(Copy, Clone, Debug)]
struct PrecomputedReducedEvals<F: Extendable<D>, const D: usize> {
pub single: F::Extension,
pub zs: F::Extension,
@ -189,7 +189,11 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
) -> F::Extension {
let config = &common_data.config;
assert!(D > 1, "Not implemented for D=1.");
let degree_log = proof.evals_proofs[0].1.siblings.len() - config.rate_bits;
let degree_log = common_data.degree_bits;
debug_assert_eq!(
degree_log,
common_data.config.cap_height + proof.evals_proofs[0].1.siblings.len() - config.rate_bits
);
let subgroup_x = F::Extension::from_basefield(subgroup_x);
let mut alpha = ReducingFactor::new(alpha);
let mut sum = F::Extension::ZERO;
@ -244,7 +248,7 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
zeta: F::Extension,
alpha: F::Extension,
precomputed_reduced_evals: PrecomputedReducedEvals<F, D>,
initial_merkle_roots: &[HashOut<F>],
initial_merkle_caps: &[MerkleCap<F>],
proof: &FriProof<F, D>,
challenger: &mut Challenger<F>,
n: usize,
@ -259,7 +263,7 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
fri_verify_initial_proof(
x_index,
&round_proof.initial_trees_proof,
initial_merkle_roots,
initial_merkle_caps,
)?;
let mut old_x_index = 0;
// `subgroup_x` is `subgroup[x_index]`, i.e., the actual field element in the domain.
@ -291,17 +295,17 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
betas[i - 1],
)
};
let mut evals = round_proof.steps[i].evals.clone();
let evals = &round_proof.steps[i].evals;
// Insert P(y) into the evaluation vector, since it wasn't included by the prover.
evals.insert(x_index & (arity - 1), e_x);
ensure!(evals[x_index & (arity - 1)] == e_x);
verify_merkle_proof(
flatten(&evals),
flatten(evals),
x_index >> arity_bits,
proof.commit_phase_merkle_roots[i],
&proof.commit_phase_merkle_caps[i],
&round_proof.steps[i].merkle_proof,
false,
)?;
evaluations.push(evals);
evaluations.push(evals.to_vec());
if i > 0 {
// Update the point x to x^arity.

View File

@ -61,3 +61,6 @@ impl HashOutTarget {
}
}
}
#[derive(Clone, Debug)]
pub struct MerkleCapTarget(pub Vec<HashOutTarget>);

View File

@ -1,11 +1,15 @@
use std::convert::TryInto;
use anyhow::{ensure, Result};
use serde::{Deserialize, Serialize};
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field_types::Field;
use crate::gates::gmimc::GMiMCGate;
use crate::hash::hash_types::{HashOut, HashOutTarget};
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget};
use crate::hash::hashing::{compress, hash_or_noop, GMIMC_ROUNDS};
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::iop::wire::Wire;
use crate::plonk::circuit_builder::CircuitBuilder;
@ -24,46 +28,46 @@ pub struct MerkleProofTarget {
}
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given root.
/// given cap.
pub(crate) fn verify_merkle_proof<F: Field>(
leaf_data: Vec<F>,
leaf_index: usize,
merkle_root: HashOut<F>,
merkle_cap: &MerkleCap<F>,
proof: &MerkleProof<F>,
reverse_bits: bool,
) -> Result<()> {
ensure!(
leaf_index >> proof.siblings.len() == 0,
"Merkle leaf index is too large."
);
let index = if reverse_bits {
let mut index = if reverse_bits {
crate::util::reverse_bits(leaf_index, proof.siblings.len())
} else {
leaf_index
};
let mut current_digest = hash_or_noop(leaf_data);
for (i, &sibling_digest) in proof.siblings.iter().enumerate() {
let bit = (index >> i & 1) == 1;
current_digest = if bit {
for &sibling_digest in proof.siblings.iter() {
let bit = index & 1;
index >>= 1;
current_digest = if bit == 1 {
compress(sibling_digest, current_digest)
} else {
compress(current_digest, sibling_digest)
}
}
ensure!(current_digest == merkle_root, "Invalid Merkle proof.");
ensure!(
current_digest == merkle_cap.0[index],
"Invalid Merkle proof."
);
Ok(())
}
impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
/// Verifies that the given leaf data is present at the given index in the Merkle tree with the
/// given root. The index is given by it's little-endian bits.
/// given cap. The index is given by it's little-endian bits.
/// Note: Works only for D=4.
pub(crate) fn verify_merkle_proof(
&mut self,
leaf_data: Vec<Target>,
leaf_index_bits: &[Target],
merkle_root: HashOutTarget,
merkle_cap: &MerkleCapTarget,
proof: &MerkleProofTarget,
) {
let zero = self.zero();
@ -108,7 +112,83 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
)
}
self.named_assert_hashes_equal(state, merkle_root, "check Merkle root".into())
let index = self.le_sum(leaf_index_bits[proof.siblings.len()..].to_vec().into_iter());
let state_ext = state.elements[..].try_into().expect("requires D = 4");
let state_ext = ExtensionTarget(state_ext);
let cap_ext = merkle_cap
.0
.iter()
.map(|h| {
let tmp = h.elements[..].try_into().expect("requires D = 4");
ExtensionTarget(tmp)
})
.collect();
self.random_access(index, state_ext, cap_ext);
}
/// Same a `verify_merkle_proof` but with the final "cap index" as extra parameter.
/// Note: Works only for D=4.
pub(crate) fn verify_merkle_proof_with_cap_index(
&mut self,
leaf_data: Vec<Target>,
leaf_index_bits: &[Target],
cap_index: Target,
merkle_cap: &MerkleCapTarget,
proof: &MerkleProofTarget,
) {
let zero = self.zero();
let mut state: HashOutTarget = self.hash_or_noop(leaf_data);
for (&bit, &sibling) in leaf_index_bits.iter().zip(&proof.siblings) {
let gate_type = GMiMCGate::<F, D, GMIMC_ROUNDS>::new_automatic_constants();
let gate = self.add_gate(gate_type, vec![]);
let swap_wire = GMiMCGate::<F, D, GMIMC_ROUNDS>::WIRE_SWAP;
let swap_wire = Target::Wire(Wire {
gate,
input: swap_wire,
});
self.generate_copy(bit, swap_wire);
let input_wires = (0..12)
.map(|i| {
Target::Wire(Wire {
gate,
input: GMiMCGate::<F, D, GMIMC_ROUNDS>::wire_input(i),
})
})
.collect::<Vec<_>>();
for i in 0..4 {
self.route(state.elements[i], input_wires[i]);
self.route(sibling.elements[i], input_wires[4 + i]);
self.route(zero, input_wires[8 + i]);
}
state = HashOutTarget::from_vec(
(0..4)
.map(|i| {
Target::Wire(Wire {
gate,
input: GMiMCGate::<F, D, GMIMC_ROUNDS>::wire_output(i),
})
})
.collect(),
)
}
let state_ext = state.elements[..].try_into().expect("requires D = 4");
let state_ext = ExtensionTarget(state_ext);
let cap_ext = merkle_cap
.0
.iter()
.map(|h| {
let tmp = h.elements[..].try_into().expect("requires D = 4");
ExtensionTarget(tmp)
})
.collect();
self.random_access(cap_index, state_ext, cap_ext);
}
pub(crate) fn assert_hashes_equal(&mut self, x: HashOutTarget, y: HashOutTarget) {
@ -159,8 +239,9 @@ mod tests {
let log_n = 8;
let n = 1 << log_n;
let cap_height = 1;
let leaves = random_data::<F>(n, 7);
let tree = MerkleTree::new(leaves, false);
let tree = MerkleTree::new(leaves, cap_height, false);
let i: usize = thread_rng().gen_range(0..n);
let proof = tree.prove(i);
@ -171,8 +252,8 @@ mod tests {
pw.set_hash_target(proof_t.siblings[i], proof.siblings[i]);
}
let root_t = builder.add_virtual_hash();
pw.set_hash_target(root_t, tree.root);
let cap_t = builder.add_virtual_cap(cap_height);
pw.set_cap_target(&cap_t, &tree.cap);
let i_c = builder.constant(F::from_canonical_usize(i));
let i_bits = builder.split_le(i_c, log_n);
@ -182,7 +263,7 @@ mod tests {
pw.set_target(data[j], tree.leaves[i][j]);
}
builder.verify_merkle_proof(data, &i_bits, root_t, &proof_t);
builder.verify_merkle_proof(data, &i_bits, &cap_t, &proof_t);
let data = builder.build();
let proof = data.prove(pw)?;

View File

@ -1,4 +1,5 @@
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use crate::field::field_types::Field;
use crate::hash::hash_types::HashOut;
@ -6,6 +7,12 @@ use crate::hash::hashing::{compress, hash_or_noop};
use crate::hash::merkle_proofs::MerkleProof;
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place};
/// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree.
/// It can be used in place of the root to verify Merkle paths, which are `h` elements shorter.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
pub struct MerkleCap<F: Field>(pub Vec<HashOut<F>>);
#[derive(Clone, Debug)]
pub struct MerkleTree<F: Field> {
/// The data in the leaves of the Merkle tree.
@ -14,8 +21,8 @@ pub struct MerkleTree<F: Field> {
/// The layers of hashes in the tree. The first layer is the one at the bottom.
pub layers: Vec<Vec<HashOut<F>>>,
/// The Merkle root.
pub root: HashOut<F>,
/// The Merkle cap.
pub cap: MerkleCap<F>,
/// If true, the indices are in bit-reversed form, so that the leaf at index `i`
/// contains the leaf originally at index `reverse_bits(i)`.
@ -23,7 +30,7 @@ pub struct MerkleTree<F: Field> {
}
impl<F: Field> MerkleTree<F> {
pub fn new(mut leaves: Vec<Vec<F>>, reverse_bits: bool) -> Self {
pub fn new(mut leaves: Vec<Vec<F>>, cap_height: usize, reverse_bits: bool) -> Self {
if reverse_bits {
reverse_index_bits_in_place(&mut leaves);
}
@ -32,7 +39,7 @@ impl<F: Field> MerkleTree<F> {
.map(|l| hash_or_noop(l.clone()))
.collect::<Vec<_>>()];
while let Some(l) = layers.last() {
if l.len() == 1 {
if l.len() == 1 << cap_height {
break;
}
let next_layer = l
@ -41,11 +48,11 @@ impl<F: Field> MerkleTree<F> {
.collect::<Vec<_>>();
layers.push(next_layer);
}
let root = layers.pop().unwrap()[0];
let cap = layers.pop().unwrap();
Self {
leaves,
layers,
root,
cap: MerkleCap(cap),
reverse_bits,
}
}
@ -97,10 +104,10 @@ mod tests {
n: usize,
reverse_bits: bool,
) -> Result<()> {
let tree = MerkleTree::new(leaves.clone(), reverse_bits);
let tree = MerkleTree::new(leaves.clone(), 1, reverse_bits);
for i in 0..n {
let proof = tree.prove(i);
verify_merkle_proof(leaves[i].clone(), i, tree.root, &proof, reverse_bits)?;
verify_merkle_proof(leaves[i].clone(), i, &tree.cap, &proof, reverse_bits)?;
}
Ok(())
}
@ -113,8 +120,7 @@ mod tests {
let n = 1 << log_n;
let leaves = random_data::<F>(n, 7);
verify_all_leaves(leaves.clone(), n, false)?;
verify_all_leaves(leaves, n, true)?;
verify_all_leaves(leaves, n, false)?;
Ok(())
}

View File

@ -3,8 +3,9 @@ use std::convert::TryInto;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field_types::Field;
use crate::hash::hash_types::{HashOut, HashOutTarget};
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget};
use crate::hash::hashing::{permute, SPONGE_RATE, SPONGE_WIDTH};
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::proof::{OpeningSet, OpeningSetTarget};
@ -93,6 +94,12 @@ impl<F: Field> Challenger<F> {
self.observe_elements(&hash.elements)
}
pub fn observe_cap(&mut self, cap: &MerkleCap<F>) {
for hash in &cap.0 {
self.observe_elements(&hash.elements)
}
}
pub fn get_challenge(&mut self) -> F {
self.absorb_buffered_inputs();
@ -239,6 +246,12 @@ impl RecursiveChallenger {
self.observe_elements(&hash.elements)
}
pub fn observe_cap(&mut self, cap: &MerkleCapTarget) {
for hash in &cap.0 {
self.observe_hash(hash)
}
}
pub fn observe_extension_element<const D: usize>(&mut self, element: ExtensionTarget<D>) {
self.observe_elements(&element.0);
}

View File

@ -6,8 +6,9 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field_types::Field;
use crate::gates::gate::GateInstance;
use crate::hash::hash_types::HashOut;
use crate::hash::hash_types::HashOutTarget;
use crate::hash::hash_types::{HashOut, MerkleCapTarget};
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::iop::wire::Wire;
use crate::plonk::copy_constraint::CopyConstraint;
@ -147,6 +148,12 @@ impl<F: Field> PartialWitness<F> {
.for_each(|(&t, x)| self.set_target(t, x));
}
pub fn set_cap_target(&mut self, ct: &MerkleCapTarget, value: &MerkleCap<F>) {
for (ht, h) in ct.0.iter().zip(&value.0) {
self.set_hash_target(*ht, *h);
}
}
pub fn set_extension_target<const D: usize>(
&mut self,
et: ExtensionTarget<D>,

View File

@ -13,7 +13,7 @@ use crate::gates::gate::{Gate, GateInstance, GateRef, PrefixedGate};
use crate::gates::gate_tree::Tree;
use crate::gates::noop::NoopGate;
use crate::gates::public_input::PublicInputGate;
use crate::hash::hash_types::HashOutTarget;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget};
use crate::hash::hashing::hash_n_to_hash;
use crate::iop::generator::{CopyGenerator, RandomValueGenerator, WitnessGenerator};
use crate::iop::target::Target;
@ -111,6 +111,10 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
HashOutTarget::from_vec(self.add_virtual_targets(4))
}
pub fn add_virtual_cap(&mut self, cap_height: usize) -> MerkleCapTarget {
MerkleCapTarget(self.add_virtual_hashes(1 << cap_height))
}
pub fn add_virtual_hashes(&mut self, n: usize) -> Vec<HashOutTarget> {
(0..n).map(|_i| self.add_virtual_hash()).collect()
}
@ -561,12 +565,13 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
constants_sigmas_vecs,
self.config.rate_bits,
self.config.zero_knowledge & PlonkPolynomials::CONSTANTS_SIGMAS.blinding,
self.config.cap_height,
&mut timing,
);
let constants_sigmas_root = constants_sigmas_commitment.merkle_tree.root;
let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone();
let verifier_only = VerifierOnlyCircuitData {
constants_sigmas_root,
constants_sigmas_cap: constants_sigmas_cap.clone(),
};
let prover_only = ProverOnlyCircuitData {
@ -597,7 +602,11 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
// TODO: This should also include an encoding of gate constraints.
let circuit_digest_parts = [
constants_sigmas_root.elements.to_vec(),
constants_sigmas_cap
.0
.into_iter()
.flat_map(|h| h.elements)
.collect::<Vec<_>>(),
vec![/* Add other circuit data here */],
];
let circuit_digest = hash_n_to_hash(circuit_digest_parts.concat(), false);

View File

@ -7,7 +7,8 @@ use crate::field::field_types::Field;
use crate::fri::commitment::PolynomialBatchCommitment;
use crate::fri::FriConfig;
use crate::gates::gate::{GateInstance, PrefixedGate};
use crate::hash::hash_types::{HashOut, HashOutTarget};
use crate::hash::hash_types::{HashOut, MerkleCapTarget};
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::generator::WitnessGenerator;
use crate::iop::target::Target;
use crate::iop::witness::PartialWitness;
@ -27,6 +28,7 @@ pub struct CircuitConfig {
/// `degree / |F|`.
pub num_challenges: usize,
pub zero_knowledge: bool,
pub cap_height: usize,
// TODO: Find a better place for this.
pub fri_config: FriConfig,
@ -41,10 +43,12 @@ impl Default for CircuitConfig {
rate_bits: 3,
num_challenges: 3,
zero_knowledge: true,
cap_height: 1,
fri_config: FriConfig {
proof_of_work_bits: 1,
reduction_arity_bits: vec![1, 1, 1, 1],
num_query_rounds: 1,
cap_height: 1,
},
}
}
@ -63,10 +67,12 @@ impl CircuitConfig {
rate_bits: 3,
num_challenges: 3,
zero_knowledge: true,
cap_height: 1,
fri_config: FriConfig {
proof_of_work_bits: 1,
reduction_arity_bits: vec![1, 1, 1, 1],
num_query_rounds: 1,
cap_height: 1,
},
}
}
@ -143,7 +149,7 @@ pub(crate) struct ProverOnlyCircuitData<F: Extendable<D>, const D: usize> {
/// Circuit data required by the verifier, but not the prover.
pub(crate) struct VerifierOnlyCircuitData<F: Field> {
/// A commitment to each constant polynomial and each permutation polynomial.
pub(crate) constants_sigmas_root: HashOut<F>,
pub(crate) constants_sigmas_cap: MerkleCap<F>,
}
/// Circuit data required by both the prover and the verifier.
@ -233,5 +239,5 @@ impl<F: Extendable<D>, const D: usize> CommonCircuitData<F, D> {
/// dynamic, at least not without setting a maximum wire count and paying for the worst case.
pub struct VerifierCircuitTarget {
/// A commitment to each constant polynomial and each permutation polynomial.
pub(crate) constants_sigmas_root: HashOutTarget,
pub(crate) constants_sigmas_cap: MerkleCapTarget,
}

View File

@ -5,19 +5,20 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::fri::commitment::PolynomialBatchCommitment;
use crate::fri::proof::{FriProof, FriProofTarget};
use crate::hash::hash_types::{HashOut, HashOutTarget};
use crate::hash::hash_types::MerkleCapTarget;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::plonk::circuit_data::CommonCircuitData;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct Proof<F: Extendable<D>, const D: usize> {
/// Merkle root of LDEs of wire values.
pub wires_root: HashOut<F>,
/// Merkle root of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_partial_products_root: HashOut<F>,
/// Merkle root of LDEs of the quotient polynomial components.
pub quotient_polys_root: HashOut<F>,
/// Merkle cap of LDEs of wire values.
pub wires_cap: MerkleCap<F>,
/// Merkle cap of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_partial_products_cap: MerkleCap<F>,
/// Merkle cap of LDEs of the quotient polynomial components.
pub quotient_polys_cap: MerkleCap<F>,
/// Purported values of each polynomial at the challenge point.
pub openings: OpeningSet<F, D>,
/// A batch FRI argument for all openings.
@ -32,9 +33,9 @@ pub struct ProofWithPublicInputs<F: Extendable<D>, const D: usize> {
}
pub struct ProofTarget<const D: usize> {
pub wires_root: HashOutTarget,
pub plonk_zs_partial_products_root: HashOutTarget,
pub quotient_polys_root: HashOutTarget,
pub wires_cap: MerkleCapTarget,
pub plonk_zs_partial_products_cap: MerkleCapTarget,
pub quotient_polys_cap: MerkleCapTarget,
pub openings: OpeningSetTarget<D>,
pub opening_proof: FriProofTarget<D>,
}

View File

@ -85,6 +85,7 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
wires_values,
config.rate_bits,
config.zero_knowledge & PlonkPolynomials::WIRES.blinding,
config.cap_height,
&mut timing,
)
);
@ -95,7 +96,7 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
challenger.observe_hash(&common_data.circuit_digest);
challenger.observe_hash(&public_inputs_hash);
challenger.observe_hash(&wires_commitment.merkle_tree.root);
challenger.observe_cap(&wires_commitment.merkle_tree.cap);
let betas = challenger.get_n_challenges(num_challenges);
let gammas = challenger.get_n_challenges(num_challenges);
@ -129,11 +130,12 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
zs_partial_products,
config.rate_bits,
config.zero_knowledge & PlonkPolynomials::ZS_PARTIAL_PRODUCTS.blinding,
config.cap_height,
&mut timing,
)
);
challenger.observe_hash(&zs_partial_products_commitment.merkle_tree.root);
challenger.observe_cap(&zs_partial_products_commitment.merkle_tree.cap);
let alphas = challenger.get_n_challenges(num_challenges);
@ -177,11 +179,12 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
all_quotient_poly_chunks,
config.rate_bits,
config.zero_knowledge & PlonkPolynomials::QUOTIENT.blinding,
config.cap_height,
&mut timing
)
);
challenger.observe_hash(&quotient_polys_commitment.merkle_tree.root);
challenger.observe_cap(&quotient_polys_commitment.merkle_tree.cap);
let zeta = challenger.get_extension_challenge();
@ -205,9 +208,9 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
timing.print();
let proof = Proof {
wires_root: wires_commitment.merkle_tree.root,
plonk_zs_partial_products_root: zs_partial_products_commitment.merkle_tree.root,
quotient_polys_root: quotient_polys_commitment.merkle_tree.root,
wires_cap: wires_commitment.merkle_tree.cap,
plonk_zs_partial_products_cap: zs_partial_products_commitment.merkle_tree.cap,
quotient_polys_cap: quotient_polys_commitment.merkle_tree.cap,
openings,
opening_proof,
};

View File

@ -44,14 +44,14 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
challenger.observe_hash(&digest);
challenger.observe_hash(&public_inputs_hash);
challenger.observe_hash(&proof.wires_root);
challenger.observe_cap(&proof.wires_cap);
let betas = challenger.get_n_challenges(self, num_challenges);
let gammas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.plonk_zs_partial_products_root);
challenger.observe_cap(&proof.plonk_zs_partial_products_cap);
let alphas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.quotient_polys_root);
challenger.observe_cap(&proof.quotient_polys_cap);
let zeta = challenger.get_extension_challenge(self);
(betas, gammas, alphas, zeta)
@ -107,11 +107,11 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
});
let merkle_roots = &[
inner_verifier_data.constants_sigmas_root,
proof.wires_root,
proof.plonk_zs_partial_products_root,
proof.quotient_polys_root,
let merkle_caps = &[
inner_verifier_data.constants_sigmas_cap.clone(),
proof.wires_cap,
proof.plonk_zs_partial_products_cap,
proof.quotient_polys_cap,
];
with_context!(
@ -120,7 +120,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.verify_fri_proof(
&proof.openings,
zeta,
merkle_roots,
merkle_caps,
&proof.opening_proof,
&mut challenger,
inner_common_data,
@ -132,6 +132,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
#[cfg(test)]
mod tests {
use anyhow::Result;
use log::info;
use super::*;
use crate::field::crandall_field::CrandallField;
@ -144,6 +145,7 @@ mod tests {
use crate::iop::witness::PartialWitness;
use crate::plonk::proof::{OpeningSetTarget, Proof, ProofTarget, ProofWithPublicInputs};
use crate::plonk::verifier::verify;
use crate::util::log2_strict;
// Construct a `FriQueryRoundTarget` with the same dimensions as the ones in `proof`.
fn get_fri_query_round<F: Extendable<D>, const D: usize>(
@ -188,9 +190,11 @@ mod tests {
public_inputs,
} = proof_with_pis;
let wires_root = builder.add_virtual_hash();
let plonk_zs_root = builder.add_virtual_hash();
let quotient_polys_root = builder.add_virtual_hash();
let wires_cap = builder.add_virtual_cap(log2_strict(proof.wires_cap.0.len()));
let plonk_zs_cap =
builder.add_virtual_cap(log2_strict(proof.plonk_zs_partial_products_cap.0.len()));
let quotient_polys_cap =
builder.add_virtual_cap(log2_strict(proof.quotient_polys_cap.0.len()));
let openings = OpeningSetTarget {
constants: builder.add_virtual_extension_targets(proof.openings.constants.len()),
@ -207,11 +211,14 @@ mod tests {
let query_round_proofs = (0..proof.opening_proof.query_round_proofs.len())
.map(|_| get_fri_query_round(proof, builder))
.collect();
let commit_phase_merkle_roots = (0..proof.opening_proof.commit_phase_merkle_roots.len())
.map(|_| builder.add_virtual_hash())
let commit_phase_merkle_caps = proof
.opening_proof
.commit_phase_merkle_caps
.iter()
.map(|r| builder.add_virtual_cap(log2_strict(r.0.len())))
.collect();
let opening_proof = FriProofTarget {
commit_phase_merkle_roots,
commit_phase_merkle_caps,
query_round_proofs,
final_poly: PolynomialCoeffsExtTarget(
builder.add_virtual_extension_targets(proof.opening_proof.final_poly.len()),
@ -220,9 +227,9 @@ mod tests {
};
let proof = ProofTarget {
wires_root,
plonk_zs_partial_products_root: plonk_zs_root,
quotient_polys_root,
wires_cap,
plonk_zs_partial_products_cap: plonk_zs_cap,
quotient_polys_cap,
openings,
opening_proof,
};
@ -254,12 +261,12 @@ mod tests {
pw.set_target(pi_t, pi);
}
pw.set_hash_target(pt.wires_root, proof.wires_root);
pw.set_hash_target(
pt.plonk_zs_partial_products_root,
proof.plonk_zs_partial_products_root,
pw.set_cap_target(&pt.wires_cap, &proof.wires_cap);
pw.set_cap_target(
&pt.plonk_zs_partial_products_cap,
&proof.plonk_zs_partial_products_cap,
);
pw.set_hash_target(pt.quotient_polys_root, proof.quotient_polys_root);
pw.set_cap_target(&pt.quotient_polys_cap, &proof.quotient_polys_cap);
for (&t, &x) in pt.openings.wires.iter().zip(&proof.openings.wires) {
pw.set_extension_target(t, x);
@ -312,12 +319,12 @@ mod tests {
pw.set_extension_target(t, x);
}
for (&t, &x) in fpt
.commit_phase_merkle_roots
for (t, x) in fpt
.commit_phase_merkle_caps
.iter()
.zip(&fri_proof.commit_phase_merkle_roots)
.zip(&fri_proof.commit_phase_merkle_caps)
{
pw.set_hash_target(t, x);
pw.set_cap_target(t, x);
}
for (qt, q) in fpt
@ -368,10 +375,12 @@ mod tests {
rate_bits: 3,
num_challenges: 3,
zero_knowledge: false,
cap_height: 2,
fri_config: FriConfig {
proof_of_work_bits: 1,
reduction_arity_bits: vec![2, 2, 2, 2, 2, 2],
num_query_rounds: 40,
cap_height: 1,
},
};
let (proof_with_pis, vd, cd) = {
@ -396,9 +405,9 @@ mod tests {
set_proof_target(&proof_with_pis, &pt, &mut pw);
let inner_data = VerifierCircuitTarget {
constants_sigmas_root: builder.add_virtual_hash(),
constants_sigmas_cap: builder.add_virtual_cap(config.cap_height),
};
pw.set_hash_target(inner_data.constants_sigmas_root, vd.constants_sigmas_root);
pw.set_cap_target(&inner_data.constants_sigmas_cap, &vd.constants_sigmas_cap);
builder.add_recursive_verifier(pt, &config, &inner_data, &cd);
@ -417,15 +426,17 @@ mod tests {
const D: usize = 4;
let config = CircuitConfig {
num_wires: 126,
num_routed_wires: 33,
num_routed_wires: 37,
security_bits: 128,
rate_bits: 3,
num_challenges: 3,
zero_knowledge: false,
cap_height: 3,
fri_config: FriConfig {
proof_of_work_bits: 1,
reduction_arity_bits: vec![2, 2, 2, 2, 2, 2],
num_query_rounds: 40,
cap_height: 3,
},
};
let (proof_with_pis, vd, cd) = {
@ -451,9 +462,9 @@ mod tests {
set_proof_target(&proof_with_pis, &pt, &mut pw);
let inner_data = VerifierCircuitTarget {
constants_sigmas_root: builder.add_virtual_hash(),
constants_sigmas_cap: builder.add_virtual_cap(config.cap_height),
};
pw.set_hash_target(inner_data.constants_sigmas_root, vd.constants_sigmas_root);
pw.set_cap_target(&inner_data.constants_sigmas_cap, &vd.constants_sigmas_cap);
builder.add_recursive_verifier(pt, &config, &inner_data, &cd);
@ -469,15 +480,17 @@ mod tests {
set_proof_target(&proof_with_pis, &pt, &mut pw);
let inner_data = VerifierCircuitTarget {
constants_sigmas_root: builder.add_virtual_hash(),
constants_sigmas_cap: builder.add_virtual_cap(config.cap_height),
};
pw.set_hash_target(inner_data.constants_sigmas_root, vd.constants_sigmas_root);
pw.set_cap_target(&inner_data.constants_sigmas_cap, &vd.constants_sigmas_cap);
builder.add_recursive_verifier(pt, &config, &inner_data, &cd);
builder.print_gate_counts(0);
let data = builder.build();
let recursive_proof = data.prove(pw)?;
let proof_bytes = serde_cbor::to_vec(&recursive_proof).unwrap();
info!("Proof length: {} bytes", proof_bytes.len());
verify(recursive_proof, &data.verifier_only, &data.common)
}
}

View File

@ -31,14 +31,14 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
challenger.observe_hash(&common_data.circuit_digest);
challenger.observe_hash(&public_inputs_hash);
challenger.observe_hash(&proof.wires_root);
challenger.observe_cap(&proof.wires_cap);
let betas = challenger.get_n_challenges(num_challenges);
let gammas = challenger.get_n_challenges(num_challenges);
challenger.observe_hash(&proof.plonk_zs_partial_products_root);
challenger.observe_cap(&proof.plonk_zs_partial_products_cap);
let alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_hash(&proof.quotient_polys_root);
challenger.observe_cap(&proof.quotient_polys_cap);
let zeta = challenger.get_extension_challenge();
let local_constants = &proof.openings.constants;
@ -83,17 +83,17 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
ensure!(vanishing_polys_zeta[i] == z_h_zeta * reduce_with_powers(chunk, zeta_pow_deg));
}
let merkle_roots = &[
verifier_data.constants_sigmas_root,
proof.wires_root,
proof.plonk_zs_partial_products_root,
proof.quotient_polys_root,
let merkle_caps = &[
verifier_data.constants_sigmas_cap.clone(),
proof.wires_cap,
proof.plonk_zs_partial_products_cap,
proof.quotient_polys_cap,
];
verify_fri_proof(
&proof.openings,
zeta,
merkle_roots,
merkle_caps,
&proof.opening_proof,
&mut challenger,
common_data,