mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-05 23:33:07 +00:00
Derive challenges from other proof fields (#262)
* Derive challenges from other proof fields * Delete failing test Seems really hard to get the challenges right with the new model. * Move PoW check * Other feedback
This commit is contained in:
parent
23b1161d27
commit
f382289896
@ -250,129 +250,3 @@ impl<F: RichField> PolynomialBatchCommitment<F> {
|
|||||||
quotient.padded(quotient.degree_plus_one().next_power_of_two())
|
quotient.padded(quotient.degree_plus_one().next_power_of_two())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use crate::fri::verifier::verify_fri_proof;
|
|
||||||
use crate::fri::FriConfig;
|
|
||||||
use crate::hash::hash_types::HashOut;
|
|
||||||
use crate::plonk::circuit_data::CircuitConfig;
|
|
||||||
|
|
||||||
fn gen_random_test_case<F: Field + Extendable<D>, const D: usize>(
|
|
||||||
k: usize,
|
|
||||||
degree_log: usize,
|
|
||||||
) -> Vec<PolynomialValues<F>> {
|
|
||||||
let degree = 1 << degree_log;
|
|
||||||
|
|
||||||
(0..k)
|
|
||||||
.map(|_| PolynomialValues::new(F::rand_vec(degree)))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn gen_random_point<F: Field + Extendable<D>, const D: usize>(
|
|
||||||
degree_log: usize,
|
|
||||||
) -> F::Extension {
|
|
||||||
let degree = 1 << degree_log;
|
|
||||||
|
|
||||||
let mut point = F::Extension::rand();
|
|
||||||
while point.exp_u64(degree as u64).is_one() {
|
|
||||||
point = F::Extension::rand();
|
|
||||||
}
|
|
||||||
|
|
||||||
point
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_batch_polynomial_commitment<F: RichField + Extendable<D>, const D: usize>(
|
|
||||||
) -> Result<()> {
|
|
||||||
let ks = [10, 2, 10, 8];
|
|
||||||
let degree_bits = 11;
|
|
||||||
let fri_config = FriConfig {
|
|
||||||
proof_of_work_bits: 2,
|
|
||||||
reduction_arity_bits: vec![2, 3, 1, 2],
|
|
||||||
num_query_rounds: 3,
|
|
||||||
};
|
|
||||||
|
|
||||||
// We only care about `fri_config, num_constants`, and `num_routed_wires` here.
|
|
||||||
let common_data = CommonCircuitData {
|
|
||||||
config: CircuitConfig {
|
|
||||||
fri_config,
|
|
||||||
num_routed_wires: 6,
|
|
||||||
..CircuitConfig::large_config()
|
|
||||||
},
|
|
||||||
degree_bits,
|
|
||||||
gates: vec![],
|
|
||||||
quotient_degree_factor: 0,
|
|
||||||
num_gate_constraints: 0,
|
|
||||||
num_constants: 4,
|
|
||||||
k_is: vec![F::ONE; 6],
|
|
||||||
num_partial_products: (0, 0),
|
|
||||||
circuit_digest: HashOut::from_partial(vec![]),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments = (0..4)
|
|
||||||
.map(|i| {
|
|
||||||
PolynomialBatchCommitment::<F>::from_values(
|
|
||||||
gen_random_test_case(ks[i], degree_bits),
|
|
||||||
common_data.config.rate_bits,
|
|
||||||
common_data.config.zero_knowledge && PlonkPolynomials::polynomials(i).blinding,
|
|
||||||
common_data.config.cap_height,
|
|
||||||
&mut TimingTree::default(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let zeta = gen_random_point::<F, D>(degree_bits);
|
|
||||||
let (proof, os) = PolynomialBatchCommitment::open_plonk::<D>(
|
|
||||||
&[
|
|
||||||
&commitments[0],
|
|
||||||
&commitments[1],
|
|
||||||
&commitments[2],
|
|
||||||
&commitments[3],
|
|
||||||
],
|
|
||||||
zeta,
|
|
||||||
&mut Challenger::new(),
|
|
||||||
&common_data,
|
|
||||||
&mut TimingTree::default(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let merkle_caps = &[
|
|
||||||
commitments[0].merkle_tree.cap.clone(),
|
|
||||||
commitments[1].merkle_tree.cap.clone(),
|
|
||||||
commitments[2].merkle_tree.cap.clone(),
|
|
||||||
commitments[3].merkle_tree.cap.clone(),
|
|
||||||
];
|
|
||||||
|
|
||||||
verify_fri_proof(
|
|
||||||
&os,
|
|
||||||
zeta,
|
|
||||||
merkle_caps,
|
|
||||||
&proof,
|
|
||||||
&mut Challenger::new(),
|
|
||||||
&common_data,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
mod quadratic {
|
|
||||||
use super::*;
|
|
||||||
use crate::field::crandall_field::CrandallField;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_batch_polynomial_commitment() -> Result<()> {
|
|
||||||
check_batch_polynomial_commitment::<CrandallField, 2>()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod quartic {
|
|
||||||
use super::*;
|
|
||||||
use crate::field::crandall_field::CrandallField;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_batch_polynomial_commitment() -> Result<()> {
|
|
||||||
check_batch_polynomial_commitment::<CrandallField, 4>()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -67,7 +67,6 @@ impl FriInitialTreeProofTarget {
|
|||||||
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
||||||
#[serde(bound = "")]
|
#[serde(bound = "")]
|
||||||
pub struct FriQueryRound<F: Extendable<D>, const D: usize> {
|
pub struct FriQueryRound<F: Extendable<D>, const D: usize> {
|
||||||
pub index: usize,
|
|
||||||
pub initial_trees_proof: FriInitialTreeProof<F>,
|
pub initial_trees_proof: FriInitialTreeProof<F>,
|
||||||
pub steps: Vec<FriQueryStep<F, D>>,
|
pub steps: Vec<FriQueryStep<F, D>>,
|
||||||
}
|
}
|
||||||
@ -102,7 +101,7 @@ pub struct FriProofTarget<const D: usize> {
|
|||||||
|
|
||||||
impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
||||||
/// Compress all the Merkle paths in the FRI proof.
|
/// Compress all the Merkle paths in the FRI proof.
|
||||||
pub fn compress(self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn compress(self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
|
||||||
if self.is_compressed {
|
if self.is_compressed {
|
||||||
panic!("Proof is already compressed.");
|
panic!("Proof is already compressed.");
|
||||||
}
|
}
|
||||||
@ -126,9 +125,8 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
|||||||
let mut steps_evals = vec![vec![]; num_reductions];
|
let mut steps_evals = vec![vec![]; num_reductions];
|
||||||
let mut steps_proofs = vec![vec![]; num_reductions];
|
let mut steps_proofs = vec![vec![]; num_reductions];
|
||||||
|
|
||||||
for qrp in &query_round_proofs {
|
for (mut index, qrp) in indices.iter().cloned().zip(&query_round_proofs) {
|
||||||
let FriQueryRound {
|
let FriQueryRound {
|
||||||
mut index,
|
|
||||||
initial_trees_proof,
|
initial_trees_proof,
|
||||||
steps,
|
steps,
|
||||||
} = qrp.clone();
|
} = qrp.clone();
|
||||||
@ -189,7 +187,7 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Decompress all the Merkle paths in the FRI proof.
|
/// Decompress all the Merkle paths in the FRI proof.
|
||||||
pub fn decompress(self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn decompress(self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
|
||||||
if !self.is_compressed {
|
if !self.is_compressed {
|
||||||
panic!("Proof is not compressed.");
|
panic!("Proof is not compressed.");
|
||||||
}
|
}
|
||||||
@ -221,9 +219,8 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
|
|||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
for qrp in &query_round_proofs {
|
for (mut index, qrp) in indices.iter().cloned().zip(&query_round_proofs) {
|
||||||
let FriQueryRound {
|
let FriQueryRound {
|
||||||
mut index,
|
|
||||||
initial_trees_proof,
|
initial_trees_proof,
|
||||||
steps,
|
steps,
|
||||||
} = qrp.clone();
|
} = qrp.clone();
|
||||||
|
|||||||
@ -153,8 +153,7 @@ fn fri_prover_query_round<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
) -> FriQueryRound<F, D> {
|
) -> FriQueryRound<F, D> {
|
||||||
let mut query_steps = Vec::new();
|
let mut query_steps = Vec::new();
|
||||||
let x = challenger.get_challenge();
|
let x = challenger.get_challenge();
|
||||||
let initial_index = x.to_canonical_u64() as usize % n;
|
let mut x_index = x.to_canonical_u64() as usize % n;
|
||||||
let mut x_index = initial_index;
|
|
||||||
let initial_proof = initial_merkle_trees
|
let initial_proof = initial_merkle_trees
|
||||||
.iter()
|
.iter()
|
||||||
.map(|t| (t.get(x_index).to_vec(), t.prove(x_index)))
|
.map(|t| (t.get(x_index).to_vec(), t.prove(x_index)))
|
||||||
@ -172,7 +171,6 @@ fn fri_prover_query_round<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
x_index >>= arity_bits;
|
x_index >>= arity_bits;
|
||||||
}
|
}
|
||||||
FriQueryRound {
|
FriQueryRound {
|
||||||
index: initial_index,
|
|
||||||
initial_trees_proof: FriInitialTreeProof {
|
initial_trees_proof: FriInitialTreeProof {
|
||||||
evals_proofs: initial_proof,
|
evals_proofs: initial_proof,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -5,13 +5,11 @@ use crate::field::field_types::{Field, RichField};
|
|||||||
use crate::field::interpolation::{barycentric_weights, interpolate, interpolate2};
|
use crate::field::interpolation::{barycentric_weights, interpolate, interpolate2};
|
||||||
use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound};
|
use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound};
|
||||||
use crate::fri::FriConfig;
|
use crate::fri::FriConfig;
|
||||||
use crate::hash::hashing::hash_n_to_1;
|
|
||||||
use crate::hash::merkle_proofs::verify_merkle_proof;
|
use crate::hash::merkle_proofs::verify_merkle_proof;
|
||||||
use crate::hash::merkle_tree::MerkleCap;
|
use crate::hash::merkle_tree::MerkleCap;
|
||||||
use crate::iop::challenger::Challenger;
|
|
||||||
use crate::plonk::circuit_data::CommonCircuitData;
|
use crate::plonk::circuit_data::CommonCircuitData;
|
||||||
use crate::plonk::plonk_common::PlonkPolynomials;
|
use crate::plonk::plonk_common::PlonkPolynomials;
|
||||||
use crate::plonk::proof::OpeningSet;
|
use crate::plonk::proof::{OpeningSet, ProofChallenges};
|
||||||
use crate::util::reducing::ReducingFactor;
|
use crate::util::reducing::ReducingFactor;
|
||||||
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place};
|
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place};
|
||||||
|
|
||||||
@ -44,23 +42,12 @@ fn compute_evaluation<F: Field + Extendable<D>, const D: usize>(
|
|||||||
interpolate(&points, beta, &barycentric_weights)
|
interpolate(&points, beta, &barycentric_weights)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fri_verify_proof_of_work<F: RichField + Extendable<D>, const D: usize>(
|
pub(crate) fn fri_verify_proof_of_work<F: RichField + Extendable<D>, const D: usize>(
|
||||||
proof: &FriProof<F, D>,
|
fri_pow_response: F,
|
||||||
challenger: &mut Challenger<F>,
|
|
||||||
config: &FriConfig,
|
config: &FriConfig,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let hash = hash_n_to_1(
|
|
||||||
challenger
|
|
||||||
.get_hash()
|
|
||||||
.elements
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.chain(Some(proof.pow_witness))
|
|
||||||
.collect(),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
ensure!(
|
ensure!(
|
||||||
hash.to_canonical_u64().leading_zeros()
|
fri_pow_response.to_canonical_u64().leading_zeros()
|
||||||
>= config.proof_of_work_bits + (64 - F::order().bits()) as u32,
|
>= config.proof_of_work_bits + (64 - F::order().bits()) as u32,
|
||||||
"Invalid proof of work witness."
|
"Invalid proof of work witness."
|
||||||
);
|
);
|
||||||
@ -68,14 +55,12 @@ fn fri_verify_proof_of_work<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_fri_proof<F: RichField + Extendable<D>, const D: usize>(
|
pub(crate) fn verify_fri_proof<F: RichField + Extendable<D>, const D: usize>(
|
||||||
// Openings of the PLONK polynomials.
|
// Openings of the PLONK polynomials.
|
||||||
os: &OpeningSet<F, D>,
|
os: &OpeningSet<F, D>,
|
||||||
// Point at which the PLONK polynomials are opened.
|
challenges: &ProofChallenges<F, D>,
|
||||||
zeta: F::Extension,
|
|
||||||
initial_merkle_caps: &[MerkleCap<F>],
|
initial_merkle_caps: &[MerkleCap<F>],
|
||||||
proof: &FriProof<F, D>,
|
proof: &FriProof<F, D>,
|
||||||
challenger: &mut Challenger<F>,
|
|
||||||
common_data: &CommonCircuitData<F, D>,
|
common_data: &CommonCircuitData<F, D>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let config = &common_data.config;
|
let config = &common_data.config;
|
||||||
@ -85,27 +70,11 @@ pub fn verify_fri_proof<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
"Final polynomial has wrong degree."
|
"Final polynomial has wrong degree."
|
||||||
);
|
);
|
||||||
|
|
||||||
challenger.observe_opening_set(os);
|
|
||||||
|
|
||||||
// Scaling factor to combine polynomials.
|
|
||||||
let alpha = challenger.get_extension_challenge();
|
|
||||||
|
|
||||||
// Size of the LDE domain.
|
// Size of the LDE domain.
|
||||||
let n = proof.final_poly.len() << (total_arities + config.rate_bits);
|
let n = proof.final_poly.len() << (total_arities + config.rate_bits);
|
||||||
|
|
||||||
// Recover the random betas used in the FRI reductions.
|
|
||||||
let betas = proof
|
|
||||||
.commit_phase_merkle_caps
|
|
||||||
.iter()
|
|
||||||
.map(|cap| {
|
|
||||||
challenger.observe_cap(cap);
|
|
||||||
challenger.get_extension_challenge()
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
challenger.observe_extension_elements(&proof.final_poly.coeffs);
|
|
||||||
|
|
||||||
// Check PoW.
|
// Check PoW.
|
||||||
fri_verify_proof_of_work(proof, challenger, &config.fri_config)?;
|
fri_verify_proof_of_work(challenges.fri_pow_response, &config.fri_config)?;
|
||||||
|
|
||||||
// Check that parameters are coherent.
|
// Check that parameters are coherent.
|
||||||
ensure!(
|
ensure!(
|
||||||
@ -117,17 +86,20 @@ pub fn verify_fri_proof<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
"Number of reductions should be non-zero."
|
"Number of reductions should be non-zero."
|
||||||
);
|
);
|
||||||
|
|
||||||
let precomputed_reduced_evals = PrecomputedReducedEvals::from_os_and_alpha(os, alpha);
|
let precomputed_reduced_evals =
|
||||||
for round_proof in &proof.query_round_proofs {
|
PrecomputedReducedEvals::from_os_and_alpha(os, challenges.fri_alpha);
|
||||||
|
for (&x_index, round_proof) in challenges
|
||||||
|
.fri_query_indices
|
||||||
|
.iter()
|
||||||
|
.zip(&proof.query_round_proofs)
|
||||||
|
{
|
||||||
fri_verifier_query_round(
|
fri_verifier_query_round(
|
||||||
zeta,
|
challenges,
|
||||||
alpha,
|
|
||||||
precomputed_reduced_evals,
|
precomputed_reduced_evals,
|
||||||
initial_merkle_caps,
|
initial_merkle_caps,
|
||||||
&proof,
|
&proof,
|
||||||
challenger,
|
x_index,
|
||||||
n,
|
n,
|
||||||
&betas,
|
|
||||||
round_proof,
|
round_proof,
|
||||||
common_data,
|
common_data,
|
||||||
)?;
|
)?;
|
||||||
@ -245,21 +217,16 @@ fn fri_combine_initial<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn fri_verifier_query_round<F: RichField + Extendable<D>, const D: usize>(
|
fn fri_verifier_query_round<F: RichField + Extendable<D>, const D: usize>(
|
||||||
zeta: F::Extension,
|
challenges: &ProofChallenges<F, D>,
|
||||||
alpha: F::Extension,
|
|
||||||
precomputed_reduced_evals: PrecomputedReducedEvals<F, D>,
|
precomputed_reduced_evals: PrecomputedReducedEvals<F, D>,
|
||||||
initial_merkle_caps: &[MerkleCap<F>],
|
initial_merkle_caps: &[MerkleCap<F>],
|
||||||
proof: &FriProof<F, D>,
|
proof: &FriProof<F, D>,
|
||||||
challenger: &mut Challenger<F>,
|
mut x_index: usize,
|
||||||
n: usize,
|
n: usize,
|
||||||
betas: &[F::Extension],
|
|
||||||
round_proof: &FriQueryRound<F, D>,
|
round_proof: &FriQueryRound<F, D>,
|
||||||
common_data: &CommonCircuitData<F, D>,
|
common_data: &CommonCircuitData<F, D>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let config = &common_data.config.fri_config;
|
let config = &common_data.config.fri_config;
|
||||||
let x = challenger.get_challenge();
|
|
||||||
let mut x_index = x.to_canonical_u64() as usize % n;
|
|
||||||
ensure!(x_index == round_proof.index, "Wrong index.");
|
|
||||||
fri_verify_initial_proof(
|
fri_verify_initial_proof(
|
||||||
x_index,
|
x_index,
|
||||||
&round_proof.initial_trees_proof,
|
&round_proof.initial_trees_proof,
|
||||||
@ -274,8 +241,8 @@ fn fri_verifier_query_round<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
// committed "parent" value in the next iteration.
|
// committed "parent" value in the next iteration.
|
||||||
let mut old_eval = fri_combine_initial(
|
let mut old_eval = fri_combine_initial(
|
||||||
&round_proof.initial_trees_proof,
|
&round_proof.initial_trees_proof,
|
||||||
alpha,
|
challenges.fri_alpha,
|
||||||
zeta,
|
challenges.plonk_zeta,
|
||||||
subgroup_x,
|
subgroup_x,
|
||||||
precomputed_reduced_evals,
|
precomputed_reduced_evals,
|
||||||
common_data,
|
common_data,
|
||||||
@ -298,7 +265,7 @@ fn fri_verifier_query_round<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
x_index_within_coset,
|
x_index_within_coset,
|
||||||
arity_bits,
|
arity_bits,
|
||||||
evals,
|
evals,
|
||||||
betas[i],
|
challenges.fri_betas[i],
|
||||||
);
|
);
|
||||||
|
|
||||||
verify_merkle_proof(
|
verify_merkle_proof(
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
use crate::field::extension_field::Extendable;
|
use crate::field::extension_field::Extendable;
|
||||||
use crate::field::field_types::RichField;
|
use crate::field::field_types::RichField;
|
||||||
use crate::gates::poseidon::PoseidonGate;
|
|
||||||
use crate::hash::hash_types::{HashOut, HashOutTarget};
|
use crate::hash::hash_types::{HashOut, HashOutTarget};
|
||||||
use crate::iop::target::Target;
|
use crate::iop::target::Target;
|
||||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
|||||||
86
src/plonk/get_challenges.rs
Normal file
86
src/plonk/get_challenges.rs
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
use crate::field::extension_field::Extendable;
|
||||||
|
use crate::field::field_types::RichField;
|
||||||
|
use crate::hash::hashing::hash_n_to_1;
|
||||||
|
use crate::iop::challenger::Challenger;
|
||||||
|
use crate::plonk::circuit_data::CommonCircuitData;
|
||||||
|
use crate::plonk::proof::{ProofChallenges, ProofWithPublicInputs};
|
||||||
|
|
||||||
|
impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
|
||||||
|
pub(crate) fn fri_query_indices(
|
||||||
|
&self,
|
||||||
|
common_data: &CommonCircuitData<F, D>,
|
||||||
|
) -> anyhow::Result<Vec<usize>> {
|
||||||
|
Ok(self.get_challenges(common_data)?.fri_query_indices)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_challenges(
|
||||||
|
&self,
|
||||||
|
common_data: &CommonCircuitData<F, D>,
|
||||||
|
) -> anyhow::Result<ProofChallenges<F, D>> {
|
||||||
|
let config = &common_data.config;
|
||||||
|
let num_challenges = config.num_challenges;
|
||||||
|
let num_fri_queries = config.fri_config.num_query_rounds;
|
||||||
|
let lde_size = common_data.lde_size();
|
||||||
|
|
||||||
|
let mut challenger = Challenger::new();
|
||||||
|
|
||||||
|
// Observe the instance.
|
||||||
|
challenger.observe_hash(&common_data.circuit_digest);
|
||||||
|
challenger.observe_hash(&self.get_public_inputs_hash());
|
||||||
|
|
||||||
|
challenger.observe_cap(&self.proof.wires_cap);
|
||||||
|
let plonk_betas = challenger.get_n_challenges(num_challenges);
|
||||||
|
let plonk_gammas = challenger.get_n_challenges(num_challenges);
|
||||||
|
|
||||||
|
challenger.observe_cap(&self.proof.plonk_zs_partial_products_cap);
|
||||||
|
let plonk_alphas = challenger.get_n_challenges(num_challenges);
|
||||||
|
|
||||||
|
challenger.observe_cap(&self.proof.quotient_polys_cap);
|
||||||
|
let plonk_zeta = challenger.get_extension_challenge();
|
||||||
|
|
||||||
|
challenger.observe_opening_set(&self.proof.openings);
|
||||||
|
|
||||||
|
// Scaling factor to combine polynomials.
|
||||||
|
let fri_alpha = challenger.get_extension_challenge();
|
||||||
|
|
||||||
|
// Recover the random betas used in the FRI reductions.
|
||||||
|
let fri_betas = self
|
||||||
|
.proof
|
||||||
|
.opening_proof
|
||||||
|
.commit_phase_merkle_caps
|
||||||
|
.iter()
|
||||||
|
.map(|cap| {
|
||||||
|
challenger.observe_cap(cap);
|
||||||
|
challenger.get_extension_challenge()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
challenger.observe_extension_elements(&self.proof.opening_proof.final_poly.coeffs);
|
||||||
|
|
||||||
|
let fri_pow_response = hash_n_to_1(
|
||||||
|
challenger
|
||||||
|
.get_hash()
|
||||||
|
.elements
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.chain(Some(self.proof.opening_proof.pow_witness))
|
||||||
|
.collect(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
let fri_query_indices = (0..num_fri_queries)
|
||||||
|
.map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(ProofChallenges {
|
||||||
|
plonk_betas,
|
||||||
|
plonk_gammas,
|
||||||
|
plonk_alphas,
|
||||||
|
plonk_zeta,
|
||||||
|
fri_alpha,
|
||||||
|
fri_betas,
|
||||||
|
fri_pow_response,
|
||||||
|
fri_query_indices,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,7 @@
|
|||||||
pub mod circuit_builder;
|
pub mod circuit_builder;
|
||||||
pub mod circuit_data;
|
pub mod circuit_data;
|
||||||
pub(crate) mod copy_constraint;
|
pub(crate) mod copy_constraint;
|
||||||
|
mod get_challenges;
|
||||||
pub(crate) mod permutation_argument;
|
pub(crate) mod permutation_argument;
|
||||||
pub(crate) mod plonk_common;
|
pub(crate) mod plonk_common;
|
||||||
pub mod proof;
|
pub mod proof;
|
||||||
|
|||||||
@ -6,7 +6,8 @@ use crate::field::extension_field::Extendable;
|
|||||||
use crate::field::field_types::RichField;
|
use crate::field::field_types::RichField;
|
||||||
use crate::fri::commitment::PolynomialBatchCommitment;
|
use crate::fri::commitment::PolynomialBatchCommitment;
|
||||||
use crate::fri::proof::{FriProof, FriProofTarget};
|
use crate::fri::proof::{FriProof, FriProofTarget};
|
||||||
use crate::hash::hash_types::MerkleCapTarget;
|
use crate::hash::hash_types::{HashOut, MerkleCapTarget};
|
||||||
|
use crate::hash::hashing::hash_n_to_hash;
|
||||||
use crate::hash::merkle_tree::MerkleCap;
|
use crate::hash::merkle_tree::MerkleCap;
|
||||||
use crate::iop::target::Target;
|
use crate::iop::target::Target;
|
||||||
use crate::plonk::circuit_data::CommonCircuitData;
|
use crate::plonk::circuit_data::CommonCircuitData;
|
||||||
@ -41,30 +42,25 @@ impl<F: RichField + Extendable<D>, const D: usize> Proof<F, D> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Compress the opening proof.
|
/// Compress the opening proof.
|
||||||
pub fn compress(mut self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn compress(mut self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
|
||||||
self.opening_proof = self.opening_proof.compress(common_data);
|
self.opening_proof = self.opening_proof.compress(&indices, common_data);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decompress the opening proof.
|
/// Decompress the opening proof.
|
||||||
pub fn decompress(mut self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn decompress(mut self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
|
||||||
self.opening_proof = self.opening_proof.decompress(common_data);
|
self.opening_proof = self.opening_proof.decompress(&indices, common_data);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
||||||
#[serde(bound = "")]
|
#[serde(bound = "")]
|
||||||
pub struct ProofWithPublicInputs<F: Extendable<D>, const D: usize> {
|
pub struct ProofWithPublicInputs<F: RichField + Extendable<D>, const D: usize> {
|
||||||
pub proof: Proof<F, D>,
|
pub proof: Proof<F, D>,
|
||||||
pub public_inputs: Vec<F>,
|
pub public_inputs: Vec<F>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ProofWithPublicInputsTarget<const D: usize> {
|
|
||||||
pub proof: ProofTarget<D>,
|
|
||||||
pub public_inputs: Vec<Target>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
|
impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
|
||||||
/// Returns `true` iff the opening proof is compressed.
|
/// Returns `true` iff the opening proof is compressed.
|
||||||
pub fn is_compressed(&self) -> bool {
|
pub fn is_compressed(&self) -> bool {
|
||||||
@ -72,16 +68,51 @@ impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Compress the opening proof.
|
/// Compress the opening proof.
|
||||||
pub fn compress(mut self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn compress(mut self, common_data: &CommonCircuitData<F, D>) -> anyhow::Result<Self> {
|
||||||
self.proof = self.proof.compress(common_data);
|
let indices = self.fri_query_indices(common_data)?;
|
||||||
self
|
self.proof = self.proof.compress(&indices, common_data);
|
||||||
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decompress the opening proof.
|
/// Decompress the opening proof.
|
||||||
pub fn decompress(mut self, common_data: &CommonCircuitData<F, D>) -> Self {
|
pub fn decompress(mut self, common_data: &CommonCircuitData<F, D>) -> anyhow::Result<Self> {
|
||||||
self.proof = self.proof.decompress(common_data);
|
let indices = self.fri_query_indices(common_data)?;
|
||||||
self
|
self.proof = self.proof.decompress(&indices, common_data);
|
||||||
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_public_inputs_hash(&self) -> HashOut<F> {
|
||||||
|
hash_n_to_hash(self.public_inputs.clone(), true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct ProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||||
|
// Random values used in Plonk's permutation argument.
|
||||||
|
pub plonk_betas: Vec<F>,
|
||||||
|
|
||||||
|
// Random values used in Plonk's permutation argument.
|
||||||
|
pub plonk_gammas: Vec<F>,
|
||||||
|
|
||||||
|
// Random values used to combine PLONK constraints.
|
||||||
|
pub plonk_alphas: Vec<F>,
|
||||||
|
|
||||||
|
// Point at which the PLONK polynomials are opened.
|
||||||
|
pub plonk_zeta: F::Extension,
|
||||||
|
|
||||||
|
// Scaling factor to combine polynomials.
|
||||||
|
pub fri_alpha: F::Extension,
|
||||||
|
|
||||||
|
// Betas used in the FRI commit phase reductions.
|
||||||
|
pub fri_betas: Vec<F::Extension>,
|
||||||
|
|
||||||
|
pub fri_pow_response: F,
|
||||||
|
|
||||||
|
pub fri_query_indices: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ProofWithPublicInputsTarget<const D: usize> {
|
||||||
|
pub proof: ProofTarget<D>,
|
||||||
|
pub public_inputs: Vec<Target>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||||
@ -175,8 +206,8 @@ mod tests {
|
|||||||
let proof = data.prove(pw)?;
|
let proof = data.prove(pw)?;
|
||||||
|
|
||||||
// Verify that `decompress ∘ compress = identity`.
|
// Verify that `decompress ∘ compress = identity`.
|
||||||
let compressed_proof = proof.clone().compress(&data.common);
|
let compressed_proof = proof.clone().compress(&data.common)?;
|
||||||
let decompressed_compressed_proof = compressed_proof.clone().decompress(&data.common);
|
let decompressed_compressed_proof = compressed_proof.clone().decompress(&data.common)?;
|
||||||
assert_eq!(proof, decompressed_compressed_proof);
|
assert_eq!(proof, decompressed_compressed_proof);
|
||||||
|
|
||||||
verify(proof, &data.verifier_only, &data.common)?;
|
verify(proof, &data.verifier_only, &data.common)?;
|
||||||
|
|||||||
@ -234,7 +234,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Set the targets in a `ProofTarget` to their corresponding values in a `Proof`.
|
// Set the targets in a `ProofTarget` to their corresponding values in a `Proof`.
|
||||||
fn set_proof_target<F: Extendable<D>, const D: usize>(
|
fn set_proof_target<F: RichField + Extendable<D>, const D: usize>(
|
||||||
proof: &ProofWithPublicInputs<F, D>,
|
proof: &ProofWithPublicInputs<F, D>,
|
||||||
pt: &ProofWithPublicInputsTarget<D>,
|
pt: &ProofWithPublicInputsTarget<D>,
|
||||||
pw: &mut PartialWitness<F>,
|
pw: &mut PartialWitness<F>,
|
||||||
@ -480,7 +480,7 @@ mod tests {
|
|||||||
let data = builder.build();
|
let data = builder.build();
|
||||||
let recursive_proof = data.prove(pw)?;
|
let recursive_proof = data.prove(pw)?;
|
||||||
let now = std::time::Instant::now();
|
let now = std::time::Instant::now();
|
||||||
let compressed_recursive_proof = recursive_proof.clone().compress(&data.common);
|
let compressed_recursive_proof = recursive_proof.clone().compress(&data.common)?;
|
||||||
info!("{:.4} to compress proof", now.elapsed().as_secs_f64());
|
info!("{:.4} to compress proof", now.elapsed().as_secs_f64());
|
||||||
let proof_bytes = serde_cbor::to_vec(&recursive_proof).unwrap();
|
let proof_bytes = serde_cbor::to_vec(&recursive_proof).unwrap();
|
||||||
info!("Proof length: {} bytes", proof_bytes.len());
|
info!("Proof length: {} bytes", proof_bytes.len());
|
||||||
|
|||||||
@ -3,8 +3,6 @@ use anyhow::{ensure, Result};
|
|||||||
use crate::field::extension_field::Extendable;
|
use crate::field::extension_field::Extendable;
|
||||||
use crate::field::field_types::{Field, RichField};
|
use crate::field::field_types::{Field, RichField};
|
||||||
use crate::fri::verifier::verify_fri_proof;
|
use crate::fri::verifier::verify_fri_proof;
|
||||||
use crate::hash::hashing::hash_n_to_hash;
|
|
||||||
use crate::iop::challenger::Challenger;
|
|
||||||
use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
|
use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
|
||||||
use crate::plonk::plonk_common::reduce_with_powers;
|
use crate::plonk::plonk_common::reduce_with_powers;
|
||||||
use crate::plonk::proof::ProofWithPublicInputs;
|
use crate::plonk::proof::ProofWithPublicInputs;
|
||||||
@ -18,32 +16,19 @@ pub(crate) fn verify<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// Decompress the proof if needed.
|
// Decompress the proof if needed.
|
||||||
if proof_with_pis.is_compressed() {
|
if proof_with_pis.is_compressed() {
|
||||||
proof_with_pis = proof_with_pis.decompress(common_data);
|
proof_with_pis = proof_with_pis.decompress(common_data)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let public_inputs_hash = &proof_with_pis.get_public_inputs_hash();
|
||||||
|
|
||||||
|
let challenges = proof_with_pis.get_challenges(common_data)?;
|
||||||
|
|
||||||
let ProofWithPublicInputs {
|
let ProofWithPublicInputs {
|
||||||
proof,
|
proof,
|
||||||
public_inputs,
|
public_inputs,
|
||||||
} = proof_with_pis;
|
} = proof_with_pis;
|
||||||
|
|
||||||
let config = &common_data.config;
|
let config = &common_data.config;
|
||||||
let num_challenges = config.num_challenges;
|
|
||||||
|
|
||||||
let public_inputs_hash = &hash_n_to_hash(public_inputs, true);
|
|
||||||
|
|
||||||
let mut challenger = Challenger::new();
|
|
||||||
|
|
||||||
// Observe the instance.
|
|
||||||
challenger.observe_hash(&common_data.circuit_digest);
|
|
||||||
challenger.observe_hash(&public_inputs_hash);
|
|
||||||
|
|
||||||
challenger.observe_cap(&proof.wires_cap);
|
|
||||||
let betas = challenger.get_n_challenges(num_challenges);
|
|
||||||
let gammas = challenger.get_n_challenges(num_challenges);
|
|
||||||
|
|
||||||
challenger.observe_cap(&proof.plonk_zs_partial_products_cap);
|
|
||||||
let alphas = challenger.get_n_challenges(num_challenges);
|
|
||||||
|
|
||||||
challenger.observe_cap(&proof.quotient_polys_cap);
|
|
||||||
let zeta = challenger.get_extension_challenge();
|
|
||||||
|
|
||||||
let local_constants = &proof.openings.constants;
|
let local_constants = &proof.openings.constants;
|
||||||
let local_wires = &proof.openings.wires;
|
let local_wires = &proof.openings.wires;
|
||||||
@ -60,20 +45,22 @@ pub(crate) fn verify<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
// Evaluate the vanishing polynomial at our challenge point, zeta.
|
// Evaluate the vanishing polynomial at our challenge point, zeta.
|
||||||
let vanishing_polys_zeta = eval_vanishing_poly(
|
let vanishing_polys_zeta = eval_vanishing_poly(
|
||||||
common_data,
|
common_data,
|
||||||
zeta,
|
challenges.plonk_zeta,
|
||||||
vars,
|
vars,
|
||||||
local_zs,
|
local_zs,
|
||||||
next_zs,
|
next_zs,
|
||||||
partial_products,
|
partial_products,
|
||||||
s_sigmas,
|
s_sigmas,
|
||||||
&betas,
|
&challenges.plonk_betas,
|
||||||
&gammas,
|
&challenges.plonk_gammas,
|
||||||
&alphas,
|
&challenges.plonk_alphas,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||||
let quotient_polys_zeta = &proof.openings.quotient_polys;
|
let quotient_polys_zeta = &proof.openings.quotient_polys;
|
||||||
let zeta_pow_deg = zeta.exp_power_of_2(common_data.degree_bits);
|
let zeta_pow_deg = challenges
|
||||||
|
.plonk_zeta
|
||||||
|
.exp_power_of_2(common_data.degree_bits);
|
||||||
let z_h_zeta = zeta_pow_deg - F::Extension::ONE;
|
let z_h_zeta = zeta_pow_deg - F::Extension::ONE;
|
||||||
// `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations.
|
// `quotient_polys_zeta` holds `num_challenges * quotient_degree_factor` evaluations.
|
||||||
// Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)`
|
// Each chunk of `quotient_degree_factor` holds the evaluations of `t_0(zeta),...,t_{quotient_degree_factor-1}(zeta)`
|
||||||
@ -96,10 +83,9 @@ pub(crate) fn verify<F: RichField + Extendable<D>, const D: usize>(
|
|||||||
|
|
||||||
verify_fri_proof(
|
verify_fri_proof(
|
||||||
&proof.openings,
|
&proof.openings,
|
||||||
zeta,
|
&challenges,
|
||||||
merkle_caps,
|
merkle_caps,
|
||||||
&proof.opening_proof,
|
&proof.opening_proof,
|
||||||
&mut challenger,
|
|
||||||
common_data,
|
common_data,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user