Merge pull request #279 from mir-protocol/duplicate_indices

Remove duplicate query indices in FRI proofs
This commit is contained in:
wborgeaud 2021-10-04 10:40:18 +02:00 committed by GitHub
commit d9634e075e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 870 additions and 97 deletions

View File

@ -17,3 +17,9 @@ pub struct FriConfig {
/// Number of query rounds to perform.
pub num_query_rounds: usize,
}
impl FriConfig {
pub(crate) fn total_arities(&self) -> usize {
self.reduction_arity_bits.iter().sum()
}
}

View File

@ -1,3 +1,5 @@
use std::collections::HashMap;
use itertools::izip;
use serde::{Deserialize, Serialize};
@ -77,6 +79,18 @@ pub struct FriQueryRoundTarget<const D: usize> {
pub steps: Vec<FriQueryStepTarget<D>>,
}
/// Compressed proof of the FRI query rounds.
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedFriQueryRounds<F: Extendable<D>, const D: usize> {
/// Query indices.
pub indices: Vec<usize>,
/// Map from initial indices `i` to the `FriInitialProof` for the `i`th leaf.
pub initial_trees_proofs: HashMap<usize, FriInitialTreeProof<F>>,
/// For each FRI query step, a map from indices `i` to the `FriQueryStep` for the `i`th leaf.
pub steps: Vec<HashMap<usize, FriQueryStep<F, D>>>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct FriProof<F: Extendable<D>, const D: usize> {
@ -88,8 +102,6 @@ pub struct FriProof<F: Extendable<D>, const D: usize> {
pub final_poly: PolynomialCoeffs<F::Extension>,
/// Witness showing that the prover did PoW.
pub pow_witness: F,
/// Flag set to true if path compression has been applied to the proof's Merkle proofs.
pub is_compressed: bool,
}
pub struct FriProofTarget<const D: usize> {
@ -99,15 +111,29 @@ pub struct FriProofTarget<const D: usize> {
pub pow_witness: Target,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedFriProof<F: Extendable<D>, const D: usize> {
/// A Merkle cap for each reduced polynomial in the commit phase.
pub commit_phase_merkle_caps: Vec<MerkleCap<F>>,
/// Compressed query rounds proof.
pub query_round_proofs: CompressedFriQueryRounds<F, D>,
/// The final polynomial in coefficient form.
pub final_poly: PolynomialCoeffs<F::Extension>,
/// Witness showing that the prover did PoW.
pub pow_witness: F,
}
impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
/// Compress all the Merkle paths in the FRI proof.
pub fn compress(self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
if self.is_compressed {
panic!("Proof is already compressed.");
}
/// Compress all the Merkle paths in the FRI proof and remove duplicate indices.
pub fn compress(
self,
indices: &[usize],
common_data: &CommonCircuitData<F, D>,
) -> CompressedFriProof<F, D> {
let FriProof {
commit_phase_merkle_caps,
mut query_round_proofs,
query_round_proofs,
final_poly,
pow_witness,
..
@ -157,9 +183,15 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
.map(|(is, ps)| compress_merkle_proofs(cap_height, is, &ps))
.collect::<Vec<_>>();
let mut compressed_query_proofs = CompressedFriQueryRounds {
indices: indices.to_vec(),
initial_trees_proofs: HashMap::new(),
steps: vec![HashMap::new(); num_reductions],
};
// Replace the query round proofs with the compressed versions.
for (i, qrp) in query_round_proofs.iter_mut().enumerate() {
qrp.initial_trees_proof = FriInitialTreeProof {
for (i, mut index) in indices.iter().copied().enumerate() {
let initial_proof = FriInitialTreeProof {
evals_proofs: (0..num_initial_trees)
.map(|j| {
(
@ -169,31 +201,41 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
})
.collect(),
};
qrp.steps = (0..num_reductions)
.map(|j| FriQueryStep {
compressed_query_proofs
.initial_trees_proofs
.entry(index)
.or_insert(initial_proof);
for j in 0..num_reductions {
index >>= reduction_arity_bits[j];
let query_step = FriQueryStep {
evals: steps_evals[j][i].clone(),
merkle_proof: steps_proofs[j][i].clone(),
})
.collect();
};
compressed_query_proofs.steps[j]
.entry(index)
.or_insert(query_step);
}
}
FriProof {
CompressedFriProof {
commit_phase_merkle_caps,
query_round_proofs,
query_round_proofs: compressed_query_proofs,
final_poly,
pow_witness,
is_compressed: true,
}
}
}
/// Decompress all the Merkle paths in the FRI proof.
pub fn decompress(self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
if !self.is_compressed {
panic!("Proof is not compressed.");
}
let FriProof {
impl<F: RichField + Extendable<D>, const D: usize> CompressedFriProof<F, D> {
/// Decompress all the Merkle paths in the FRI proof and reinsert duplicate indices.
pub fn decompress(
self,
indices: &[usize],
common_data: &CommonCircuitData<F, D>,
) -> FriProof<F, D> {
let CompressedFriProof {
commit_phase_merkle_caps,
mut query_round_proofs,
query_round_proofs,
final_poly,
pow_witness,
..
@ -201,7 +243,13 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
let cap_height = common_data.config.cap_height;
let reduction_arity_bits = &common_data.config.fri_config.reduction_arity_bits;
let num_reductions = reduction_arity_bits.len();
let num_initial_trees = query_round_proofs[0].initial_trees_proof.evals_proofs.len();
let num_initial_trees = query_round_proofs
.initial_trees_proofs
.values()
.next()
.unwrap()
.evals_proofs
.len();
// "Transpose" the query round proofs, so that information for each Merkle tree is collected together.
let mut initial_trees_indices = vec![vec![]; num_initial_trees];
@ -219,11 +267,8 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
})
.collect::<Vec<_>>();
for (mut index, qrp) in indices.iter().cloned().zip(&query_round_proofs) {
let FriQueryRound {
initial_trees_proof,
steps,
} = qrp.clone();
for mut index in indices.iter().copied() {
let initial_trees_proof = query_round_proofs.initial_trees_proofs[&index].clone();
for (i, (leaves_data, proof)) in
initial_trees_proof.evals_proofs.into_iter().enumerate()
{
@ -231,8 +276,9 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
initial_trees_leaves[i].push(leaves_data);
initial_trees_proofs[i].push(proof);
}
for (i, query_step) in steps.into_iter().enumerate() {
for i in 0..num_reductions {
index >>= reduction_arity_bits[i];
let query_step = query_round_proofs.steps[i][&index].clone();
steps_indices[i].push(index);
steps_evals[i].push(flatten(&query_step.evals));
steps_proofs[i].push(query_step.merkle_proof);
@ -245,15 +291,15 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
&initial_trees_indices,
initial_trees_proofs
)
.map(|(ls, is, ps)| decompress_merkle_proofs(&ls, is, &ps, height, cap_height))
.map(|(ls, is, ps)| decompress_merkle_proofs(ls, is, &ps, height, cap_height))
.collect::<Vec<_>>();
let steps_proofs = izip!(&steps_evals, &steps_indices, steps_proofs, heights)
.map(|(ls, is, ps, h)| decompress_merkle_proofs(ls, is, &ps, h, cap_height))
.collect::<Vec<_>>();
// Replace the query round proofs with the decompressed versions.
for (i, qrp) in query_round_proofs.iter_mut().enumerate() {
qrp.initial_trees_proof = FriInitialTreeProof {
let mut decompressed_query_proofs = Vec::with_capacity(num_reductions);
for i in 0..indices.len() {
let initial_trees_proof = FriInitialTreeProof {
evals_proofs: (0..num_initial_trees)
.map(|j| {
(
@ -263,20 +309,23 @@ impl<F: RichField + Extendable<D>, const D: usize> FriProof<F, D> {
})
.collect(),
};
qrp.steps = (0..num_reductions)
let steps = (0..num_reductions)
.map(|j| FriQueryStep {
evals: unflatten(&steps_evals[j][i]),
merkle_proof: steps_proofs[j][i].clone(),
})
.collect();
decompressed_query_proofs.push(FriQueryRound {
initial_trees_proof,
steps,
})
}
FriProof {
commit_phase_merkle_caps,
query_round_proofs,
query_round_proofs: decompressed_query_proofs,
final_poly,
pow_witness,
is_compressed: false,
}
}
}

View File

@ -63,7 +63,6 @@ pub fn fri_proof<F: RichField + Extendable<D>, const D: usize>(
query_round_proofs,
final_poly: final_coeffs,
pow_witness,
is_compressed: false,
}
}

View File

@ -81,15 +81,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
common_data: &CommonCircuitData<F, D>,
) {
let config = &common_data.config;
let total_arities = config.fri_config.reduction_arity_bits.iter().sum::<usize>();
debug_assert_eq!(
common_data.degree_bits,
log2_strict(proof.final_poly.len()) + total_arities,
common_data.final_poly_len(),
proof.final_poly.len(),
"Final polynomial has wrong degree."
);
// Size of the LDE domain.
let n = proof.final_poly.len() << (total_arities + config.rate_bits);
let n = common_data.lde_size();
challenger.observe_opening_set(os);

View File

@ -64,14 +64,13 @@ pub(crate) fn verify_fri_proof<F: RichField + Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
) -> Result<()> {
let config = &common_data.config;
let total_arities = config.fri_config.reduction_arity_bits.iter().sum::<usize>();
ensure!(
common_data.degree_bits == log2_strict(proof.final_poly.len()) + total_arities,
common_data.final_poly_len() == proof.final_poly.len(),
"Final polynomial has wrong degree."
);
// Size of the LDE domain.
let n = proof.final_poly.len() << (total_arities + config.rate_bits);
let n = common_data.lde_size();
// Check PoW.
fri_verify_proof_of_work(challenges.fri_pow_response, &config.fri_config)?;

View File

@ -579,12 +579,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
info!("Degree after blinding & padding: {}", degree);
let degree_bits = log2_strict(degree);
assert!(
self.config
.fri_config
.reduction_arity_bits
.iter()
.sum::<usize>()
<= degree_bits,
self.config.fri_config.total_arities() <= degree_bits,
"FRI total reduction arity is too large."
);

View File

@ -252,6 +252,10 @@ impl<F: RichField + Extendable<D>, const D: usize> CommonCircuitData<F, D> {
pub fn partial_products_range(&self) -> RangeFrom<usize> {
self.config.num_challenges..
}
pub fn final_poly_len(&self) -> usize {
1 << (self.degree_bits - self.config.fri_config.total_arities())
}
}
/// The `Target` version of `VerifierCircuitData`, for use inside recursive circuits. Note that this

View File

@ -3,7 +3,9 @@ use crate::field::field_types::RichField;
use crate::hash::hashing::hash_n_to_1;
use crate::iop::challenger::Challenger;
use crate::plonk::circuit_data::CommonCircuitData;
use crate::plonk::proof::{ProofChallenges, ProofWithPublicInputs};
use crate::plonk::proof::{
CompressedProofWithPublicInputs, ProofChallenges, ProofWithPublicInputs,
};
impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
pub(crate) fn fri_query_indices(
@ -84,3 +86,83 @@ impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
})
}
}
impl<F: RichField + Extendable<D>, const D: usize> CompressedProofWithPublicInputs<F, D> {
pub(crate) fn fri_query_indices(
&self,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<Vec<usize>> {
Ok(self.get_challenges(common_data)?.fri_query_indices)
}
pub(crate) fn get_challenges(
&self,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofChallenges<F, D>> {
let config = &common_data.config;
let num_challenges = config.num_challenges;
let num_fri_queries = config.fri_config.num_query_rounds;
let lde_size = common_data.lde_size();
let mut challenger = Challenger::new();
// Observe the instance.
challenger.observe_hash(&common_data.circuit_digest);
challenger.observe_hash(&self.get_public_inputs_hash());
challenger.observe_cap(&self.proof.wires_cap);
let plonk_betas = challenger.get_n_challenges(num_challenges);
let plonk_gammas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap(&self.proof.plonk_zs_partial_products_cap);
let plonk_alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_cap(&self.proof.quotient_polys_cap);
let plonk_zeta = challenger.get_extension_challenge();
challenger.observe_opening_set(&self.proof.openings);
// Scaling factor to combine polynomials.
let fri_alpha = challenger.get_extension_challenge();
// Recover the random betas used in the FRI reductions.
let fri_betas = self
.proof
.opening_proof
.commit_phase_merkle_caps
.iter()
.map(|cap| {
challenger.observe_cap(cap);
challenger.get_extension_challenge()
})
.collect();
challenger.observe_extension_elements(&self.proof.opening_proof.final_poly.coeffs);
let fri_pow_response = hash_n_to_1(
challenger
.get_hash()
.elements
.iter()
.copied()
.chain(Some(self.proof.opening_proof.pow_witness))
.collect(),
false,
);
let fri_query_indices = (0..num_fri_queries)
.map(|_| challenger.get_challenge().to_canonical_u64() as usize % lde_size)
.collect();
Ok(ProofChallenges {
plonk_betas,
plonk_gammas,
plonk_alphas,
plonk_zeta,
fri_alpha,
fri_betas,
fri_pow_response,
fri_query_indices,
})
}
}

View File

@ -5,12 +5,13 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field_types::RichField;
use crate::fri::commitment::PolynomialBatchCommitment;
use crate::fri::proof::{FriProof, FriProofTarget};
use crate::fri::proof::{CompressedFriProof, FriProof, FriProofTarget};
use crate::hash::hash_types::{HashOut, MerkleCapTarget};
use crate::hash::hashing::hash_n_to_hash;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::Target;
use crate::plonk::circuit_data::CommonCircuitData;
use crate::util::serialization::Buffer;
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
@ -36,21 +37,27 @@ pub struct ProofTarget<const D: usize> {
}
impl<F: RichField + Extendable<D>, const D: usize> Proof<F, D> {
/// Returns `true` iff the opening proof is compressed.
pub fn is_compressed(&self) -> bool {
self.opening_proof.is_compressed
}
/// Compress the proof.
pub fn compress(
self,
indices: &[usize],
common_data: &CommonCircuitData<F, D>,
) -> CompressedProof<F, D> {
let Proof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
} = self;
/// Compress the opening proof.
pub fn compress(mut self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
self.opening_proof = self.opening_proof.compress(&indices, common_data);
self
}
/// Decompress the opening proof.
pub fn decompress(mut self, indices: &[usize], common_data: &CommonCircuitData<F, D>) -> Self {
self.opening_proof = self.opening_proof.decompress(&indices, common_data);
self
CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof: opening_proof.compress(indices, common_data),
}
}
}
@ -62,28 +69,116 @@ pub struct ProofWithPublicInputs<F: RichField + Extendable<D>, const D: usize> {
}
impl<F: RichField + Extendable<D>, const D: usize> ProofWithPublicInputs<F, D> {
/// Returns `true` iff the opening proof is compressed.
pub fn is_compressed(&self) -> bool {
self.proof.is_compressed()
}
/// Compress the opening proof.
pub fn compress(mut self, common_data: &CommonCircuitData<F, D>) -> anyhow::Result<Self> {
pub fn compress(
self,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<CompressedProofWithPublicInputs<F, D>> {
let indices = self.fri_query_indices(common_data)?;
self.proof = self.proof.compress(&indices, common_data);
Ok(self)
}
/// Decompress the opening proof.
pub fn decompress(mut self, common_data: &CommonCircuitData<F, D>) -> anyhow::Result<Self> {
let indices = self.fri_query_indices(common_data)?;
self.proof = self.proof.decompress(&indices, common_data);
Ok(self)
let compressed_proof = self.proof.compress(&indices, common_data);
Ok(CompressedProofWithPublicInputs {
public_inputs: self.public_inputs,
proof: compressed_proof,
})
}
pub(crate) fn get_public_inputs_hash(&self) -> HashOut<F> {
hash_n_to_hash(self.public_inputs.clone(), true)
}
pub fn to_bytes(&self) -> anyhow::Result<Vec<u8>> {
let mut buffer = Buffer::new(Vec::new());
buffer.write_proof_with_public_inputs(self)?;
Ok(buffer.bytes())
}
pub fn from_bytes(
bytes: Vec<u8>,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<Self> {
let mut buffer = Buffer::new(bytes);
let proof = buffer.read_proof_with_public_inputs(common_data)?;
Ok(proof)
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedProof<F: Extendable<D>, const D: usize> {
/// Merkle cap of LDEs of wire values.
pub wires_cap: MerkleCap<F>,
/// Merkle cap of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_partial_products_cap: MerkleCap<F>,
/// Merkle cap of LDEs of the quotient polynomial components.
pub quotient_polys_cap: MerkleCap<F>,
/// Purported values of each polynomial at the challenge point.
pub openings: OpeningSet<F, D>,
/// A compressed batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, D>,
}
impl<F: RichField + Extendable<D>, const D: usize> CompressedProof<F, D> {
/// Decompress the proof.
pub fn decompress(
self,
indices: &[usize],
common_data: &CommonCircuitData<F, D>,
) -> Proof<F, D> {
let CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
} = self;
Proof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof: opening_proof.decompress(indices, common_data),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(bound = "")]
pub struct CompressedProofWithPublicInputs<F: RichField + Extendable<D>, const D: usize> {
pub proof: CompressedProof<F, D>,
pub public_inputs: Vec<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> CompressedProofWithPublicInputs<F, D> {
pub fn decompress(
self,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<ProofWithPublicInputs<F, D>> {
let indices = self.fri_query_indices(common_data)?;
let compressed_proof = self.proof.decompress(&indices, common_data);
Ok(ProofWithPublicInputs {
public_inputs: self.public_inputs,
proof: compressed_proof,
})
}
pub(crate) fn get_public_inputs_hash(&self) -> HashOut<F> {
hash_n_to_hash(self.public_inputs.clone(), true)
}
pub fn to_bytes(&self) -> anyhow::Result<Vec<u8>> {
let mut buffer = Buffer::new(Vec::new());
buffer.write_compressed_proof_with_public_inputs(self)?;
Ok(buffer.bytes())
}
pub fn from_bytes(
bytes: Vec<u8>,
common_data: &CommonCircuitData<F, D>,
) -> anyhow::Result<Self> {
let mut buffer = Buffer::new(bytes);
let proof = buffer.read_compressed_proof_with_public_inputs(common_data)?;
Ok(proof)
}
}
pub(crate) struct ProofChallenges<F: RichField + Extendable<D>, const D: usize> {
@ -188,7 +283,8 @@ mod tests {
type F = CrandallField;
const D: usize = 4;
let config = CircuitConfig::large_config();
let mut config = CircuitConfig::large_config();
config.fri_config.num_query_rounds = 50;
let pw = PartialWitness::new();
let mut builder = CircuitBuilder::<F, D>::new(config);
@ -207,10 +303,9 @@ mod tests {
// Verify that `decompress ∘ compress = identity`.
let compressed_proof = proof.clone().compress(&data.common)?;
let decompressed_compressed_proof = compressed_proof.clone().decompress(&data.common)?;
let decompressed_compressed_proof = compressed_proof.decompress(&data.common)?;
assert_eq!(proof, decompressed_compressed_proof);
verify(proof, &data.verifier_only, &data.common)?;
verify(compressed_proof, &data.verifier_only, &data.common)
verify(proof, &data.verifier_only, &data.common)
}
}

View File

@ -135,7 +135,10 @@ mod tests {
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::hash::merkle_proofs::MerkleProofTarget;
use crate::iop::witness::{PartialWitness, Witness};
use crate::plonk::proof::{OpeningSetTarget, Proof, ProofTarget, ProofWithPublicInputs};
use crate::plonk::proof::{
CompressedProofWithPublicInputs, OpeningSetTarget, Proof, ProofTarget,
ProofWithPublicInputs,
};
use crate::plonk::verifier::verify;
use crate::util::log2_strict;
@ -479,16 +482,25 @@ mod tests {
builder.print_gate_counts(0);
let data = builder.build();
let recursive_proof = data.prove(pw)?;
let proof_bytes = recursive_proof.to_bytes()?;
info!("Proof length: {} bytes", proof_bytes.len());
let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, &data.common)?;
assert_eq!(recursive_proof, proof_from_bytes);
let now = std::time::Instant::now();
let compressed_recursive_proof = recursive_proof.clone().compress(&data.common)?;
let decompressed_compressed_proof = compressed_recursive_proof
.clone()
.decompress(&data.common)?;
assert_eq!(recursive_proof, decompressed_compressed_proof);
info!("{:.4} to compress proof", now.elapsed().as_secs_f64());
let proof_bytes = serde_cbor::to_vec(&recursive_proof).unwrap();
info!("Proof length: {} bytes", proof_bytes.len());
let compressed_proof_bytes = serde_cbor::to_vec(&compressed_recursive_proof).unwrap();
let compressed_proof_bytes = compressed_recursive_proof.to_bytes()?;
info!(
"Compressed proof length: {} bytes",
compressed_proof_bytes.len()
);
let compressed_proof_from_bytes =
CompressedProofWithPublicInputs::from_bytes(compressed_proof_bytes, &data.common)?;
assert_eq!(compressed_recursive_proof, compressed_proof_from_bytes);
verify(recursive_proof, &data.verifier_only, &data.common)
}
}

View File

@ -14,11 +14,6 @@ pub(crate) fn verify<F: RichField + Extendable<D>, const D: usize>(
verifier_data: &VerifierOnlyCircuitData<F>,
common_data: &CommonCircuitData<F, D>,
) -> Result<()> {
// Decompress the proof if needed.
if proof_with_pis.is_compressed() {
proof_with_pis = proof_with_pis.decompress(common_data)?;
}
let public_inputs_hash = &proof_with_pis.get_public_inputs_hash();
let challenges = proof_with_pis.get_challenges(common_data)?;

View File

@ -6,6 +6,7 @@ pub(crate) mod context_tree;
pub(crate) mod marking;
pub(crate) mod partial_products;
pub mod reducing;
pub mod serialization;
pub(crate) mod timing;
pub(crate) fn bits_u64(n: u64) -> usize {

537
src/util/serialization.rs Normal file
View File

@ -0,0 +1,537 @@
use std::collections::HashMap;
use std::convert::TryInto;
use std::io::Cursor;
use std::io::{Read, Result, Write};
use std::iter::FromIterator;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field_types::{PrimeField, RichField};
use crate::fri::proof::{
CompressedFriProof, CompressedFriQueryRounds, FriInitialTreeProof, FriProof, FriQueryRound,
FriQueryStep,
};
use crate::hash::hash_types::HashOut;
use crate::hash::merkle_proofs::MerkleProof;
use crate::hash::merkle_tree::MerkleCap;
use crate::plonk::circuit_data::CommonCircuitData;
use crate::plonk::proof::{
CompressedProof, CompressedProofWithPublicInputs, OpeningSet, Proof, ProofWithPublicInputs,
};
use crate::polynomial::polynomial::PolynomialCoeffs;
#[derive(Debug)]
pub struct Buffer(Cursor<Vec<u8>>);
impl Buffer {
pub fn new(buffer: Vec<u8>) -> Self {
Self(Cursor::new(buffer))
}
pub fn len(&self) -> usize {
self.0.get_ref().len()
}
pub fn bytes(self) -> Vec<u8> {
self.0.into_inner()
}
fn write_u8(&mut self, x: u8) -> Result<()> {
self.0.write_all(&[x])
}
fn read_u8(&mut self) -> Result<u8> {
let mut buf = [0; std::mem::size_of::<u8>()];
self.0.read_exact(&mut buf)?;
Ok(buf[0])
}
fn write_u32(&mut self, x: u32) -> Result<()> {
self.0.write_all(&x.to_le_bytes())
}
fn read_u32(&mut self) -> Result<u32> {
let mut buf = [0; std::mem::size_of::<u32>()];
self.0.read_exact(&mut buf)?;
Ok(u32::from_le_bytes(buf))
}
fn write_field<F: PrimeField>(&mut self, x: F) -> Result<()> {
self.0.write_all(&x.to_canonical_u64().to_le_bytes())
}
fn read_field<F: PrimeField>(&mut self) -> Result<F> {
let mut buf = [0; std::mem::size_of::<u64>()];
self.0.read_exact(&mut buf)?;
Ok(F::from_canonical_u64(u64::from_le_bytes(
buf.try_into().unwrap(),
)))
}
fn write_field_ext<F: Extendable<D>, const D: usize>(&mut self, x: F::Extension) -> Result<()> {
for &a in &x.to_basefield_array() {
self.write_field(a)?;
}
Ok(())
}
fn read_field_ext<F: Extendable<D>, const D: usize>(&mut self) -> Result<F::Extension> {
let mut arr = [F::ZERO; D];
for a in arr.iter_mut() {
*a = self.read_field()?;
}
Ok(<F::Extension as FieldExtension<D>>::from_basefield_array(
arr,
))
}
fn write_hash<F: PrimeField>(&mut self, h: HashOut<F>) -> Result<()> {
for &a in &h.elements {
self.write_field(a)?;
}
Ok(())
}
fn read_hash<F: PrimeField>(&mut self) -> Result<HashOut<F>> {
let mut elements = [F::ZERO; 4];
for a in elements.iter_mut() {
*a = self.read_field()?;
}
Ok(HashOut { elements })
}
fn write_merkle_cap<F: PrimeField>(&mut self, cap: &MerkleCap<F>) -> Result<()> {
for &a in &cap.0 {
self.write_hash(a)?;
}
Ok(())
}
fn read_merkle_cap<F: PrimeField>(&mut self, cap_height: usize) -> Result<MerkleCap<F>> {
let cap_length = 1 << cap_height;
Ok(MerkleCap(
(0..cap_length)
.map(|_| self.read_hash())
.collect::<Result<Vec<_>>>()?,
))
}
fn write_field_vec<F: PrimeField>(&mut self, v: &[F]) -> Result<()> {
for &a in v {
self.write_field(a)?;
}
Ok(())
}
fn read_field_vec<F: PrimeField>(&mut self, length: usize) -> Result<Vec<F>> {
(0..length)
.map(|_| self.read_field())
.collect::<Result<Vec<_>>>()
}
fn write_field_ext_vec<F: Extendable<D>, const D: usize>(
&mut self,
v: &[F::Extension],
) -> Result<()> {
for &a in v {
self.write_field_ext::<F, D>(a)?;
}
Ok(())
}
fn read_field_ext_vec<F: Extendable<D>, const D: usize>(
&mut self,
length: usize,
) -> Result<Vec<F::Extension>> {
(0..length)
.map(|_| self.read_field_ext::<F, D>())
.collect::<Result<Vec<_>>>()
}
fn write_opening_set<F: Extendable<D>, const D: usize>(
&mut self,
os: &OpeningSet<F, D>,
) -> Result<()> {
self.write_field_ext_vec::<F, D>(&os.constants)?;
self.write_field_ext_vec::<F, D>(&os.plonk_sigmas)?;
self.write_field_ext_vec::<F, D>(&os.wires)?;
self.write_field_ext_vec::<F, D>(&os.plonk_zs)?;
self.write_field_ext_vec::<F, D>(&os.plonk_zs_right)?;
self.write_field_ext_vec::<F, D>(&os.partial_products)?;
self.write_field_ext_vec::<F, D>(&os.quotient_polys)
}
fn read_opening_set<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<OpeningSet<F, D>> {
let config = &common_data.config;
let constants = self.read_field_ext_vec::<F, D>(common_data.num_constants)?;
let plonk_sigmas = self.read_field_ext_vec::<F, D>(config.num_routed_wires)?;
let wires = self.read_field_ext_vec::<F, D>(config.num_wires)?;
let plonk_zs = self.read_field_ext_vec::<F, D>(config.num_challenges)?;
let plonk_zs_right = self.read_field_ext_vec::<F, D>(config.num_challenges)?;
let partial_products = self.read_field_ext_vec::<F, D>(
common_data.num_partial_products.0 * config.num_challenges,
)?;
let quotient_polys = self.read_field_ext_vec::<F, D>(
common_data.quotient_degree_factor * config.num_challenges,
)?;
Ok(OpeningSet {
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
partial_products,
quotient_polys,
})
}
fn write_merkle_proof<F: PrimeField>(&mut self, p: &MerkleProof<F>) -> Result<()> {
let length = p.siblings.len();
self.write_u8(
length
.try_into()
.expect("Merkle proof length must fit in u8."),
)?;
for &h in &p.siblings {
self.write_hash(h)?;
}
Ok(())
}
fn read_merkle_proof<F: PrimeField>(&mut self) -> Result<MerkleProof<F>> {
let length = self.read_u8()?;
Ok(MerkleProof {
siblings: (0..length)
.map(|_| self.read_hash())
.collect::<Result<Vec<_>>>()?,
})
}
fn write_fri_initial_proof<F: PrimeField>(
&mut self,
fitp: &FriInitialTreeProof<F>,
) -> Result<()> {
for (v, p) in &fitp.evals_proofs {
self.write_field_vec(v)?;
self.write_merkle_proof(p)?;
}
Ok(())
}
fn read_fri_initial_proof<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<FriInitialTreeProof<F>> {
let config = &common_data.config;
let mut evals_proofs = Vec::with_capacity(4);
let constants_sigmas_v =
self.read_field_vec(common_data.num_constants + config.num_routed_wires)?;
let constants_sigmas_p = self.read_merkle_proof()?;
evals_proofs.push((constants_sigmas_v, constants_sigmas_p));
let wires_v = self.read_field_vec(config.num_wires)?;
let wires_p = self.read_merkle_proof()?;
evals_proofs.push((wires_v, wires_p));
let zs_partial_v =
self.read_field_vec(config.num_challenges * (1 + common_data.num_partial_products.0))?;
let zs_partial_p = self.read_merkle_proof()?;
evals_proofs.push((zs_partial_v, zs_partial_p));
let quotient_v =
self.read_field_vec(config.num_challenges * common_data.quotient_degree_factor)?;
let quotient_p = self.read_merkle_proof()?;
evals_proofs.push((quotient_v, quotient_p));
Ok(FriInitialTreeProof { evals_proofs })
}
fn write_fri_query_step<F: Extendable<D>, const D: usize>(
&mut self,
fqs: &FriQueryStep<F, D>,
) -> Result<()> {
self.write_field_ext_vec::<F, D>(&fqs.evals)?;
self.write_merkle_proof(&fqs.merkle_proof)
}
fn read_fri_query_step<F: Extendable<D>, const D: usize>(
&mut self,
arity: usize,
) -> Result<FriQueryStep<F, D>> {
let evals = self.read_field_ext_vec::<F, D>(arity)?;
let merkle_proof = self.read_merkle_proof()?;
Ok(FriQueryStep {
evals,
merkle_proof,
})
}
fn write_fri_query_rounds<F: Extendable<D>, const D: usize>(
&mut self,
fqrs: &[FriQueryRound<F, D>],
) -> Result<()> {
for fqr in fqrs {
self.write_fri_initial_proof(&fqr.initial_trees_proof)?;
for fqs in &fqr.steps {
self.write_fri_query_step(fqs)?;
}
}
Ok(())
}
fn read_fri_query_rounds<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<Vec<FriQueryRound<F, D>>> {
let config = &common_data.config;
let mut fqrs = Vec::with_capacity(config.fri_config.num_query_rounds);
for _ in 0..config.fri_config.num_query_rounds {
let initial_trees_proof = self.read_fri_initial_proof(common_data)?;
let steps = config
.fri_config
.reduction_arity_bits
.iter()
.map(|&ar| self.read_fri_query_step(1 << ar))
.collect::<Result<_>>()?;
fqrs.push(FriQueryRound {
initial_trees_proof,
steps,
})
}
Ok(fqrs)
}
fn write_fri_proof<F: Extendable<D>, const D: usize>(
&mut self,
fp: &FriProof<F, D>,
) -> Result<()> {
for cap in &fp.commit_phase_merkle_caps {
self.write_merkle_cap(cap)?;
}
self.write_fri_query_rounds(&fp.query_round_proofs)?;
self.write_field_ext_vec::<F, D>(&fp.final_poly.coeffs)?;
self.write_field(fp.pow_witness)
}
fn read_fri_proof<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<FriProof<F, D>> {
let config = &common_data.config;
let commit_phase_merkle_caps = (0..config.fri_config.reduction_arity_bits.len())
.map(|_| self.read_merkle_cap(config.cap_height))
.collect::<Result<Vec<_>>>()?;
let query_round_proofs = self.read_fri_query_rounds(common_data)?;
let final_poly =
PolynomialCoeffs::new(self.read_field_ext_vec::<F, D>(common_data.final_poly_len())?);
let pow_witness = self.read_field()?;
Ok(FriProof {
commit_phase_merkle_caps,
query_round_proofs,
final_poly,
pow_witness,
})
}
pub fn write_proof<F: Extendable<D>, const D: usize>(
&mut self,
proof: &Proof<F, D>,
) -> Result<()> {
self.write_merkle_cap(&proof.wires_cap)?;
self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?;
self.write_merkle_cap(&proof.quotient_polys_cap)?;
self.write_opening_set(&proof.openings)?;
self.write_fri_proof(&proof.opening_proof)
}
pub fn read_proof<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<Proof<F, D>> {
let config = &common_data.config;
let wires_cap = self.read_merkle_cap(config.cap_height)?;
let plonk_zs_partial_products_cap = self.read_merkle_cap(config.cap_height)?;
let quotient_polys_cap = self.read_merkle_cap(config.cap_height)?;
let openings = self.read_opening_set(common_data)?;
let opening_proof = self.read_fri_proof(common_data)?;
Ok(Proof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
})
}
pub fn write_proof_with_public_inputs<F: RichField + Extendable<D>, const D: usize>(
&mut self,
proof_with_pis: &ProofWithPublicInputs<F, D>,
) -> Result<()> {
let ProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
self.write_proof(proof)?;
self.write_field_vec(public_inputs)
}
pub fn read_proof_with_public_inputs<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<ProofWithPublicInputs<F, D>> {
let proof = self.read_proof(common_data)?;
let public_inputs = self.read_field_vec(
(self.len() - self.0.position() as usize) / std::mem::size_of::<u64>(),
)?;
Ok(ProofWithPublicInputs {
proof,
public_inputs,
})
}
fn write_compressed_fri_query_rounds<F: Extendable<D>, const D: usize>(
&mut self,
cfqrs: &CompressedFriQueryRounds<F, D>,
) -> Result<()> {
for &i in &cfqrs.indices {
self.write_u32(i as u32)?;
}
let mut initial_trees_proofs = cfqrs.initial_trees_proofs.iter().collect::<Vec<_>>();
initial_trees_proofs.sort_by_key(|&x| x.0);
for (_, itp) in initial_trees_proofs {
self.write_fri_initial_proof(itp)?;
}
for h in &cfqrs.steps {
let mut fri_query_steps = h.iter().collect::<Vec<_>>();
fri_query_steps.sort_by_key(|&x| x.0);
for (_, fqs) in fri_query_steps {
self.write_fri_query_step(fqs)?;
}
}
Ok(())
}
fn read_compressed_fri_query_rounds<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<CompressedFriQueryRounds<F, D>> {
let config = &common_data.config;
let original_indices = (0..config.fri_config.num_query_rounds)
.map(|_| self.read_u32().map(|i| i as usize))
.collect::<Result<Vec<_>>>()?;
let mut indices = original_indices.clone();
indices.sort_unstable();
indices.dedup();
let mut pairs = Vec::new();
for &i in &indices {
pairs.push((i, self.read_fri_initial_proof(common_data)?));
}
let initial_trees_proofs = HashMap::from_iter(pairs);
let mut steps = Vec::with_capacity(config.fri_config.reduction_arity_bits.len());
for &a in &config.fri_config.reduction_arity_bits {
indices.iter_mut().for_each(|x| {
*x >>= a;
});
indices.dedup();
let query_steps = (0..indices.len())
.map(|_| self.read_fri_query_step(1 << a))
.collect::<Result<Vec<_>>>()?;
steps.push(
indices
.iter()
.copied()
.zip(query_steps)
.collect::<HashMap<_, _>>(),
);
}
Ok(CompressedFriQueryRounds {
indices: original_indices,
initial_trees_proofs,
steps,
})
}
fn write_compressed_fri_proof<F: Extendable<D>, const D: usize>(
&mut self,
fp: &CompressedFriProof<F, D>,
) -> Result<()> {
for cap in &fp.commit_phase_merkle_caps {
self.write_merkle_cap(cap)?;
}
self.write_compressed_fri_query_rounds(&fp.query_round_proofs)?;
self.write_field_ext_vec::<F, D>(&fp.final_poly.coeffs)?;
self.write_field(fp.pow_witness)
}
fn read_compressed_fri_proof<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<CompressedFriProof<F, D>> {
let config = &common_data.config;
let commit_phase_merkle_caps = (0..config.fri_config.reduction_arity_bits.len())
.map(|_| self.read_merkle_cap(config.cap_height))
.collect::<Result<Vec<_>>>()?;
let query_round_proofs = self.read_compressed_fri_query_rounds(common_data)?;
let final_poly =
PolynomialCoeffs::new(self.read_field_ext_vec::<F, D>(common_data.final_poly_len())?);
let pow_witness = self.read_field()?;
Ok(CompressedFriProof {
commit_phase_merkle_caps,
query_round_proofs,
final_poly,
pow_witness,
})
}
pub fn write_compressed_proof<F: Extendable<D>, const D: usize>(
&mut self,
proof: &CompressedProof<F, D>,
) -> Result<()> {
self.write_merkle_cap(&proof.wires_cap)?;
self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?;
self.write_merkle_cap(&proof.quotient_polys_cap)?;
self.write_opening_set(&proof.openings)?;
self.write_compressed_fri_proof(&proof.opening_proof)
}
pub fn read_compressed_proof<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<CompressedProof<F, D>> {
let config = &common_data.config;
let wires_cap = self.read_merkle_cap(config.cap_height)?;
let plonk_zs_partial_products_cap = self.read_merkle_cap(config.cap_height)?;
let quotient_polys_cap = self.read_merkle_cap(config.cap_height)?;
let openings = self.read_opening_set(common_data)?;
let opening_proof = self.read_compressed_fri_proof(common_data)?;
Ok(CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
})
}
pub fn write_compressed_proof_with_public_inputs<
F: RichField + Extendable<D>,
const D: usize,
>(
&mut self,
proof_with_pis: &CompressedProofWithPublicInputs<F, D>,
) -> Result<()> {
let CompressedProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
self.write_compressed_proof(proof)?;
self.write_field_vec(public_inputs)
}
pub fn read_compressed_proof_with_public_inputs<
F: RichField + Extendable<D>,
const D: usize,
>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> Result<CompressedProofWithPublicInputs<F, D>> {
let proof = self.read_compressed_proof(common_data)?;
let public_inputs = self.read_field_vec(
(self.len() - self.0.position() as usize) / std::mem::size_of::<u64>(),
)?;
Ok(CompressedProofWithPublicInputs {
proof,
public_inputs,
})
}
}