Almost working recursive verifier

This commit is contained in:
wborgeaud 2021-07-12 14:25:28 +02:00
parent cbb0cbffb1
commit ad24f5d4d1
17 changed files with 643 additions and 263 deletions

View File

@ -1,6 +1,7 @@
use itertools::izip;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::{flatten_target, ExtensionTarget};
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
@ -73,8 +74,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
initial_merkle_roots: &[HashTarget],
proof: &FriProofTarget<D>,
challenger: &mut RecursiveChallenger,
config: &FriConfig,
common_data: &CommonCircuitData<F, D>,
) {
let config = &common_data.config.fri_config;
let total_arities = config.reduction_arity_bits.iter().sum::<usize>();
debug_assert_eq!(
purported_degree_log,

View File

@ -178,7 +178,7 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
let single_openings = os
.constants
.iter()
.chain(&os.plonk_s_sigmas)
.chain(&os.plonk_sigmas)
.chain(&os.quotient_polys)
.chain(&os.partial_products);
let single_diffs = single_evals

View File

@ -252,6 +252,16 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.arithmetic_extension(F::ONE, F::ONE, a_ext, b, c)
}
/// Like `mul_sub`, but for `ExtensionTarget`s.
pub fn mul_sub_extension(
&mut self,
a: ExtensionTarget<D>,
b: ExtensionTarget<D>,
c: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
self.arithmetic_extension(F::ONE, F::NEG_ONE, a, b, c)
}
/// Like `mul_sub`, but for `ExtensionTarget`s.
pub fn scalar_mul_sub_extension(
&mut self,

View File

@ -49,7 +49,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
let mut already_inserted = self.zero();
let mut new_list = Vec::new();
for i in 0..v.len() {
for i in 0..=v.len() {
let one = self.one();
let cur_index = self.constant(F::from_canonical_usize(i));
@ -63,7 +63,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
already_inserted = self.add(already_inserted, insert_here);
let not_already_inserted = self.sub(one, already_inserted);
new_item = self.scalar_mul_add_extension(not_already_inserted, v[i], new_item);
if i < v.len() {
new_item = self.scalar_mul_add_extension(not_already_inserted, v[i], new_item);
}
new_list.push(new_item);
}
@ -106,6 +108,7 @@ mod tests {
let elem = builder.constant_extension(FF::rand());
let inserted = real_insert(i, elem, &v);
let purported_inserted = builder.insert(it, elem, v.clone());
assert_eq!(inserted.len(), purported_inserted.len());
for (x, y) in inserted.into_iter().zip(purported_inserted) {
builder.route_extension(x, y);

View File

@ -8,6 +8,7 @@ use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field::Field;
use crate::gates::gate_tree::Tree;
use crate::generator::WitnessGenerator;
use crate::target::Target;
use crate::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
/// A custom gate.
@ -76,9 +77,11 @@ pub trait Gate<F: Extendable<D>, const D: usize>: 'static + Send + Sync {
fn eval_filtered_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: EvaluationTargets<D>,
mut vars: EvaluationTargets<D>,
prefix: &[bool],
) -> Vec<ExtensionTarget<D>> {
// TODO: Filter
let filter = compute_filter_recursively(builder, prefix, vars.local_constants);
vars.remove_prefix(prefix);
self.eval_unfiltered_recursively(builder, vars)
}
@ -167,3 +170,24 @@ fn compute_filter<K: Field>(prefix: &[bool], constants: &[K]) -> K {
})
.product()
}
fn compute_filter_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
prefix: &[bool],
constants: &[ExtensionTarget<D>],
) -> ExtensionTarget<D> {
let one = builder.one_extension();
let v = prefix
.iter()
.enumerate()
.map(|(i, &b)| {
if b {
constants[i]
} else {
builder.sub_extension(one, constants[i])
}
})
.collect::<Vec<_>>();
builder.mul_many_extension(&v)
}

View File

@ -55,6 +55,12 @@ pub(crate) fn generate_partial_witness<F: Field>(
pending_generator_indices = next_pending_generator_indices;
}
for i in 0..generators.len() {
if !expired_generator_indices.contains(&i) {
dbg!(i);
break;
}
}
assert_eq!(
expired_generator_indices.len(),
generators.len(),

View File

@ -21,6 +21,7 @@ pub mod recursive_verifier;
pub mod rescue;
pub mod target;
pub mod util;
pub mod vanishing_poly;
pub mod vars;
pub mod verifier;
pub mod wire;

View File

@ -5,7 +5,7 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field::Field;
use crate::hash::{permute, SPONGE_RATE, SPONGE_WIDTH};
use crate::proof::{Hash, HashTarget, OpeningSet};
use crate::proof::{Hash, HashTarget, OpeningSet, OpeningSetTarget};
use crate::target::Target;
/// Observes prover messages, and generates challenges by hashing the transcript.
@ -68,7 +68,7 @@ impl<F: Field> Challenger<F> {
{
let OpeningSet {
constants,
plonk_s_sigmas,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
@ -77,7 +77,7 @@ impl<F: Field> Challenger<F> {
} = os;
for v in &[
constants,
plonk_s_sigmas,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
@ -211,6 +211,29 @@ impl RecursiveChallenger {
}
}
pub fn observe_opening_set<const D: usize>(&mut self, os: &OpeningSetTarget<D>) {
let OpeningSetTarget {
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
partial_products,
quotient_polys,
} = os;
for v in &[
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
partial_products,
quotient_polys,
] {
self.observe_extension_elements(v);
}
}
pub fn observe_hash(&mut self, hash: &HashTarget) {
self.observe_elements(&hash.elements)
}

View File

@ -58,237 +58,6 @@ impl PlonkPolynomials {
}
}
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
x: F::Extension,
vars: EvaluationVars<F, D>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
partial_products: &[F::Extension],
s_sigmas: &[F::Extension],
betas: &[F],
gammas: &[F],
alphas: &[F],
) -> Vec<F::Extension> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(eval_l_1(common_data.degree(), x) * (z_x - F::Extension::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = x * k_i.into();
wire_value + s_id * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + s_sigma * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F::Extension = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
let alphas = &alphas.iter().map(|&a| a.into()).collect::<Vec<_>>();
reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Like `eval_vanishing_poly`, but specialized for base field points.
pub(crate) fn eval_vanishing_poly_base<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
index: usize,
x: F,
vars: EvaluationVarsBase<F>,
local_zs: &[F],
next_zs: &[F],
partial_products: &[F],
s_sigmas: &[F],
betas: &[F],
gammas: &[F],
alphas: &[F],
z_h_on_coset: &ZeroPolyOnCoset<F>,
) -> Vec<F> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints_base(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(z_h_on_coset.eval_l1(index, x) * (z_x - F::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = k_i * x;
wire_value + betas[i] * s_id + gammas[i]
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + betas[i] * s_sigma + gammas[i]
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the numerator partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Evaluates all gate constraints.
///
/// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not
/// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly
/// the capacity that we need.
pub fn evaluate_gate_constraints<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVars<F, D>,
) -> Vec<F::Extension> {
let mut constraints = vec![F::Extension::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_base<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVarsBase<F>,
) -> Vec<F> {
let mut constraints = vec![F::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered_base(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
gates: &[GateRef<F, D>],
num_gate_constraints: usize,
vars: EvaluationTargets<D>,
) -> Vec<ExtensionTarget<D>> {
let mut constraints = vec![builder.zero_extension(); num_gate_constraints];
for gate in gates {
let gate_constraints = gate.0.eval_filtered_recursively(builder, vars);
for (i, c) in gate_constraints.into_iter().enumerate() {
constraints[i] = builder.add_extension(constraints[i], c);
}
}
constraints
}
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
@ -354,6 +123,28 @@ pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
pub(crate) fn eval_l_1_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
n: usize,
x: ExtensionTarget<D>,
x_pow_n: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
let one = builder.one_extension();
let neg_one = builder.neg_one();
let neg_one = builder.convert_to_ext(neg_one);
let eval_zero_poly = builder.sub_extension(x_pow_n, one);
let denominator = builder.arithmetic_extension(
F::from_canonical_usize(n),
F::from_canonical_usize(n),
x,
one,
neg_one,
);
builder.div_unsafe_extension(eval_zero_poly, denominator)
}
/// For each alpha in alphas, compute a reduction of the given terms using powers of alpha.
pub(crate) fn reduce_with_powers_multi<F: Field>(terms: &[F], alphas: &[F]) -> Vec<F> {
alphas

View File

@ -1,19 +1,21 @@
use anyhow::Result;
use rayon::prelude::*;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::extension_field::{FieldExtension, Frobenius};
use crate::field::field::Field;
use crate::fri::{prover::fri_proof, verifier::verify_fri_proof, FriConfig};
use crate::merkle_tree::MerkleTree;
use crate::plonk_challenger::Challenger;
use crate::plonk_challenger::{Challenger, RecursiveChallenger};
use crate::plonk_common::PlonkPolynomials;
use crate::polynomial::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::proof::{FriProof, FriProofTarget, Hash, OpeningSet};
use crate::proof::{FriProof, FriProofTarget, Hash, HashTarget, OpeningSet, OpeningSetTarget};
use crate::timed;
use crate::util::scaling::ReducingFactor;
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place, transpose};
use crate::util::{log2_ceil, log2_strict, reverse_bits, reverse_index_bits_in_place, transpose};
pub const SALT_SIZE: usize = 2;
@ -245,13 +247,14 @@ impl<F: Field> ListPolynomialCommitment<F> {
}
}
pub struct OpeningProof<F: Field + Extendable<D>, const D: usize> {
#[derive(Clone)]
pub struct OpeningProof<F: Extendable<D>, const D: usize> {
pub fri_proof: FriProof<F, D>,
// TODO: Get the degree from `CommonCircuitData` instead.
quotient_degree: usize,
}
impl<F: Field + Extendable<D>, const D: usize> OpeningProof<F, D> {
impl<F: Extendable<D>, const D: usize> OpeningProof<F, D> {
pub fn verify(
&self,
zeta: F::Extension,
@ -281,6 +284,33 @@ pub struct OpeningProofTarget<const D: usize> {
pub fri_proof: FriProofTarget<D>,
}
impl<const D: usize> OpeningProofTarget<D> {
pub fn verify<F: Extendable<D>>(
&self,
zeta: ExtensionTarget<D>,
os: &OpeningSetTarget<D>,
merkle_roots: &[HashTarget],
challenger: &mut RecursiveChallenger,
common_data: &CommonCircuitData<F, D>,
builder: &mut CircuitBuilder<F, D>,
) {
challenger.observe_opening_set(os);
let alpha = challenger.get_extension_challenge(builder);
builder.verify_fri_proof(
log2_ceil(common_data.degree()),
&os,
zeta,
alpha,
merkle_roots,
&self.fri_proof,
challenger,
common_data,
);
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@ -61,6 +61,7 @@ impl HashTarget {
}
}
#[derive(Clone)]
pub struct Proof<F: Extendable<D>, const D: usize> {
/// Merkle root of LDEs of wire values.
pub wires_root: Hash<F>,
@ -83,6 +84,7 @@ pub struct ProofTarget<const D: usize> {
}
/// Evaluations and Merkle proof produced by the prover in a FRI query step.
#[derive(Clone)]
pub struct FriQueryStep<F: Field + Extendable<D>, const D: usize> {
pub evals: Vec<F::Extension>,
pub merkle_proof: MerkleProof<F>,
@ -96,6 +98,7 @@ pub struct FriQueryStepTarget<const D: usize> {
/// Evaluations and Merkle proofs of the original set of polynomials,
/// before they are combined into a composition polynomial.
#[derive(Clone)]
pub struct FriInitialTreeProof<F: Field> {
pub evals_proofs: Vec<(Vec<F>, MerkleProof<F>)>,
}
@ -120,6 +123,7 @@ impl FriInitialTreeProofTarget {
}
/// Proof for a FRI query round.
#[derive(Clone)]
pub struct FriQueryRound<F: Field + Extendable<D>, const D: usize> {
pub initial_trees_proof: FriInitialTreeProof<F>,
pub steps: Vec<FriQueryStep<F, D>>,
@ -131,6 +135,7 @@ pub struct FriQueryRoundTarget<const D: usize> {
pub steps: Vec<FriQueryStepTarget<D>>,
}
#[derive(Clone)]
pub struct FriProof<F: Field + Extendable<D>, const D: usize> {
/// A Merkle root for each reduced polynomial in the commit phase.
pub commit_phase_merkle_roots: Vec<Hash<F>>,
@ -153,7 +158,7 @@ pub struct FriProofTarget<const D: usize> {
/// The purported values of each polynomial at a single point.
pub struct OpeningSet<F: Field + Extendable<D>, const D: usize> {
pub constants: Vec<F::Extension>,
pub plonk_s_sigmas: Vec<F::Extension>,
pub plonk_sigmas: Vec<F::Extension>,
pub wires: Vec<F::Extension>,
pub plonk_zs: Vec<F::Extension>,
pub plonk_zs_right: Vec<F::Extension>,
@ -181,7 +186,7 @@ impl<F: Field + Extendable<D>, const D: usize> OpeningSet<F, D> {
let zs_partial_products_eval = eval_commitment(z, zs_partial_products_commitment);
Self {
constants: constants_sigmas_eval[common_data.constants_range()].to_vec(),
plonk_s_sigmas: constants_sigmas_eval[common_data.sigmas_range()].to_vec(),
plonk_sigmas: constants_sigmas_eval[common_data.sigmas_range()].to_vec(),
wires: eval_commitment(z, wires_commitment),
plonk_zs: zs_partial_products_eval[common_data.zs_range()].to_vec(),
plonk_zs_right: eval_commitment(g * z, zs_partial_products_commitment)
@ -195,6 +200,7 @@ impl<F: Field + Extendable<D>, const D: usize> OpeningSet<F, D> {
}
/// The purported values of each polynomial at a single point.
#[derive(Clone, Debug)]
pub struct OpeningSetTarget<const D: usize> {
pub constants: Vec<ExtensionTarget<D>>,
pub plonk_sigmas: Vec<ExtensionTarget<D>>,

View File

@ -7,13 +7,14 @@ use crate::circuit_data::{CommonCircuitData, ProverOnlyCircuitData};
use crate::field::extension_field::Extendable;
use crate::generator::generate_partial_witness;
use crate::plonk_challenger::Challenger;
use crate::plonk_common::{eval_vanishing_poly_base, PlonkPolynomials, ZeroPolyOnCoset};
use crate::plonk_common::{PlonkPolynomials, ZeroPolyOnCoset};
use crate::polynomial::commitment::ListPolynomialCommitment;
use crate::polynomial::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::proof::Proof;
use crate::timed;
use crate::util::partial_products::partial_products;
use crate::util::{log2_ceil, transpose};
use crate::vanishing_poly::eval_vanishing_poly_base;
use crate::vars::EvaluationVarsBase;
use crate::witness::{PartialWitness, Witness};
@ -50,6 +51,12 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
"to check copy constraints"
);
if degree > 7 {
dbg!(witness.get_wire(8, 16));
dbg!(witness.get_wire(8, 17));
dbg!(witness.get_wire(8, 18));
dbg!(witness.get_wire(8, 19));
}
let wires_values: Vec<PolynomialValues<F>> = timed!(
witness
.wire_values

View File

@ -1,8 +1,15 @@
use env_logger::builder;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::{CircuitConfig, VerifierCircuitTarget};
use crate::circuit_data::{CircuitConfig, CommonCircuitData, VerifierCircuitTarget};
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::gates::gate::{GateRef, PrefixedGate};
use crate::proof::ProofTarget;
use crate::plonk_challenger::RecursiveChallenger;
use crate::proof::{HashTarget, ProofTarget};
use crate::util::scaling::ReducingFactorTarget;
use crate::vanishing_poly::eval_vanishing_poly_recursively;
use crate::vars::EvaluationTargets;
const MIN_WIRES: usize = 120; // TODO: Double check.
const MIN_ROUTED_WIRES: usize = 28; // TODO: Double check.
@ -11,15 +18,94 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
/// Recursively verifies an inner proof.
pub fn add_recursive_verifier(
&mut self,
inner_proof: ProofTarget<D>,
proof: ProofTarget<D>,
inner_config: &CircuitConfig,
inner_circuit_data: &VerifierCircuitTarget,
inner_gates: &[PrefixedGate<F, D>],
inner_verifier_data: &VerifierCircuitTarget,
inner_common_data: &CommonCircuitData<F, D>,
) {
assert!(self.config.num_wires >= MIN_WIRES);
assert!(self.config.num_wires >= MIN_ROUTED_WIRES);
let one = self.one_extension();
todo!()
let num_challenges = inner_config.num_challenges;
let mut challenger = RecursiveChallenger::new(self);
let digest =
HashTarget::from_vec(self.constants(&inner_common_data.circuit_digest.elements));
challenger.observe_hash(&digest);
challenger.observe_hash(&proof.wires_root);
let betas = challenger.get_n_challenges(self, num_challenges);
let gammas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.plonk_zs_root);
let alphas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.quotient_polys_root);
let zeta = challenger.get_extension_challenge(self);
let local_constants = &proof.openings.constants;
let local_wires = &proof.openings.wires;
let vars = EvaluationTargets {
local_constants,
local_wires,
};
let local_zs = &proof.openings.plonk_zs;
let next_zs = &proof.openings.plonk_zs_right;
let s_sigmas = &proof.openings.plonk_sigmas;
let partial_products = &proof.openings.partial_products;
let zeta_pow_deg = self.exp_u64_extension(zeta, inner_common_data.degree() as u64);
// Evaluate the vanishing polynomial at our challenge point, zeta.
let vanishing_polys_zeta = eval_vanishing_poly_recursively(
self,
inner_common_data,
zeta,
zeta_pow_deg,
vars,
local_zs,
next_zs,
partial_products,
s_sigmas,
&betas,
&gammas,
&alphas,
);
// let quotient_polys_zeta = &proof.openings.quotient_polys;
// let zeta_pow_deg = self.exp_u64_extension(zeta, 1 << inner_common_data.degree_bits as u64);
// let z_h_zeta = self.sub_extension(zeta_pow_deg, one);
// for (i, chunk) in quotient_polys_zeta
// .chunks(inner_common_data.quotient_degree_factor)
// .enumerate()
// {
// let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
// let mut rhs = scale.reduce(chunk, self);
// rhs = self.mul_extension(z_h_zeta, rhs);
// dbg!(self.num_gates());
// self.route_extension(vanishing_polys_zeta[i], rhs);
// }
//
// let evaluations = proof.openings.clone();
//
// let merkle_roots = &[
// inner_verifier_data.constants_sigmas_root,
// proof.wires_root,
// proof.plonk_zs_root,
// proof.quotient_polys_root,
// ];
//
// proof.opening_proof.verify(
// zeta,
// &evaluations,
// merkle_roots,
// &mut challenger,
// inner_common_data,
// self,
// );
// dbg!(self.num_gates());
// dbg!(self.generators.len());
}
}
@ -80,8 +166,7 @@ mod tests {
let openings = OpeningSetTarget {
constants: builder.add_virtual_extension_targets(proof.openings.constants.len()),
plonk_sigmas: builder
.add_virtual_extension_targets(proof.openings.plonk_s_sigmas.len()),
plonk_sigmas: builder.add_virtual_extension_targets(proof.openings.plonk_sigmas.len()),
wires: builder.add_virtual_extension_targets(proof.openings.wires.len()),
plonk_zs: builder.add_virtual_extension_targets(proof.openings.plonk_zs.len()),
plonk_zs_right: builder
@ -141,7 +226,7 @@ mod tests {
.openings
.plonk_sigmas
.iter()
.zip(&proof.openings.plonk_s_sigmas)
.zip(&proof.openings.plonk_sigmas)
{
pw.set_extension_target(t, x);
}
@ -240,6 +325,7 @@ mod tests {
data.common,
)
};
verify(proof.clone(), &vd, &cd).unwrap();
let config = CircuitConfig::large_config();
let mut builder = CircuitBuilder::<F, 4>::new(config.clone());
@ -252,12 +338,11 @@ mod tests {
};
pw.set_hash_target(inner_data.constants_sigmas_root, vd.constants_sigmas_root);
let gates = cd.gates;
builder.add_recursive_verifier(pt, &config, &inner_data, &gates);
builder.add_recursive_verifier(pt, &config, &inner_data, &cd);
dbg!(builder.num_gates());
let data = builder.build();
let recursive_proof = data.prove(PartialWitness::new());
let recursive_proof = data.prove(pw);
verify(recursive_proof, &data.verifier_only, &data.common).unwrap();
}

View File

@ -1,6 +1,9 @@
use std::iter::Product;
use std::ops::Sub;
use crate::circuit_builder::CircuitBuilder;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::util::ceil_div_usize;
/// Compute partial products of the original vector `v` such that all products consist of `max_degree`
@ -58,6 +61,32 @@ pub fn check_partial_products<T: Product + Copy + Sub<Output = T>>(
res
}
pub fn check_partial_products_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
v: &[ExtensionTarget<D>],
partials: &[ExtensionTarget<D>],
max_degree: usize,
) -> Vec<ExtensionTarget<D>> {
let mut res = Vec::new();
let mut remainder = v.to_vec();
let mut partials = partials.to_vec();
while remainder.len() > max_degree {
let products = remainder
.chunks(max_degree)
.map(|chunk| builder.mul_many_extension(chunk))
.collect::<Vec<_>>();
res.extend(
products
.iter()
.zip(&partials)
.map(|(&a, &b)| builder.sub_extension(a, b)),
);
remainder = partials.drain(..products.len()).collect();
}
res
}
#[cfg(test)]
mod tests {
use num::Zero;

356
src/vanishing_poly.rs Normal file
View File

@ -0,0 +1,356 @@
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::gates::gate::{Gate, GateRef, PrefixedGate};
use crate::plonk_common;
use crate::plonk_common::{eval_l_1_recursively, ZeroPolyOnCoset};
use crate::target::Target;
use crate::util::partial_products::{check_partial_products, check_partial_products_recursively};
use crate::util::scaling::ReducingFactorTarget;
use crate::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
x: F::Extension,
vars: EvaluationVars<F, D>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
partial_products: &[F::Extension],
s_sigmas: &[F::Extension],
betas: &[F],
gammas: &[F],
alphas: &[F],
) -> Vec<F::Extension> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms
.push(plonk_common::eval_l_1(common_data.degree(), x) * (z_x - F::Extension::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = x * k_i.into();
wire_value + s_id * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + s_sigma * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F::Extension = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
dbg!(&vanishing_terms);
let alphas = &alphas.iter().map(|&a| a.into()).collect::<Vec<_>>();
plonk_common::reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Like `eval_vanishing_poly`, but specialized for base field points.
pub(crate) fn eval_vanishing_poly_base<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
index: usize,
x: F,
vars: EvaluationVarsBase<F>,
local_zs: &[F],
next_zs: &[F],
partial_products: &[F],
s_sigmas: &[F],
betas: &[F],
gammas: &[F],
alphas: &[F],
z_h_on_coset: &ZeroPolyOnCoset<F>,
) -> Vec<F> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints_base(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(z_h_on_coset.eval_l1(index, x) * (z_x - F::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = k_i * x;
wire_value + betas[i] * s_id + gammas[i]
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + betas[i] * s_sigma + gammas[i]
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the numerator partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
plonk_common::reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Evaluates all gate constraints.
///
/// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not
/// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly
/// the capacity that we need.
pub fn evaluate_gate_constraints<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVars<F, D>,
) -> Vec<F::Extension> {
let mut constraints = vec![F::Extension::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_base<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVarsBase<F>,
) -> Vec<F> {
let mut constraints = vec![F::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered_base(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationTargets<D>,
) -> Vec<ExtensionTarget<D>> {
let mut constraints = vec![builder.zero_extension(); num_gate_constraints];
for gate in gates {
let gate_constraints = gate
.gate
.0
.eval_filtered_recursively(builder, vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
constraints[i] = builder.add_extension(constraints[i], c);
}
}
constraints
}
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
common_data: &CommonCircuitData<F, D>,
x: ExtensionTarget<D>,
x_pow_deg: ExtensionTarget<D>,
vars: EvaluationTargets<D>,
local_zs: &[ExtensionTarget<D>],
next_zs: &[ExtensionTarget<D>],
partial_products: &[ExtensionTarget<D>],
s_sigmas: &[ExtensionTarget<D>],
betas: &[Target],
gammas: &[Target],
alphas: &[Target],
) -> Vec<ExtensionTarget<D>> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms = evaluate_gate_constraints_recursively(
builder,
&common_data.gates,
common_data.num_gate_constraints,
vars,
);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
let l1 = eval_l_1_recursively(builder, common_data.degree(), x, x_pow_deg);
vanishing_z_1_terms.push(builder.arithmetic_extension(F::ONE, F::NEG_ONE, l1, z_x, l1));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = builder.constant(common_data.k_is[j]);
let s_id = builder.scalar_mul_ext(k_i, x);
let gamma_ext = builder.convert_to_ext(gammas[i]);
let tmp = builder.scalar_mul_add_extension(betas[i], s_id, wire_value);
builder.add_extension(tmp, gamma_ext)
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
let gamma_ext = builder.convert_to_ext(gammas[i]);
let tmp = builder.scalar_mul_add_extension(betas[i], s_sigma, wire_value);
builder.add_extension(tmp, gamma_ext)
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| builder.div_unsafe_extension(numerator_values[j], denominator_values[j]))
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check = check_partial_products_recursively(
builder,
&quotient_values,
current_partial_products,
max_degree,
);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
let tmp = builder.mul_many_extension(d);
*q = builder.mul_extension(*q, tmp);
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient =
builder.mul_many_extension(&current_partial_products[num_prods - final_num_prod..]);
vanishing_v_shift_terms.push(builder.mul_sub_extension(quotient, z_x, z_gz));
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
dbg!(&vanishing_terms);
alphas
.iter()
.map(|&alpha| {
let alpha = builder.convert_to_ext(alpha);
let mut alpha = ReducingFactorTarget::new(alpha);
alpha.reduce(&vanishing_terms, builder)
})
.collect()
}

View File

@ -39,6 +39,12 @@ impl<'a, F: Field> EvaluationVarsBase<'a, F> {
}
}
impl<'a, const D: usize> EvaluationTargets<'a, D> {
pub fn remove_prefix(&mut self, prefix: &[bool]) {
self.local_constants = &self.local_constants[prefix.len()..];
}
}
#[derive(Copy, Clone)]
pub struct EvaluationTargets<'a, const D: usize> {
pub(crate) local_constants: &'a [ExtensionTarget<D>],

View File

@ -4,8 +4,9 @@ use crate::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::plonk_challenger::Challenger;
use crate::plonk_common::{eval_vanishing_poly, eval_zero_poly, reduce_with_powers};
use crate::plonk_common::{eval_zero_poly, reduce_with_powers};
use crate::proof::Proof;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::EvaluationVars;
pub(crate) fn verify<F: Extendable<D>, const D: usize>(
@ -39,7 +40,7 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
};
let local_zs = &proof.openings.plonk_zs;
let next_zs = &proof.openings.plonk_zs_right;
let s_sigmas = &proof.openings.plonk_s_sigmas;
let s_sigmas = &proof.openings.plonk_sigmas;
let partial_products = &proof.openings.partial_products;
// Evaluate the vanishing polynomial at our challenge point, zeta.