Merge pull request #95 from mir-protocol/more_recursive_verifier

Recursive verifier
This commit is contained in:
wborgeaud 2021-07-19 07:11:57 +02:00 committed by GitHub
commit b937679292
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 1356 additions and 441 deletions

View File

@ -1,4 +1,5 @@
use std::collections::{HashMap, HashSet};
use std::convert::TryInto;
use std::time::Instant;
use log::info;
@ -7,6 +8,7 @@ use crate::circuit_data::{
CircuitConfig, CircuitData, CommonCircuitData, ProverCircuitData, ProverOnlyCircuitData,
VerifierCircuitData, VerifierOnlyCircuitData,
};
use crate::copy_constraint::CopyConstraint;
use crate::field::cosets::get_unique_coset_shifts;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
@ -20,7 +22,9 @@ use crate::permutation_argument::TargetPartition;
use crate::plonk_common::PlonkPolynomials;
use crate::polynomial::commitment::ListPolynomialCommitment;
use crate::polynomial::polynomial::PolynomialValues;
use crate::proof::HashTarget;
use crate::target::Target;
use crate::util::marking::{Markable, MarkedTargets};
use crate::util::partial_products::num_partial_products;
use crate::util::{log2_ceil, log2_strict, transpose, transpose_poly_values};
use crate::wire::Wire;
@ -40,7 +44,13 @@ pub struct CircuitBuilder<F: Extendable<D>, const D: usize> {
/// The next available index for a `VirtualTarget`.
virtual_target_index: usize,
copy_constraints: Vec<(Target, Target)>,
copy_constraints: Vec<CopyConstraint>,
/// A string used to give context to copy constraints.
context: String,
/// A vector of marked targets. The values assigned to these targets will be displayed by the prover.
marked_targets: Vec<MarkedTargets<D>>,
/// Generators used to generate the witness.
generators: Vec<Box<dyn WitnessGenerator<F>>>,
@ -58,6 +68,8 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
public_input_index: 0,
virtual_target_index: 0,
copy_constraints: Vec::new(),
context: String::new(),
marked_targets: Vec::new(),
generators: Vec::new(),
constants_to_targets: HashMap::new(),
targets_to_constants: HashMap::new(),
@ -92,6 +104,24 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
(0..n).map(|_i| self.add_virtual_target()).collect()
}
pub fn add_virtual_hash(&mut self) -> HashTarget {
HashTarget::from_vec(self.add_virtual_targets(4))
}
pub fn add_virtual_hashes(&mut self, n: usize) -> Vec<HashTarget> {
(0..n).map(|_i| self.add_virtual_hash()).collect()
}
pub fn add_virtual_extension_target(&mut self) -> ExtensionTarget<D> {
ExtensionTarget(self.add_virtual_targets(D).try_into().unwrap())
}
pub fn add_virtual_extension_targets(&mut self, n: usize) -> Vec<ExtensionTarget<D>> {
(0..n)
.map(|_i| self.add_virtual_extension_target())
.collect()
}
pub fn add_gate_no_constants(&mut self, gate_type: GateRef<F, D>) -> usize {
self.add_gate(gate_type, Vec::new())
}
@ -138,12 +168,29 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.assert_equal(src, dst);
}
/// Same as `route` with a named copy constraint.
pub fn named_route(&mut self, src: Target, dst: Target, name: String) {
self.generate_copy(src, dst);
self.named_assert_equal(src, dst, name);
}
pub fn route_extension(&mut self, src: ExtensionTarget<D>, dst: ExtensionTarget<D>) {
for i in 0..D {
self.route(src.0[i], dst.0[i]);
}
}
pub fn named_route_extension(
&mut self,
src: ExtensionTarget<D>,
dst: ExtensionTarget<D>,
name: String,
) {
for i in 0..D {
self.named_route(src.0[i], dst.0[i], format!("{}: limb {}", name, i));
}
}
/// Adds a generator which will copy `src` to `dst`.
pub fn generate_copy(&mut self, src: Target, dst: Target) {
self.add_generator(CopyGenerator { src, dst });
@ -160,7 +207,24 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
y.is_routable(&self.config),
"Tried to route a wire that isn't routable"
);
self.copy_constraints.push((x, y));
self.copy_constraints
.push(CopyConstraint::new((x, y), self.context.clone()));
}
/// Same as `assert_equal` for a named copy constraint.
pub fn named_assert_equal(&mut self, x: Target, y: Target, name: String) {
assert!(
x.is_routable(&self.config),
"Tried to route a wire that isn't routable"
);
assert!(
y.is_routable(&self.config),
"Tried to route a wire that isn't routable"
);
self.copy_constraints.push(CopyConstraint::new(
(x, y),
format!("{}: {}", self.context.clone(), name),
));
}
pub fn assert_zero(&mut self, x: Target) {
@ -174,6 +238,18 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
}
}
pub fn named_assert_equal_extension(
&mut self,
x: ExtensionTarget<D>,
y: ExtensionTarget<D>,
name: String,
) {
for i in 0..D {
self.assert_equal(x.0[i], y.0[i]);
self.named_assert_equal(x.0[i], y.0[i], format!("{}: limb {}", name, i));
}
}
pub fn add_generators(&mut self, generators: Vec<Box<dyn WitnessGenerator<F>>>) {
self.generators.extend(generators);
}
@ -229,6 +305,17 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.targets_to_constants.get(&target).cloned()
}
pub fn set_context(&mut self, new_context: &str) {
self.context = new_context.to_string();
}
pub fn add_marked(&mut self, targets: Markable<D>, name: &str) {
self.marked_targets.push(MarkedTargets {
targets,
name: name.to_string(),
})
}
/// The number of polynomial values that will be revealed per opening, both for the "regular"
/// polynomials and for the Z polynomials. Because calculating these values involves a recursive
/// dependence (the amount of blinding depends on the degree, which depends on the blinding),
@ -382,7 +469,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
target_partition.add(Target::VirtualTarget { index });
}
for &(a, b) in &self.copy_constraints {
for &CopyConstraint { pair: (a, b), .. } in &self.copy_constraints {
target_partition.merge(a, b);
}
@ -437,6 +524,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
subgroup,
copy_constraints: self.copy_constraints,
gate_instances: self.gate_instances,
marked_targets: self.marked_targets,
};
// The HashSet of gates will have a non-deterministic order. When converting to a Vec, we

View File

@ -2,6 +2,7 @@ use std::ops::{Range, RangeFrom};
use anyhow::Result;
use crate::copy_constraint::CopyConstraint;
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::fri::FriConfig;
@ -10,7 +11,7 @@ use crate::generator::WitnessGenerator;
use crate::polynomial::commitment::ListPolynomialCommitment;
use crate::proof::{Hash, HashTarget, Proof};
use crate::prover::prove;
use crate::target::Target;
use crate::util::marking::MarkedTargets;
use crate::verifier::verify;
use crate::witness::PartialWitness;
@ -125,9 +126,11 @@ pub(crate) struct ProverOnlyCircuitData<F: Extendable<D>, const D: usize> {
/// Subgroup of order `degree`.
pub subgroup: Vec<F>,
/// The circuit's copy constraints.
pub copy_constraints: Vec<(Target, Target)>,
pub copy_constraints: Vec<CopyConstraint>,
/// The concrete placement of each gate in the circuit.
pub gate_instances: Vec<GateInstance<F, D>>,
/// A vector of marked targets. The values assigned to these targets will be displayed by the prover.
pub marked_targets: Vec<MarkedTargets<D>>,
}
/// Circuit data required by the verifier, but not the prover.
@ -222,9 +225,6 @@ impl<F: Extendable<D>, const D: usize> CommonCircuitData<F, D> {
/// limited form of dynamic inner circuits. We can't practically make things like the wire count
/// dynamic, at least not without setting a maximum wire count and paying for the worst case.
pub struct VerifierCircuitTarget {
/// A commitment to each constant polynomial.
pub(crate) constants_root: HashTarget,
/// A commitment to each permutation polynomial.
pub(crate) sigmas_root: HashTarget,
/// A commitment to each constant polynomial and each permutation polynomial.
pub(crate) constants_sigmas_root: HashTarget,
}

22
src/copy_constraint.rs Normal file
View File

@ -0,0 +1,22 @@
use crate::target::Target;
/// A named copy constraint.
pub struct CopyConstraint {
pub pair: (Target, Target),
pub name: String,
}
impl From<(Target, Target)> for CopyConstraint {
fn from(pair: (Target, Target)) -> Self {
Self {
pair,
name: String::new(),
}
}
}
impl CopyConstraint {
pub fn new(pair: (Target, Target), name: String) -> Self {
Self { pair, name }
}
}

View File

@ -32,7 +32,7 @@ impl<const D: usize> ExtensionTarget<D> {
}
let arr = self.to_target_array();
let k = (F::ORDER - 1) / (D as u64);
let z0 = F::W.exp(k * count as u64);
let z0 = F::Extension::W.exp(k * count as u64);
let zs = z0
.powers()
.take(D)

View File

@ -1,6 +1,5 @@
use itertools::izip;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::{flatten_target, ExtensionTarget};
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
@ -11,6 +10,7 @@ use crate::proof::{
FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget, HashTarget, OpeningSetTarget,
};
use crate::target::Target;
use crate::util::scaling::ReducingFactorTarget;
use crate::util::{log2_strict, reverse_index_bits_in_place};
impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
@ -73,8 +73,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
initial_merkle_roots: &[HashTarget],
proof: &FriProofTarget<D>,
challenger: &mut RecursiveChallenger,
config: &FriConfig,
common_data: &CommonCircuitData<F, D>,
) {
let config = &common_data.config.fri_config;
let total_arities = config.reduction_arity_bits.iter().sum::<usize>();
debug_assert_eq!(
purported_degree_log,
@ -85,7 +86,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
// Size of the LDE domain.
let n = proof.final_poly.len() << total_arities;
// Recover the random betas used in the FRI reductions.
self.set_context("Recover the random betas used in the FRI reductions.");
let betas = proof
.commit_phase_merkle_roots
.iter()
@ -96,7 +97,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
.collect::<Vec<_>>();
challenger.observe_extension_elements(&proof.final_poly.0);
// Check PoW.
self.set_context("Check PoW");
self.fri_verify_proof_of_work(proof, challenger, config);
// Check that parameters are coherent.
@ -116,12 +117,12 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
zeta,
alpha,
initial_merkle_roots,
&proof,
proof,
challenger,
n,
&betas,
round_proof,
config,
common_data,
);
}
}
@ -132,7 +133,13 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
proof: &FriInitialTreeProofTarget,
initial_merkle_roots: &[HashTarget],
) {
for ((evals, merkle_proof), &root) in proof.evals_proofs.iter().zip(initial_merkle_roots) {
for (i, ((evals, merkle_proof), &root)) in proof
.evals_proofs
.iter()
.zip(initial_merkle_roots)
.enumerate()
{
self.set_context(&format!("Verify {}-th initial Merkle proof.", i));
self.verify_merkle_proof(evals.clone(), x_index, root, merkle_proof);
}
}
@ -144,12 +151,13 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
os: &OpeningSetTarget<D>,
zeta: ExtensionTarget<D>,
subgroup_x: Target,
common_data: &CommonCircuitData<F, D>,
) -> ExtensionTarget<D> {
assert!(D > 1, "Not implemented for D=1.");
let config = &self.config.fri_config.clone();
let degree_log = proof.evals_proofs[0].1.siblings.len() - config.rate_bits;
let subgroup_x = self.convert_to_ext(subgroup_x);
let mut alpha_powers = self.powers(alpha);
let mut alpha = ReducingFactorTarget::new(alpha);
let mut sum = self.zero_extension();
// We will add three terms to `sum`:
@ -157,57 +165,49 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
// - one for polynomials opened at `x` and `g x`
// - one for polynomials opened at `x` and `x.frobenius()`
// Polynomials opened at `x`, i.e., the constants, sigmas and quotient polynomials.
// Polynomials opened at `x`, i.e., the constants, sigmas, quotient and partial products polynomials.
let single_evals = [
PlonkPolynomials::CONSTANTS_SIGMAS,
PlonkPolynomials::QUOTIENT,
]
.iter()
.flat_map(|&p| proof.unsalted_evals(p))
.chain(
&proof.unsalted_evals(PlonkPolynomials::ZS_PARTIAL_PRODUCTS)
[common_data.partial_products_range()],
)
.map(|&e| self.convert_to_ext(e))
.collect::<Vec<_>>();
let single_openings = os
.constants
.iter()
.chain(&os.plonk_sigmas)
.chain(&os.quotient_polys);
let mut single_numerator = self.zero_extension();
for (e, &o) in izip!(single_evals, single_openings) {
let a = alpha_powers.next(self);
let diff = self.sub_extension(e, o);
single_numerator = self.mul_add_extension(a, diff, single_numerator);
}
.chain(&os.quotient_polys)
.chain(&os.partial_products)
.copied()
.collect::<Vec<_>>();
let mut single_numerator = alpha.reduce(&single_evals, self);
// TODO: Precompute the rhs as it is the same in all FRI rounds.
let rhs = alpha.reduce(&single_openings, self);
single_numerator = self.sub_extension(single_numerator, rhs);
let single_denominator = self.sub_extension(subgroup_x, zeta);
let quotient = self.div_unsafe_extension(single_numerator, single_denominator);
sum = self.add_extension(sum, quotient);
alpha.reset();
// Polynomials opened at `x` and `g x`, i.e., the Zs polynomials.
let zs_evals = proof
.unsalted_evals(PlonkPolynomials::ZS_PARTIAL_PRODUCTS)
.iter()
.take(common_data.zs_range().end)
.map(|&e| self.convert_to_ext(e))
.collect::<Vec<_>>();
// TODO: Would probably be more efficient using `CircuitBuilder::reduce_with_powers_recursive`
let mut zs_composition_eval = self.zero_extension();
let mut alpha_powers_cloned = alpha_powers.clone();
for &e in &zs_evals {
let a = alpha_powers_cloned.next(self);
zs_composition_eval = self.mul_add_extension(a, e, zs_composition_eval);
}
let zs_composition_eval = alpha.clone().reduce(&zs_evals, self);
let g = self.constant_extension(F::Extension::primitive_root_of_unity(degree_log));
let zeta_right = self.mul_extension(g, zeta);
let mut zs_ev_zeta = self.zero_extension();
let mut alpha_powers_cloned = alpha_powers.clone();
for &t in &os.plonk_zs {
let a = alpha_powers_cloned.next(self);
zs_ev_zeta = self.mul_add_extension(a, t, zs_ev_zeta);
}
let mut zs_ev_zeta_right = self.zero_extension();
for &t in &os.plonk_zs_right {
let a = alpha_powers.next(self);
zs_ev_zeta_right = self.mul_add_extension(a, t, zs_ev_zeta);
}
let zs_ev_zeta = alpha.clone().reduce(&os.plonk_zs, self);
let zs_ev_zeta_right = alpha.reduce(&os.plonk_zs_right, self);
let interpol_val = self.interpolate2(
[(zeta, zs_ev_zeta), (zeta_right, zs_ev_zeta_right)],
subgroup_x,
@ -217,6 +217,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
let vanish_zeta_right = self.sub_extension(subgroup_x, zeta_right);
let zs_denominator = self.mul_extension(vanish_zeta, vanish_zeta_right);
let zs_quotient = self.div_unsafe_extension(zs_numerator, zs_denominator);
sum = alpha.shift(sum, self);
sum = self.add_extension(sum, zs_quotient);
// Polynomials opened at `x` and `x.frobenius()`, i.e., the wires polynomials.
@ -225,26 +226,11 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
.iter()
.map(|&e| self.convert_to_ext(e))
.collect::<Vec<_>>();
let mut wire_composition_eval = self.zero_extension();
let mut alpha_powers_cloned = alpha_powers.clone();
for &e in &wire_evals {
let a = alpha_powers_cloned.next(self);
wire_composition_eval = self.mul_add_extension(a, e, wire_composition_eval);
}
let mut alpha_powers_cloned = alpha_powers.clone();
let wire_eval = os.wires.iter().fold(self.zero_extension(), |acc, &w| {
let a = alpha_powers_cloned.next(self);
self.mul_add_extension(a, w, acc)
});
let mut alpha_powers_frob = alpha_powers.repeated_frobenius(D - 1, self);
let wire_eval_frob = os
.wires
.iter()
.fold(self.zero_extension(), |acc, &w| {
let a = alpha_powers_frob.next(self);
self.mul_add_extension(a, w, acc)
})
.frobenius(self);
let wire_composition_eval = alpha.clone().reduce(&wire_evals, self);
let mut alpha_frob = alpha.repeated_frobenius(D - 1, self);
let wire_eval = alpha.reduce(&os.wires, self);
let wire_eval_frob = alpha_frob.reduce(&os.wires, self);
let wire_eval_frob = wire_eval_frob.frobenius(self);
let zeta_frob = zeta.frobenius(self);
let wire_interpol_val =
self.interpolate2([(zeta, wire_eval), (zeta_frob, wire_eval_frob)], subgroup_x);
@ -252,6 +238,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
let vanish_zeta_frob = self.sub_extension(subgroup_x, zeta_frob);
let wire_denominator = self.mul_extension(vanish_zeta, vanish_zeta_frob);
let wire_quotient = self.div_unsafe_extension(wire_numerator, wire_denominator);
sum = alpha.shift(sum, self);
sum = self.add_extension(sum, wire_quotient);
sum
@ -268,8 +255,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
n: usize,
betas: &[ExtensionTarget<D>],
round_proof: &FriQueryRoundTarget<D>,
config: &FriConfig,
common_data: &CommonCircuitData<F, D>,
) {
let config = &common_data.config.fri_config;
let n_log = log2_strict(n);
let mut evaluations: Vec<Vec<ExtensionTarget<D>>> = Vec::new();
// TODO: Do we need to range check `x_index` to a target smaller than `p`?
@ -277,6 +265,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
x_index = self.split_low_high(x_index, n_log, 64).0;
let mut x_index_num_bits = n_log;
let mut domain_size = n;
self.set_context("Check FRI initial proof.");
self.fri_verify_initial_proof(
x_index,
&round_proof.initial_trees_proof,
@ -300,6 +289,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
os,
zeta,
subgroup_x,
common_data,
)
} else {
let last_evals = &evaluations[i - 1];
@ -318,6 +308,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.split_low_high(x_index, arity_bits, x_index_num_bits);
evals = self.insert(low_x_index, e_x, evals);
evaluations.push(evals);
self.set_context("Verify FRI round Merkle proof.");
self.verify_merkle_proof(
flatten_target(&evaluations[i]),
high_x_index,

View File

@ -163,7 +163,7 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
// - one for Zs, which are opened at `x` and `g x`
// - one for wire polynomials, which are opened at `x` and `x.frobenius()`
// Polynomials opened at `x`, i.e., the constants, sigmas and quotient polynomials.
// Polynomials opened at `x`, i.e., the constants, sigmas, quotient and partial products polynomials.
let single_evals = [
PlonkPolynomials::CONSTANTS_SIGMAS,
PlonkPolynomials::QUOTIENT,
@ -178,7 +178,7 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
let single_openings = os
.constants
.iter()
.chain(&os.plonk_s_sigmas)
.chain(&os.plonk_sigmas)
.chain(&os.quotient_polys)
.chain(&os.partial_products);
let single_diffs = single_evals

View File

@ -252,6 +252,16 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.arithmetic_extension(F::ONE, F::ONE, a_ext, b, c)
}
/// Like `mul_sub`, but for `ExtensionTarget`s.
pub fn mul_sub_extension(
&mut self,
a: ExtensionTarget<D>,
b: ExtensionTarget<D>,
c: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
self.arithmetic_extension(F::ONE, F::NEG_ONE, a, b, c)
}
/// Like `mul_sub`, but for `ExtensionTarget`s.
pub fn scalar_mul_sub_extension(
&mut self,
@ -282,6 +292,19 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
b
}
/// Exponentiate `base` to the power of `2^power_log`.
// TODO: Test
pub fn exp_power_of_2(
&mut self,
mut base: ExtensionTarget<D>,
power_log: usize,
) -> ExtensionTarget<D> {
for _ in 0..power_log {
base = self.square_extension(base);
}
base
}
/// Exponentiate `base` to the power of a known `exponent`.
// TODO: Test
pub fn exp_u64_extension(
@ -437,7 +460,6 @@ mod tests {
use crate::field::crandall_field::CrandallField;
use crate::field::extension_field::quartic::QuarticCrandallField;
use crate::field::field::Field;
use crate::fri::FriConfig;
use crate::verifier::verify;
use crate::witness::PartialWitness;

View File

@ -65,7 +65,7 @@ mod tests {
use crate::field::extension_field::quartic::QuarticCrandallField;
use crate::field::extension_field::FieldExtension;
use crate::field::field::Field;
use crate::field::interpolation::{interpolant, interpolate};
use crate::field::interpolation::interpolant;
use crate::verifier::verify;
use crate::witness::PartialWitness;

View File

@ -170,7 +170,6 @@ impl<F: Field, const B: usize> SimpleGenerator<F> for BaseSplitGenerator<B> {
#[cfg(test)]
mod tests {
use crate::circuit_data::CircuitConfig;
use crate::field::crandall_field::CrandallField;
use crate::gates::base_sum::BaseSumGate;
use crate::gates::gate_testing::test_low_degree;

View File

@ -76,10 +76,15 @@ pub trait Gate<F: Extendable<D>, const D: usize>: 'static + Send + Sync {
fn eval_filtered_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: EvaluationTargets<D>,
mut vars: EvaluationTargets<D>,
prefix: &[bool],
) -> Vec<ExtensionTarget<D>> {
// TODO: Filter
let filter = compute_filter_recursively(builder, prefix, vars.local_constants);
vars.remove_prefix(prefix);
self.eval_unfiltered_recursively(builder, vars)
.into_iter()
.map(|c| builder.mul_extension(filter, c))
.collect()
}
fn generators(
@ -167,3 +172,24 @@ fn compute_filter<K: Field>(prefix: &[bool], constants: &[K]) -> K {
})
.product()
}
fn compute_filter_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
prefix: &[bool],
constants: &[ExtensionTarget<D>],
) -> ExtensionTarget<D> {
let one = builder.one_extension();
let v = prefix
.iter()
.enumerate()
.map(|(i, &b)| {
if b {
constants[i]
} else {
builder.sub_extension(one, constants[i])
}
})
.collect::<Vec<_>>();
builder.mul_many_extension(&v)
}

View File

@ -129,16 +129,13 @@ impl<F: Extendable<D>, const D: usize, const R: usize> Gate<F, D> for GMiMCGate<
let mut constraints = Vec::with_capacity(self.num_constraints());
let swap = vars.local_wires[Self::WIRE_SWAP];
let one_ext = builder.one_extension();
let not_swap = builder.sub_extension(swap, one_ext);
constraints.push(builder.mul_extension(swap, not_swap));
constraints.push(builder.mul_sub_extension(swap, swap, swap));
let old_index_acc = vars.local_wires[Self::WIRE_INDEX_ACCUMULATOR_OLD];
let new_index_acc = vars.local_wires[Self::WIRE_INDEX_ACCUMULATOR_NEW];
// computed_new_index_acc = 2 * old_index_acc + swap
let two = builder.two();
let double_old_index_acc = builder.scalar_mul_ext(two, old_index_acc);
let computed_new_index_acc = builder.add_extension(double_old_index_acc, swap);
let two = builder.two_extension();
let computed_new_index_acc = builder.mul_add_extension(two, old_index_acc, swap);
constraints.push(builder.sub_extension(computed_new_index_acc, new_index_acc));
let mut state = Vec::with_capacity(12);
@ -168,8 +165,10 @@ impl<F: Extendable<D>, const D: usize, const R: usize> Gate<F, D> for GMiMCGate<
let constant = builder.constant_extension(self.constants[r].into());
let cubing_input =
builder.add_many_extension(&[state[active], addition_buffer, constant]);
let square = builder.mul_extension(cubing_input, cubing_input);
let f = builder.mul_extension(square, cubing_input);
let cubing_input_wire = vars.local_wires[Self::wire_cubing_input(r)];
constraints.push(builder.sub_extension(cubing_input, cubing_input_wire));
let square = builder.mul_extension(cubing_input_wire, cubing_input_wire);
let f = builder.mul_extension(square, cubing_input_wire);
addition_buffer = builder.add_extension(addition_buffer, f);
state[active] = builder.sub_extension(state[active], f);
}
@ -316,15 +315,17 @@ mod tests {
use std::convert::TryInto;
use std::sync::Arc;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CircuitConfig;
use crate::field::crandall_field::CrandallField;
use crate::field::extension_field::quartic::QuarticCrandallField;
use crate::field::field::Field;
use crate::gates::gate_testing::test_low_degree;
use crate::gates::gmimc::{GMiMCGate, W};
use crate::generator::generate_partial_witness;
use crate::gmimc::gmimc_permute_naive;
use crate::permutation_argument::TargetPartition;
use crate::target::Target;
use crate::vars::{EvaluationTargets, EvaluationVars};
use crate::verifier::verify;
use crate::wire::Wire;
use crate::witness::PartialWitness;
@ -399,4 +400,47 @@ mod tests {
let gate = Gate::with_constants(constants);
test_low_degree(gate)
}
#[test]
fn test_evals() {
type F = CrandallField;
type FF = QuarticCrandallField;
const R: usize = 101;
let config = CircuitConfig::large_config();
let mut builder = CircuitBuilder::<F, 4>::new(config);
let mut pw = PartialWitness::<F>::new();
let constants = Arc::new([F::TWO; R]);
type Gate = GMiMCGate<F, 4, R>;
let gate = Gate::with_constants(constants);
let wires = FF::rand_vec(Gate::end());
let vars = EvaluationVars {
local_constants: &[],
local_wires: &wires,
};
let ev = gate.0.eval_unfiltered((vars));
let wires_t = builder.add_virtual_extension_targets(Gate::end());
for i in 0..Gate::end() {
pw.set_extension_target(wires_t[i], wires[i]);
}
let vars_t = EvaluationTargets {
local_constants: &[],
local_wires: &wires_t,
};
let ev_t = gate.0.eval_unfiltered_recursively(&mut builder, vars_t);
assert_eq!(ev.len(), ev_t.len());
for (e, e_t) in ev.into_iter().zip(ev_t) {
let e_c = builder.constant_extension(e);
builder.assert_equal_extension(e_c, e_t);
}
let data = builder.build();
let proof = data.prove(pw);
verify(proof, &data.verifier_only, &data.common).unwrap();
}
}

View File

@ -212,7 +212,7 @@ pub fn hash_n_to_m<F: Field>(mut inputs: Vec<F>, num_outputs: usize, pad: bool)
// Absorb all input chunks.
for input_chunk in inputs.chunks(SPONGE_RATE) {
for i in 0..input_chunk.len() {
state[i] += input_chunk[i];
state[i] = input_chunk[i];
}
state = permute(state);
}

View File

@ -2,6 +2,7 @@
pub mod circuit_builder;
pub mod circuit_data;
pub mod copy_constraint;
pub mod field;
pub mod fri;
pub mod gadgets;
@ -21,6 +22,7 @@ pub mod recursive_verifier;
pub mod rescue;
pub mod target;
pub mod util;
pub mod vanishing_poly;
pub mod vars;
pub mod verifier;
pub mod wire;

View File

@ -18,6 +18,7 @@ pub struct MerkleProof<F: Field> {
pub siblings: Vec<Hash<F>>,
}
#[derive(Clone)]
pub struct MerkleProofTarget {
/// The Merkle digest of each sibling subtree, staying from the bottommost layer.
pub siblings: Vec<HashTarget>,
@ -125,9 +126,11 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
)
}
self.assert_equal(acc_leaf_index, leaf_index);
// TODO: this is far from optimal.
let leaf_index_rev = self.reverse_limbs::<2>(leaf_index, height);
self.assert_equal(acc_leaf_index, leaf_index_rev);
self.assert_hashes_equal(state, merkle_root)
self.named_assert_hashes_equal(state, merkle_root, "Check Merkle root".into())
}
pub(crate) fn assert_hashes_equal(&mut self, x: HashTarget, y: HashTarget) {
@ -135,4 +138,70 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
self.assert_equal(x.elements[i], y.elements[i]);
}
}
pub(crate) fn named_assert_hashes_equal(&mut self, x: HashTarget, y: HashTarget, name: String) {
for i in 0..4 {
self.named_assert_equal(
x.elements[i],
y.elements[i],
format!("{}: {}-th hash element", name, i),
);
}
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use rand::{thread_rng, Rng};
use super::*;
use crate::circuit_data::CircuitConfig;
use crate::field::crandall_field::CrandallField;
use crate::merkle_tree::MerkleTree;
use crate::verifier::verify;
use crate::witness::PartialWitness;
fn random_data<F: Field>(n: usize, k: usize) -> Vec<Vec<F>> {
(0..n).map(|_| F::rand_vec(k)).collect()
}
#[test]
fn test_recursive_merkle_proof() -> Result<()> {
type F = CrandallField;
let config = CircuitConfig::large_config();
let mut builder = CircuitBuilder::<F, 4>::new(config);
let mut pw = PartialWitness::new();
let log_n = 8;
let n = 1 << log_n;
let leaves = random_data::<F>(n, 7);
let tree = MerkleTree::new(leaves, false);
let i: usize = thread_rng().gen_range(0, n);
let proof = tree.prove(i);
let proof_t = MerkleProofTarget {
siblings: builder.add_virtual_hashes(proof.siblings.len()),
};
for i in 0..proof.siblings.len() {
pw.set_hash_target(proof_t.siblings[i], proof.siblings[i]);
}
let root_t = builder.add_virtual_hash();
pw.set_hash_target(root_t, tree.root);
let i_c = builder.constant(F::from_canonical_usize(i));
let data = builder.add_virtual_targets(tree.leaves[i].len());
for j in 0..data.len() {
pw.set_target(data[j], tree.leaves[i][j]);
}
builder.verify_merkle_proof(data, i_c, root_t, &proof_t);
let data = builder.build();
let proof = data.prove(pw);
verify(proof, &data.verifier_only, &data.common)
}
}

View File

@ -114,7 +114,7 @@ mod tests {
let leaves = random_data::<F>(n, 7);
verify_all_leaves(leaves.clone(), n, false)?;
verify_all_leaves(leaves.clone(), n, true)?;
verify_all_leaves(leaves, n, true)?;
Ok(())
}

View File

@ -114,26 +114,11 @@ impl<F: Fn(Target) -> usize> TargetPartition<Target, F> {
pub struct WirePartitions {
partition: Vec<Vec<Wire>>,
// TODO: We don't need `indices` anymore, so we can delete it.
indices: HashMap<Wire, usize>,
}
impl WirePartitions {
/// Find a wire's "neighbor" in the context of Plonk's "extended copy constraints" check. In
/// other words, find the next wire in the given wire's partition. If the given wire is last in
/// its partition, this will loop around. If the given wire has a partition all to itself, it
/// is considered its own neighbor.
fn get_neighbor(&self, wire: Wire) -> Wire {
let partition = &self.partition[self.indices[&wire]];
let n = partition.len();
for i in 0..n {
if partition[i] == wire {
let neighbor_index = (i + 1) % n;
return partition[neighbor_index];
}
}
panic!("Wire not found in the expected partition")
}
pub(crate) fn get_sigma_polys<F: Field>(
&self,
degree_log: usize,
@ -161,11 +146,22 @@ impl WirePartitions {
debug_assert_eq!(self.indices.len() % degree, 0);
let num_routed_wires = self.indices.len() / degree;
// Find a wire's "neighbor" in the context of Plonk's "extended copy constraints" check. In
// other words, find the next wire in the given wire's partition. If the given wire is last in
// its partition, this will loop around. If the given wire has a partition all to itself, it
// is considered its own neighbor.
let mut neighbors = HashMap::new();
for subset in &self.partition {
for n in 0..subset.len() {
neighbors.insert(subset[n], subset[(n + 1) % subset.len()]);
}
}
let mut sigma = Vec::new();
for input in 0..num_routed_wires {
for gate in 0..degree {
let wire = Wire { gate, input };
let neighbor = self.get_neighbor(wire);
let neighbor = neighbors[&wire];
sigma.push(neighbor.input * degree + neighbor.gate);
}
}

View File

@ -5,7 +5,7 @@ use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field::Field;
use crate::hash::{permute, SPONGE_RATE, SPONGE_WIDTH};
use crate::proof::{Hash, HashTarget, OpeningSet};
use crate::proof::{Hash, HashTarget, OpeningSet, OpeningSetTarget};
use crate::target::Target;
/// Observes prover messages, and generates challenges by hashing the transcript.
@ -68,7 +68,7 @@ impl<F: Field> Challenger<F> {
{
let OpeningSet {
constants,
plonk_s_sigmas,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
@ -77,7 +77,7 @@ impl<F: Field> Challenger<F> {
} = os;
for v in &[
constants,
plonk_s_sigmas,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
@ -211,6 +211,29 @@ impl RecursiveChallenger {
}
}
pub fn observe_opening_set<const D: usize>(&mut self, os: &OpeningSetTarget<D>) {
let OpeningSetTarget {
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
partial_products,
quotient_polys,
} = os;
for v in &[
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_right,
partial_products,
quotient_polys,
] {
self.observe_extension_elements(v);
}
}
pub fn observe_hash(&mut self, hash: &HashTarget) {
self.observe_elements(&hash.elements)
}
@ -323,7 +346,6 @@ mod tests {
use crate::field::crandall_field::CrandallField;
use crate::field::field::Field;
use crate::generator::generate_partial_witness;
use crate::permutation_argument::TargetPartition;
use crate::plonk_challenger::{Challenger, RecursiveChallenger};
use crate::target::Target;
use crate::witness::PartialWitness;

View File

@ -1,16 +1,12 @@
use std::borrow::Borrow;
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::gates::gate::{GateRef, PrefixedGate};
use crate::polynomial::commitment::SALT_SIZE;
use crate::polynomial::polynomial::PolynomialCoeffs;
use crate::target::Target;
use crate::util::partial_products::check_partial_products;
use crate::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
/// Holds the Merkle tree index and blinding flag of a set of polynomials used in FRI.
#[derive(Debug, Copy, Clone)]
@ -58,243 +54,6 @@ impl PlonkPolynomials {
}
}
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
x: F::Extension,
vars: EvaluationVars<F, D>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
partial_products: &[F::Extension],
s_sigmas: &[F::Extension],
betas: &[F],
gammas: &[F],
alphas: &[F],
) -> Vec<F::Extension> {
let partial_products_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(eval_l_1(common_data.degree(), x) * (z_x - F::Extension::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = x * k_i.into();
wire_value + s_id * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + s_sigma * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check = check_partial_products(
&quotient_values,
current_partial_products,
partial_products_degree,
);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(partial_products_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F::Extension = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
let alphas = &alphas.iter().map(|&a| a.into()).collect::<Vec<_>>();
reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Like `eval_vanishing_poly`, but specialized for base field points.
pub(crate) fn eval_vanishing_poly_base<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
index: usize,
x: F,
vars: EvaluationVarsBase<F>,
local_zs: &[F],
next_zs: &[F],
partial_products: &[F],
s_sigmas: &[F],
betas: &[F],
gammas: &[F],
alphas: &[F],
z_h_on_coset: &ZeroPolyOnCoset<F>,
) -> Vec<F> {
let partial_products_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints_base(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(z_h_on_coset.eval_l1(index, x) * (z_x - F::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = k_i * x;
wire_value + betas[i] * s_id + gammas[i]
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + betas[i] * s_sigma + gammas[i]
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check = check_partial_products(
&quotient_values,
current_partial_products,
partial_products_degree,
);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(partial_products_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Evaluates all gate constraints.
///
/// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not
/// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly
/// the capacity that we need.
pub fn evaluate_gate_constraints<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVars<F, D>,
) -> Vec<F::Extension> {
let mut constraints = vec![F::Extension::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_base<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVarsBase<F>,
) -> Vec<F> {
let mut constraints = vec![F::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered_base(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
gates: &[GateRef<F, D>],
num_gate_constraints: usize,
vars: EvaluationTargets<D>,
) -> Vec<ExtensionTarget<D>> {
let mut constraints = vec![builder.zero_extension(); num_gate_constraints];
for gate in gates {
let gate_constraints = gate.0.eval_filtered_recursively(builder, vars);
for (i, c) in gate_constraints.into_iter().enumerate() {
constraints[i] = builder.add_extension(constraints[i], c);
}
}
constraints
}
/// Evaluate the polynomial which vanishes on any multiplicative subgroup of a given order `n`.
pub(crate) fn eval_zero_poly<F: Field>(n: usize, x: F) -> F {
// Z(x) = x^n - 1
@ -360,6 +119,28 @@ pub(crate) fn eval_l_1<F: Field>(n: usize, x: F) -> F {
eval_zero_poly(n, x) / (F::from_canonical_usize(n) * (x - F::ONE))
}
pub(crate) fn eval_l_1_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
n: usize,
x: ExtensionTarget<D>,
x_pow_n: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
// L_1(x) = (x^n - 1) / (n * (x - 1))
// = Z(x) / (n * (x - 1))
let one = builder.one_extension();
let neg_one = builder.neg_one();
let neg_one = builder.convert_to_ext(neg_one);
let eval_zero_poly = builder.sub_extension(x_pow_n, one);
let denominator = builder.arithmetic_extension(
F::from_canonical_usize(n),
F::from_canonical_usize(n),
x,
one,
neg_one,
);
builder.div_unsafe_extension(eval_zero_poly, denominator)
}
/// For each alpha in alphas, compute a reduction of the given terms using powers of alpha.
pub(crate) fn reduce_with_powers_multi<F: Field>(terms: &[F], alphas: &[F]) -> Vec<F> {
alphas

View File

@ -2,19 +2,21 @@ use anyhow::Result;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::extension_field::{FieldExtension, Frobenius};
use crate::field::field::Field;
use crate::fri::{prover::fri_proof, verifier::verify_fri_proof};
use crate::merkle_tree::MerkleTree;
use crate::plonk_challenger::Challenger;
use crate::plonk_challenger::{Challenger, RecursiveChallenger};
use crate::plonk_common::PlonkPolynomials;
use crate::polynomial::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::proof::{FriProof, FriProofTarget, Hash, OpeningSet};
use crate::proof::{FriProof, FriProofTarget, Hash, HashTarget, OpeningSet, OpeningSetTarget};
use crate::timed;
use crate::util::scaling::ReducingFactor;
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place, transpose};
use crate::util::{log2_ceil, log2_strict, reverse_bits, reverse_index_bits_in_place, transpose};
pub const SALT_SIZE: usize = 2;
@ -246,15 +248,15 @@ impl<F: Field> ListPolynomialCommitment<F> {
}
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct OpeningProof<F: Field + Extendable<D>, const D: usize> {
fri_proof: FriProof<F, D>,
pub struct OpeningProof<F: Extendable<D>, const D: usize> {
pub(crate) fri_proof: FriProof<F, D>,
// TODO: Get the degree from `CommonCircuitData` instead.
quotient_degree: usize,
}
impl<F: Field + Extendable<D>, const D: usize> OpeningProof<F, D> {
impl<F: Extendable<D>, const D: usize> OpeningProof<F, D> {
pub fn verify(
&self,
zeta: F::Extension,
@ -281,7 +283,34 @@ impl<F: Field + Extendable<D>, const D: usize> OpeningProof<F, D> {
}
pub struct OpeningProofTarget<const D: usize> {
fri_proof: FriProofTarget<D>,
pub(crate) fri_proof: FriProofTarget<D>,
}
impl<const D: usize> OpeningProofTarget<D> {
pub fn verify<F: Extendable<D>>(
&self,
zeta: ExtensionTarget<D>,
os: &OpeningSetTarget<D>,
merkle_roots: &[HashTarget],
challenger: &mut RecursiveChallenger,
common_data: &CommonCircuitData<F, D>,
builder: &mut CircuitBuilder<F, D>,
) {
challenger.observe_opening_set(os);
let alpha = challenger.get_extension_challenge(builder);
builder.verify_fri_proof(
log2_ceil(common_data.degree()),
&os,
zeta,
alpha,
merkle_roots,
&self.fri_proof,
challenger,
common_data,
);
}
}
#[cfg(test)]

View File

@ -64,13 +64,13 @@ impl HashTarget {
}
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct Proof<F: Field + Extendable<D>, const D: usize> {
pub struct Proof<F: Extendable<D>, const D: usize> {
/// Merkle root of LDEs of wire values.
pub wires_root: Hash<F>,
/// Merkle root of LDEs of Z, in the context of Plonk's permutation argument.
pub plonk_zs_root: Hash<F>,
pub plonk_zs_partial_products_root: Hash<F>,
/// Merkle root of LDEs of the quotient polynomial components.
pub quotient_polys_root: Hash<F>,
/// Purported values of each polynomial at the challenge point.
@ -81,20 +81,21 @@ pub struct Proof<F: Field + Extendable<D>, const D: usize> {
pub struct ProofTarget<const D: usize> {
pub wires_root: HashTarget,
pub plonk_zs_root: HashTarget,
pub plonk_zs_partial_products_root: HashTarget,
pub quotient_polys_root: HashTarget,
pub openings: Vec<OpeningSetTarget<D>>,
pub opening_proof: Vec<OpeningProofTarget<D>>,
pub openings: OpeningSetTarget<D>,
pub opening_proof: OpeningProofTarget<D>,
}
/// Evaluations and Merkle proof produced by the prover in a FRI query step.
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct FriQueryStep<F: Field + Extendable<D>, const D: usize> {
pub struct FriQueryStep<F: Extendable<D>, const D: usize> {
pub evals: Vec<F::Extension>,
pub merkle_proof: MerkleProof<F>,
}
#[derive(Clone)]
pub struct FriQueryStepTarget<const D: usize> {
pub evals: Vec<ExtensionTarget<D>>,
pub merkle_proof: MerkleProofTarget,
@ -102,7 +103,7 @@ pub struct FriQueryStepTarget<const D: usize> {
/// Evaluations and Merkle proofs of the original set of polynomials,
/// before they are combined into a composition polynomial.
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct FriInitialTreeProof<F: Field> {
pub evals_proofs: Vec<(Vec<F>, MerkleProof<F>)>,
@ -115,6 +116,7 @@ impl<F: Field> FriInitialTreeProof<F> {
}
}
#[derive(Clone)]
pub struct FriInitialTreeProofTarget {
pub evals_proofs: Vec<(Vec<Target>, MerkleProofTarget)>,
}
@ -127,21 +129,22 @@ impl FriInitialTreeProofTarget {
}
/// Proof for a FRI query round.
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct FriQueryRound<F: Field + Extendable<D>, const D: usize> {
pub struct FriQueryRound<F: Extendable<D>, const D: usize> {
pub initial_trees_proof: FriInitialTreeProof<F>,
pub steps: Vec<FriQueryStep<F, D>>,
}
#[derive(Clone)]
pub struct FriQueryRoundTarget<const D: usize> {
pub initial_trees_proof: FriInitialTreeProofTarget,
pub steps: Vec<FriQueryStepTarget<D>>,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(bound = "")]
pub struct FriProof<F: Field + Extendable<D>, const D: usize> {
pub struct FriProof<F: Extendable<D>, const D: usize> {
/// A Merkle root for each reduced polynomial in the commit phase.
pub commit_phase_merkle_roots: Vec<Hash<F>>,
/// Query rounds proofs
@ -161,9 +164,9 @@ pub struct FriProofTarget<const D: usize> {
#[derive(Clone, Debug, Serialize, Deserialize)]
/// The purported values of each polynomial at a single point.
pub struct OpeningSet<F: Field + Extendable<D>, const D: usize> {
pub struct OpeningSet<F: Extendable<D>, const D: usize> {
pub constants: Vec<F::Extension>,
pub plonk_s_sigmas: Vec<F::Extension>,
pub plonk_sigmas: Vec<F::Extension>,
pub wires: Vec<F::Extension>,
pub plonk_zs: Vec<F::Extension>,
pub plonk_zs_right: Vec<F::Extension>,
@ -171,7 +174,7 @@ pub struct OpeningSet<F: Field + Extendable<D>, const D: usize> {
pub quotient_polys: Vec<F::Extension>,
}
impl<F: Field + Extendable<D>, const D: usize> OpeningSet<F, D> {
impl<F: Extendable<D>, const D: usize> OpeningSet<F, D> {
pub fn new(
z: F::Extension,
g: F::Extension,
@ -191,7 +194,7 @@ impl<F: Field + Extendable<D>, const D: usize> OpeningSet<F, D> {
let zs_partial_products_eval = eval_commitment(z, zs_partial_products_commitment);
Self {
constants: constants_sigmas_eval[common_data.constants_range()].to_vec(),
plonk_s_sigmas: constants_sigmas_eval[common_data.sigmas_range()].to_vec(),
plonk_sigmas: constants_sigmas_eval[common_data.sigmas_range()].to_vec(),
wires: eval_commitment(z, wires_commitment),
plonk_zs: zs_partial_products_eval[common_data.zs_range()].to_vec(),
plonk_zs_right: eval_commitment(g * z, zs_partial_products_commitment)
@ -205,11 +208,13 @@ impl<F: Field + Extendable<D>, const D: usize> OpeningSet<F, D> {
}
/// The purported values of each polynomial at a single point.
#[derive(Clone, Debug)]
pub struct OpeningSetTarget<const D: usize> {
pub constants: Vec<ExtensionTarget<D>>,
pub plonk_sigmas: Vec<ExtensionTarget<D>>,
pub wires: Vec<ExtensionTarget<D>>,
pub plonk_zs: Vec<ExtensionTarget<D>>,
pub plonk_zs_right: Vec<ExtensionTarget<D>>,
pub partial_products: Vec<ExtensionTarget<D>>,
pub quotient_polys: Vec<ExtensionTarget<D>>,
}

View File

@ -7,13 +7,14 @@ use crate::circuit_data::{CommonCircuitData, ProverOnlyCircuitData};
use crate::field::extension_field::Extendable;
use crate::generator::generate_partial_witness;
use crate::plonk_challenger::Challenger;
use crate::plonk_common::{eval_vanishing_poly_base, PlonkPolynomials, ZeroPolyOnCoset};
use crate::plonk_common::{PlonkPolynomials, ZeroPolyOnCoset};
use crate::polynomial::commitment::ListPolynomialCommitment;
use crate::polynomial::polynomial::{PolynomialCoeffs, PolynomialValues};
use crate::proof::Proof;
use crate::timed;
use crate::util::partial_products::partial_products;
use crate::util::{log2_ceil, transpose};
use crate::vanishing_poly::eval_vanishing_poly_base;
use crate::vars::EvaluationVarsBase;
use crate::witness::{PartialWitness, Witness};
@ -38,18 +39,23 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
"to generate witness"
);
let witness = timed!(
partial_witness.full_witness(degree, num_wires),
"to compute full witness"
);
// Display the marked targets for debugging purposes.
for m in &prover_data.marked_targets {
m.display(&partial_witness);
}
timed!(
witness
partial_witness
.check_copy_constraints(&prover_data.copy_constraints, &prover_data.gate_instances)
.unwrap(), // TODO: Change return value to `Result` and use `?` here.
"to check copy constraints"
);
let witness = timed!(
partial_witness.full_witness(degree, num_wires),
"to compute full witness"
);
let wires_values: Vec<PolynomialValues<F>> = timed!(
witness
.wire_values
@ -130,8 +136,7 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
.flat_map(|mut quotient_poly| {
quotient_poly.trim();
quotient_poly.pad(quotient_degree).expect(
"The quotient polynomial doesn't have the right degree. \
This may be because the `Z`s polynomials are still too high degree.",
"Quotient has failed, the vanishing polynomial is not divisible by `Z_H",
);
// Split t into degree-n chunks.
quotient_poly.chunks(degree)
@ -175,7 +180,7 @@ pub(crate) fn prove<F: Extendable<D>, const D: usize>(
Proof {
wires_root: wires_commitment.merkle_tree.root,
plonk_zs_root: zs_partial_products_commitment.merkle_tree.root,
plonk_zs_partial_products_root: zs_partial_products_commitment.merkle_tree.root,
quotient_polys_root: quotient_polys_commitment.merkle_tree.root,
openings,
opening_proof,

View File

@ -1,22 +1,367 @@
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::{CircuitConfig, VerifierCircuitTarget};
use crate::circuit_data::{CircuitConfig, CommonCircuitData, VerifierCircuitTarget};
use crate::field::extension_field::Extendable;
use crate::gates::gate::GateRef;
use crate::proof::ProofTarget;
use crate::plonk_challenger::RecursiveChallenger;
use crate::proof::{HashTarget, ProofTarget};
use crate::util::scaling::ReducingFactorTarget;
use crate::vanishing_poly::eval_vanishing_poly_recursively;
use crate::vars::EvaluationTargets;
const MIN_WIRES: usize = 120; // TODO: Double check.
const MIN_ROUTED_WIRES: usize = 28; // TODO: Double check.
/// Recursively verifies an inner proof.
pub fn add_recursive_verifier<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
inner_config: CircuitConfig,
inner_circuit: VerifierCircuitTarget,
inner_gates: Vec<GateRef<F, D>>,
inner_proof: ProofTarget<D>,
) {
assert!(builder.config.num_wires >= MIN_WIRES);
assert!(builder.config.num_wires >= MIN_ROUTED_WIRES);
impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
/// Recursively verifies an inner proof.
pub fn add_recursive_verifier(
&mut self,
proof: ProofTarget<D>,
inner_config: &CircuitConfig,
inner_verifier_data: &VerifierCircuitTarget,
inner_common_data: &CommonCircuitData<F, D>,
) {
assert!(self.config.num_wires >= MIN_WIRES);
assert!(self.config.num_wires >= MIN_ROUTED_WIRES);
let one = self.one_extension();
todo!()
let num_challenges = inner_config.num_challenges;
let mut challenger = RecursiveChallenger::new(self);
self.set_context("Challenger observes proof and generates challenges.");
let digest =
HashTarget::from_vec(self.constants(&inner_common_data.circuit_digest.elements));
challenger.observe_hash(&digest);
challenger.observe_hash(&proof.wires_root);
let betas = challenger.get_n_challenges(self, num_challenges);
let gammas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.plonk_zs_partial_products_root);
let alphas = challenger.get_n_challenges(self, num_challenges);
challenger.observe_hash(&proof.quotient_polys_root);
let zeta = challenger.get_extension_challenge(self);
let local_constants = &proof.openings.constants;
let local_wires = &proof.openings.wires;
let vars = EvaluationTargets {
local_constants,
local_wires,
};
let local_zs = &proof.openings.plonk_zs;
let next_zs = &proof.openings.plonk_zs_right;
let s_sigmas = &proof.openings.plonk_sigmas;
let partial_products = &proof.openings.partial_products;
let zeta_pow_deg = self.exp_power_of_2(zeta, inner_common_data.degree_bits);
self.set_context("Evaluate the vanishing polynomial at our challenge point, zeta.");
let vanishing_polys_zeta = eval_vanishing_poly_recursively(
self,
inner_common_data,
zeta,
zeta_pow_deg,
vars,
local_zs,
next_zs,
partial_products,
s_sigmas,
&betas,
&gammas,
&alphas,
);
self.set_context("Check vanishing and quotient polynomials.");
let quotient_polys_zeta = &proof.openings.quotient_polys;
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
let z_h_zeta = self.sub_extension(zeta_pow_deg, one);
for (i, chunk) in quotient_polys_zeta
.chunks(inner_common_data.quotient_degree_factor)
.enumerate()
{
let recombined_quotient = scale.reduce(chunk, self);
let computed_vanishing_poly = self.mul_extension(z_h_zeta, recombined_quotient);
self.named_route_extension(
vanishing_polys_zeta[i],
computed_vanishing_poly,
format!("Vanishing polynomial == Z_H * quotient, challenge {}", i),
);
}
let merkle_roots = &[
inner_verifier_data.constants_sigmas_root,
proof.wires_root,
proof.plonk_zs_partial_products_root,
proof.quotient_polys_root,
];
proof.opening_proof.verify(
zeta,
&proof.openings,
merkle_roots,
&mut challenger,
inner_common_data,
self,
);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::field::crandall_field::CrandallField;
use crate::fri::FriConfig;
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::merkle_proofs::MerkleProofTarget;
use crate::polynomial::commitment::OpeningProofTarget;
use crate::proof::{
FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget, FriQueryStepTarget,
OpeningSetTarget, Proof,
};
use crate::verifier::verify;
use crate::witness::PartialWitness;
// Construct a `FriQueryRoundTarget` with the same dimensions as the ones in `proof`.
fn get_fri_query_round<F: Extendable<D>, const D: usize>(
proof: &Proof<F, D>,
builder: &mut CircuitBuilder<F, D>,
) -> FriQueryRoundTarget<D> {
let mut query_round = FriQueryRoundTarget {
initial_trees_proof: FriInitialTreeProofTarget {
evals_proofs: vec![],
},
steps: vec![],
};
for (v, merkle_proof) in &proof.opening_proof.fri_proof.query_round_proofs[0]
.initial_trees_proof
.evals_proofs
{
query_round.initial_trees_proof.evals_proofs.push((
builder.add_virtual_targets(v.len()),
MerkleProofTarget {
siblings: builder.add_virtual_hashes(merkle_proof.siblings.len()),
},
));
}
for step in &proof.opening_proof.fri_proof.query_round_proofs[0].steps {
query_round.steps.push(FriQueryStepTarget {
evals: builder.add_virtual_extension_targets(step.evals.len()),
merkle_proof: MerkleProofTarget {
siblings: builder.add_virtual_hashes(step.merkle_proof.siblings.len()),
},
});
}
query_round
}
// Construct a `ProofTarget` with the same dimensions as `proof`.
fn proof_to_proof_target<F: Extendable<D>, const D: usize>(
proof: &Proof<F, D>,
builder: &mut CircuitBuilder<F, D>,
) -> ProofTarget<D> {
let wires_root = builder.add_virtual_hash();
let plonk_zs_root = builder.add_virtual_hash();
let quotient_polys_root = builder.add_virtual_hash();
let openings = OpeningSetTarget {
constants: builder.add_virtual_extension_targets(proof.openings.constants.len()),
plonk_sigmas: builder.add_virtual_extension_targets(proof.openings.plonk_sigmas.len()),
wires: builder.add_virtual_extension_targets(proof.openings.wires.len()),
plonk_zs: builder.add_virtual_extension_targets(proof.openings.plonk_zs.len()),
plonk_zs_right: builder
.add_virtual_extension_targets(proof.openings.plonk_zs_right.len()),
partial_products: builder
.add_virtual_extension_targets(proof.openings.partial_products.len()),
quotient_polys: builder
.add_virtual_extension_targets(proof.openings.quotient_polys.len()),
};
let query_round_proofs = (0..proof.opening_proof.fri_proof.query_round_proofs.len())
.map(|_| get_fri_query_round(proof, builder))
.collect();
let commit_phase_merkle_roots = (0..proof
.opening_proof
.fri_proof
.commit_phase_merkle_roots
.len())
.map(|_| builder.add_virtual_hash())
.collect();
let opening_proof =
OpeningProofTarget {
fri_proof: FriProofTarget {
commit_phase_merkle_roots,
query_round_proofs,
final_poly: PolynomialCoeffsExtTarget(builder.add_virtual_extension_targets(
proof.opening_proof.fri_proof.final_poly.len(),
)),
pow_witness: builder.add_virtual_target(),
},
};
ProofTarget {
wires_root,
plonk_zs_partial_products_root: plonk_zs_root,
quotient_polys_root,
openings,
opening_proof,
}
}
// Set the targets in a `ProofTarget` to their corresponding values in a `Proof`.
fn set_proof_target<F: Extendable<D>, const D: usize>(
proof: &Proof<F, D>,
pt: &ProofTarget<D>,
pw: &mut PartialWitness<F>,
) {
pw.set_hash_target(pt.wires_root, proof.wires_root);
pw.set_hash_target(
pt.plonk_zs_partial_products_root,
proof.plonk_zs_partial_products_root,
);
pw.set_hash_target(pt.quotient_polys_root, proof.quotient_polys_root);
for (&t, &x) in pt.openings.wires.iter().zip(&proof.openings.wires) {
pw.set_extension_target(t, x);
}
for (&t, &x) in pt.openings.constants.iter().zip(&proof.openings.constants) {
pw.set_extension_target(t, x);
}
for (&t, &x) in pt
.openings
.plonk_sigmas
.iter()
.zip(&proof.openings.plonk_sigmas)
{
pw.set_extension_target(t, x);
}
for (&t, &x) in pt.openings.plonk_zs.iter().zip(&proof.openings.plonk_zs) {
pw.set_extension_target(t, x);
}
for (&t, &x) in pt
.openings
.plonk_zs_right
.iter()
.zip(&proof.openings.plonk_zs_right)
{
pw.set_extension_target(t, x);
}
for (&t, &x) in pt
.openings
.partial_products
.iter()
.zip(&proof.openings.partial_products)
{
pw.set_extension_target(t, x);
}
for (&t, &x) in pt
.openings
.quotient_polys
.iter()
.zip(&proof.openings.quotient_polys)
{
pw.set_extension_target(t, x);
}
let fri_proof = &proof.opening_proof.fri_proof;
let fpt = &pt.opening_proof.fri_proof;
pw.set_target(fpt.pow_witness, fri_proof.pow_witness);
for (&t, &x) in fpt.final_poly.0.iter().zip(&fri_proof.final_poly.coeffs) {
pw.set_extension_target(t, x);
}
for (&t, &x) in fpt
.commit_phase_merkle_roots
.iter()
.zip(&fri_proof.commit_phase_merkle_roots)
{
pw.set_hash_target(t, x);
}
for (qt, q) in fpt
.query_round_proofs
.iter()
.zip(&fri_proof.query_round_proofs)
{
for (at, a) in qt
.initial_trees_proof
.evals_proofs
.iter()
.zip(&q.initial_trees_proof.evals_proofs)
{
for (&t, &x) in at.0.iter().zip(&a.0) {
pw.set_target(t, x);
}
for (&t, &x) in at.1.siblings.iter().zip(&a.1.siblings) {
pw.set_hash_target(t, x);
}
}
for (st, s) in qt.steps.iter().zip(&q.steps) {
for (&t, &x) in st.evals.iter().zip(&s.evals) {
pw.set_extension_target(t, x);
}
for (&t, &x) in st
.merkle_proof
.siblings
.iter()
.zip(&s.merkle_proof.siblings)
{
pw.set_hash_target(t, x);
}
}
}
}
#[test]
#[ignore]
fn test_recursive_verifier() {
env_logger::init();
type F = CrandallField;
const D: usize = 4;
let config = CircuitConfig {
num_wires: 134,
num_routed_wires: 28,
security_bits: 128,
rate_bits: 3,
num_challenges: 3,
fri_config: FriConfig {
proof_of_work_bits: 1,
rate_bits: 3,
reduction_arity_bits: vec![2, 2, 2, 2, 2, 2, 2],
num_query_rounds: 40,
},
};
let (proof, vd, cd) = {
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
let _two = builder.two();
let _two = builder.hash_n_to_hash(vec![_two], true).elements[0];
for _ in 0..5000 {
let _two = builder.mul(_two, _two);
}
let data = builder.build();
(
data.prove(PartialWitness::new()),
data.verifier_only,
data.common,
)
};
verify(proof.clone(), &vd, &cd).unwrap();
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
let mut pw = PartialWitness::new();
let pt = proof_to_proof_target(&proof, &mut builder);
set_proof_target(&proof, &pt, &mut pw);
let inner_data = VerifierCircuitTarget {
constants_sigmas_root: builder.add_virtual_hash(),
};
pw.set_hash_target(inner_data.constants_sigmas_root, vd.constants_sigmas_root);
builder.add_recursive_verifier(pt, &config, &inner_data, &cd);
let data = builder.build();
let recursive_proof = data.prove(pw);
verify(recursive_proof, &data.verifier_only, &data.common).unwrap();
}
}

63
src/util/marking.rs Normal file
View File

@ -0,0 +1,63 @@
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::proof::HashTarget;
use crate::target::Target;
use crate::witness::PartialWitness;
/// Enum representing all types of targets, so that they can be marked.
#[derive(Clone)]
pub enum Markable<const D: usize> {
Target(Target),
ExtensionTarget(ExtensionTarget<D>),
HashTarget(HashTarget),
Vec(Vec<Markable<D>>),
}
impl<const D: usize> From<Target> for Markable<D> {
fn from(t: Target) -> Self {
Self::Target(t)
}
}
impl<const D: usize> From<ExtensionTarget<D>> for Markable<D> {
fn from(et: ExtensionTarget<D>) -> Self {
Self::ExtensionTarget(et)
}
}
impl<const D: usize> From<HashTarget> for Markable<D> {
fn from(ht: HashTarget) -> Self {
Self::HashTarget(ht)
}
}
impl<M: Into<Markable<D>>, const D: usize> From<Vec<M>> for Markable<D> {
fn from(v: Vec<M>) -> Self {
Self::Vec(v.into_iter().map(|m| m.into()).collect())
}
}
impl<const D: usize> Markable<D> {
/// Display a `Markable` by querying a partial witness.
fn print_markable<F: Extendable<D>>(&self, pw: &PartialWitness<F>) {
match self {
Markable::Target(t) => println!("{}", pw.get_target(*t)),
Markable::ExtensionTarget(et) => println!("{}", pw.get_extension_target(*et)),
Markable::HashTarget(ht) => println!("{:?}", pw.get_hash_target(*ht)),
Markable::Vec(v) => v.iter().for_each(|m| m.print_markable(pw)),
}
}
}
/// A named collection of targets.
#[derive(Clone)]
pub struct MarkedTargets<const D: usize> {
pub targets: Markable<D>,
pub name: String,
}
impl<const D: usize> MarkedTargets<D> {
/// Display the collection of targets along with its name by querying a partial witness.
pub fn display<F: Extendable<D>>(&self, pw: &PartialWitness<F>) {
println!("Values for {}:", self.name);
self.targets.print_markable(pw);
println!("End of values for {}", self.name);
}
}

View File

@ -1,3 +1,4 @@
pub mod marking;
pub mod partial_products;
pub mod scaling;
pub(crate) mod timing;

View File

@ -1,6 +1,9 @@
use std::iter::Product;
use std::ops::Sub;
use crate::circuit_builder::CircuitBuilder;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::util::ceil_div_usize;
/// Compute partial products of the original vector `v` such that all products consist of `max_degree`
@ -58,6 +61,32 @@ pub fn check_partial_products<T: Product + Copy + Sub<Output = T>>(
res
}
pub fn check_partial_products_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
v: &[ExtensionTarget<D>],
partials: &[ExtensionTarget<D>],
max_degree: usize,
) -> Vec<ExtensionTarget<D>> {
let mut res = Vec::new();
let mut remainder = v.to_vec();
let mut partials = partials.to_vec();
while remainder.len() > max_degree {
let products = remainder
.chunks(max_degree)
.map(|chunk| builder.mul_many_extension(chunk))
.collect::<Vec<_>>();
res.extend(
products
.iter()
.zip(&partials)
.map(|(&a, &b)| builder.sub_extension(a, b)),
);
remainder = partials.drain(..products.len()).collect();
}
res
}
#[cfg(test)]
mod tests {
use num::Zero;

View File

@ -187,10 +187,7 @@ mod tests {
let mut builder = CircuitBuilder::<F, D>::new(config);
let alpha = FF::rand();
let alpha = FF::ONE;
let vs = (0..n)
.map(|i| FF::from_canonical_usize(i))
.collect::<Vec<_>>();
let vs = (0..n).map(FF::from_canonical_usize).collect::<Vec<_>>();
let manual_reduce = ReducingFactor::new(alpha).reduce(vs.iter());
let manual_reduce = builder.constant_extension(manual_reduce);

354
src/vanishing_poly.rs Normal file
View File

@ -0,0 +1,354 @@
use crate::circuit_builder::CircuitBuilder;
use crate::circuit_data::CommonCircuitData;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::gates::gate::PrefixedGate;
use crate::plonk_common;
use crate::plonk_common::{eval_l_1_recursively, ZeroPolyOnCoset};
use crate::target::Target;
use crate::util::partial_products::{check_partial_products, check_partial_products_recursively};
use crate::util::scaling::ReducingFactorTarget;
use crate::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
x: F::Extension,
vars: EvaluationVars<F, D>,
local_zs: &[F::Extension],
next_zs: &[F::Extension],
partial_products: &[F::Extension],
s_sigmas: &[F::Extension],
betas: &[F],
gammas: &[F],
alphas: &[F],
) -> Vec<F::Extension> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms
.push(plonk_common::eval_l_1(common_data.degree(), x) * (z_x - F::Extension::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = x * k_i.into();
wire_value + s_id * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + s_sigma * betas[i].into() + gammas[i].into()
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F::Extension = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
let alphas = &alphas.iter().map(|&a| a.into()).collect::<Vec<_>>();
plonk_common::reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Like `eval_vanishing_poly`, but specialized for base field points.
pub(crate) fn eval_vanishing_poly_base<F: Extendable<D>, const D: usize>(
common_data: &CommonCircuitData<F, D>,
index: usize,
x: F,
vars: EvaluationVarsBase<F>,
local_zs: &[F],
next_zs: &[F],
partial_products: &[F],
s_sigmas: &[F],
betas: &[F],
gammas: &[F],
alphas: &[F],
z_h_on_coset: &ZeroPolyOnCoset<F>,
) -> Vec<F> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms =
evaluate_gate_constraints_base(&common_data.gates, common_data.num_gate_constraints, vars);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
vanishing_z_1_terms.push(z_h_on_coset.eval_l1(index, x) * (z_x - F::ONE));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = common_data.k_is[j];
let s_id = k_i * x;
wire_value + betas[i] * s_id + gammas[i]
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
wire_value + betas[i] * s_sigma + gammas[i]
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| numerator_values[j] / denominator_values[j])
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the numerator partial products.
let mut partial_product_check =
check_partial_products(&quotient_values, current_partial_products, max_degree);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
*q *= d.iter().copied().product();
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient: F = current_partial_products[num_prods - final_num_prod..]
.iter()
.copied()
.product();
vanishing_v_shift_terms.push(quotient * z_x - z_gz);
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
plonk_common::reduce_with_powers_multi(&vanishing_terms, alphas)
}
/// Evaluates all gate constraints.
///
/// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not
/// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly
/// the capacity that we need.
pub fn evaluate_gate_constraints<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVars<F, D>,
) -> Vec<F::Extension> {
let mut constraints = vec![F::Extension::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_base<F: Extendable<D>, const D: usize>(
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationVarsBase<F>,
) -> Vec<F> {
let mut constraints = vec![F::ZERO; num_gate_constraints];
for gate in gates {
let gate_constraints = gate.gate.0.eval_filtered_base(vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
debug_assert!(
i < num_gate_constraints,
"num_constraints() gave too low of a number"
);
constraints[i] += c;
}
}
constraints
}
pub fn evaluate_gate_constraints_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
gates: &[PrefixedGate<F, D>],
num_gate_constraints: usize,
vars: EvaluationTargets<D>,
) -> Vec<ExtensionTarget<D>> {
let mut constraints = vec![builder.zero_extension(); num_gate_constraints];
for gate in gates {
let gate_constraints = gate
.gate
.0
.eval_filtered_recursively(builder, vars, &gate.prefix);
for (i, c) in gate_constraints.into_iter().enumerate() {
constraints[i] = builder.add_extension(constraints[i], c);
}
}
constraints
}
/// Evaluate the vanishing polynomial at `x`. In this context, the vanishing polynomial is a random
/// linear combination of gate constraints, plus some other terms relating to the permutation
/// argument. All such terms should vanish on `H`.
pub(crate) fn eval_vanishing_poly_recursively<F: Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
common_data: &CommonCircuitData<F, D>,
x: ExtensionTarget<D>,
x_pow_deg: ExtensionTarget<D>,
vars: EvaluationTargets<D>,
local_zs: &[ExtensionTarget<D>],
next_zs: &[ExtensionTarget<D>],
partial_products: &[ExtensionTarget<D>],
s_sigmas: &[ExtensionTarget<D>],
betas: &[Target],
gammas: &[Target],
alphas: &[Target],
) -> Vec<ExtensionTarget<D>> {
let max_degree = common_data.quotient_degree_factor;
let (num_prods, final_num_prod) = common_data.num_partial_products;
let constraint_terms = evaluate_gate_constraints_recursively(
builder,
&common_data.gates,
common_data.num_gate_constraints,
vars,
);
// The L_1(x) (Z(x) - 1) vanishing terms.
let mut vanishing_z_1_terms = Vec::new();
// The terms checking the partial products.
let mut vanishing_partial_products_terms = Vec::new();
// The Z(x) f'(x) - g'(x) Z(g x) terms.
let mut vanishing_v_shift_terms = Vec::new();
for i in 0..common_data.config.num_challenges {
let z_x = local_zs[i];
let z_gz = next_zs[i];
let l1 = eval_l_1_recursively(builder, common_data.degree(), x, x_pow_deg);
vanishing_z_1_terms.push(builder.arithmetic_extension(F::ONE, F::NEG_ONE, l1, z_x, l1));
let numerator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let k_i = builder.constant(common_data.k_is[j]);
let s_id = builder.scalar_mul_ext(k_i, x);
let gamma_ext = builder.convert_to_ext(gammas[i]);
let tmp = builder.scalar_mul_add_extension(betas[i], s_id, wire_value);
builder.add_extension(tmp, gamma_ext)
})
.collect::<Vec<_>>();
let denominator_values = (0..common_data.config.num_routed_wires)
.map(|j| {
let wire_value = vars.local_wires[j];
let s_sigma = s_sigmas[j];
let gamma_ext = builder.convert_to_ext(gammas[i]);
let tmp = builder.scalar_mul_add_extension(betas[i], s_sigma, wire_value);
builder.add_extension(tmp, gamma_ext)
})
.collect::<Vec<_>>();
let quotient_values = (0..common_data.config.num_routed_wires)
.map(|j| builder.div_unsafe_extension(numerator_values[j], denominator_values[j]))
.collect::<Vec<_>>();
// The partial products considered for this iteration of `i`.
let current_partial_products = &partial_products[i * num_prods..(i + 1) * num_prods];
// Check the quotient partial products.
let mut partial_product_check = check_partial_products_recursively(
builder,
&quotient_values,
current_partial_products,
max_degree,
);
// The first checks are of the form `q - n/d` which is a rational function not a polynomial.
// We multiply them by `d` to get checks of the form `q*d - n` which low-degree polynomials.
denominator_values
.chunks(max_degree)
.zip(partial_product_check.iter_mut())
.for_each(|(d, q)| {
let tmp = builder.mul_many_extension(d);
*q = builder.mul_extension(*q, tmp);
});
vanishing_partial_products_terms.extend(partial_product_check);
// The quotient final product is the product of the last `final_num_prod` elements.
let quotient =
builder.mul_many_extension(&current_partial_products[num_prods - final_num_prod..]);
vanishing_v_shift_terms.push(builder.mul_sub_extension(quotient, z_x, z_gz));
}
let vanishing_terms = [
vanishing_z_1_terms,
vanishing_partial_products_terms,
vanishing_v_shift_terms,
constraint_terms,
]
.concat();
alphas
.iter()
.map(|&alpha| {
let alpha = builder.convert_to_ext(alpha);
let mut alpha = ReducingFactorTarget::new(alpha);
alpha.reduce(&vanishing_terms, builder)
})
.collect()
}

View File

@ -39,6 +39,12 @@ impl<'a, F: Field> EvaluationVarsBase<'a, F> {
}
}
impl<'a, const D: usize> EvaluationTargets<'a, D> {
pub fn remove_prefix(&mut self, prefix: &[bool]) {
self.local_constants = &self.local_constants[prefix.len()..];
}
}
#[derive(Copy, Clone)]
pub struct EvaluationTargets<'a, const D: usize> {
pub(crate) local_constants: &'a [ExtensionTarget<D>],

View File

@ -4,8 +4,9 @@ use crate::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
use crate::field::extension_field::Extendable;
use crate::field::field::Field;
use crate::plonk_challenger::Challenger;
use crate::plonk_common::{eval_vanishing_poly, reduce_with_powers};
use crate::plonk_common::reduce_with_powers;
use crate::proof::Proof;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::EvaluationVars;
pub(crate) fn verify<F: Extendable<D>, const D: usize>(
@ -25,7 +26,7 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
let betas = challenger.get_n_challenges(num_challenges);
let gammas = challenger.get_n_challenges(num_challenges);
challenger.observe_hash(&proof.plonk_zs_root);
challenger.observe_hash(&proof.plonk_zs_partial_products_root);
let alphas = challenger.get_n_challenges(num_challenges);
challenger.observe_hash(&proof.quotient_polys_root);
@ -39,7 +40,7 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
};
let local_zs = &proof.openings.plonk_zs;
let next_zs = &proof.openings.plonk_zs_right;
let s_sigmas = &proof.openings.plonk_s_sigmas;
let s_sigmas = &proof.openings.plonk_sigmas;
let partial_products = &proof.openings.partial_products;
// Evaluate the vanishing polynomial at our challenge point, zeta.
@ -77,7 +78,7 @@ pub(crate) fn verify<F: Extendable<D>, const D: usize>(
let merkle_roots = &[
verifier_data.constants_sigmas_root,
proof.wires_root,
proof.plonk_zs_root,
proof.plonk_zs_partial_products_root,
proof.quotient_polys_root,
];

View File

@ -3,10 +3,12 @@ use std::convert::TryInto;
use anyhow::{ensure, Result};
use crate::copy_constraint::CopyConstraint;
use crate::field::extension_field::target::ExtensionTarget;
use crate::field::extension_field::{Extendable, FieldExtension};
use crate::field::field::Field;
use crate::gates::gate::GateInstance;
use crate::proof::{Hash, HashTarget};
use crate::target::Target;
use crate::wire::Wire;
@ -19,41 +21,6 @@ impl<F: Field> Witness<F> {
pub fn get_wire(&self, gate: usize, input: usize) -> F {
self.wire_values[input][gate]
}
/// Checks that the copy constraints are satisfied in the witness.
pub fn check_copy_constraints<const D: usize>(
&self,
copy_constraints: &[(Target, Target)],
gate_instances: &[GateInstance<F, D>],
) -> Result<()>
where
F: Extendable<D>,
{
for &(a, b) in copy_constraints {
// TODO: Take care of public inputs once they land, and virtual targets.
if let (
Target::Wire(Wire {
gate: a_gate,
input: a_input,
}),
Target::Wire(Wire {
gate: b_gate,
input: b_input,
}),
) = (a, b)
{
let va = self.get_wire(a_gate, a_input);
let vb = self.get_wire(b_gate, b_input);
ensure!(
va == vb,
"Copy constraint between wire {} of gate #{} (`{}`) and wire {} of gate #{} (`{}`) is not satisfied. \
Got values of {} and {} respectively.",
a_input, a_gate, gate_instances[a_gate].gate_type.0.id(), b_input, b_gate,
gate_instances[b_gate].gate_type.0.id(), va, vb);
}
}
Ok(())
}
}
#[derive(Clone, Debug)]
@ -111,6 +78,12 @@ impl<F: Field> PartialWitness<F> {
)
}
pub fn get_hash_target(&self, ht: HashTarget) -> Hash<F> {
Hash {
elements: self.get_targets(&ht.elements).try_into().unwrap(),
}
}
pub fn try_get_target(&self, target: Target) -> Option<F> {
self.target_values.get(&target).cloned()
}
@ -142,6 +115,13 @@ impl<F: Field> PartialWitness<F> {
}
}
pub fn set_hash_target(&mut self, ht: HashTarget, value: Hash<F>) {
ht.elements
.iter()
.zip(value.elements)
.for_each(|(&t, x)| self.set_target(t, x));
}
pub fn set_extension_target<const D: usize>(
&mut self,
et: ExtensionTarget<D>,
@ -192,6 +172,44 @@ impl<F: Field> PartialWitness<F> {
});
Witness { wire_values }
}
/// Checks that the copy constraints are satisfied in the witness.
pub fn check_copy_constraints<const D: usize>(
&self,
copy_constraints: &[CopyConstraint],
gate_instances: &[GateInstance<F, D>],
) -> Result<()>
where
F: Extendable<D>,
{
for CopyConstraint { pair: (a, b), name } in copy_constraints {
let va = self.try_get_target(*a).unwrap_or(F::ZERO);
let vb = self.try_get_target(*b).unwrap_or(F::ZERO);
let desc = |t: &Target| -> String {
match t {
Target::Wire(Wire { gate, input }) => format!(
"wire {} of gate #{} (`{}`)",
input,
gate,
gate_instances[*gate].gate_type.0.id()
),
Target::PublicInput { index } => format!("{}-th public input", index),
Target::VirtualTarget { index } => format!("{}-th virtual target", index),
}
};
ensure!(
va == vb,
"Copy constraint '{}' between {} and {} is not satisfied. \
Got values of {} and {} respectively.",
name,
desc(a),
desc(b),
va,
vb
);
}
Ok(())
}
}
impl<F: Field> Default for PartialWitness<F> {