mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-07 16:23:12 +00:00
BaseSum gate
This commit is contained in:
parent
897ec3b053
commit
42d5b80a7a
@ -1,6 +1,7 @@
|
|||||||
use crate::polynomial::commitment::SALT_SIZE;
|
use crate::polynomial::commitment::SALT_SIZE;
|
||||||
|
|
||||||
pub mod prover;
|
pub mod prover;
|
||||||
|
mod recursive_verifier;
|
||||||
pub mod verifier;
|
pub mod verifier;
|
||||||
|
|
||||||
/// Somewhat arbitrary. Smaller values will increase delta, but with diminishing returns,
|
/// Somewhat arbitrary. Smaller values will increase delta, but with diminishing returns,
|
||||||
|
|||||||
@ -106,7 +106,7 @@ fn fri_proof_of_work<F: Field>(current_hash: Hash<F>, config: &FriConfig) -> F {
|
|||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
.to_canonical_u64()
|
.to_canonical_u64()
|
||||||
.leading_zeros()
|
.trailing_zeros()
|
||||||
>= config.proof_of_work_bits
|
>= config.proof_of_work_bits
|
||||||
})
|
})
|
||||||
.map(F::from_canonical_u64)
|
.map(F::from_canonical_u64)
|
||||||
|
|||||||
306
src/fri/recursive_verifier.rs
Normal file
306
src/fri/recursive_verifier.rs
Normal file
@ -0,0 +1,306 @@
|
|||||||
|
use anyhow::{ensure, Result};
|
||||||
|
use itertools::izip;
|
||||||
|
|
||||||
|
use crate::circuit_builder::CircuitBuilder;
|
||||||
|
use crate::field::extension_field::{flatten, Extendable, FieldExtension, OEF};
|
||||||
|
use crate::field::field::Field;
|
||||||
|
use crate::field::lagrange::{barycentric_weights, interpolant, interpolate};
|
||||||
|
use crate::fri::FriConfig;
|
||||||
|
use crate::hash::hash_n_to_1;
|
||||||
|
use crate::merkle_proofs::verify_merkle_proof;
|
||||||
|
use crate::plonk_challenger::{Challenger, RecursiveChallenger};
|
||||||
|
use crate::plonk_common::reduce_with_iter;
|
||||||
|
use crate::proof::{
|
||||||
|
FriInitialTreeProof, FriProof, FriProofTarget, FriQueryRound, Hash, OpeningSet,
|
||||||
|
};
|
||||||
|
use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place};
|
||||||
|
|
||||||
|
impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||||
|
/// Computes P'(x^arity) from {P(x*g^i)}_(i=0..arity), where g is a `arity`-th root of unity
|
||||||
|
/// and P' is the FRI reduced polynomial.
|
||||||
|
fn compute_evaluation() {
|
||||||
|
todo!();
|
||||||
|
// debug_assert_eq!(last_evals.len(), 1 << arity_bits);
|
||||||
|
//
|
||||||
|
// let g = F::primitive_root_of_unity(arity_bits);
|
||||||
|
//
|
||||||
|
// // The evaluation vector needs to be reordered first.
|
||||||
|
// let mut evals = last_evals.to_vec();
|
||||||
|
// reverse_index_bits_in_place(&mut evals);
|
||||||
|
// evals.rotate_left(reverse_bits(old_x_index, arity_bits));
|
||||||
|
//
|
||||||
|
// // The answer is gotten by interpolating {(x*g^i, P(x*g^i))} and evaluating at beta.
|
||||||
|
// let points = g
|
||||||
|
// .powers()
|
||||||
|
// .zip(evals)
|
||||||
|
// .map(|(y, e)| ((x * y).into(), e))
|
||||||
|
// .collect::<Vec<_>>();
|
||||||
|
// let barycentric_weights = barycentric_weights(&points);
|
||||||
|
// interpolate(&points, beta, &barycentric_weights)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fri_verify_proof_of_work(
|
||||||
|
&mut self,
|
||||||
|
proof: &FriProofTarget<D>,
|
||||||
|
challenger: &mut RecursiveChallenger,
|
||||||
|
config: &FriConfig,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut inputs = challenger.get_hash(self).elements.to_vec();
|
||||||
|
inputs.push(proof.pow_witness);
|
||||||
|
|
||||||
|
let hash = self.hash_n_to_m(inputs, 1, false)[0];
|
||||||
|
let purported_hash_bits = self.split_le_virtual(hash, 64);
|
||||||
|
// for &b in &purported_hash_bits {
|
||||||
|
// self.generate_copy(b, self.zero());
|
||||||
|
// }
|
||||||
|
// ensure!(
|
||||||
|
// hash.to_canonical_u64().trailing_zeros() >= config.proof_of_work_bits,
|
||||||
|
// "Invalid proof of work witness."
|
||||||
|
// );
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// pub fn verify_fri_proof<const D: usize>(
|
||||||
|
// purported_degree_log: usize,
|
||||||
|
// // Openings of the PLONK polynomials.
|
||||||
|
// os: &OpeningSet<F, D>,
|
||||||
|
// // Point at which the PLONK polynomials are opened.
|
||||||
|
// zeta: F::Extension,
|
||||||
|
// // Scaling factor to combine polynomials.
|
||||||
|
// alpha: F::Extension,
|
||||||
|
// initial_merkle_roots: &[Hash<F>],
|
||||||
|
// proof: &FriProof<F, D>,
|
||||||
|
// challenger: &mut Challenger<F>,
|
||||||
|
// config: &FriConfig,
|
||||||
|
// ) -> Result<()> {
|
||||||
|
// let total_arities = config.reduction_arity_bits.iter().sum::<usize>();
|
||||||
|
// ensure!(
|
||||||
|
// purported_degree_log
|
||||||
|
// == log2_strict(proof.final_poly.len()) + total_arities - config.rate_bits,
|
||||||
|
// "Final polynomial has wrong degree."
|
||||||
|
// );
|
||||||
|
//
|
||||||
|
// // Size of the LDE domain.
|
||||||
|
// let n = proof.final_poly.len() << total_arities;
|
||||||
|
//
|
||||||
|
// // Recover the random betas used in the FRI reductions.
|
||||||
|
// let betas = proof
|
||||||
|
// .commit_phase_merkle_roots
|
||||||
|
// .iter()
|
||||||
|
// .map(|root| {
|
||||||
|
// challenger.observe_hash(root);
|
||||||
|
// challenger.get_extension_challenge()
|
||||||
|
// })
|
||||||
|
// .collect::<Vec<_>>();
|
||||||
|
// challenger.observe_extension_elements(&proof.final_poly.coeffs);
|
||||||
|
//
|
||||||
|
// // Check PoW.
|
||||||
|
// fri_verify_proof_of_work(proof, challenger, config)?;
|
||||||
|
//
|
||||||
|
// // Check that parameters are coherent.
|
||||||
|
// ensure!(
|
||||||
|
// config.num_query_rounds == proof.query_round_proofs.len(),
|
||||||
|
// "Number of query rounds does not match config."
|
||||||
|
// );
|
||||||
|
// ensure!(
|
||||||
|
// !config.reduction_arity_bits.is_empty(),
|
||||||
|
// "Number of reductions should be non-zero."
|
||||||
|
// );
|
||||||
|
//
|
||||||
|
// for round_proof in &proof.query_round_proofs {
|
||||||
|
// fri_verifier_query_round(
|
||||||
|
// os,
|
||||||
|
// zeta,
|
||||||
|
// alpha,
|
||||||
|
// initial_merkle_roots,
|
||||||
|
// &proof,
|
||||||
|
// challenger,
|
||||||
|
// n,
|
||||||
|
// &betas,
|
||||||
|
// round_proof,
|
||||||
|
// config,
|
||||||
|
// )?;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Ok(())
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// fn fri_verify_initial_proof<F: Field>(
|
||||||
|
// x_index: usize,
|
||||||
|
// proof: &FriInitialTreeProof<F>,
|
||||||
|
// initial_merkle_roots: &[Hash<F>],
|
||||||
|
// ) -> Result<()> {
|
||||||
|
// for ((evals, merkle_proof), &root) in proof.evals_proofs.iter().zip(initial_merkle_roots) {
|
||||||
|
// verify_merkle_proof(evals.clone(), x_index, root, merkle_proof, false)?;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Ok(())
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
|
||||||
|
// proof: &FriInitialTreeProof<F>,
|
||||||
|
// alpha: F::Extension,
|
||||||
|
// os: &OpeningSet<F, D>,
|
||||||
|
// zeta: F::Extension,
|
||||||
|
// subgroup_x: F,
|
||||||
|
// config: &FriConfig,
|
||||||
|
// ) -> F::Extension {
|
||||||
|
// assert!(D > 1, "Not implemented for D=1.");
|
||||||
|
// let degree_log = proof.evals_proofs[0].1.siblings.len() - config.rate_bits;
|
||||||
|
// let subgroup_x = F::Extension::from_basefield(subgroup_x);
|
||||||
|
// let mut alpha_powers = alpha.powers();
|
||||||
|
// let mut sum = F::Extension::ZERO;
|
||||||
|
//
|
||||||
|
// // We will add three terms to `sum`:
|
||||||
|
// // - one for polynomials opened at `x` only
|
||||||
|
// // - one for polynomials opened at `x` and `g x`
|
||||||
|
// // - one for polynomials opened at `x` and its conjugate
|
||||||
|
//
|
||||||
|
// let evals = [0, 1, 4]
|
||||||
|
// .iter()
|
||||||
|
// .flat_map(|&i| proof.unsalted_evals(i, config))
|
||||||
|
// .map(|&e| F::Extension::from_basefield(e));
|
||||||
|
// let openings = os
|
||||||
|
// .constants
|
||||||
|
// .iter()
|
||||||
|
// .chain(&os.plonk_sigmas)
|
||||||
|
// .chain(&os.quotient_polys);
|
||||||
|
// let numerator = izip!(evals, openings, &mut alpha_powers)
|
||||||
|
// .map(|(e, &o, a)| a * (e - o))
|
||||||
|
// .sum::<F::Extension>();
|
||||||
|
// let denominator = subgroup_x - zeta;
|
||||||
|
// sum += numerator / denominator;
|
||||||
|
//
|
||||||
|
// let ev: F::Extension = proof
|
||||||
|
// .unsalted_evals(3, config)
|
||||||
|
// .iter()
|
||||||
|
// .zip(alpha_powers.clone())
|
||||||
|
// .map(|(&e, a)| a * e.into())
|
||||||
|
// .sum();
|
||||||
|
// let zeta_right = F::Extension::primitive_root_of_unity(degree_log) * zeta;
|
||||||
|
// let zs_interpol = interpolant(&[
|
||||||
|
// (zeta, reduce_with_iter(&os.plonk_zs, alpha_powers.clone())),
|
||||||
|
// (
|
||||||
|
// zeta_right,
|
||||||
|
// reduce_with_iter(&os.plonk_zs_right, &mut alpha_powers),
|
||||||
|
// ),
|
||||||
|
// ]);
|
||||||
|
// let numerator = ev - zs_interpol.eval(subgroup_x);
|
||||||
|
// let denominator = (subgroup_x - zeta) * (subgroup_x - zeta_right);
|
||||||
|
// sum += numerator / denominator;
|
||||||
|
//
|
||||||
|
// let ev: F::Extension = proof
|
||||||
|
// .unsalted_evals(2, config)
|
||||||
|
// .iter()
|
||||||
|
// .zip(alpha_powers.clone())
|
||||||
|
// .map(|(&e, a)| a * e.into())
|
||||||
|
// .sum();
|
||||||
|
// let zeta_frob = zeta.frobenius();
|
||||||
|
// let wire_evals_frob = os.wires.iter().map(|e| e.frobenius()).collect::<Vec<_>>();
|
||||||
|
// let wires_interpol = interpolant(&[
|
||||||
|
// (zeta, reduce_with_iter(&os.wires, alpha_powers.clone())),
|
||||||
|
// (zeta_frob, reduce_with_iter(&wire_evals_frob, alpha_powers)),
|
||||||
|
// ]);
|
||||||
|
// let numerator = ev - wires_interpol.eval(subgroup_x);
|
||||||
|
// let denominator = (subgroup_x - zeta) * (subgroup_x - zeta_frob);
|
||||||
|
// sum += numerator / denominator;
|
||||||
|
//
|
||||||
|
// sum
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||||
|
// os: &OpeningSet<F, D>,
|
||||||
|
// zeta: F::Extension,
|
||||||
|
// alpha: F::Extension,
|
||||||
|
// initial_merkle_roots: &[Hash<F>],
|
||||||
|
// proof: &FriProof<F, D>,
|
||||||
|
// challenger: &mut Challenger<F>,
|
||||||
|
// n: usize,
|
||||||
|
// betas: &[F::Extension],
|
||||||
|
// round_proof: &FriQueryRound<F, D>,
|
||||||
|
// config: &FriConfig,
|
||||||
|
// ) -> Result<()> {
|
||||||
|
// let mut evaluations: Vec<Vec<F::Extension>> = Vec::new();
|
||||||
|
// let x = challenger.get_challenge();
|
||||||
|
// let mut domain_size = n;
|
||||||
|
// let mut x_index = x.to_canonical_u64() as usize % n;
|
||||||
|
// fri_verify_initial_proof(
|
||||||
|
// x_index,
|
||||||
|
// &round_proof.initial_trees_proof,
|
||||||
|
// initial_merkle_roots,
|
||||||
|
// )?;
|
||||||
|
// let mut old_x_index = 0;
|
||||||
|
// // `subgroup_x` is `subgroup[x_index]`, i.e., the actual field element in the domain.
|
||||||
|
// let log_n = log2_strict(n);
|
||||||
|
// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
|
||||||
|
// * F::primitive_root_of_unity(log_n).exp(reverse_bits(x_index, log_n) as u64);
|
||||||
|
// for (i, &arity_bits) in config.reduction_arity_bits.iter().enumerate() {
|
||||||
|
// let arity = 1 << arity_bits;
|
||||||
|
// let next_domain_size = domain_size >> arity_bits;
|
||||||
|
// let e_x = if i == 0 {
|
||||||
|
// fri_combine_initial(
|
||||||
|
// &round_proof.initial_trees_proof,
|
||||||
|
// alpha,
|
||||||
|
// os,
|
||||||
|
// zeta,
|
||||||
|
// subgroup_x,
|
||||||
|
// config,
|
||||||
|
// )
|
||||||
|
// } else {
|
||||||
|
// let last_evals = &evaluations[i - 1];
|
||||||
|
// // Infer P(y) from {P(x)}_{x^arity=y}.
|
||||||
|
// compute_evaluation(
|
||||||
|
// subgroup_x,
|
||||||
|
// old_x_index,
|
||||||
|
// config.reduction_arity_bits[i - 1],
|
||||||
|
// last_evals,
|
||||||
|
// betas[i - 1],
|
||||||
|
// )
|
||||||
|
// };
|
||||||
|
// let mut evals = round_proof.steps[i].evals.clone();
|
||||||
|
// // Insert P(y) into the evaluation vector, since it wasn't included by the prover.
|
||||||
|
// evals.insert(x_index & (arity - 1), e_x);
|
||||||
|
// evaluations.push(evals);
|
||||||
|
// verify_merkle_proof(
|
||||||
|
// flatten(&evaluations[i]),
|
||||||
|
// x_index >> arity_bits,
|
||||||
|
// proof.commit_phase_merkle_roots[i],
|
||||||
|
// &round_proof.steps[i].merkle_proof,
|
||||||
|
// false,
|
||||||
|
// )?;
|
||||||
|
//
|
||||||
|
// if i > 0 {
|
||||||
|
// // Update the point x to x^arity.
|
||||||
|
// for _ in 0..config.reduction_arity_bits[i - 1] {
|
||||||
|
// subgroup_x = subgroup_x.square();
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// domain_size = next_domain_size;
|
||||||
|
// old_x_index = x_index;
|
||||||
|
// x_index >>= arity_bits;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// let last_evals = evaluations.last().unwrap();
|
||||||
|
// let final_arity_bits = *config.reduction_arity_bits.last().unwrap();
|
||||||
|
// let purported_eval = compute_evaluation(
|
||||||
|
// subgroup_x,
|
||||||
|
// old_x_index,
|
||||||
|
// final_arity_bits,
|
||||||
|
// last_evals,
|
||||||
|
// *betas.last().unwrap(),
|
||||||
|
// );
|
||||||
|
// for _ in 0..final_arity_bits {
|
||||||
|
// subgroup_x = subgroup_x.square();
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // Final check of FRI. After all the reductions, we check that the final polynomial is equal
|
||||||
|
// // to the one sent by the prover.
|
||||||
|
// ensure!(
|
||||||
|
// proof.final_poly.eval(subgroup_x.into()) == purported_eval,
|
||||||
|
// "Final polynomial evaluation is invalid."
|
||||||
|
// );
|
||||||
|
//
|
||||||
|
// Ok(())
|
||||||
|
// }
|
||||||
|
}
|
||||||
@ -56,8 +56,7 @@ fn fri_verify_proof_of_work<F: Field + Extendable<D>, const D: usize>(
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
ensure!(
|
ensure!(
|
||||||
hash.to_canonical_u64().leading_zeros()
|
hash.to_canonical_u64().trailing_zeros() >= config.proof_of_work_bits,
|
||||||
>= config.proof_of_work_bits + F::ORDER.leading_zeros(),
|
|
||||||
"Invalid proof of work witness."
|
"Invalid proof of work witness."
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
169
src/gates/base_sum.rs
Normal file
169
src/gates/base_sum.rs
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
use crate::circuit_builder::CircuitBuilder;
|
||||||
|
use crate::circuit_data::CircuitConfig;
|
||||||
|
use crate::field::extension_field::target::ExtensionTarget;
|
||||||
|
use crate::field::extension_field::Extendable;
|
||||||
|
use crate::field::field::Field;
|
||||||
|
use crate::gates::gate::{Gate, GateRef};
|
||||||
|
use crate::generator::{SimpleGenerator, WitnessGenerator};
|
||||||
|
use crate::plonk_common::{reduce_with_powers, reduce_with_powers_recursive};
|
||||||
|
use crate::target::Target;
|
||||||
|
use crate::vars::{EvaluationTargets, EvaluationVars};
|
||||||
|
use crate::wire::Wire;
|
||||||
|
use crate::witness::PartialWitness;
|
||||||
|
use std::ops::Range;
|
||||||
|
|
||||||
|
/// A gate which can sum base W limbs.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BaseSumGate<const B: usize> {
|
||||||
|
num_limbs: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> BaseSumGate<B> {
|
||||||
|
pub fn new<F: Extendable<D>, const D: usize>(config: &CircuitConfig) -> GateRef<F, D> {
|
||||||
|
GateRef::new(BaseSumGate::<B> {
|
||||||
|
num_limbs: config.num_routed_wires - 1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const WIRE_SUM: usize = 0;
|
||||||
|
pub const WIRE_LIMBS_START: usize = 1;
|
||||||
|
|
||||||
|
/// Returns the index of the `i`th limb wire.
|
||||||
|
pub fn limbs(&self) -> Range<usize> {
|
||||||
|
Self::WIRE_LIMBS_START..Self::WIRE_LIMBS_START + self.num_limbs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F: Extendable<D>, const D: usize, const B: usize> Gate<F, D> for BaseSumGate<B> {
|
||||||
|
fn id(&self) -> String {
|
||||||
|
format!("{:?}", self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension> {
|
||||||
|
let sum = vars.local_wires[Self::WIRE_SUM];
|
||||||
|
let limbs = vars.local_wires[self.limbs()].to_vec();
|
||||||
|
let computed_sum = reduce_with_powers(&limbs, F::Extension::from_canonical_usize(B));
|
||||||
|
let mut constraints = vec![computed_sum - sum];
|
||||||
|
for limb in limbs {
|
||||||
|
constraints.push(
|
||||||
|
(0..B)
|
||||||
|
.map(|i| limb - F::Extension::from_canonical_usize(i))
|
||||||
|
.product(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
constraints
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_unfiltered_recursively(
|
||||||
|
&self,
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
vars: EvaluationTargets<D>,
|
||||||
|
) -> Vec<ExtensionTarget<D>> {
|
||||||
|
let base = builder.constant(F::from_canonical_usize(B));
|
||||||
|
let sum = vars.local_wires[Self::WIRE_SUM];
|
||||||
|
let limbs = vars.local_wires[self.limbs()].to_vec();
|
||||||
|
let computed_sum =
|
||||||
|
reduce_with_powers_recursive(builder, &vars.local_wires[self.limbs()], base);
|
||||||
|
let mut constraints = vec![builder.sub_extension(computed_sum, sum)];
|
||||||
|
for limb in limbs {
|
||||||
|
constraints.push({
|
||||||
|
let mut acc = builder.one_extension();
|
||||||
|
(0..B).for_each(|i| {
|
||||||
|
let it = builder.constant_extension(F::from_canonical_usize(i).into());
|
||||||
|
let diff = builder.sub_extension(limb, it);
|
||||||
|
acc = builder.mul_extension(acc, diff);
|
||||||
|
});
|
||||||
|
acc
|
||||||
|
});
|
||||||
|
}
|
||||||
|
constraints
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generators(
|
||||||
|
&self,
|
||||||
|
gate_index: usize,
|
||||||
|
_local_constants: &[F],
|
||||||
|
) -> Vec<Box<dyn WitnessGenerator<F>>> {
|
||||||
|
let gen = BaseSplitGenerator::<B> {
|
||||||
|
gate_index,
|
||||||
|
num_limbs: self.num_limbs,
|
||||||
|
};
|
||||||
|
vec![Box::new(gen)]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn num_wires(&self) -> usize {
|
||||||
|
self.num_limbs + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn num_constants(&self) -> usize {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn degree(&self) -> usize {
|
||||||
|
B
|
||||||
|
}
|
||||||
|
|
||||||
|
fn num_constraints(&self) -> usize {
|
||||||
|
1 + B
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BaseSplitGenerator<const B: usize> {
|
||||||
|
gate_index: usize,
|
||||||
|
num_limbs: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F: Field, const B: usize> SimpleGenerator<F> for BaseSplitGenerator<B> {
|
||||||
|
fn dependencies(&self) -> Vec<Target> {
|
||||||
|
vec![Target::Wire(Wire {
|
||||||
|
gate: self.gate_index,
|
||||||
|
input: BaseSumGate::<B>::WIRE_SUM,
|
||||||
|
})]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_once(&self, witness: &PartialWitness<F>) -> PartialWitness<F> {
|
||||||
|
let mut sum_value = witness
|
||||||
|
.get_target(Target::Wire(Wire {
|
||||||
|
gate: self.gate_index,
|
||||||
|
input: BaseSumGate::<B>::WIRE_SUM,
|
||||||
|
}))
|
||||||
|
.to_canonical_u64() as usize;
|
||||||
|
let limbs = (BaseSumGate::<B>::WIRE_LIMBS_START
|
||||||
|
..BaseSumGate::<B>::WIRE_LIMBS_START + self.num_limbs)
|
||||||
|
.map(|i| {
|
||||||
|
Target::Wire(Wire {
|
||||||
|
gate: self.gate_index,
|
||||||
|
input: i,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut result = PartialWitness::new();
|
||||||
|
for b in limbs {
|
||||||
|
let b_value = sum_value % B;
|
||||||
|
result.set_target(b, F::from_canonical_usize(b_value));
|
||||||
|
sum_value /= B;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug_assert_eq!(
|
||||||
|
sum_value, 0,
|
||||||
|
"Integer too large to fit in given number of bits"
|
||||||
|
);
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::circuit_data::CircuitConfig;
|
||||||
|
use crate::field::crandall_field::CrandallField;
|
||||||
|
use crate::gates::base_sum::BaseSumGate;
|
||||||
|
use crate::gates::gate_testing::test_low_degree;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn low_degree() {
|
||||||
|
let config = CircuitConfig::default();
|
||||||
|
test_low_degree(BaseSumGate::<6>::new::<CrandallField, 4>(&config))
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -5,5 +5,6 @@ pub mod gmimc;
|
|||||||
mod interpolation;
|
mod interpolation;
|
||||||
pub(crate) mod noop;
|
pub(crate) mod noop;
|
||||||
|
|
||||||
|
mod base_sum;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod gate_testing;
|
mod gate_testing;
|
||||||
|
|||||||
@ -255,6 +255,20 @@ impl RecursiveChallenger {
|
|||||||
(0..n).map(|_| self.get_challenge(builder)).collect()
|
(0..n).map(|_| self.get_challenge(builder)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_hash<F: Extendable<D>, const D: usize>(
|
||||||
|
&mut self,
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
) -> HashTarget {
|
||||||
|
HashTarget {
|
||||||
|
elements: [
|
||||||
|
self.get_challenge(builder),
|
||||||
|
self.get_challenge(builder),
|
||||||
|
self.get_challenge(builder),
|
||||||
|
self.get_challenge(builder),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Absorb any buffered inputs. After calling this, the input buffer will be empty.
|
/// Absorb any buffered inputs. After calling this, the input buffer will be empty.
|
||||||
fn absorb_buffered_inputs<F: Extendable<D>, const D: usize>(
|
fn absorb_buffered_inputs<F: Extendable<D>, const D: usize>(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
|||||||
@ -103,10 +103,15 @@ pub(crate) fn reduce_with_powers<F: Field>(terms: &[F], alpha: F) -> F {
|
|||||||
|
|
||||||
pub(crate) fn reduce_with_powers_recursive<F: Extendable<D>, const D: usize>(
|
pub(crate) fn reduce_with_powers_recursive<F: Extendable<D>, const D: usize>(
|
||||||
builder: &mut CircuitBuilder<F, D>,
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
terms: Vec<Target>,
|
terms: &[ExtensionTarget<D>],
|
||||||
alpha: Target,
|
alpha: Target,
|
||||||
) -> Target {
|
) -> ExtensionTarget<D> {
|
||||||
todo!()
|
let mut sum = builder.zero_extension();
|
||||||
|
for &term in terms.iter().rev() {
|
||||||
|
sum = builder.scalar_mul_ext(alpha, sum);
|
||||||
|
sum = builder.add_extension(sum, term);
|
||||||
|
}
|
||||||
|
sum
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reduce_with_iter<F: Field, I>(terms: &[F], coeffs: I) -> F
|
pub(crate) fn reduce_with_iter<F: Field, I>(terms: &[F], coeffs: I) -> F
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user