mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-08 08:43:06 +00:00
Merge branch 'main' into order_bigint
This commit is contained in:
commit
906a0c00f4
@ -39,7 +39,7 @@ mod tests {
|
||||
let generator = F::primitive_root_of_unity(SUBGROUP_BITS);
|
||||
let subgroup_size = 1 << SUBGROUP_BITS;
|
||||
|
||||
let shifts = get_unique_coset_shifts::<F>(SUBGROUP_BITS, NUM_SHIFTS);
|
||||
let shifts = get_unique_coset_shifts::<F>(subgroup_size, NUM_SHIFTS);
|
||||
|
||||
let mut union = HashSet::new();
|
||||
for shift in shifts {
|
||||
|
||||
@ -118,14 +118,16 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
"Number of reductions should be non-zero."
|
||||
);
|
||||
|
||||
let precomputed_reduced_evals =
|
||||
PrecomputedReducedEvalsTarget::from_os_and_alpha(os, alpha, self);
|
||||
for (i, round_proof) in proof.query_round_proofs.iter().enumerate() {
|
||||
context!(
|
||||
self,
|
||||
&format!("verify {}'th FRI query", i),
|
||||
self.fri_verifier_query_round(
|
||||
os,
|
||||
zeta,
|
||||
alpha,
|
||||
precomputed_reduced_evals,
|
||||
initial_merkle_roots,
|
||||
proof,
|
||||
challenger,
|
||||
@ -162,9 +164,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
&mut self,
|
||||
proof: &FriInitialTreeProofTarget,
|
||||
alpha: ExtensionTarget<D>,
|
||||
os: &OpeningSetTarget<D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
subgroup_x: Target,
|
||||
precomputed_reduced_evals: PrecomputedReducedEvalsTarget<D>,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> ExtensionTarget<D> {
|
||||
assert!(D > 1, "Not implemented for D=1.");
|
||||
@ -192,19 +194,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
)
|
||||
.map(|&e| self.convert_to_ext(e))
|
||||
.collect::<Vec<_>>();
|
||||
let single_openings = os
|
||||
.constants
|
||||
.iter()
|
||||
.chain(&os.plonk_sigmas)
|
||||
.chain(&os.wires)
|
||||
.chain(&os.quotient_polys)
|
||||
.chain(&os.partial_products)
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
let mut single_numerator = alpha.reduce(&single_evals, self);
|
||||
// TODO: Precompute the rhs as it is the same in all FRI rounds.
|
||||
let rhs = alpha.reduce(&single_openings, self);
|
||||
single_numerator = self.sub_extension(single_numerator, rhs);
|
||||
let single_composition_eval = alpha.reduce(&single_evals, self);
|
||||
let single_numerator =
|
||||
self.sub_extension(single_composition_eval, precomputed_reduced_evals.single);
|
||||
let single_denominator = self.sub_extension(subgroup_x, zeta);
|
||||
let quotient = self.div_unsafe_extension(single_numerator, single_denominator);
|
||||
sum = self.add_extension(sum, quotient);
|
||||
@ -217,14 +209,15 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
.take(common_data.zs_range().end)
|
||||
.map(|&e| self.convert_to_ext(e))
|
||||
.collect::<Vec<_>>();
|
||||
let zs_composition_eval = alpha.clone().reduce(&zs_evals, self);
|
||||
let zs_composition_eval = alpha.reduce(&zs_evals, self);
|
||||
|
||||
let g = self.constant_extension(F::Extension::primitive_root_of_unity(degree_log));
|
||||
let zeta_right = self.mul_extension(g, zeta);
|
||||
let zs_ev_zeta = alpha.clone().reduce(&os.plonk_zs, self);
|
||||
let zs_ev_zeta_right = alpha.reduce(&os.plonk_zs_right, self);
|
||||
let interpol_val = self.interpolate2(
|
||||
[(zeta, zs_ev_zeta), (zeta_right, zs_ev_zeta_right)],
|
||||
[
|
||||
(zeta, precomputed_reduced_evals.zs),
|
||||
(zeta_right, precomputed_reduced_evals.zs_right),
|
||||
],
|
||||
subgroup_x,
|
||||
);
|
||||
let zs_numerator = self.sub_extension(zs_composition_eval, interpol_val);
|
||||
@ -240,9 +233,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
|
||||
fn fri_verifier_query_round(
|
||||
&mut self,
|
||||
os: &OpeningSetTarget<D>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
alpha: ExtensionTarget<D>,
|
||||
precomputed_reduced_evals: PrecomputedReducedEvalsTarget<D>,
|
||||
initial_merkle_roots: &[HashTarget],
|
||||
proof: &FriProofTarget<D>,
|
||||
challenger: &mut RecursiveChallenger,
|
||||
@ -253,7 +246,6 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
) {
|
||||
let config = &common_data.config.fri_config;
|
||||
let n_log = log2_strict(n);
|
||||
let mut evaluations: Vec<Vec<ExtensionTarget<D>>> = Vec::new();
|
||||
// TODO: Do we need to range check `x_index` to a target smaller than `p`?
|
||||
let mut x_index = challenger.get_challenge(self);
|
||||
x_index = self.split_low_high(x_index, n_log, 64).0;
|
||||
@ -280,6 +272,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
self.mul(g, phi)
|
||||
});
|
||||
|
||||
let mut evaluations: Vec<Vec<ExtensionTarget<D>>> = Vec::new();
|
||||
for (i, &arity_bits) in config.reduction_arity_bits.iter().enumerate() {
|
||||
let next_domain_size = domain_size >> arity_bits;
|
||||
let e_x = if i == 0 {
|
||||
@ -289,9 +282,9 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
self.fri_combine_initial(
|
||||
&round_proof.initial_trees_proof,
|
||||
alpha,
|
||||
os,
|
||||
zeta,
|
||||
subgroup_x,
|
||||
precomputed_reduced_evals,
|
||||
common_data,
|
||||
)
|
||||
)
|
||||
@ -315,23 +308,21 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
let (low_x_index, high_x_index) =
|
||||
self.split_low_high(x_index, arity_bits, x_index_num_bits);
|
||||
evals = self.insert(low_x_index, e_x, evals);
|
||||
evaluations.push(evals);
|
||||
context!(
|
||||
self,
|
||||
"verify FRI round Merkle proof.",
|
||||
self.verify_merkle_proof(
|
||||
flatten_target(&evaluations[i]),
|
||||
flatten_target(&evals),
|
||||
high_x_index,
|
||||
proof.commit_phase_merkle_roots[i],
|
||||
&round_proof.steps[i].merkle_proof,
|
||||
)
|
||||
);
|
||||
evaluations.push(evals);
|
||||
|
||||
if i > 0 {
|
||||
// Update the point x to x^arity.
|
||||
for _ in 0..config.reduction_arity_bits[i - 1] {
|
||||
subgroup_x = self.square(subgroup_x);
|
||||
}
|
||||
subgroup_x = self.exp_power_of_2(subgroup_x, config.reduction_arity_bits[i - 1]);
|
||||
}
|
||||
domain_size = next_domain_size;
|
||||
old_x_index = low_x_index;
|
||||
@ -352,9 +343,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
*betas.last().unwrap(),
|
||||
)
|
||||
);
|
||||
for _ in 0..final_arity_bits {
|
||||
subgroup_x = self.square(subgroup_x);
|
||||
}
|
||||
subgroup_x = self.exp_power_of_2(subgroup_x, final_arity_bits);
|
||||
|
||||
// Final check of FRI. After all the reductions, we check that the final polynomial is equal
|
||||
// to the one sent by the prover.
|
||||
@ -366,3 +355,39 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
self.assert_equal_extension(eval, purported_eval);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct PrecomputedReducedEvalsTarget<const D: usize> {
|
||||
pub single: ExtensionTarget<D>,
|
||||
pub zs: ExtensionTarget<D>,
|
||||
pub zs_right: ExtensionTarget<D>,
|
||||
}
|
||||
|
||||
impl<const D: usize> PrecomputedReducedEvalsTarget<D> {
|
||||
fn from_os_and_alpha<F: Extendable<D>>(
|
||||
os: &OpeningSetTarget<D>,
|
||||
alpha: ExtensionTarget<D>,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
) -> Self {
|
||||
let mut alpha = ReducingFactorTarget::new(alpha);
|
||||
let single = alpha.reduce(
|
||||
&os.constants
|
||||
.iter()
|
||||
.chain(&os.plonk_sigmas)
|
||||
.chain(&os.wires)
|
||||
.chain(&os.quotient_polys)
|
||||
.chain(&os.partial_products)
|
||||
.copied()
|
||||
.collect::<Vec<_>>(),
|
||||
builder,
|
||||
);
|
||||
let zs = alpha.reduce(&os.plonk_zs, builder);
|
||||
let zs_right = alpha.reduce(&os.plonk_zs_right, builder);
|
||||
|
||||
Self {
|
||||
single,
|
||||
zs,
|
||||
zs_right,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,11 +112,12 @@ pub fn verify_fri_proof<F: Field + Extendable<D>, const D: usize>(
|
||||
"Number of reductions should be non-zero."
|
||||
);
|
||||
|
||||
let precomputed_reduced_evals = PrecomputedReducedEvals::from_os_and_alpha(os, alpha);
|
||||
for round_proof in &proof.query_round_proofs {
|
||||
fri_verifier_query_round(
|
||||
os,
|
||||
zeta,
|
||||
alpha,
|
||||
precomputed_reduced_evals,
|
||||
initial_merkle_roots,
|
||||
&proof,
|
||||
challenger,
|
||||
@ -142,12 +143,43 @@ fn fri_verify_initial_proof<F: Field>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Holds the reduced (by `alpha`) evaluations at `zeta` for the polynomial opened just at
|
||||
/// zeta, for `Z` at zeta and for `Z` at `g*zeta`.
|
||||
#[derive(Copy, Clone)]
|
||||
struct PrecomputedReducedEvals<F: Extendable<D>, const D: usize> {
|
||||
pub single: F::Extension,
|
||||
pub zs: F::Extension,
|
||||
pub zs_right: F::Extension,
|
||||
}
|
||||
|
||||
impl<F: Extendable<D>, const D: usize> PrecomputedReducedEvals<F, D> {
|
||||
fn from_os_and_alpha(os: &OpeningSet<F, D>, alpha: F::Extension) -> Self {
|
||||
let mut alpha = ReducingFactor::new(alpha);
|
||||
let single = alpha.reduce(
|
||||
os.constants
|
||||
.iter()
|
||||
.chain(&os.plonk_sigmas)
|
||||
.chain(&os.wires)
|
||||
.chain(&os.quotient_polys)
|
||||
.chain(&os.partial_products),
|
||||
);
|
||||
let zs = alpha.reduce(os.plonk_zs.iter());
|
||||
let zs_right = alpha.reduce(os.plonk_zs_right.iter());
|
||||
|
||||
Self {
|
||||
single,
|
||||
zs,
|
||||
zs_right,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
|
||||
proof: &FriInitialTreeProof<F>,
|
||||
alpha: F::Extension,
|
||||
os: &OpeningSet<F, D>,
|
||||
zeta: F::Extension,
|
||||
subgroup_x: F,
|
||||
precomputed_reduced_evals: PrecomputedReducedEvals<F, D>,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> F::Extension {
|
||||
let config = &common_data.config;
|
||||
@ -174,19 +206,8 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
|
||||
[common_data.partial_products_range()],
|
||||
)
|
||||
.map(|&e| F::Extension::from_basefield(e));
|
||||
let single_openings = os
|
||||
.constants
|
||||
.iter()
|
||||
.chain(&os.plonk_sigmas)
|
||||
.chain(&os.wires)
|
||||
.chain(&os.quotient_polys)
|
||||
.chain(&os.partial_products);
|
||||
let single_diffs = single_evals
|
||||
.into_iter()
|
||||
.zip(single_openings)
|
||||
.map(|(e, &o)| e - o)
|
||||
.collect::<Vec<_>>();
|
||||
let single_numerator = alpha.reduce(single_diffs.iter());
|
||||
let single_composition_eval = alpha.reduce(single_evals);
|
||||
let single_numerator = single_composition_eval - precomputed_reduced_evals.single;
|
||||
let single_denominator = subgroup_x - zeta;
|
||||
sum += single_numerator / single_denominator;
|
||||
alpha.reset();
|
||||
@ -197,12 +218,12 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
|
||||
.iter()
|
||||
.map(|&e| F::Extension::from_basefield(e))
|
||||
.take(common_data.zs_range().end);
|
||||
let zs_composition_eval = alpha.clone().reduce(zs_evals);
|
||||
let zs_composition_eval = alpha.reduce(zs_evals);
|
||||
let zeta_right = F::Extension::primitive_root_of_unity(degree_log) * zeta;
|
||||
let zs_interpol = interpolate2(
|
||||
[
|
||||
(zeta, alpha.clone().reduce(os.plonk_zs.iter())),
|
||||
(zeta_right, alpha.reduce(os.plonk_zs_right.iter())),
|
||||
(zeta, precomputed_reduced_evals.zs),
|
||||
(zeta_right, precomputed_reduced_evals.zs_right),
|
||||
],
|
||||
subgroup_x,
|
||||
);
|
||||
@ -215,9 +236,9 @@ fn fri_combine_initial<F: Field + Extendable<D>, const D: usize>(
|
||||
}
|
||||
|
||||
fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
os: &OpeningSet<F, D>,
|
||||
zeta: F::Extension,
|
||||
alpha: F::Extension,
|
||||
precomputed_reduced_evals: PrecomputedReducedEvals<F, D>,
|
||||
initial_merkle_roots: &[Hash<F>],
|
||||
proof: &FriProof<F, D>,
|
||||
challenger: &mut Challenger<F>,
|
||||
@ -227,7 +248,6 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> Result<()> {
|
||||
let config = &common_data.config.fri_config;
|
||||
let mut evaluations: Vec<Vec<F::Extension>> = Vec::new();
|
||||
let x = challenger.get_challenge();
|
||||
let mut domain_size = n;
|
||||
let mut x_index = x.to_canonical_u64() as usize % n;
|
||||
@ -241,6 +261,8 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
let log_n = log2_strict(n);
|
||||
let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
|
||||
* F::primitive_root_of_unity(log_n).exp(reverse_bits(x_index, log_n) as u64);
|
||||
|
||||
let mut evaluations: Vec<Vec<F::Extension>> = Vec::new();
|
||||
for (i, &arity_bits) in config.reduction_arity_bits.iter().enumerate() {
|
||||
let arity = 1 << arity_bits;
|
||||
let next_domain_size = domain_size >> arity_bits;
|
||||
@ -248,9 +270,9 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
fri_combine_initial(
|
||||
&round_proof.initial_trees_proof,
|
||||
alpha,
|
||||
os,
|
||||
zeta,
|
||||
subgroup_x,
|
||||
precomputed_reduced_evals,
|
||||
common_data,
|
||||
)
|
||||
} else {
|
||||
@ -267,20 +289,18 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
let mut evals = round_proof.steps[i].evals.clone();
|
||||
// Insert P(y) into the evaluation vector, since it wasn't included by the prover.
|
||||
evals.insert(x_index & (arity - 1), e_x);
|
||||
evaluations.push(evals);
|
||||
verify_merkle_proof(
|
||||
flatten(&evaluations[i]),
|
||||
flatten(&evals),
|
||||
x_index >> arity_bits,
|
||||
proof.commit_phase_merkle_roots[i],
|
||||
&round_proof.steps[i].merkle_proof,
|
||||
false,
|
||||
)?;
|
||||
evaluations.push(evals);
|
||||
|
||||
if i > 0 {
|
||||
// Update the point x to x^arity.
|
||||
for _ in 0..config.reduction_arity_bits[i - 1] {
|
||||
subgroup_x = subgroup_x.square();
|
||||
}
|
||||
subgroup_x = subgroup_x.exp_power_of_2(config.reduction_arity_bits[i - 1]);
|
||||
}
|
||||
domain_size = next_domain_size;
|
||||
old_x_index = x_index & (arity - 1);
|
||||
@ -296,9 +316,7 @@ fn fri_verifier_query_round<F: Field + Extendable<D>, const D: usize>(
|
||||
last_evals,
|
||||
*betas.last().unwrap(),
|
||||
);
|
||||
for _ in 0..final_arity_bits {
|
||||
subgroup_x = subgroup_x.square();
|
||||
}
|
||||
subgroup_x = subgroup_x.exp_power_of_2(final_arity_bits);
|
||||
|
||||
// Final check of FRI. After all the reductions, we check that the final polynomial is equal
|
||||
// to the one sent by the prover.
|
||||
|
||||
@ -153,6 +153,15 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
product
|
||||
}
|
||||
|
||||
/// Exponentiate `base` to the power of `2^power_log`.
|
||||
// TODO: Test
|
||||
pub fn exp_power_of_2(&mut self, mut base: Target, power_log: usize) -> Target {
|
||||
for _ in 0..power_log {
|
||||
base = self.square(base);
|
||||
}
|
||||
base
|
||||
}
|
||||
|
||||
// TODO: Optimize this, maybe with a new gate.
|
||||
// TODO: Test
|
||||
/// Exponentiate `base` to the power of `exponent`, where `exponent < 2^num_bits`.
|
||||
|
||||
@ -292,7 +292,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
|
||||
/// Exponentiate `base` to the power of `2^power_log`.
|
||||
// TODO: Test
|
||||
pub fn exp_power_of_2(
|
||||
pub fn exp_power_of_2_extension(
|
||||
&mut self,
|
||||
mut base: ExtensionTarget<D>,
|
||||
power_log: usize,
|
||||
|
||||
@ -59,7 +59,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
let s_sigmas = &proof.openings.plonk_sigmas;
|
||||
let partial_products = &proof.openings.partial_products;
|
||||
|
||||
let zeta_pow_deg = self.exp_power_of_2(zeta, inner_common_data.degree_bits);
|
||||
let zeta_pow_deg = self.exp_power_of_2_extension(zeta, inner_common_data.degree_bits);
|
||||
let vanishing_polys_zeta = context!(
|
||||
self,
|
||||
"evaluate the vanishing polynomial at our challenge point, zeta.",
|
||||
@ -89,7 +89,7 @@ impl<F: Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
{
|
||||
let recombined_quotient = scale.reduce(chunk, self);
|
||||
let computed_vanishing_poly = self.mul_extension(z_h_zeta, recombined_quotient);
|
||||
self.named_route_extension(
|
||||
self.named_assert_equal_extension(
|
||||
vanishing_polys_zeta[i],
|
||||
computed_vanishing_poly,
|
||||
format!("Vanishing polynomial == Z_H * quotient, challenge {}", i),
|
||||
|
||||
@ -16,7 +16,7 @@ use crate::polynomial::polynomial::PolynomialCoeffs;
|
||||
/// This struct abstract away these operations by implementing Horner's method and keeping track
|
||||
/// of the number of multiplications by `a` to compute the scaling factor.
|
||||
/// See https://github.com/mir-protocol/plonky2/pull/69 for more details and discussions.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReducingFactor<F: Field> {
|
||||
base: F,
|
||||
count: u64,
|
||||
@ -79,7 +79,7 @@ impl<F: Field> ReducingFactor<F> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReducingFactorTarget<const D: usize> {
|
||||
base: ExtensionTarget<D>,
|
||||
count: u64,
|
||||
|
||||
@ -236,10 +236,13 @@ pub fn evaluate_gate_constraints_recursively<F: Extendable<D>, const D: usize>(
|
||||
) -> Vec<ExtensionTarget<D>> {
|
||||
let mut constraints = vec![builder.zero_extension(); num_gate_constraints];
|
||||
for gate in gates {
|
||||
let gate_constraints = gate
|
||||
.gate
|
||||
.0
|
||||
.eval_filtered_recursively(builder, vars, &gate.prefix);
|
||||
let gate_constraints = context!(
|
||||
builder,
|
||||
&format!("evaluate {} constraints", gate.gate.0.id()),
|
||||
gate.gate
|
||||
.0
|
||||
.eval_filtered_recursively(builder, vars, &gate.prefix)
|
||||
);
|
||||
for (i, c) in gate_constraints.into_iter().enumerate() {
|
||||
constraints[i] = builder.add_extension(constraints[i], c);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user