mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-08 16:53:07 +00:00
Merge pull request #486 from mir-protocol/recursive_starks
Recursive STARK verifier
This commit is contained in:
commit
3f7cefbc6b
@ -10,7 +10,6 @@ use crate::hash::merkle_tree::MerkleCap;
|
|||||||
use crate::iop::challenger::{Challenger, RecursiveChallenger};
|
use crate::iop::challenger::{Challenger, RecursiveChallenger};
|
||||||
use crate::iop::target::Target;
|
use crate::iop::target::Target;
|
||||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||||
use crate::plonk::circuit_data::CommonCircuitData;
|
|
||||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||||
|
|
||||||
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||||
@ -89,9 +88,9 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
|||||||
commit_phase_merkle_caps: &[MerkleCapTarget],
|
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||||
final_poly: &PolynomialCoeffsExtTarget<D>,
|
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||||
pow_witness: Target,
|
pow_witness: Target,
|
||||||
inner_common_data: &CommonCircuitData<F, C, D>,
|
inner_fri_config: &FriConfig,
|
||||||
) -> FriChallengesTarget<D> {
|
) -> FriChallengesTarget<D> {
|
||||||
let num_fri_queries = inner_common_data.config.fri_config.num_query_rounds;
|
let num_fri_queries = inner_fri_config.num_query_rounds;
|
||||||
// Scaling factor to combine polynomials.
|
// Scaling factor to combine polynomials.
|
||||||
let fri_alpha = self.get_extension_challenge(builder);
|
let fri_alpha = self.get_extension_challenge(builder);
|
||||||
|
|
||||||
|
|||||||
@ -30,6 +30,20 @@ impl FriConfig {
|
|||||||
pub fn rate(&self) -> f64 {
|
pub fn rate(&self) -> f64 {
|
||||||
1.0 / ((1 << self.rate_bits) as f64)
|
1.0 / ((1 << self.rate_bits) as f64)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fri_params(&self, degree_bits: usize, hiding: bool) -> FriParams {
|
||||||
|
let reduction_arity_bits = self.reduction_strategy.reduction_arity_bits(
|
||||||
|
degree_bits,
|
||||||
|
self.rate_bits,
|
||||||
|
self.num_query_rounds,
|
||||||
|
);
|
||||||
|
FriParams {
|
||||||
|
config: self.clone(),
|
||||||
|
hiding,
|
||||||
|
degree_bits,
|
||||||
|
reduction_arity_bits,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// FRI parameters, including generated parameters which are specific to an instance size, in
|
/// FRI parameters, including generated parameters which are specific to an instance size, in
|
||||||
|
|||||||
@ -398,7 +398,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
|||||||
"A non-negligible portion of field elements are in the range that permits non-canonical encodings. Need to do more analysis or enforce canonical encodings.");
|
"A non-negligible portion of field elements are in the range that permits non-canonical encodings. Need to do more analysis or enforce canonical encodings.");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_virtual_fri_proof(
|
pub fn add_virtual_fri_proof(
|
||||||
&mut self,
|
&mut self,
|
||||||
num_leaves_per_oracle: &[usize],
|
num_leaves_per_oracle: &[usize],
|
||||||
params: &FriParams,
|
params: &FriParams,
|
||||||
|
|||||||
@ -166,7 +166,7 @@ pub struct RecursiveChallenger<F: RichField + Extendable<D>, H: AlgebraicHasher<
|
|||||||
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||||
RecursiveChallenger<F, H, D>
|
RecursiveChallenger<F, H, D>
|
||||||
{
|
{
|
||||||
pub(crate) fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
|
pub fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
|
||||||
let zero = builder.zero();
|
let zero = builder.zero();
|
||||||
RecursiveChallenger {
|
RecursiveChallenger {
|
||||||
sponge_state: [zero; SPONGE_WIDTH],
|
sponge_state: [zero; SPONGE_WIDTH],
|
||||||
@ -222,7 +222,7 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
|||||||
.expect("Output buffer should be non-empty")
|
.expect("Output buffer should be non-empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_n_challenges(
|
pub fn get_n_challenges(
|
||||||
&mut self,
|
&mut self,
|
||||||
builder: &mut CircuitBuilder<F, D>,
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
n: usize,
|
n: usize,
|
||||||
|
|||||||
@ -5,6 +5,7 @@ use num::{BigUint, FromPrimitive, Zero};
|
|||||||
use plonky2_field::extension_field::{Extendable, FieldExtension};
|
use plonky2_field::extension_field::{Extendable, FieldExtension};
|
||||||
use plonky2_field::field_types::{Field, PrimeField};
|
use plonky2_field::field_types::{Field, PrimeField};
|
||||||
|
|
||||||
|
use crate::fri::structure::{FriOpenings, FriOpeningsTarget};
|
||||||
use crate::fri::witness_util::set_fri_proof_target;
|
use crate::fri::witness_util::set_fri_proof_target;
|
||||||
use crate::gadgets::arithmetic_u32::U32Target;
|
use crate::gadgets::arithmetic_u32::U32Target;
|
||||||
use crate::gadgets::biguint::BigUintTarget;
|
use crate::gadgets::biguint::BigUintTarget;
|
||||||
@ -207,66 +208,30 @@ pub trait Witness<F: Field> {
|
|||||||
);
|
);
|
||||||
self.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
self.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||||
|
|
||||||
for (&t, &x) in proof_target
|
self.set_fri_openings(
|
||||||
.openings
|
&proof_target.openings.to_fri_openings(),
|
||||||
.wires
|
&proof.openings.to_fri_openings(),
|
||||||
.iter()
|
);
|
||||||
.zip_eq(&proof.openings.wires)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.constants
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.constants)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.plonk_sigmas
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.plonk_sigmas)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.plonk_zs
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.plonk_zs)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.plonk_zs_right
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.plonk_zs_right)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.partial_products
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.partial_products)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
for (&t, &x) in proof_target
|
|
||||||
.openings
|
|
||||||
.quotient_polys
|
|
||||||
.iter()
|
|
||||||
.zip_eq(&proof.openings.quotient_polys)
|
|
||||||
{
|
|
||||||
self.set_extension_target(t, x);
|
|
||||||
}
|
|
||||||
|
|
||||||
set_fri_proof_target(self, &proof_target.opening_proof, &proof.opening_proof);
|
set_fri_proof_target(self, &proof_target.opening_proof, &proof.opening_proof);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn set_fri_openings<const D: usize>(
|
||||||
|
&mut self,
|
||||||
|
fri_openings_target: &FriOpeningsTarget<D>,
|
||||||
|
fri_openings: &FriOpenings<F, D>,
|
||||||
|
) where
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
{
|
||||||
|
for (batch_target, batch) in fri_openings_target
|
||||||
|
.batches
|
||||||
|
.iter()
|
||||||
|
.zip_eq(&fri_openings.batches)
|
||||||
|
{
|
||||||
|
self.set_extension_targets(&batch_target.values, &batch.values);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn set_wire(&mut self, wire: Wire, value: F) {
|
fn set_wire(&mut self, wire: Wire, value: F) {
|
||||||
self.set_target(Target::Wire(wire), value)
|
self.set_target(Target::Wire(wire), value)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -393,18 +393,9 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn fri_params(&self, degree_bits: usize) -> FriParams {
|
fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||||
let fri_config = &self.config.fri_config;
|
self.config
|
||||||
let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits(
|
.fri_config
|
||||||
degree_bits,
|
.fri_params(degree_bits, self.config.zero_knowledge)
|
||||||
fri_config.rate_bits,
|
|
||||||
fri_config.num_query_rounds,
|
|
||||||
);
|
|
||||||
FriParams {
|
|
||||||
config: fri_config.clone(),
|
|
||||||
hiding: self.config.zero_knowledge,
|
|
||||||
degree_bits,
|
|
||||||
reduction_arity_bits,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The number of (base field) `arithmetic` operations that can be performed in a single gate.
|
/// The number of (base field) `arithmetic` operations that can be performed in a single gate.
|
||||||
|
|||||||
@ -275,7 +275,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
|||||||
commit_phase_merkle_caps,
|
commit_phase_merkle_caps,
|
||||||
final_poly,
|
final_poly,
|
||||||
pow_witness,
|
pow_witness,
|
||||||
inner_common_data,
|
&inner_common_data.config.fri_config,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -29,17 +29,6 @@ impl StarkConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
|
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
|
||||||
let fri_config = &self.fri_config;
|
self.fri_config.fri_params(degree_bits, false)
|
||||||
let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits(
|
|
||||||
degree_bits,
|
|
||||||
fri_config.rate_bits,
|
|
||||||
fri_config.num_query_rounds,
|
|
||||||
);
|
|
||||||
FriParams {
|
|
||||||
config: fri_config.clone(),
|
|
||||||
hiding: false,
|
|
||||||
degree_bits,
|
|
||||||
reduction_arity_bits,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,10 +80,10 @@ impl<P: PackedField> ConstraintConsumer<P> {
|
|||||||
|
|
||||||
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
|
||||||
/// A random value used to combine multiple constraints into one.
|
/// A random value used to combine multiple constraints into one.
|
||||||
alpha: Target,
|
alphas: Vec<Target>,
|
||||||
|
|
||||||
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
|
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
|
||||||
constraint_acc: ExtensionTarget<D>,
|
constraint_accs: Vec<ExtensionTarget<D>>,
|
||||||
|
|
||||||
/// The evaluation of `X - g^(n-1)`.
|
/// The evaluation of `X - g^(n-1)`.
|
||||||
z_last: ExtensionTarget<D>,
|
z_last: ExtensionTarget<D>,
|
||||||
@ -100,6 +100,27 @@ pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: us
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
|
||||||
|
pub fn new(
|
||||||
|
zero: ExtensionTarget<D>,
|
||||||
|
alphas: Vec<Target>,
|
||||||
|
z_last: ExtensionTarget<D>,
|
||||||
|
lagrange_basis_first: ExtensionTarget<D>,
|
||||||
|
lagrange_basis_last: ExtensionTarget<D>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
constraint_accs: vec![zero; alphas.len()],
|
||||||
|
alphas,
|
||||||
|
z_last,
|
||||||
|
lagrange_basis_first,
|
||||||
|
lagrange_basis_last,
|
||||||
|
_phantom: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn accumulators(self) -> Vec<ExtensionTarget<D>> {
|
||||||
|
self.constraint_accs
|
||||||
|
}
|
||||||
|
|
||||||
/// Add one constraint valid on all rows except the last.
|
/// Add one constraint valid on all rows except the last.
|
||||||
pub fn constraint(
|
pub fn constraint(
|
||||||
&mut self,
|
&mut self,
|
||||||
@ -116,8 +137,9 @@ impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F
|
|||||||
builder: &mut CircuitBuilder<F, D>,
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
constraint: ExtensionTarget<D>,
|
constraint: ExtensionTarget<D>,
|
||||||
) {
|
) {
|
||||||
self.constraint_acc =
|
for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) {
|
||||||
builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint);
|
*acc = builder.scalar_mul_add_extension(alpha, *acc, constraint);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||||
@ -128,7 +150,7 @@ impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F
|
|||||||
constraint: ExtensionTarget<D>,
|
constraint: ExtensionTarget<D>,
|
||||||
) {
|
) {
|
||||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first);
|
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first);
|
||||||
self.constraint(builder, filtered_constraint);
|
self.constraint_wrapping(builder, filtered_constraint);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
|
||||||
@ -139,6 +161,6 @@ impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F
|
|||||||
constraint: ExtensionTarget<D>,
|
constraint: ExtensionTarget<D>,
|
||||||
) {
|
) {
|
||||||
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last);
|
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last);
|
||||||
self.constraint(builder, filtered_constraint);
|
self.constraint_wrapping(builder, filtered_constraint);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -67,9 +67,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
|
|||||||
yield_constr
|
yield_constr
|
||||||
.constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]);
|
.constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]);
|
||||||
|
|
||||||
// x0 <- x1
|
// x0' <- x1
|
||||||
yield_constr.constraint(vars.next_values[0] - vars.local_values[1]);
|
yield_constr.constraint(vars.next_values[0] - vars.local_values[1]);
|
||||||
// x1 <- x0 + x1
|
// x1' <- x0 + x1
|
||||||
yield_constr.constraint(vars.next_values[1] - vars.local_values[0] - vars.local_values[1]);
|
yield_constr.constraint(vars.next_values[1] - vars.local_values[0] - vars.local_values[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,7 +79,25 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
|
|||||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||||
) {
|
) {
|
||||||
todo!()
|
// Check public inputs.
|
||||||
|
let pis_constraints = [
|
||||||
|
builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]),
|
||||||
|
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_X1]),
|
||||||
|
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_RES]),
|
||||||
|
];
|
||||||
|
yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||||
|
yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||||
|
yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||||
|
|
||||||
|
// x0' <- x1
|
||||||
|
let first_col_constraint = builder.sub_extension(vars.next_values[0], vars.local_values[1]);
|
||||||
|
yield_constr.constraint(builder, first_col_constraint);
|
||||||
|
// x1' <- x0 + x1
|
||||||
|
let second_col_constraint = {
|
||||||
|
let tmp = builder.sub_extension(vars.next_values[1], vars.local_values[0]);
|
||||||
|
builder.sub_extension(tmp, vars.local_values[1])
|
||||||
|
};
|
||||||
|
yield_constr.constraint(builder, second_col_constraint);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn constraint_degree(&self) -> usize {
|
fn constraint_degree(&self) -> usize {
|
||||||
@ -90,15 +108,28 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStar
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use plonky2::field::extension_field::Extendable;
|
||||||
use plonky2::field::field_types::Field;
|
use plonky2::field::field_types::Field;
|
||||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2::iop::witness::PartialWitness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||||
|
use plonky2::plonk::config::{
|
||||||
|
AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig,
|
||||||
|
};
|
||||||
use plonky2::util::timing::TimingTree;
|
use plonky2::util::timing::TimingTree;
|
||||||
|
|
||||||
use crate::config::StarkConfig;
|
use crate::config::StarkConfig;
|
||||||
use crate::fibonacci_stark::FibonacciStark;
|
use crate::fibonacci_stark::FibonacciStark;
|
||||||
|
use crate::proof::StarkProofWithPublicInputs;
|
||||||
use crate::prover::prove;
|
use crate::prover::prove;
|
||||||
|
use crate::recursive_verifier::{
|
||||||
|
add_virtual_stark_proof_with_pis, recursively_verify_stark_proof,
|
||||||
|
set_stark_proof_with_pis_target,
|
||||||
|
};
|
||||||
|
use crate::stark::Stark;
|
||||||
use crate::stark_testing::test_stark_low_degree;
|
use crate::stark_testing::test_stark_low_degree;
|
||||||
use crate::verifier::verify;
|
use crate::verifier::verify_stark_proof;
|
||||||
|
|
||||||
fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
|
fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
|
||||||
(0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
|
(0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
|
||||||
@ -124,7 +155,7 @@ mod tests {
|
|||||||
&mut TimingTree::default(),
|
&mut TimingTree::default(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
verify(stark, proof, &config)
|
verify_stark_proof(stark, proof, &config)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -134,9 +165,73 @@ mod tests {
|
|||||||
type F = <C as GenericConfig<D>>::F;
|
type F = <C as GenericConfig<D>>::F;
|
||||||
type S = FibonacciStark<F, D>;
|
type S = FibonacciStark<F, D>;
|
||||||
|
|
||||||
let config = StarkConfig::standard_fast_config();
|
|
||||||
let num_rows = 1 << 5;
|
let num_rows = 1 << 5;
|
||||||
let stark = S::new(num_rows);
|
let stark = S::new(num_rows);
|
||||||
test_stark_low_degree(stark)
|
test_stark_low_degree(stark)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recursive_stark_verifier() -> Result<()> {
|
||||||
|
init_logger();
|
||||||
|
const D: usize = 2;
|
||||||
|
type C = PoseidonGoldilocksConfig;
|
||||||
|
type F = <C as GenericConfig<D>>::F;
|
||||||
|
type S = FibonacciStark<F, D>;
|
||||||
|
|
||||||
|
let config = StarkConfig::standard_fast_config();
|
||||||
|
let num_rows = 1 << 5;
|
||||||
|
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||||
|
let stark = S::new(num_rows);
|
||||||
|
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||||
|
let proof = prove::<F, C, S, D>(
|
||||||
|
stark,
|
||||||
|
&config,
|
||||||
|
trace,
|
||||||
|
public_inputs,
|
||||||
|
&mut TimingTree::default(),
|
||||||
|
)?;
|
||||||
|
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||||
|
|
||||||
|
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recursive_proof<
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
S: Stark<F, D> + Copy,
|
||||||
|
InnerC: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
stark: S,
|
||||||
|
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
|
||||||
|
inner_config: &StarkConfig,
|
||||||
|
print_gate_counts: bool,
|
||||||
|
) -> Result<()>
|
||||||
|
where
|
||||||
|
InnerC::Hasher: AlgebraicHasher<F>,
|
||||||
|
[(); S::COLUMNS]:,
|
||||||
|
[(); S::PUBLIC_INPUTS]:,
|
||||||
|
[(); C::Hasher::HASH_SIZE]:,
|
||||||
|
{
|
||||||
|
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||||
|
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||||
|
let mut pw = PartialWitness::new();
|
||||||
|
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||||
|
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
|
||||||
|
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
|
||||||
|
|
||||||
|
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||||
|
|
||||||
|
if print_gate_counts {
|
||||||
|
builder.print_gate_counts(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = builder.build::<C>();
|
||||||
|
let proof = data.prove(pw)?;
|
||||||
|
data.verify(proof)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_logger() {
|
||||||
|
let _ = env_logger::builder().format_timestamp(None).try_init();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,16 +1,22 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use plonky2::field::extension_field::Extendable;
|
use plonky2::field::extension_field::Extendable;
|
||||||
use plonky2::field::polynomial::PolynomialCoeffs;
|
use plonky2::field::polynomial::PolynomialCoeffs;
|
||||||
use plonky2::fri::proof::FriProof;
|
use plonky2::fri::proof::{FriProof, FriProofTarget};
|
||||||
use plonky2::hash::hash_types::RichField;
|
use plonky2::gadgets::polynomial::PolynomialCoeffsExtTarget;
|
||||||
|
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||||
use plonky2::hash::merkle_tree::MerkleCap;
|
use plonky2::hash::merkle_tree::MerkleCap;
|
||||||
use plonky2::iop::challenger::Challenger;
|
use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||||
use plonky2::plonk::config::GenericConfig;
|
use plonky2::iop::target::Target;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||||
|
|
||||||
use crate::config::StarkConfig;
|
use crate::config::StarkConfig;
|
||||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
|
use crate::proof::{
|
||||||
|
StarkOpeningSet, StarkOpeningSetTarget, StarkProof, StarkProofChallenges,
|
||||||
|
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs,
|
||||||
|
StarkProofWithPublicInputsTarget,
|
||||||
|
};
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||||
trace_cap: &MerkleCap<F, C::Hasher>,
|
trace_cap: &MerkleCap<F, C::Hasher>,
|
||||||
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
|
quotient_polys_cap: &MerkleCap<F, C::Hasher>,
|
||||||
@ -22,8 +28,6 @@ fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, cons
|
|||||||
degree_bits: usize,
|
degree_bits: usize,
|
||||||
) -> Result<StarkProofChallenges<F, D>> {
|
) -> Result<StarkProofChallenges<F, D>> {
|
||||||
let num_challenges = config.num_challenges;
|
let num_challenges = config.num_challenges;
|
||||||
let num_fri_queries = config.fri_config.num_query_rounds;
|
|
||||||
let lde_size = 1 << (degree_bits + config.fri_config.rate_bits);
|
|
||||||
|
|
||||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||||
|
|
||||||
@ -94,6 +98,84 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
pub(crate) fn get_challenges_target<
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
trace_cap: &MerkleCapTarget,
|
||||||
|
quotient_polys_cap: &MerkleCapTarget,
|
||||||
|
openings: &StarkOpeningSetTarget<D>,
|
||||||
|
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||||
|
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||||
|
pow_witness: Target,
|
||||||
|
config: &StarkConfig,
|
||||||
|
) -> StarkProofChallengesTarget<D>
|
||||||
|
where
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
{
|
||||||
|
let num_challenges = config.num_challenges;
|
||||||
|
|
||||||
|
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
|
||||||
|
|
||||||
|
challenger.observe_cap(trace_cap);
|
||||||
|
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||||
|
|
||||||
|
challenger.observe_cap(quotient_polys_cap);
|
||||||
|
let stark_zeta = challenger.get_extension_challenge(builder);
|
||||||
|
|
||||||
|
challenger.observe_openings(&openings.to_fri_openings());
|
||||||
|
|
||||||
|
StarkProofChallengesTarget {
|
||||||
|
stark_alphas,
|
||||||
|
stark_zeta,
|
||||||
|
fri_challenges: challenger.fri_challenges::<C>(
|
||||||
|
builder,
|
||||||
|
commit_phase_merkle_caps,
|
||||||
|
final_poly,
|
||||||
|
pow_witness,
|
||||||
|
&config.fri_config,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||||
|
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
|
||||||
|
&self,
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
config: &StarkConfig,
|
||||||
|
) -> StarkProofChallengesTarget<D>
|
||||||
|
where
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
{
|
||||||
|
let StarkProofTarget {
|
||||||
|
trace_cap,
|
||||||
|
quotient_polys_cap,
|
||||||
|
openings,
|
||||||
|
opening_proof:
|
||||||
|
FriProofTarget {
|
||||||
|
commit_phase_merkle_caps,
|
||||||
|
final_poly,
|
||||||
|
pow_witness,
|
||||||
|
..
|
||||||
|
},
|
||||||
|
} = &self.proof;
|
||||||
|
|
||||||
|
get_challenges_target::<F, C, D>(
|
||||||
|
builder,
|
||||||
|
trace_cap,
|
||||||
|
quotient_polys_cap,
|
||||||
|
openings,
|
||||||
|
commit_phase_merkle_caps,
|
||||||
|
final_poly,
|
||||||
|
*pow_witness,
|
||||||
|
config,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: Deal with the compressed stuff.
|
// TODO: Deal with the compressed stuff.
|
||||||
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||||
// CompressedProofWithPublicInputs<F, C, D>
|
// CompressedProofWithPublicInputs<F, C, D>
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
#![allow(unused_variables)]
|
#![allow(unused_variables)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
#![allow(clippy::too_many_arguments)]
|
||||||
#![feature(generic_const_exprs)]
|
#![feature(generic_const_exprs)]
|
||||||
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
@ -9,6 +10,7 @@ pub mod constraint_consumer;
|
|||||||
mod get_challenges;
|
mod get_challenges;
|
||||||
pub mod proof;
|
pub mod proof;
|
||||||
pub mod prover;
|
pub mod prover;
|
||||||
|
pub mod recursive_verifier;
|
||||||
pub mod stark;
|
pub mod stark;
|
||||||
pub mod stark_testing;
|
pub mod stark_testing;
|
||||||
pub mod vars;
|
pub mod vars;
|
||||||
|
|||||||
@ -1,12 +1,21 @@
|
|||||||
use plonky2::field::extension_field::Extendable;
|
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||||
use plonky2::fri::oracle::PolynomialBatch;
|
use plonky2::fri::oracle::PolynomialBatch;
|
||||||
use plonky2::fri::proof::{CompressedFriProof, FriChallenges, FriProof};
|
use plonky2::fri::proof::{
|
||||||
use plonky2::fri::structure::{FriOpeningBatch, FriOpenings};
|
CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget,
|
||||||
use plonky2::hash::hash_types::RichField;
|
};
|
||||||
|
use plonky2::fri::structure::{
|
||||||
|
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
|
||||||
|
};
|
||||||
|
use plonky2::hash::hash_types::{MerkleCapTarget, RichField};
|
||||||
use plonky2::hash::merkle_tree::MerkleCap;
|
use plonky2::hash::merkle_tree::MerkleCap;
|
||||||
|
use plonky2::iop::ext_target::ExtensionTarget;
|
||||||
|
use plonky2::iop::target::Target;
|
||||||
use plonky2::plonk::config::GenericConfig;
|
use plonky2::plonk::config::GenericConfig;
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
|
||||||
|
use crate::config::StarkConfig;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
|
||||||
/// Merkle cap of LDEs of trace values.
|
/// Merkle cap of LDEs of trace values.
|
||||||
pub trace_cap: MerkleCap<F, C::Hasher>,
|
pub trace_cap: MerkleCap<F, C::Hasher>,
|
||||||
@ -18,6 +27,36 @@ pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
|
|||||||
pub opening_proof: FriProof<F, C::Hasher, D>,
|
pub opening_proof: FriProof<F, C::Hasher, D>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {
|
||||||
|
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||||
|
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||||
|
.initial_trees_proof
|
||||||
|
.evals_proofs[0]
|
||||||
|
.1;
|
||||||
|
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||||
|
lde_bits - config.fri_config.rate_bits
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct StarkProofTarget<const D: usize> {
|
||||||
|
pub trace_cap: MerkleCapTarget,
|
||||||
|
pub quotient_polys_cap: MerkleCapTarget,
|
||||||
|
pub openings: StarkOpeningSetTarget<D>,
|
||||||
|
pub opening_proof: FriProofTarget<D>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const D: usize> StarkProofTarget<D> {
|
||||||
|
pub(crate) fn recover_degree_bits(&self, config: &StarkConfig) -> usize {
|
||||||
|
let initial_merkle_proof = &self.opening_proof.query_round_proofs[0]
|
||||||
|
.initial_trees_proof
|
||||||
|
.evals_proofs[0]
|
||||||
|
.1;
|
||||||
|
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
||||||
|
lde_bits - config.fri_config.rate_bits
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct StarkProofWithPublicInputs<
|
pub struct StarkProofWithPublicInputs<
|
||||||
F: RichField + Extendable<D>,
|
F: RichField + Extendable<D>,
|
||||||
C: GenericConfig<D, F = F>,
|
C: GenericConfig<D, F = F>,
|
||||||
@ -28,6 +67,11 @@ pub struct StarkProofWithPublicInputs<
|
|||||||
pub public_inputs: Vec<F>,
|
pub public_inputs: Vec<F>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct StarkProofWithPublicInputsTarget<const D: usize> {
|
||||||
|
pub proof: StarkProofTarget<D>,
|
||||||
|
pub public_inputs: Vec<Target>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct CompressedStarkProof<
|
pub struct CompressedStarkProof<
|
||||||
F: RichField + Extendable<D>,
|
F: RichField + Extendable<D>,
|
||||||
C: GenericConfig<D, F = F>,
|
C: GenericConfig<D, F = F>,
|
||||||
@ -60,7 +104,14 @@ pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: us
|
|||||||
pub fri_challenges: FriChallenges<F, D>,
|
pub fri_challenges: FriChallenges<F, D>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) struct StarkProofChallengesTarget<const D: usize> {
|
||||||
|
pub stark_alphas: Vec<Target>,
|
||||||
|
pub stark_zeta: ExtensionTarget<D>,
|
||||||
|
pub fri_challenges: FriChallengesTarget<D>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Purported values of each polynomial at the challenge point.
|
/// Purported values of each polynomial at the challenge point.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
||||||
pub local_values: Vec<F::Extension>,
|
pub local_values: Vec<F::Extension>,
|
||||||
pub next_values: Vec<F::Extension>,
|
pub next_values: Vec<F::Extension>,
|
||||||
@ -72,7 +123,7 @@ pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
|
|||||||
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
||||||
pub fn new<C: GenericConfig<D, F = F>>(
|
pub fn new<C: GenericConfig<D, F = F>>(
|
||||||
zeta: F::Extension,
|
zeta: F::Extension,
|
||||||
g: F::Extension,
|
g: F,
|
||||||
trace_commitment: &PolynomialBatch<F, C, D>,
|
trace_commitment: &PolynomialBatch<F, C, D>,
|
||||||
quotient_commitment: &PolynomialBatch<F, C, D>,
|
quotient_commitment: &PolynomialBatch<F, C, D>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@ -84,7 +135,7 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
|||||||
};
|
};
|
||||||
Self {
|
Self {
|
||||||
local_values: eval_commitment(zeta, trace_commitment),
|
local_values: eval_commitment(zeta, trace_commitment),
|
||||||
next_values: eval_commitment(zeta * g, trace_commitment),
|
next_values: eval_commitment(zeta.scalar_mul(g), trace_commitment),
|
||||||
permutation_zs: vec![/*TODO*/],
|
permutation_zs: vec![/*TODO*/],
|
||||||
permutation_zs_right: vec![/*TODO*/],
|
permutation_zs_right: vec![/*TODO*/],
|
||||||
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
quotient_polys: eval_commitment(zeta, quotient_commitment),
|
||||||
@ -112,3 +163,34 @@ impl<F: RichField + Extendable<D>, const D: usize> StarkOpeningSet<F, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct StarkOpeningSetTarget<const D: usize> {
|
||||||
|
pub local_values: Vec<ExtensionTarget<D>>,
|
||||||
|
pub next_values: Vec<ExtensionTarget<D>>,
|
||||||
|
pub permutation_zs: Vec<ExtensionTarget<D>>,
|
||||||
|
pub permutation_zs_right: Vec<ExtensionTarget<D>>,
|
||||||
|
pub quotient_polys: Vec<ExtensionTarget<D>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const D: usize> StarkOpeningSetTarget<D> {
|
||||||
|
pub(crate) fn to_fri_openings(&self) -> FriOpeningsTarget<D> {
|
||||||
|
let zeta_batch = FriOpeningBatchTarget {
|
||||||
|
values: [
|
||||||
|
self.local_values.as_slice(),
|
||||||
|
self.quotient_polys.as_slice(),
|
||||||
|
self.permutation_zs.as_slice(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
};
|
||||||
|
let zeta_right_batch = FriOpeningBatchTarget {
|
||||||
|
values: [
|
||||||
|
self.next_values.as_slice(),
|
||||||
|
self.permutation_zs_right.as_slice(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
|
};
|
||||||
|
FriOpeningsTarget {
|
||||||
|
batches: vec![zeta_batch, zeta_right_batch],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -107,7 +107,7 @@ where
|
|||||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||||
let g = F::Extension::primitive_root_of_unity(degree_bits);
|
let g = F::primitive_root_of_unity(degree_bits);
|
||||||
ensure!(
|
ensure!(
|
||||||
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
||||||
"Opening point is in the subgroup."
|
"Opening point is in the subgroup."
|
||||||
@ -115,7 +115,7 @@ where
|
|||||||
let openings = StarkOpeningSet::new(zeta, g, &trace_commitment, "ient_commitment);
|
let openings = StarkOpeningSet::new(zeta, g, &trace_commitment, "ient_commitment);
|
||||||
challenger.observe_openings(&openings.to_fri_openings());
|
challenger.observe_openings(&openings.to_fri_openings());
|
||||||
|
|
||||||
// TODO: Add permuation checks
|
// TODO: Add permutation checks
|
||||||
let initial_merkle_trees = &[&trace_commitment, "ient_commitment];
|
let initial_merkle_trees = &[&trace_commitment, "ient_commitment];
|
||||||
let fri_params = config.fri_params(degree_bits);
|
let fri_params = config.fri_params(degree_bits);
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ where
|
|||||||
timing,
|
timing,
|
||||||
"compute openings proof",
|
"compute openings proof",
|
||||||
PolynomialBatch::prove_openings(
|
PolynomialBatch::prove_openings(
|
||||||
&stark.fri_instance(zeta, g, rate_bits, config.num_challenges),
|
&stark.fri_instance(zeta, g, config.num_challenges),
|
||||||
initial_merkle_trees,
|
initial_merkle_trees,
|
||||||
&mut challenger,
|
&mut challenger,
|
||||||
&fri_params,
|
&fri_params,
|
||||||
|
|||||||
259
starky/src/recursive_verifier.rs
Normal file
259
starky/src/recursive_verifier.rs
Normal file
@ -0,0 +1,259 @@
|
|||||||
|
use itertools::Itertools;
|
||||||
|
use plonky2::field::extension_field::Extendable;
|
||||||
|
use plonky2::field::field_types::Field;
|
||||||
|
use plonky2::fri::witness_util::set_fri_proof_target;
|
||||||
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2::iop::ext_target::ExtensionTarget;
|
||||||
|
use plonky2::iop::witness::Witness;
|
||||||
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||||
|
use plonky2::util::reducing::ReducingFactorTarget;
|
||||||
|
|
||||||
|
use crate::config::StarkConfig;
|
||||||
|
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||||
|
use crate::proof::{
|
||||||
|
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
|
||||||
|
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
|
||||||
|
};
|
||||||
|
use crate::stark::Stark;
|
||||||
|
use crate::vars::StarkEvaluationTargets;
|
||||||
|
|
||||||
|
pub fn recursively_verify_stark_proof<
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
S: Stark<F, D>,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
stark: S,
|
||||||
|
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||||
|
inner_config: &StarkConfig,
|
||||||
|
) where
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
[(); S::COLUMNS]:,
|
||||||
|
[(); S::PUBLIC_INPUTS]:,
|
||||||
|
{
|
||||||
|
assert_eq!(proof_with_pis.public_inputs.len(), S::PUBLIC_INPUTS);
|
||||||
|
let degree_bits = proof_with_pis.proof.recover_degree_bits(inner_config);
|
||||||
|
let challenges = proof_with_pis.get_challenges::<F, C>(builder, inner_config);
|
||||||
|
|
||||||
|
recursively_verify_stark_proof_with_challenges::<F, C, S, D>(
|
||||||
|
builder,
|
||||||
|
stark,
|
||||||
|
proof_with_pis,
|
||||||
|
challenges,
|
||||||
|
inner_config,
|
||||||
|
degree_bits,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively verifies an inner proof.
|
||||||
|
fn recursively_verify_stark_proof_with_challenges<
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C: GenericConfig<D, F = F>,
|
||||||
|
S: Stark<F, D>,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
stark: S,
|
||||||
|
proof_with_pis: StarkProofWithPublicInputsTarget<D>,
|
||||||
|
challenges: StarkProofChallengesTarget<D>,
|
||||||
|
inner_config: &StarkConfig,
|
||||||
|
degree_bits: usize,
|
||||||
|
) where
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
[(); S::COLUMNS]:,
|
||||||
|
[(); S::PUBLIC_INPUTS]:,
|
||||||
|
{
|
||||||
|
let one = builder.one_extension();
|
||||||
|
|
||||||
|
let StarkProofWithPublicInputsTarget {
|
||||||
|
proof,
|
||||||
|
public_inputs,
|
||||||
|
} = proof_with_pis;
|
||||||
|
let StarkOpeningSetTarget {
|
||||||
|
local_values,
|
||||||
|
next_values,
|
||||||
|
permutation_zs,
|
||||||
|
permutation_zs_right,
|
||||||
|
quotient_polys,
|
||||||
|
} = &proof.openings;
|
||||||
|
let vars = StarkEvaluationTargets {
|
||||||
|
local_values: &local_values.to_vec().try_into().unwrap(),
|
||||||
|
next_values: &next_values.to_vec().try_into().unwrap(),
|
||||||
|
public_inputs: &public_inputs
|
||||||
|
.into_iter()
|
||||||
|
.map(|t| builder.convert_to_ext(t))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.try_into()
|
||||||
|
.unwrap(),
|
||||||
|
};
|
||||||
|
let zeta_pow_deg = builder.exp_power_of_2_extension(challenges.stark_zeta, degree_bits);
|
||||||
|
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||||
|
let (l_1, l_last) =
|
||||||
|
eval_l_1_and_l_last_recursively(builder, degree_bits, challenges.stark_zeta, z_h_zeta);
|
||||||
|
let last =
|
||||||
|
builder.constant_extension(F::Extension::primitive_root_of_unity(degree_bits).inverse());
|
||||||
|
let z_last = builder.sub_extension(challenges.stark_zeta, last);
|
||||||
|
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||||
|
builder.zero_extension(),
|
||||||
|
challenges.stark_alphas,
|
||||||
|
z_last,
|
||||||
|
l_1,
|
||||||
|
l_last,
|
||||||
|
);
|
||||||
|
stark.eval_ext_recursively(builder, vars, &mut consumer);
|
||||||
|
let vanishing_polys_zeta = consumer.accumulators();
|
||||||
|
|
||||||
|
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||||
|
let quotient_polys_zeta = &proof.openings.quotient_polys;
|
||||||
|
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||||
|
for (i, chunk) in quotient_polys_zeta
|
||||||
|
.chunks(stark.quotient_degree_factor())
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let recombined_quotient = scale.reduce(chunk, builder);
|
||||||
|
let computed_vanishing_poly = builder.mul_extension(z_h_zeta, recombined_quotient);
|
||||||
|
builder.connect_extension(vanishing_polys_zeta[i], computed_vanishing_poly);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Permutation polynomials.
|
||||||
|
let merkle_caps = &[proof.trace_cap, proof.quotient_polys_cap];
|
||||||
|
|
||||||
|
let fri_instance = stark.fri_instance_target(
|
||||||
|
builder,
|
||||||
|
challenges.stark_zeta,
|
||||||
|
F::primitive_root_of_unity(degree_bits),
|
||||||
|
inner_config.num_challenges,
|
||||||
|
);
|
||||||
|
builder.verify_fri_proof::<C>(
|
||||||
|
&fri_instance,
|
||||||
|
&proof.openings.to_fri_openings(),
|
||||||
|
&challenges.fri_challenges,
|
||||||
|
merkle_caps,
|
||||||
|
&proof.opening_proof,
|
||||||
|
&inner_config.fri_params(degree_bits),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_l_1_and_l_last_recursively<F: RichField + Extendable<D>, const D: usize>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
log_n: usize,
|
||||||
|
x: ExtensionTarget<D>,
|
||||||
|
z_x: ExtensionTarget<D>,
|
||||||
|
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||||
|
let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n));
|
||||||
|
let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n));
|
||||||
|
let one = builder.one_extension();
|
||||||
|
let l_1_deno = builder.mul_sub_extension(n, x, n);
|
||||||
|
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||||
|
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||||
|
|
||||||
|
(
|
||||||
|
builder.div_extension(z_x, l_1_deno),
|
||||||
|
builder.div_extension(z_x, l_last_deno),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_virtual_stark_proof_with_pis<
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
S: Stark<F, D>,
|
||||||
|
const D: usize,
|
||||||
|
>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
stark: S,
|
||||||
|
config: &StarkConfig,
|
||||||
|
degree_bits: usize,
|
||||||
|
) -> StarkProofWithPublicInputsTarget<D> {
|
||||||
|
let proof = add_virtual_stark_proof::<F, S, D>(builder, stark, config, degree_bits);
|
||||||
|
let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS);
|
||||||
|
StarkProofWithPublicInputsTarget {
|
||||||
|
proof,
|
||||||
|
public_inputs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
stark: S,
|
||||||
|
config: &StarkConfig,
|
||||||
|
degree_bits: usize,
|
||||||
|
) -> StarkProofTarget<D> {
|
||||||
|
let fri_params = config.fri_params(degree_bits);
|
||||||
|
let cap_height = fri_params.config.cap_height;
|
||||||
|
|
||||||
|
let num_leaves_per_oracle = &[
|
||||||
|
S::COLUMNS,
|
||||||
|
// TODO: permutation polys
|
||||||
|
stark.quotient_degree_factor() * config.num_challenges,
|
||||||
|
];
|
||||||
|
|
||||||
|
StarkProofTarget {
|
||||||
|
trace_cap: builder.add_virtual_cap(cap_height),
|
||||||
|
quotient_polys_cap: builder.add_virtual_cap(cap_height),
|
||||||
|
openings: add_stark_opening_set::<F, S, D>(builder, stark, config),
|
||||||
|
opening_proof: builder.add_virtual_fri_proof(num_leaves_per_oracle, &fri_params),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
stark: S,
|
||||||
|
config: &StarkConfig,
|
||||||
|
) -> StarkOpeningSetTarget<D> {
|
||||||
|
let num_challenges = config.num_challenges;
|
||||||
|
StarkOpeningSetTarget {
|
||||||
|
local_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||||
|
next_values: builder.add_virtual_extension_targets(S::COLUMNS),
|
||||||
|
permutation_zs: vec![/*TODO*/],
|
||||||
|
permutation_zs_right: vec![/*TODO*/],
|
||||||
|
quotient_polys: builder
|
||||||
|
.add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||||
|
witness: &mut W,
|
||||||
|
stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,
|
||||||
|
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
|
||||||
|
) where
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
W: Witness<F>,
|
||||||
|
{
|
||||||
|
let StarkProofWithPublicInputs {
|
||||||
|
proof,
|
||||||
|
public_inputs,
|
||||||
|
} = stark_proof_with_pis;
|
||||||
|
let StarkProofWithPublicInputsTarget {
|
||||||
|
proof: pt,
|
||||||
|
public_inputs: pi_targets,
|
||||||
|
} = stark_proof_with_pis_target;
|
||||||
|
|
||||||
|
// Set public inputs.
|
||||||
|
for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) {
|
||||||
|
witness.set_target(pi_t, pi);
|
||||||
|
}
|
||||||
|
|
||||||
|
set_stark_proof_target(witness, pt, proof);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
|
||||||
|
witness: &mut W,
|
||||||
|
proof_target: &StarkProofTarget<D>,
|
||||||
|
proof: &StarkProof<F, C, D>,
|
||||||
|
) where
|
||||||
|
F: RichField + Extendable<D>,
|
||||||
|
C::Hasher: AlgebraicHasher<F>,
|
||||||
|
W: Witness<F>,
|
||||||
|
{
|
||||||
|
witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);
|
||||||
|
witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);
|
||||||
|
|
||||||
|
witness.set_fri_openings(
|
||||||
|
&proof_target.openings.to_fri_openings(),
|
||||||
|
&proof.openings.to_fri_openings(),
|
||||||
|
);
|
||||||
|
|
||||||
|
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
|
||||||
|
}
|
||||||
@ -1,7 +1,11 @@
|
|||||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||||
use plonky2::field::packed_field::PackedField;
|
use plonky2::field::packed_field::PackedField;
|
||||||
use plonky2::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOracleInfo, FriPolynomialInfo};
|
use plonky2::fri::structure::{
|
||||||
|
FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo,
|
||||||
|
FriPolynomialInfo,
|
||||||
|
};
|
||||||
use plonky2::hash::hash_types::RichField;
|
use plonky2::hash::hash_types::RichField;
|
||||||
|
use plonky2::iop::ext_target::ExtensionTarget;
|
||||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||||
|
|
||||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||||
@ -75,8 +79,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
|||||||
fn fri_instance(
|
fn fri_instance(
|
||||||
&self,
|
&self,
|
||||||
zeta: F::Extension,
|
zeta: F::Extension,
|
||||||
g: F::Extension,
|
g: F,
|
||||||
rate_bits: usize,
|
|
||||||
num_challenges: usize,
|
num_challenges: usize,
|
||||||
) -> FriInstanceInfo<F, D> {
|
) -> FriInstanceInfo<F, D> {
|
||||||
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||||
@ -88,12 +91,40 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
|||||||
polynomials: [trace_info.clone(), quotient_info].concat(),
|
polynomials: [trace_info.clone(), quotient_info].concat(),
|
||||||
};
|
};
|
||||||
let zeta_right_batch = FriBatchInfo::<F, D> {
|
let zeta_right_batch = FriBatchInfo::<F, D> {
|
||||||
point: zeta * g,
|
point: zeta.scalar_mul(g),
|
||||||
polynomials: trace_info,
|
polynomials: trace_info,
|
||||||
};
|
};
|
||||||
FriInstanceInfo {
|
FriInstanceInfo {
|
||||||
oracles: vec![no_blinding_oracle; 3],
|
oracles: vec![no_blinding_oracle; 3],
|
||||||
batches: vec![zeta_batch],
|
batches: vec![zeta_batch, zeta_right_batch],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the FRI instance used to prove this Stark.
|
||||||
|
// TODO: Permutation polynomials.
|
||||||
|
fn fri_instance_target(
|
||||||
|
&self,
|
||||||
|
builder: &mut CircuitBuilder<F, D>,
|
||||||
|
zeta: ExtensionTarget<D>,
|
||||||
|
g: F,
|
||||||
|
num_challenges: usize,
|
||||||
|
) -> FriInstanceInfoTarget<D> {
|
||||||
|
let no_blinding_oracle = FriOracleInfo { blinding: false };
|
||||||
|
let trace_info = FriPolynomialInfo::from_range(0, 0..Self::COLUMNS);
|
||||||
|
let quotient_info =
|
||||||
|
FriPolynomialInfo::from_range(1, 0..self.quotient_degree_factor() * num_challenges);
|
||||||
|
let zeta_batch = FriBatchInfoTarget {
|
||||||
|
point: zeta,
|
||||||
|
polynomials: [trace_info.clone(), quotient_info].concat(),
|
||||||
|
};
|
||||||
|
let zeta_right = builder.mul_const_extension(g, zeta);
|
||||||
|
let zeta_right_batch = FriBatchInfoTarget {
|
||||||
|
point: zeta_right,
|
||||||
|
polynomials: trace_info,
|
||||||
|
};
|
||||||
|
FriInstanceInfoTarget {
|
||||||
|
oracles: vec![no_blinding_oracle; 3],
|
||||||
|
batches: vec![zeta_batch, zeta_right_batch],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,15 +5,14 @@ use plonky2::fri::verifier::verify_fri_proof;
|
|||||||
use plonky2::hash::hash_types::RichField;
|
use plonky2::hash::hash_types::RichField;
|
||||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||||
use plonky2::plonk::plonk_common::reduce_with_powers;
|
use plonky2::plonk::plonk_common::reduce_with_powers;
|
||||||
use plonky2_util::log2_strict;
|
|
||||||
|
|
||||||
use crate::config::StarkConfig;
|
use crate::config::StarkConfig;
|
||||||
use crate::constraint_consumer::ConstraintConsumer;
|
use crate::constraint_consumer::ConstraintConsumer;
|
||||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
|
use crate::proof::{StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs};
|
||||||
use crate::stark::Stark;
|
use crate::stark::Stark;
|
||||||
use crate::vars::StarkEvaluationVars;
|
use crate::vars::StarkEvaluationVars;
|
||||||
|
|
||||||
pub fn verify<
|
pub fn verify_stark_proof<
|
||||||
F: RichField + Extendable<D>,
|
F: RichField + Extendable<D>,
|
||||||
C: GenericConfig<D, F = F>,
|
C: GenericConfig<D, F = F>,
|
||||||
S: Stark<F, D>,
|
S: Stark<F, D>,
|
||||||
@ -28,12 +27,13 @@ where
|
|||||||
[(); S::PUBLIC_INPUTS]:,
|
[(); S::PUBLIC_INPUTS]:,
|
||||||
[(); C::Hasher::HASH_SIZE]:,
|
[(); C::Hasher::HASH_SIZE]:,
|
||||||
{
|
{
|
||||||
let degree_bits = log2_strict(recover_degree(&proof_with_pis.proof, config));
|
ensure!(proof_with_pis.public_inputs.len() == S::PUBLIC_INPUTS);
|
||||||
|
let degree_bits = proof_with_pis.proof.recover_degree_bits(config);
|
||||||
let challenges = proof_with_pis.get_challenges(config, degree_bits)?;
|
let challenges = proof_with_pis.get_challenges(config, degree_bits)?;
|
||||||
verify_with_challenges(stark, proof_with_pis, challenges, degree_bits, config)
|
verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify_with_challenges<
|
pub(crate) fn verify_stark_proof_with_challenges<
|
||||||
F: RichField + Extendable<D>,
|
F: RichField + Extendable<D>,
|
||||||
C: GenericConfig<D, F = F>,
|
C: GenericConfig<D, F = F>,
|
||||||
S: Stark<F, D>,
|
S: Stark<F, D>,
|
||||||
@ -54,8 +54,6 @@ where
|
|||||||
proof,
|
proof,
|
||||||
public_inputs,
|
public_inputs,
|
||||||
} = proof_with_pis;
|
} = proof_with_pis;
|
||||||
let local_values = &proof.openings.local_values;
|
|
||||||
let next_values = &proof.openings.local_values;
|
|
||||||
let StarkOpeningSet {
|
let StarkOpeningSet {
|
||||||
local_values,
|
local_values,
|
||||||
next_values,
|
next_values,
|
||||||
@ -112,8 +110,7 @@ where
|
|||||||
verify_fri_proof::<F, C, D>(
|
verify_fri_proof::<F, C, D>(
|
||||||
&stark.fri_instance(
|
&stark.fri_instance(
|
||||||
challenges.stark_zeta,
|
challenges.stark_zeta,
|
||||||
F::primitive_root_of_unity(degree_bits).into(),
|
F::primitive_root_of_unity(degree_bits),
|
||||||
config.fri_config.rate_bits,
|
|
||||||
config.num_challenges,
|
config.num_challenges,
|
||||||
),
|
),
|
||||||
&proof.openings.to_fri_openings(),
|
&proof.openings.to_fri_openings(),
|
||||||
@ -139,17 +136,6 @@ fn eval_l_1_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Recover the length of the trace from a STARK proof and a STARK config.
|
/// Recover the length of the trace from a STARK proof and a STARK config.
|
||||||
fn recover_degree<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
|
||||||
proof: &StarkProof<F, C, D>,
|
|
||||||
config: &StarkConfig,
|
|
||||||
) -> usize {
|
|
||||||
let initial_merkle_proof = &proof.opening_proof.query_round_proofs[0]
|
|
||||||
.initial_trees_proof
|
|
||||||
.evals_proofs[0]
|
|
||||||
.1;
|
|
||||||
let lde_bits = config.fri_config.cap_height + initial_merkle_proof.siblings.len();
|
|
||||||
1 << (lde_bits - config.fri_config.rate_bits)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|||||||
@ -117,7 +117,7 @@ mod tests {
|
|||||||
use starky::prover::prove;
|
use starky::prover::prove;
|
||||||
use starky::stark::Stark;
|
use starky::stark::Stark;
|
||||||
use starky::stark_testing::test_stark_low_degree;
|
use starky::stark_testing::test_stark_low_degree;
|
||||||
use starky::verifier::verify;
|
use starky::verifier::verify_stark_proof;
|
||||||
|
|
||||||
use crate::system_zero::SystemZero;
|
use crate::system_zero::SystemZero;
|
||||||
|
|
||||||
@ -136,7 +136,7 @@ mod tests {
|
|||||||
let trace = system.generate_trace();
|
let trace = system.generate_trace();
|
||||||
let proof = prove::<F, C, S, D>(system, &config, trace, public_inputs, &mut timing)?;
|
let proof = prove::<F, C, S, D>(system, &config, trace, public_inputs, &mut timing)?;
|
||||||
|
|
||||||
verify(system, proof, &config)
|
verify_stark_proof(system, proof, &config)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user