mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-02 13:53:07 +00:00
fix: remove unstable features from plonky2
Signed-off-by: Brandon H. Gomes <bhgomes@pm.me>
This commit is contained in:
parent
11600b93c0
commit
6fd0da216a
@ -1,6 +1,5 @@
|
||||
use itertools::Itertools;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
use rayon::prelude::*;
|
||||
|
||||
use crate::curve::curve_summation::affine_multisummation_best;
|
||||
@ -188,8 +187,7 @@ pub(crate) fn to_digits<C: Curve>(x: &C::ScalarField, w: usize) -> Vec<usize> {
|
||||
mod tests {
|
||||
use num::BigUint;
|
||||
use plonky2_field::secp256k1_scalar::Secp256K1Scalar;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
|
||||
use crate::curve::curve_msm::{msm_execute, msm_precompute, to_digits};
|
||||
use crate::curve::curve_types::Curve;
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use std::ops::Mul;
|
||||
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
|
||||
use crate::curve::curve_types::{Curve, CurveScalar, ProjectivePoint};
|
||||
|
||||
|
||||
@ -41,8 +41,7 @@ const SECP256K1_GENERATOR_Y: Secp256K1Base = Secp256K1Base([
|
||||
mod tests {
|
||||
use num::BigUint;
|
||||
use plonky2_field::secp256k1_scalar::Secp256K1Scalar;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
|
||||
use crate::curve::curve_types::{AffinePoint, Curve, ProjectivePoint};
|
||||
use crate::curve::secp256k1::Secp256K1;
|
||||
|
||||
@ -346,11 +346,10 @@ impl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use num::{BigUint, FromPrimitive, Integer};
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::{
|
||||
iop::witness::PartialWitness,
|
||||
plonk::{circuit_builder::CircuitBuilder, circuit_data::CircuitConfig},
|
||||
};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::gadgets::biguint::{CircuitBuilderBiguint, WitnessBigUint};
|
||||
|
||||
@ -71,8 +71,7 @@ mod tests {
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::secp256k1_scalar::Secp256K1Scalar;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
|
||||
use crate::curve::curve_types::{Curve, CurveScalar};
|
||||
use crate::curve::secp256k1::Secp256K1;
|
||||
|
||||
@ -6,8 +6,8 @@ use plonky2::iop::generator::{GeneratedValues, SimpleGenerator};
|
||||
use plonky2::iop::target::{BoolTarget, Target};
|
||||
use plonky2::iop::witness::PartitionWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2_field::types::PrimeField;
|
||||
use plonky2_field::{extension::Extendable, types::Field};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::types::{Field, PrimeField};
|
||||
use plonky2_u32::gadgets::arithmetic_u32::{CircuitBuilderU32, U32Target};
|
||||
use plonky2_u32::gadgets::range_check::range_check_u32_circuit;
|
||||
use plonky2_u32::witness::GeneratedValuesU32;
|
||||
|
||||
@ -6,12 +6,7 @@ use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
|
||||
use crate::arithmetic::add;
|
||||
use crate::arithmetic::columns;
|
||||
use crate::arithmetic::compare;
|
||||
use crate::arithmetic::modular;
|
||||
use crate::arithmetic::mul;
|
||||
use crate::arithmetic::sub;
|
||||
use crate::arithmetic::{add, columns, compare, modular, mul, sub};
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::stark::Stark;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
@ -87,7 +87,8 @@
|
||||
//! In the case of DIV, we do something similar, except that we "replace"
|
||||
//! the modulus with "2^256" to force the quotient to be zero.
|
||||
|
||||
use num::{bigint::Sign, BigInt, One, Zero};
|
||||
use num::bigint::Sign;
|
||||
use num::{BigInt, One, Zero};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::{env, fs};
|
||||
|
||||
use hex::encode;
|
||||
use plonky2_evm::cpu::kernel::assemble_to_bytes;
|
||||
|
||||
@ -7,15 +7,12 @@ use plonky2_util::ceil_div_usize;
|
||||
|
||||
use super::ast::PushTarget;
|
||||
use crate::cpu::kernel::ast::Item::LocalLabelDeclaration;
|
||||
use crate::cpu::kernel::ast::StackReplacement;
|
||||
use crate::cpu::kernel::ast::{File, Item, StackReplacement};
|
||||
use crate::cpu::kernel::keccak_util::hash_kernel;
|
||||
use crate::cpu::kernel::opcodes::{get_opcode, get_push_opcode};
|
||||
use crate::cpu::kernel::optimizer::optimize_asm;
|
||||
use crate::cpu::kernel::stack::stack_manipulation::expand_stack_manipulation;
|
||||
use crate::cpu::kernel::utils::u256_to_trimmed_be_bytes;
|
||||
use crate::cpu::kernel::{
|
||||
ast::{File, Item},
|
||||
opcodes::{get_opcode, get_push_opcode},
|
||||
};
|
||||
use crate::generation::prover_input::ProverInputFn;
|
||||
use crate::keccak_sponge::columns::KECCAK_RATE_BYTES;
|
||||
|
||||
@ -374,8 +371,9 @@ mod tests {
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::cpu::kernel::assembler::*;
|
||||
use crate::cpu::kernel::ast::*;
|
||||
use crate::cpu::kernel::parser::parse;
|
||||
use crate::cpu::kernel::{assembler::*, ast::*};
|
||||
|
||||
#[test]
|
||||
fn two_files() {
|
||||
|
||||
@ -5,11 +5,9 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::keccak::columns::reg_step;
|
||||
use crate::keccak::columns::NUM_COLUMNS;
|
||||
use crate::keccak::columns::{reg_step, NUM_COLUMNS};
|
||||
use crate::keccak::keccak_stark::NUM_ROUNDS;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
|
||||
vars: StarkEvaluationVars<F, P, NUM_COLUMNS>,
|
||||
|
||||
@ -15,8 +15,7 @@ use crate::keccak_memory::columns::*;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
||||
Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL, COL_READ_TIMESTAMP]).collect()
|
||||
|
||||
@ -21,8 +21,7 @@ use crate::keccak_sponge::columns::*;
|
||||
use crate::memory::segments::Segment;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
#[allow(unused)] // TODO: Should be used soon.
|
||||
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
||||
|
||||
@ -27,9 +27,9 @@ use crate::keccak::keccak_stark::KeccakStark;
|
||||
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
|
||||
use crate::logic::LogicStark;
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::permutation::PermutationCheckVars;
|
||||
use crate::permutation::{
|
||||
compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet,
|
||||
PermutationCheckVars,
|
||||
};
|
||||
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof};
|
||||
use crate::stark::Stark;
|
||||
|
||||
@ -13,13 +13,12 @@ use plonky2::iop::target::Target;
|
||||
use plonky2::iop::witness::Witness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData, VerifierCircuitTarget};
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
|
||||
use plonky2::util::reducing::ReducingFactorTarget;
|
||||
use plonky2::with_context;
|
||||
|
||||
use crate::all_stark::NUM_TABLES;
|
||||
use crate::all_stark::{AllStark, Table, NUM_TABLES};
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cpu::cpu_stark::CpuStark;
|
||||
@ -41,13 +40,9 @@ use crate::proof::{
|
||||
TrieRootsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::util::h160_limbs;
|
||||
use crate::util::{h160_limbs, h256_limbs};
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::{
|
||||
all_stark::{AllStark, Table},
|
||||
util::h256_limbs,
|
||||
};
|
||||
|
||||
/// Table-wise recursive proofs of an `AllProof`.
|
||||
pub struct RecursiveAllProof<
|
||||
@ -850,8 +845,7 @@ pub(crate) mod tests {
|
||||
use plonky2::iop::witness::{PartialWitness, Witness};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData};
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use plonky2::plonk::proof::ProofWithPublicInputs;
|
||||
|
||||
use crate::all_stark::{AllStark, Table};
|
||||
|
||||
@ -13,8 +13,7 @@ use plonky2_util::ceil_div_usize;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
const TRACE_ORACLE_INDEX: usize = 0;
|
||||
const PERMUTATION_CTL_ORACLE_INDEX: usize = 1;
|
||||
|
||||
@ -1,15 +1,12 @@
|
||||
use anyhow::{ensure, Result};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::extension::FieldExtension;
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::iop::witness::Witness;
|
||||
use plonky2::iop::witness::{PartialWitness, Witness};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
use crate::extension::Extendable;
|
||||
use crate::extension::Frobenius;
|
||||
use crate::extension::{Extendable, Frobenius};
|
||||
use crate::ops::Square;
|
||||
use crate::types::Field;
|
||||
|
||||
|
||||
@ -19,30 +19,31 @@ rand_chacha = ["dep:rand_chacha"]
|
||||
timing = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.40"
|
||||
derivative = { version = "2.2.0", default-features = false, features = ["use_core"] }
|
||||
itertools = "0.10.0"
|
||||
keccak-hash = "0.8.0"
|
||||
log = "0.4.14"
|
||||
maybe_rayon = { path = "../maybe_rayon" }
|
||||
num = { version = "0.4", features = [ "rand" ] }
|
||||
plonky2_field = { path = "../field" }
|
||||
plonky2_util = { path = "../util" }
|
||||
log = "0.4.14"
|
||||
itertools = "0.10.0"
|
||||
num = { version = "0.4", features = [ "rand" ] }
|
||||
rand = { version = "0.8.4", optional = true }
|
||||
rand_chacha = { version = "0.3.1", optional = true }
|
||||
maybe_rayon = { path = "../maybe_rayon" }
|
||||
unroll = "0.1.5"
|
||||
anyhow = "1.0.40"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_cbor = "0.11.1"
|
||||
keccak-hash = "0.8.0"
|
||||
static_assertions = "1.1.0"
|
||||
unroll = "0.1.5"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.4"
|
||||
rand_chacha = "0.3.1"
|
||||
criterion = "0.3.5"
|
||||
env_logger = "0.9.0"
|
||||
tynm = "0.1.6"
|
||||
structopt = "0.3.26"
|
||||
num_cpus = "1.13.1"
|
||||
rand = "0.8.4"
|
||||
rand_chacha = "0.3.1"
|
||||
rayon = "1.5.1"
|
||||
structopt = "0.3.26"
|
||||
tynm = "0.1.6"
|
||||
|
||||
[target.'cfg(not(target_env = "msvc"))'.dev-dependencies]
|
||||
jemallocator = "0.3.2"
|
||||
|
||||
@ -5,27 +5,26 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![feature(generic_const_exprs)]
|
||||
|
||||
use core::{num::ParseIntError, ops::RangeInclusive, str::FromStr};
|
||||
use core::num::ParseIntError;
|
||||
use core::ops::RangeInclusive;
|
||||
use core::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use log::{info, Level, LevelFilter};
|
||||
use plonky2::{
|
||||
gates::noop::NoopGate,
|
||||
hash::hash_types::RichField,
|
||||
iop::witness::{PartialWitness, Witness},
|
||||
plonk::{
|
||||
circuit_builder::CircuitBuilder,
|
||||
circuit_data::{
|
||||
CircuitConfig, CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
|
||||
},
|
||||
config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig},
|
||||
proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs},
|
||||
prover::prove,
|
||||
},
|
||||
util::timing::TimingTree,
|
||||
use plonky2::gates::noop::NoopGate;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::{PartialWitness, Witness};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::{
|
||||
CircuitConfig, CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
|
||||
};
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig};
|
||||
use plonky2::plonk::proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs};
|
||||
use plonky2::plonk::prover::prove;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use rand::{rngs::OsRng, RngCore, SeedableRng};
|
||||
use rand::rngs::OsRng;
|
||||
use rand::{RngCore, SeedableRng};
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
use structopt::StructOpt;
|
||||
|
||||
|
||||
@ -14,12 +14,11 @@ use crate::fri::FriParams;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::merkle_tree::MerkleTree;
|
||||
use crate::iop::challenger::Challenger;
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::config::GenericConfig;
|
||||
use crate::timed;
|
||||
use crate::util::reducing::ReducingFactor;
|
||||
use crate::util::reverse_bits;
|
||||
use crate::util::timing::TimingTree;
|
||||
use crate::util::transpose;
|
||||
use crate::util::{reverse_bits, transpose};
|
||||
|
||||
/// Four (~64 bit) field elements gives ~128 bit security.
|
||||
pub const SALT_SIZE: usize = 4;
|
||||
@ -45,10 +44,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
cap_height: usize,
|
||||
timing: &mut TimingTree,
|
||||
fft_root_table: Option<&FftRootTable<F>>,
|
||||
) -> Self
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Self {
|
||||
let coeffs = timed!(
|
||||
timing,
|
||||
"IFFT",
|
||||
@ -73,10 +69,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
cap_height: usize,
|
||||
timing: &mut TimingTree,
|
||||
fft_root_table: Option<&FftRootTable<F>>,
|
||||
) -> Self
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Self {
|
||||
let degree = polynomials[0].len();
|
||||
let lde_values = timed!(
|
||||
timing,
|
||||
@ -169,10 +162,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
fri_params: &FriParams,
|
||||
timing: &mut TimingTree,
|
||||
) -> FriProof<F, C::Hasher, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> FriProof<F, C::Hasher, D> {
|
||||
assert!(D > 1, "Not implemented for D=1.");
|
||||
let alpha = challenger.get_extension_challenge::<D>();
|
||||
let mut alpha = ReducingFactor::new(alpha);
|
||||
|
||||
@ -7,8 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::fri::FriParams;
|
||||
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
|
||||
use crate::hash::hash_types::MerkleCapTarget;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
|
||||
use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::hash::path_compression::{compress_merkle_proofs, decompress_merkle_proofs};
|
||||
@ -245,10 +244,7 @@ impl<F: RichField + Extendable<D>, H: Hasher<F>, const D: usize> CompressedFriPr
|
||||
challenges: &ProofChallenges<F, D>,
|
||||
fri_inferred_elements: FriInferredElements<F, D>,
|
||||
params: &FriParams,
|
||||
) -> FriProof<F, H, D>
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> FriProof<F, H, D> {
|
||||
let CompressedFriProof {
|
||||
commit_phase_merkle_caps,
|
||||
query_round_proofs,
|
||||
|
||||
@ -24,10 +24,7 @@ pub fn fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
fri_params: &FriParams,
|
||||
timing: &mut TimingTree,
|
||||
) -> FriProof<F, C::Hasher, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> FriProof<F, C::Hasher, D> {
|
||||
let n = lde_polynomial_values.len();
|
||||
assert_eq!(lde_polynomial_coeffs.len(), n);
|
||||
|
||||
@ -63,18 +60,17 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
type FriCommitedTrees<F, C, const D: usize> = (
|
||||
Vec<MerkleTree<F, <C as GenericConfig<D>>::Hasher>>,
|
||||
PolynomialCoeffs<<F as Extendable<D>>::Extension>,
|
||||
);
|
||||
|
||||
fn fri_committed_trees<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
mut coeffs: PolynomialCoeffs<F::Extension>,
|
||||
mut values: PolynomialValues<F::Extension>,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
fri_params: &FriParams,
|
||||
) -> (
|
||||
Vec<MerkleTree<F, C::Hasher>>,
|
||||
PolynomialCoeffs<F::Extension>,
|
||||
)
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> FriCommitedTrees<F, C, D> {
|
||||
let mut trees = Vec::new();
|
||||
|
||||
let mut shift = F::MULTIPLICATIVE_GROUP_GENERATOR;
|
||||
|
||||
@ -13,8 +13,7 @@ use crate::gates::high_degree_interpolation::HighDegreeInterpolationGate;
|
||||
use crate::gates::interpolation::InterpolationGate;
|
||||
use crate::gates::low_degree_interpolation::LowDegreeInterpolationGate;
|
||||
use crate::gates::random_access::RandomAccessGate;
|
||||
use crate::hash::hash_types::MerkleCapTarget;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{MerkleCapTarget, RichField};
|
||||
use crate::iop::ext_target::{flatten_target, ExtensionTarget};
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
@ -57,17 +57,18 @@ pub(crate) fn fri_verify_proof_of_work<F: RichField + Extendable<D>, const D: us
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn verify_fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
pub fn verify_fri_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
instance: &FriInstanceInfo<F, D>,
|
||||
openings: &FriOpenings<F, D>,
|
||||
challenges: &FriChallenges<F, D>,
|
||||
initial_merkle_caps: &[MerkleCap<F, C::Hasher>],
|
||||
proof: &FriProof<F, C::Hasher, D>,
|
||||
params: &FriParams,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
validate_fri_proof_shape::<F, C, D>(proof, instance, params)?;
|
||||
|
||||
// Size of the LDE domain.
|
||||
@ -109,10 +110,7 @@ fn fri_verify_initial_proof<F: RichField, H: Hasher<F>>(
|
||||
x_index: usize,
|
||||
proof: &FriInitialTreeProof<F, H>,
|
||||
initial_merkle_caps: &[MerkleCap<F, H>],
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
for ((evals, merkle_proof), cap) in proof.evals_proofs.iter().zip(initial_merkle_caps) {
|
||||
verify_merkle_proof_to_cap::<F, H>(evals.clone(), x_index, cap, merkle_proof)?;
|
||||
}
|
||||
@ -177,10 +175,7 @@ fn fri_verifier_query_round<
|
||||
n: usize,
|
||||
round_proof: &FriQueryRound<F, C::Hasher, D>,
|
||||
params: &FriParams,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
fri_verify_initial_proof::<F, C::Hasher>(
|
||||
x_index,
|
||||
&round_proof.initial_trees_proof,
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::borrow::Borrow;
|
||||
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, OEF};
|
||||
use plonky2_field::extension::{Extendable, FieldExtension, OEF};
|
||||
use plonky2_field::types::{Field, Field64};
|
||||
use plonky2_util::bits_u64;
|
||||
|
||||
|
||||
@ -14,6 +14,10 @@ impl<const D: usize> PolynomialCoeffsExtTarget<D> {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn eval_scalar<F: RichField + Extendable<D>>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::ops::Range;
|
||||
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
use crate::gates::util::StridedConstraintConsumer;
|
||||
|
||||
@ -5,12 +5,11 @@ use plonky2_field::types::Field;
|
||||
use plonky2_util::log2_ceil;
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
use crate::hash::hash_types::HashOut;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOut, RichField};
|
||||
use crate::iop::witness::{PartialWitness, Witness};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::circuit_data::CircuitConfig;
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::config::GenericConfig;
|
||||
use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBaseBatch};
|
||||
use crate::plonk::verifier::verify;
|
||||
use crate::util::transpose;
|
||||
@ -92,10 +91,7 @@ pub fn test_eval_fns<
|
||||
const D: usize,
|
||||
>(
|
||||
gate: G,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
// Test that `eval_unfiltered` and `eval_unfiltered_base` are coherent.
|
||||
let wires_base = F::rand_vec(gate.num_wires());
|
||||
let constants_base = F::rand_vec(gate.num_constants());
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::ops::Range;
|
||||
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
use crate::gates::util::StridedConstraintConsumer;
|
||||
|
||||
@ -23,16 +23,13 @@ use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
|
||||
/// This also has some extra features to make it suitable for efficiently verifying Merkle proofs.
|
||||
/// It has a flag which can be used to swap the first four inputs with the next four, for ordering
|
||||
/// sibling digests.
|
||||
#[derive(Debug)]
|
||||
pub struct PoseidonGate<F: RichField + Extendable<D>, const D: usize> {
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
#[derive(derivative::Derivative)]
|
||||
#[derivative(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
|
||||
pub struct PoseidonGate<F: RichField + Extendable<D>, const D: usize>(PhantomData<F>);
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> PoseidonGate<F, D> {
|
||||
pub fn new() -> Self {
|
||||
PoseidonGate {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
Self(PhantomData)
|
||||
}
|
||||
|
||||
/// The wire index for the `i`th input to the permutation.
|
||||
|
||||
@ -2,8 +2,7 @@ use core::marker::PhantomData;
|
||||
use core::ops::Range;
|
||||
|
||||
use plonky2_field::extension::algebra::ExtensionAlgebra;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
use plonky2_field::types::Field;
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
@ -18,16 +17,14 @@ use crate::iop::witness::{PartitionWitness, Witness};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PoseidonMdsGate<F: RichField + Extendable<D> + Poseidon, const D: usize> {
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
/// Poseidon MDS Gate
|
||||
#[derive(derivative::Derivative)]
|
||||
#[derivative(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
|
||||
pub struct PoseidonMdsGate<F: RichField + Extendable<D> + Poseidon, const D: usize>(PhantomData<F>);
|
||||
|
||||
impl<F: RichField + Extendable<D> + Poseidon, const D: usize> PoseidonMdsGate<F, D> {
|
||||
pub fn new() -> Self {
|
||||
PoseidonMdsGate {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
Self(PhantomData)
|
||||
}
|
||||
|
||||
pub fn wires_input(i: usize) -> Range<usize> {
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::ops::Range;
|
||||
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
use crate::gates::util::StridedConstraintConsumer;
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::ops::Range;
|
||||
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::extension::FieldExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
|
||||
use crate::gates::gate::Gate;
|
||||
use crate::gates::util::StridedConstraintConsumer;
|
||||
|
||||
@ -28,7 +28,7 @@ impl SelectorsInfo {
|
||||
/// `|G| + max_{g in G} g.degree() <= max_degree`. These groups are constructed greedily from
|
||||
/// the list of gates sorted by degree.
|
||||
/// We build a selector polynomial `S_i` for each group `G_i`, with
|
||||
/// S_i[j] =
|
||||
/// S_i\[j\] =
|
||||
/// if j-th row gate=g_k in G_i
|
||||
/// k
|
||||
/// else
|
||||
|
||||
@ -2,8 +2,7 @@
|
||||
|
||||
use plonky2_field::extension::Extendable;
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget};
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget, RichField};
|
||||
use crate::iop::target::Target;
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::config::AlgebraicHasher;
|
||||
|
||||
@ -2,8 +2,7 @@ use anyhow::{ensure, Result};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget};
|
||||
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
|
||||
use crate::hash::hashing::SPONGE_WIDTH;
|
||||
use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
@ -21,6 +20,10 @@ impl<F: RichField, H: Hasher<F>> MerkleProof<F, H> {
|
||||
pub fn len(&self) -> usize {
|
||||
self.siblings.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@ -36,10 +39,7 @@ pub fn verify_merkle_proof<F: RichField, H: Hasher<F>>(
|
||||
leaf_index: usize,
|
||||
merkle_root: H::Hash,
|
||||
proof: &MerkleProof<F, H>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let merkle_cap = MerkleCap(vec![merkle_root]);
|
||||
verify_merkle_proof_to_cap(leaf_data, leaf_index, &merkle_cap, proof)
|
||||
}
|
||||
@ -51,10 +51,7 @@ pub fn verify_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
|
||||
leaf_index: usize,
|
||||
merkle_cap: &MerkleCap<F, H>,
|
||||
proof: &MerkleProof<F, H>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let mut index = leaf_index;
|
||||
let mut current_digest = H::hash_or_noop(&leaf_data);
|
||||
for &sibling_digest in proof.siblings.iter() {
|
||||
|
||||
@ -7,8 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::merkle_proofs::MerkleProof;
|
||||
use crate::plonk::config::GenericHashOut;
|
||||
use crate::plonk::config::Hasher;
|
||||
use crate::plonk::config::{GenericHashOut, Hasher};
|
||||
|
||||
/// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree.
|
||||
/// It can be used in place of the root to verify Merkle paths, which are `h` elements shorter.
|
||||
@ -21,6 +20,10 @@ impl<F: RichField, H: Hasher<F>> MerkleCap<F, H> {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn height(&self) -> usize {
|
||||
log2_strict(self.len())
|
||||
}
|
||||
@ -64,10 +67,7 @@ fn capacity_up_to_mut<T>(v: &mut Vec<T>, len: usize) -> &mut [MaybeUninit<T>] {
|
||||
fn fill_subtree<F: RichField, H: Hasher<F>>(
|
||||
digests_buf: &mut [MaybeUninit<H::Hash>],
|
||||
leaves: &[Vec<F>],
|
||||
) -> H::Hash
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> H::Hash {
|
||||
assert_eq!(leaves.len(), digests_buf.len() / 2 + 1);
|
||||
if digests_buf.is_empty() {
|
||||
H::hash_or_noop(&leaves[0])
|
||||
@ -98,9 +98,7 @@ fn fill_digests_buf<F: RichField, H: Hasher<F>>(
|
||||
cap_buf: &mut [MaybeUninit<H::Hash>],
|
||||
leaves: &[Vec<F>],
|
||||
cap_height: usize,
|
||||
) where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) {
|
||||
// Special case of a tree that's all cap. The usual case will panic because we'll try to split
|
||||
// an empty slice into chunks of `0`. (We would not need this if there was a way to split into
|
||||
// `blah` chunks as opposed to chunks _of_ `blah`.)
|
||||
@ -132,10 +130,7 @@ fn fill_digests_buf<F: RichField, H: Hasher<F>>(
|
||||
}
|
||||
|
||||
impl<F: RichField, H: Hasher<F>> MerkleTree<F, H> {
|
||||
pub fn new(leaves: Vec<Vec<F>>, cap_height: usize) -> Self
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
pub fn new(leaves: Vec<Vec<F>>, cap_height: usize) -> Self {
|
||||
let log2_leaves_len = log2_strict(leaves.len());
|
||||
assert!(
|
||||
cap_height <= log2_leaves_len,
|
||||
@ -222,13 +217,14 @@ mod tests {
|
||||
(0..n).map(|_| F::rand_vec(k)).collect()
|
||||
}
|
||||
|
||||
fn verify_all_leaves<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
fn verify_all_leaves<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
leaves: Vec<Vec<F>>,
|
||||
cap_height: usize,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let tree = MerkleTree::<F, C::Hasher>::new(leaves.clone(), cap_height);
|
||||
for (i, leaf) in leaves.into_iter().enumerate() {
|
||||
let proof = tree.prove(i);
|
||||
|
||||
@ -57,10 +57,7 @@ pub(crate) fn decompress_merkle_proofs<F: RichField, H: Hasher<F>>(
|
||||
compressed_proofs: &[MerkleProof<F, H>],
|
||||
height: usize,
|
||||
cap_height: usize,
|
||||
) -> Vec<MerkleProof<F, H>>
|
||||
where
|
||||
[(); H::HASH_SIZE]:,
|
||||
{
|
||||
) -> Vec<MerkleProof<F, H>> {
|
||||
let num_leaves = 1 << height;
|
||||
let compressed_proofs = compressed_proofs.to_vec();
|
||||
let mut decompressed_proofs = Vec::with_capacity(compressed_proofs.len());
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
//! Implementation of the Poseidon hash function, as described in
|
||||
//! https://eprint.iacr.org/2019/458.pdf
|
||||
//! <https://eprint.iacr.org/2019/458.pdf>
|
||||
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
|
||||
@ -272,8 +272,7 @@ impl Poseidon for GoldilocksField {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use plonky2_field::goldilocks_field::GoldilocksField as F;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField64;
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
|
||||
use crate::hash::poseidon::test_helpers::{check_consistency, check_test_vectors};
|
||||
|
||||
|
||||
@ -3,8 +3,7 @@ use core::marker::PhantomData;
|
||||
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget};
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
|
||||
use crate::hash::hashing::{PlonkyPermutation, SPONGE_RATE, SPONGE_WIDTH};
|
||||
use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::ext_target::ExtensionTarget;
|
||||
@ -170,6 +169,7 @@ pub struct RecursiveChallenger<F: RichField + Extendable<D>, H: AlgebraicHasher<
|
||||
sponge_state: [Target; SPONGE_WIDTH],
|
||||
input_buffer: Vec<Target>,
|
||||
output_buffer: Vec<Target>,
|
||||
__: PhantomData<(F, H)>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
@ -177,18 +177,20 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
{
|
||||
pub fn new(builder: &mut CircuitBuilder<F, D>) -> Self {
|
||||
let zero = builder.zero();
|
||||
RecursiveChallenger {
|
||||
Self {
|
||||
sponge_state: [zero; SPONGE_WIDTH],
|
||||
input_buffer: Vec::new(),
|
||||
output_buffer: Vec::new(),
|
||||
__: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_state(sponge_state: [Target; SPONGE_WIDTH]) -> Self {
|
||||
RecursiveChallenger {
|
||||
Self {
|
||||
sponge_state,
|
||||
input_buffer: vec![],
|
||||
output_buffer: vec![],
|
||||
__: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,9 +6,7 @@ use plonky2_field::types::Field;
|
||||
|
||||
use crate::fri::structure::{FriOpenings, FriOpeningsTarget};
|
||||
use crate::fri::witness_util::set_fri_proof_target;
|
||||
use crate::hash::hash_types::HashOutTarget;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOut, MerkleCapTarget};
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
|
||||
use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::ext_target::ExtensionTarget;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
@ -250,14 +248,15 @@ impl<F: Field> MatrixWitness<F> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(derivative::Derivative)]
|
||||
#[derivative(Clone, Debug, Default(bound = ""))]
|
||||
pub struct PartialWitness<F: Field> {
|
||||
pub(crate) target_values: HashMap<Target, F>,
|
||||
}
|
||||
|
||||
impl<F: Field> PartialWitness<F> {
|
||||
pub fn new() -> Self {
|
||||
PartialWitness {
|
||||
Self {
|
||||
target_values: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,14 +1,5 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![allow(const_evaluatable_unchecked)]
|
||||
#![allow(clippy::new_without_default)]
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
#![allow(clippy::len_without_is_empty)]
|
||||
#![allow(clippy::needless_range_loop)]
|
||||
#![allow(clippy::return_self_not_must_use)]
|
||||
#![feature(generic_const_exprs)]
|
||||
#![feature(specialization)]
|
||||
#![feature(stdsimd)]
|
||||
|
||||
pub use plonky2_field as field;
|
||||
|
||||
|
||||
@ -686,10 +686,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
}
|
||||
|
||||
/// Builds a "full circuit", with both prover and verifier data.
|
||||
pub fn build<C: GenericConfig<D, F = F>>(mut self) -> CircuitData<F, C, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn build<C: GenericConfig<D, F = F>>(mut self) -> CircuitData<F, C, D> {
|
||||
let mut timing = TimingTree::new("preprocess", Level::Trace);
|
||||
let start = Instant::now();
|
||||
let rate_bits = self.config.fri_config.rate_bits;
|
||||
@ -887,20 +884,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
}
|
||||
|
||||
/// Builds a "prover circuit", with data needed to generate proofs but not verify them.
|
||||
pub fn build_prover<C: GenericConfig<D, F = F>>(self) -> ProverCircuitData<F, C, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn build_prover<C: GenericConfig<D, F = F>>(self) -> ProverCircuitData<F, C, D> {
|
||||
// TODO: Can skip parts of this.
|
||||
let circuit_data = self.build();
|
||||
circuit_data.prover_data()
|
||||
}
|
||||
|
||||
/// Builds a "verifier circuit", with data needed to verify proofs but not generate them.
|
||||
pub fn build_verifier<C: GenericConfig<D, F = F>>(self) -> VerifierCircuitData<F, C, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn build_verifier<C: GenericConfig<D, F = F>>(self) -> VerifierCircuitData<F, C, D> {
|
||||
// TODO: Can skip parts of this.
|
||||
let circuit_data = self.build();
|
||||
circuit_data.verifier_data()
|
||||
|
||||
@ -112,10 +112,7 @@ pub struct CircuitData<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
CircuitData<F, C, D>
|
||||
{
|
||||
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>> {
|
||||
prove(
|
||||
&self.prover_only,
|
||||
&self.common,
|
||||
@ -124,20 +121,14 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
)
|
||||
}
|
||||
|
||||
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()> {
|
||||
verify(proof_with_pis, &self.verifier_only, &self.common)
|
||||
}
|
||||
|
||||
pub fn verify_compressed(
|
||||
&self,
|
||||
compressed_proof_with_pis: CompressedProofWithPublicInputs<F, C, D>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
compressed_proof_with_pis.verify(&self.verifier_only, &self.common)
|
||||
}
|
||||
|
||||
@ -151,10 +142,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
pub fn decompress(
|
||||
&self,
|
||||
proof: CompressedProofWithPublicInputs<F, C, D>,
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>> {
|
||||
proof.decompress(&self.verifier_only.circuit_digest, &self.common)
|
||||
}
|
||||
|
||||
@ -202,10 +190,7 @@ pub struct ProverCircuitData<
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
ProverCircuitData<F, C, D>
|
||||
{
|
||||
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn prove(&self, inputs: PartialWitness<F>) -> Result<ProofWithPublicInputs<F, C, D>> {
|
||||
prove(
|
||||
&self.prover_only,
|
||||
&self.common,
|
||||
@ -229,20 +214,14 @@ pub struct VerifierCircuitData<
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
VerifierCircuitData<F, C, D>
|
||||
{
|
||||
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
pub fn verify(&self, proof_with_pis: ProofWithPublicInputs<F, C, D>) -> Result<()> {
|
||||
verify(proof_with_pis, &self.verifier_only, &self.common)
|
||||
}
|
||||
|
||||
pub fn verify_compressed(
|
||||
&self,
|
||||
compressed_proof_with_pis: CompressedProofWithPublicInputs<F, C, D>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
compressed_proof_with_pis.verify(&self.verifier_only, &self.common)
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,10 +3,10 @@ use core::fmt::Debug;
|
||||
use plonky2_field::extension::quadratic::QuadraticExtension;
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::hash::hash_types::HashOut;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hash_types::{HashOut, RichField};
|
||||
use crate::hash::hashing::{PlonkyPermutation, SPONGE_WIDTH};
|
||||
use crate::hash::keccak::KeccakHash;
|
||||
use crate::hash::poseidon::PoseidonHash;
|
||||
@ -26,6 +26,8 @@ pub trait GenericHashOut<F: RichField>:
|
||||
pub trait Hasher<F: RichField>: Sized + Clone + Debug + Eq + PartialEq {
|
||||
/// Size of `Hash` in bytes.
|
||||
const HASH_SIZE: usize;
|
||||
|
||||
/// Hash Output
|
||||
type Hash: GenericHashOut<F>;
|
||||
|
||||
/// Permutation used in the sponge construction.
|
||||
@ -48,12 +50,9 @@ pub trait Hasher<F: RichField>: Sized + Clone + Debug + Eq + PartialEq {
|
||||
|
||||
/// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a
|
||||
/// no-op.
|
||||
fn hash_or_noop(inputs: &[F]) -> Self::Hash
|
||||
where
|
||||
[(); Self::HASH_SIZE]:,
|
||||
{
|
||||
fn hash_or_noop(inputs: &[F]) -> Self::Hash {
|
||||
if inputs.len() <= 4 {
|
||||
let mut inputs_bytes = [0u8; Self::HASH_SIZE];
|
||||
let mut inputs_bytes = vec![0u8; Self::HASH_SIZE];
|
||||
for i in 0..inputs.len() {
|
||||
inputs_bytes[i * 8..(i + 1) * 8]
|
||||
.copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes());
|
||||
|
||||
@ -7,7 +7,7 @@ use plonky2_field::types::Field;
|
||||
use crate::iop::target::Target;
|
||||
use crate::iop::wire::Wire;
|
||||
|
||||
/// Disjoint Set Forest data-structure following https://en.wikipedia.org/wiki/Disjoint-set_data_structure.
|
||||
/// Disjoint Set Forest data-structure following <https://en.wikipedia.org/wiki/Disjoint-set_data_structure>.
|
||||
pub struct Forest {
|
||||
/// A map of parent pointers, stored as indices.
|
||||
pub(crate) parents: Vec<usize>,
|
||||
@ -44,7 +44,7 @@ impl Forest {
|
||||
self.parents.push(index);
|
||||
}
|
||||
|
||||
/// Path compression method, see https://en.wikipedia.org/wiki/Disjoint-set_data_structure#Finding_set_representatives.
|
||||
/// Path compression method, see <https://en.wikipedia.org/wiki/Disjoint-set_data_structure#Finding_set_representatives>.
|
||||
pub fn find(&mut self, mut x_index: usize) -> usize {
|
||||
// Note: We avoid recursion here since the chains can be long, causing stack overflows.
|
||||
|
||||
|
||||
@ -139,10 +139,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
challenges: &ProofChallenges<F, D>,
|
||||
fri_inferred_elements: FriInferredElements<F, D>,
|
||||
params: &FriParams,
|
||||
) -> Proof<F, C, D>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Proof<F, C, D> {
|
||||
let CompressedProof {
|
||||
wires_cap,
|
||||
plonk_zs_partial_products_cap,
|
||||
@ -179,10 +176,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
self,
|
||||
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F>>::Hash,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> anyhow::Result<ProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> anyhow::Result<ProofWithPublicInputs<F, C, D>> {
|
||||
let challenges =
|
||||
self.get_challenges(self.get_public_inputs_hash(), circuit_digest, common_data)?;
|
||||
let fri_inferred_elements = self.get_inferred_elements(&challenges, common_data);
|
||||
@ -199,10 +193,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
self,
|
||||
verifier_data: &VerifierOnlyCircuitData<C, D>,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> anyhow::Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> anyhow::Result<()> {
|
||||
ensure!(
|
||||
self.public_inputs.len() == common_data.num_public_inputs,
|
||||
"Number of public inputs doesn't match circuit data."
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use core::mem::swap;
|
||||
|
||||
use anyhow::ensure;
|
||||
use anyhow::Result;
|
||||
use anyhow::{ensure, Result};
|
||||
use maybe_rayon::*;
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::polynomial::{PolynomialCoeffs, PolynomialValues};
|
||||
@ -17,8 +16,7 @@ use crate::iop::witness::{MatrixWitness, PartialWitness, Witness};
|
||||
use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData};
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::plonk_common::PlonkOracle;
|
||||
use crate::plonk::proof::OpeningSet;
|
||||
use crate::plonk::proof::{Proof, ProofWithPublicInputs};
|
||||
use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs};
|
||||
use crate::plonk::vanishing_poly::eval_vanishing_poly_base_batch;
|
||||
use crate::plonk::vars::EvaluationVarsBaseBatch;
|
||||
use crate::timed;
|
||||
@ -31,10 +29,7 @@ pub fn prove<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D:
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
inputs: PartialWitness<F>,
|
||||
timing: &mut TimingTree,
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<ProofWithPublicInputs<F, C, D>> {
|
||||
let config = &common_data.config;
|
||||
let num_challenges = config.num_challenges;
|
||||
let quotient_degree = common_data.quotient_degree();
|
||||
|
||||
@ -3,7 +3,7 @@ use plonky2_field::extension::Extendable;
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::plonk::circuit_data::CommonCircuitData;
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::config::GenericConfig;
|
||||
use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs};
|
||||
|
||||
pub(crate) fn validate_proof_with_pis_shape<F, C, const D: usize>(
|
||||
@ -13,20 +13,16 @@ pub(crate) fn validate_proof_with_pis_shape<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let ProofWithPublicInputs {
|
||||
proof,
|
||||
public_inputs,
|
||||
} = proof_with_pis;
|
||||
|
||||
validate_proof_shape(proof, common_data)?;
|
||||
|
||||
ensure!(
|
||||
public_inputs.len() == common_data.num_public_inputs,
|
||||
"Number of public inputs doesn't match circuit data."
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -37,7 +33,6 @@ fn validate_proof_shape<F, C, const D: usize>(
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let config = &common_data.config;
|
||||
let Proof {
|
||||
@ -49,7 +44,6 @@ where
|
||||
// validate_fri_proof_shape), so we ignore it here.
|
||||
opening_proof: _,
|
||||
} = proof;
|
||||
|
||||
let OpeningSet {
|
||||
constants,
|
||||
plonk_sigmas,
|
||||
@ -59,12 +53,10 @@ where
|
||||
partial_products,
|
||||
quotient_polys,
|
||||
} = openings;
|
||||
|
||||
let cap_height = common_data.fri_params.config.cap_height;
|
||||
ensure!(wires_cap.height() == cap_height);
|
||||
ensure!(plonk_zs_partial_products_cap.height() == cap_height);
|
||||
ensure!(quotient_polys_cap.height() == cap_height);
|
||||
|
||||
ensure!(constants.len() == common_data.num_constants);
|
||||
ensure!(plonk_sigmas.len() == config.num_routed_wires);
|
||||
ensure!(wires.len() == config.num_wires);
|
||||
@ -72,6 +64,5 @@ where
|
||||
ensure!(plonk_zs_next.len() == config.num_challenges);
|
||||
ensure!(partial_products.len() == config.num_challenges * common_data.num_partial_products);
|
||||
ensure!(quotient_polys.len() == common_data.num_quotient_polys());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -85,6 +85,10 @@ impl<'a, F: Field> EvaluationVarsBaseBatch<'a, F> {
|
||||
self.batch_size
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn view(&self, index: usize) -> EvaluationVarsBase<'a, F> {
|
||||
// We cannot implement `Index` as `EvaluationVarsBase` is a struct, not a reference.
|
||||
assert!(index < self.len());
|
||||
|
||||
@ -16,10 +16,7 @@ pub(crate) fn verify<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, c
|
||||
proof_with_pis: ProofWithPublicInputs<F, C, D>,
|
||||
verifier_data: &VerifierOnlyCircuitData<C, D>,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
validate_proof_with_pis_shape(&proof_with_pis, common_data)?;
|
||||
|
||||
let public_inputs_hash = proof_with_pis.get_public_inputs_hash();
|
||||
@ -48,10 +45,7 @@ pub(crate) fn verify_with_challenges<
|
||||
challenges: ProofChallenges<F, D>,
|
||||
verifier_data: &VerifierOnlyCircuitData<C, D>,
|
||||
common_data: &CommonCircuitData<F, D>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let local_constants = &proof.openings.constants;
|
||||
let local_wires = &proof.openings.wires;
|
||||
let vars = EvaluationVars {
|
||||
|
||||
@ -17,7 +17,7 @@ use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::circuit_data::{
|
||||
CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
|
||||
};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use crate::plonk::proof::{
|
||||
OpeningSetTarget, ProofTarget, ProofWithPublicInputs, ProofWithPublicInputsTarget,
|
||||
};
|
||||
@ -33,10 +33,7 @@ pub(crate) fn dummy_proof<
|
||||
) -> Result<(
|
||||
ProofWithPublicInputs<F, C, D>,
|
||||
VerifierOnlyCircuitData<C, D>,
|
||||
)>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
)> {
|
||||
let config = common_data.config.clone();
|
||||
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
@ -12,7 +12,6 @@ use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::circuit_data::{
|
||||
CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
|
||||
};
|
||||
use crate::plonk::config::Hasher;
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use crate::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget};
|
||||
use crate::recursion::conditional_recursive_verifier::dummy_proof;
|
||||
@ -100,7 +99,6 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
) -> Result<CyclicRecursionTarget<D>>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
if self.verifier_data_public_input.is_none() {
|
||||
self.add_verifier_data_public_input();
|
||||
@ -179,7 +177,6 @@ pub fn set_cyclic_recursion_data_target<
|
||||
) -> Result<()>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
if let Some(proof) = cyclic_recursion_data.proof {
|
||||
pw.set_bool_target(cyclic_recursion_data_target.base_case, false);
|
||||
@ -280,7 +277,6 @@ mod tests {
|
||||
>() -> CommonCircuitData<F, D>
|
||||
where
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
@ -323,18 +323,17 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
type Proof<F, C, const D: usize> = (
|
||||
ProofWithPublicInputs<F, C, D>,
|
||||
VerifierOnlyCircuitData<C, D>,
|
||||
CommonCircuitData<F, D>,
|
||||
);
|
||||
|
||||
/// Creates a dummy proof which should have roughly `num_dummy_gates` gates.
|
||||
fn dummy_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
|
||||
config: &CircuitConfig,
|
||||
num_dummy_gates: u64,
|
||||
) -> Result<(
|
||||
ProofWithPublicInputs<F, C, D>,
|
||||
VerifierOnlyCircuitData<C, D>,
|
||||
CommonCircuitData<F, D>,
|
||||
)>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<Proof<F, C, D>> {
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
|
||||
for _ in 0..num_dummy_gates {
|
||||
builder.add_gate(NoopGate, vec![]);
|
||||
@ -361,14 +360,9 @@ mod tests {
|
||||
min_degree_bits: Option<usize>,
|
||||
print_gate_counts: bool,
|
||||
print_timing: bool,
|
||||
) -> Result<(
|
||||
ProofWithPublicInputs<F, C, D>,
|
||||
VerifierOnlyCircuitData<C, D>,
|
||||
CommonCircuitData<F, D>,
|
||||
)>
|
||||
) -> Result<Proof<F, C, D>>
|
||||
where
|
||||
InnerC::Hasher: AlgebraicHasher<F>,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config.clone());
|
||||
let mut pw = PartialWitness::new();
|
||||
@ -423,10 +417,7 @@ mod tests {
|
||||
proof: &ProofWithPublicInputs<F, C, D>,
|
||||
vd: &VerifierOnlyCircuitData<C, D>,
|
||||
cd: &CommonCircuitData<F, D>,
|
||||
) -> Result<()>
|
||||
where
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
) -> Result<()> {
|
||||
let proof_bytes = proof.to_bytes()?;
|
||||
info!("Proof length: {} bytes", proof_bytes.len());
|
||||
let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?;
|
||||
|
||||
@ -19,7 +19,7 @@ use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
/// scale the second one by `a^(r-1-k)`, and add them up.
|
||||
/// This struct abstract away these operations by implementing Horner's method and keeping track
|
||||
/// of the number of multiplications by `a` to compute the scaling factor.
|
||||
/// See https://github.com/mir-protocol/plonky2/pull/69 for more details and discussions.
|
||||
/// See <https://github.com/mir-protocol/plonky2/pull/69> for more details and discussions.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReducingFactor<F: Field> {
|
||||
base: F,
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use std::io::{Read, Result, Write};
|
||||
use std::io::{Cursor, Read, Result, Write};
|
||||
|
||||
use plonky2_field::extension::{Extendable, FieldExtension};
|
||||
use plonky2_field::polynomial::PolynomialCoeffs;
|
||||
@ -32,6 +31,10 @@ impl Buffer {
|
||||
self.0.get_ref().len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn bytes(self) -> Vec<u8> {
|
||||
self.0.into_inner()
|
||||
}
|
||||
@ -60,9 +63,7 @@ impl Buffer {
|
||||
fn read_field<F: Field64>(&mut self) -> Result<F> {
|
||||
let mut buf = [0; std::mem::size_of::<u64>()];
|
||||
self.0.read_exact(&mut buf)?;
|
||||
Ok(F::from_canonical_u64(u64::from_le_bytes(
|
||||
buf.try_into().unwrap(),
|
||||
)))
|
||||
Ok(F::from_canonical_u64(u64::from_le_bytes(buf)))
|
||||
}
|
||||
|
||||
fn write_field_ext<F: RichField + Extendable<D>, const D: usize>(
|
||||
|
||||
@ -123,6 +123,11 @@ impl<'a, P: PackedField> PackedStridedView<'a, P> {
|
||||
pub fn len(&self) -> usize {
|
||||
self.length
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Index<usize> for PackedStridedView<'a, P> {
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
unstable_features = true
|
||||
group_imports = "StdExternalCrate"
|
||||
imports_granularity = "Module"
|
||||
unstable_features = true
|
||||
|
||||
@ -20,9 +20,9 @@ use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::ConstraintConsumer;
|
||||
use crate::permutation::PermutationCheckVars;
|
||||
use crate::permutation::{
|
||||
compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet,
|
||||
PermutationCheckVars,
|
||||
};
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
|
||||
@ -12,8 +12,7 @@ use plonky2_util::ceil_div_usize;
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::vars::StarkEvaluationTargets;
|
||||
use crate::vars::StarkEvaluationVars;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Represents a STARK system.
|
||||
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
|
||||
|
||||
@ -6,8 +6,7 @@ use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::{PartialWitness, Witness};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::GenericConfig;
|
||||
use plonky2::plonk::config::Hasher;
|
||||
use plonky2::plonk::config::{GenericConfig, Hasher};
|
||||
use plonky2::util::transpose;
|
||||
use plonky2_util::{log2_ceil, log2_strict};
|
||||
|
||||
|
||||
@ -4,8 +4,7 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::vars::StarkEvaluationTargets;
|
||||
use starky::vars::StarkEvaluationVars;
|
||||
use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
use crate::alu::addition::{eval_addition, eval_addition_circuit, generate_addition};
|
||||
use crate::alu::bitops::{eval_bitop, eval_bitop_circuit, generate_bitop};
|
||||
|
||||
@ -4,8 +4,7 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::vars::StarkEvaluationTargets;
|
||||
use starky::vars::StarkEvaluationVars;
|
||||
use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
|
||||
use crate::registers::core::*;
|
||||
|
||||
@ -13,8 +13,7 @@ use plonky2::field::types::{Field, PrimeField64};
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::vars::StarkEvaluationTargets;
|
||||
use starky::vars::StarkEvaluationVars;
|
||||
use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
|
||||
use crate::registers::lookup::*;
|
||||
|
||||
@ -5,8 +5,7 @@ use plonky2::hash::hashing::SPONGE_WIDTH;
|
||||
use plonky2::hash::poseidon::{Poseidon, HALF_N_FULL_ROUNDS, N_PARTIAL_ROUNDS};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::vars::StarkEvaluationTargets;
|
||||
use starky::vars::StarkEvaluationVars;
|
||||
use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
|
||||
use crate::registers::permutation::*;
|
||||
|
||||
@ -11,8 +11,7 @@ use plonky2::util::transpose;
|
||||
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use starky::permutation::PermutationPair;
|
||||
use starky::stark::Stark;
|
||||
use starky::vars::StarkEvaluationTargets;
|
||||
use starky::vars::StarkEvaluationVars;
|
||||
use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
use crate::alu::{eval_alu, eval_alu_circuit, generate_alu};
|
||||
use crate::core_registers::{
|
||||
|
||||
@ -416,8 +416,7 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::gates::gate::Gate;
|
||||
use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree};
|
||||
use plonky2::hash::hash_types::HashOut;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::hash::hash_types::{HashOut, RichField};
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::plonk::vars::EvaluationVars;
|
||||
use plonky2_field::extension::Extendable;
|
||||
|
||||
@ -521,8 +521,7 @@ mod tests {
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use plonky2::plonk::vars::EvaluationVars;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField64;
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::gates::comparison::ComparisonGate;
|
||||
|
||||
@ -339,8 +339,7 @@ mod tests {
|
||||
use plonky2::plonk::vars::EvaluationVars;
|
||||
use plonky2_field::extension::quartic::QuarticExtension;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField64;
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::gates::subtraction_u32::U32SubtractionGate;
|
||||
|
||||
@ -456,8 +456,7 @@ mod tests {
|
||||
use plonky2::plonk::vars::EvaluationVars;
|
||||
use plonky2_field::extension::quartic::QuarticExtension;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use plonky2_field::types::Field;
|
||||
use plonky2_field::types::PrimeField64;
|
||||
use plonky2_field::types::{Field, PrimeField64};
|
||||
use rand::Rng;
|
||||
|
||||
use crate::gates::assert_le::AssertLessThanGate;
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::{extension::Extendable, types::Field};
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::generator::{GeneratedValues, SimpleGenerator};
|
||||
use plonky2::iop::target::Target;
|
||||
@ -374,7 +375,8 @@ mod tests {
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use rand::{seq::SliceRandom, thread_rng, Rng};
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user