diff --git a/.github/workflows/continuous-integration-workflow.yml b/.github/workflows/continuous-integration-workflow.yml index 38231892..48848b73 100644 --- a/.github/workflows/continuous-integration-workflow.yml +++ b/.github/workflows/continuous-integration-workflow.yml @@ -6,6 +6,9 @@ on: pull_request: branches: - "**" + workflow_dispatch: + branches: + - "**" jobs: test: @@ -21,7 +24,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly + toolchain: nightly-2022-11-23 override: true - name: rust-cache @@ -57,7 +60,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly + toolchain: nightly-2022-11-23 override: true components: rustfmt, clippy diff --git a/Cargo.toml b/Cargo.toml index a78d0a96..82fb725e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["field", "insertion", "plonky2", "starky", "system_zero", "util", "waksman", "ecdsa", "u32", "evm", "maybe_rayon"] +members = ["ecdsa", "evm", "field", "insertion", "maybe_rayon", "plonky2", "starky", "system_zero", "u32", "util", "waksman"] [profile.release] opt-level = 3 diff --git a/README.md b/README.md index 7b924698..59fc4d09 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,11 @@ Jemalloc is known to cause crashes when a binary compiled for x86 is run on an A As this is a monorepo, see the individual crates within for license information. -## Disclaimer +## Security This code has not yet been audited, and should not be used in any production systems. +While Plonky2 is configurable, its defaults generally target 100 bits of security. The default FRI configuration targets 100 bits of *conjectured* security based on the conjecture in [ethSTARK](https://eprint.iacr.org/2021/582). + +Plonky2's default hash function is Poseidon, configured with 8 full rounds, 22 partial rounds, a width of 12 field elements (each ~64 bits), and an S-box of `x^7`. [BBLP22](https://tosc.iacr.org/index.php/ToSC/article/view/9850) suggests that this configuration may have around 95 bits of security, falling a bit short of our 100 bit target. + diff --git a/ecdsa/Cargo.toml b/ecdsa/Cargo.toml index 99f355b9..ed84d5de 100644 --- a/ecdsa/Cargo.toml +++ b/ecdsa/Cargo.toml @@ -3,16 +3,17 @@ name = "plonky2_ecdsa" version = "0.1.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +parallel = ["maybe_rayon/parallel", "plonky2/parallel"] [dependencies] -plonky2 = { path = "../plonky2" } -plonky2_util = { path = "../util" } -plonky2_field = { path = "../field" } -plonky2_u32 = { path = "../u32" } -num = "0.4.0" -itertools = "0.10.0" -rayon = "1.5.1" -serde = { version = "1.0", features = ["derive"] } -anyhow = "1.0.40" -rand = "0.8.4" \ No newline at end of file +anyhow = { version = "1.0.40", default-features = false } +itertools = { version = "0.10.0", default-features = false } +maybe_rayon = { path = "../maybe_rayon", default-features = false } +num = { version = "0.4.0", default-features = false } +plonky2 = { path = "../plonky2", default-features = false } +plonky2_u32 = { path = "../u32", default-features = false } +serde = { version = "1.0", default-features = false, features = ["derive"] } + +[dev-dependencies] +rand = { version = "0.8.4", default-features = false, features = ["getrandom"] } diff --git a/ecdsa/src/curve/curve_adds.rs b/ecdsa/src/curve/curve_adds.rs index 05d9d515..319c5614 100644 --- a/ecdsa/src/curve/curve_adds.rs +++ b/ecdsa/src/curve/curve_adds.rs @@ -1,7 +1,7 @@ -use std::ops::Add; +use core::ops::Add; -use plonky2_field::ops::Square; -use plonky2_field::types::Field; +use plonky2::field::ops::Square; +use plonky2::field::types::Field; use crate::curve::curve_types::{AffinePoint, Curve, ProjectivePoint}; diff --git a/ecdsa/src/curve/curve_msm.rs b/ecdsa/src/curve/curve_msm.rs index f681deb2..21bcc404 100644 --- a/ecdsa/src/curve/curve_msm.rs +++ b/ecdsa/src/curve/curve_msm.rs @@ -1,7 +1,8 @@ +use alloc::vec::Vec; + use itertools::Itertools; -use plonky2_field::types::Field; -use plonky2_field::types::PrimeField; -use rayon::prelude::*; +use maybe_rayon::*; +use plonky2::field::types::{Field, PrimeField}; use crate::curve::curve_summation::affine_multisummation_best; use crate::curve::curve_types::{AffinePoint, Curve, ProjectivePoint}; @@ -186,13 +187,12 @@ pub(crate) fn to_digits(x: &C::ScalarField, w: usize) -> Vec { #[cfg(test)] mod tests { - use num::BigUint; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField; + use alloc::vec; - use crate::curve::curve_msm::{msm_execute, msm_precompute, to_digits}; - use crate::curve::curve_types::Curve; + use num::BigUint; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + + use super::*; use crate::curve::secp256k1::Secp256K1; #[test] diff --git a/ecdsa/src/curve/curve_multiplication.rs b/ecdsa/src/curve/curve_multiplication.rs index 70e49df6..1f9c653d 100644 --- a/ecdsa/src/curve/curve_multiplication.rs +++ b/ecdsa/src/curve/curve_multiplication.rs @@ -1,7 +1,7 @@ -use std::ops::Mul; +use alloc::vec::Vec; +use core::ops::Mul; -use plonky2_field::types::Field; -use plonky2_field::types::PrimeField; +use plonky2::field::types::{Field, PrimeField}; use crate::curve::curve_types::{Curve, CurveScalar, ProjectivePoint}; diff --git a/ecdsa/src/curve/curve_summation.rs b/ecdsa/src/curve/curve_summation.rs index 492d7722..7bb633af 100644 --- a/ecdsa/src/curve/curve_summation.rs +++ b/ecdsa/src/curve/curve_summation.rs @@ -1,7 +1,9 @@ -use std::iter::Sum; +use alloc::vec; +use alloc::vec::Vec; +use core::iter::Sum; -use plonky2_field::ops::Square; -use plonky2_field::types::Field; +use plonky2::field::ops::Square; +use plonky2::field::types::Field; use crate::curve::curve_types::{AffinePoint, Curve, ProjectivePoint}; @@ -188,10 +190,7 @@ pub fn affine_multisummation_batch_inversion( #[cfg(test)] mod tests { - use crate::curve::curve_summation::{ - affine_summation_batch_inversion, affine_summation_pairwise, - }; - use crate::curve::curve_types::{Curve, ProjectivePoint}; + use super::*; use crate::curve::secp256k1::Secp256K1; #[test] diff --git a/ecdsa/src/curve/curve_types.rs b/ecdsa/src/curve/curve_types.rs index 96821672..91047393 100644 --- a/ecdsa/src/curve/curve_types.rs +++ b/ecdsa/src/curve/curve_types.rs @@ -1,9 +1,10 @@ -use std::fmt::Debug; -use std::hash::Hash; -use std::ops::Neg; +use alloc::vec::Vec; +use core::fmt::Debug; +use core::hash::{Hash, Hasher}; +use core::ops::Neg; -use plonky2_field::ops::Square; -use plonky2_field::types::{Field, PrimeField}; +use plonky2::field::ops::Square; +use plonky2::field::types::{Field, PrimeField}; use serde::{Deserialize, Serialize}; // To avoid implementation conflicts from associated types, @@ -123,7 +124,7 @@ impl PartialEq for AffinePoint { impl Eq for AffinePoint {} impl Hash for AffinePoint { - fn hash(&self, state: &mut H) { + fn hash(&self, state: &mut H) { if self.zero { self.zero.hash(state); } else { diff --git a/ecdsa/src/curve/ecdsa.rs b/ecdsa/src/curve/ecdsa.rs index 0b2d396d..131d8b4d 100644 --- a/ecdsa/src/curve/ecdsa.rs +++ b/ecdsa/src/curve/ecdsa.rs @@ -1,4 +1,4 @@ -use plonky2_field::types::Field; +use plonky2::field::types::{Field, Sample}; use serde::{Deserialize, Serialize}; use crate::curve::curve_msm::msm_parallel; @@ -63,8 +63,8 @@ pub fn verify_message( #[cfg(test)] mod tests { - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::Sample; use crate::curve::ecdsa::{sign_message, verify_message, ECDSASecretKey}; use crate::curve::secp256k1::Secp256K1; diff --git a/ecdsa/src/curve/glv.rs b/ecdsa/src/curve/glv.rs index c58032ec..7c3e5de0 100644 --- a/ecdsa/src/curve/glv.rs +++ b/ecdsa/src/curve/glv.rs @@ -1,8 +1,8 @@ use num::rational::Ratio; use num::BigUint; -use plonky2_field::secp256k1_base::Secp256K1Base; -use plonky2_field::secp256k1_scalar::Secp256K1Scalar; -use plonky2_field::types::{Field, PrimeField}; +use plonky2::field::secp256k1_base::Secp256K1Base; +use plonky2::field::secp256k1_scalar::Secp256K1Scalar; +use plonky2::field::types::{Field, PrimeField}; use crate::curve::curve_msm::msm_parallel; use crate::curve::curve_types::{AffinePoint, ProjectivePoint}; @@ -102,8 +102,8 @@ pub fn glv_mul(p: ProjectivePoint, k: Secp256K1Scalar) -> ProjectiveP #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::{Field, Sample}; use crate::curve::curve_types::{Curve, CurveScalar}; use crate::curve::glv::{decompose_secp256k1_scalar, glv_mul, GLV_S}; diff --git a/ecdsa/src/curve/secp256k1.rs b/ecdsa/src/curve/secp256k1.rs index 8f7bccf3..0b899a71 100644 --- a/ecdsa/src/curve/secp256k1.rs +++ b/ecdsa/src/curve/secp256k1.rs @@ -1,6 +1,6 @@ -use plonky2_field::secp256k1_base::Secp256K1Base; -use plonky2_field::secp256k1_scalar::Secp256K1Scalar; -use plonky2_field::types::Field; +use plonky2::field::secp256k1_base::Secp256K1Base; +use plonky2::field::secp256k1_scalar::Secp256K1Scalar; +use plonky2::field::types::Field; use serde::{Deserialize, Serialize}; use crate::curve::curve_types::{AffinePoint, Curve}; @@ -40,9 +40,8 @@ const SECP256K1_GENERATOR_Y: Secp256K1Base = Secp256K1Base([ #[cfg(test)] mod tests { use num::BigUint; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::{Field, PrimeField}; use crate::curve::curve_types::{AffinePoint, Curve, ProjectivePoint}; use crate::curve::secp256k1::Secp256K1; diff --git a/ecdsa/src/gadgets/biguint.rs b/ecdsa/src/gadgets/biguint.rs index faae365c..59e48d01 100644 --- a/ecdsa/src/gadgets/biguint.rs +++ b/ecdsa/src/gadgets/biguint.rs @@ -1,13 +1,15 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; use num::{BigUint, Integer, Zero}; +use plonky2::field::extension::Extendable; +use plonky2::field::types::{PrimeField, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::iop::witness::{PartitionWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_field::types::{PrimeField, PrimeField64}; use plonky2_u32::gadgets::arithmetic_u32::{CircuitBuilderU32, U32Target}; use plonky2_u32::gadgets::multiple_comparison::list_le_u32_circuit; use plonky2_u32::witness::{GeneratedValuesU32, WitnessU32}; @@ -346,11 +348,11 @@ impl, const D: usize> SimpleGenerator mod tests { use anyhow::Result; use num::{BigUint, FromPrimitive, Integer}; + use plonky2::iop::witness::PartialWitness; + use plonky2::plonk::circuit_builder::CircuitBuilder; + use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::{ - iop::witness::PartialWitness, - plonk::{circuit_builder::CircuitBuilder, circuit_data::CircuitConfig}, - }; + use rand::rngs::OsRng; use rand::Rng; use crate::gadgets::biguint::{CircuitBuilderBiguint, WitnessBigUint}; @@ -360,7 +362,7 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let x_value = BigUint::from_u128(rng.gen()).unwrap(); let y_value = BigUint::from_u128(rng.gen()).unwrap(); @@ -390,7 +392,7 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let mut x_value = BigUint::from_u128(rng.gen()).unwrap(); let mut y_value = BigUint::from_u128(rng.gen()).unwrap(); @@ -420,7 +422,7 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let x_value = BigUint::from_u128(rng.gen()).unwrap(); let y_value = BigUint::from_u128(rng.gen()).unwrap(); @@ -450,7 +452,7 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let x_value = BigUint::from_u128(rng.gen()).unwrap(); let y_value = BigUint::from_u128(rng.gen()).unwrap(); @@ -476,7 +478,7 @@ mod tests { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = >::F; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let mut x_value = BigUint::from_u128(rng.gen()).unwrap(); let mut y_value = BigUint::from_u128(rng.gen()).unwrap(); diff --git a/ecdsa/src/gadgets/curve.rs b/ecdsa/src/gadgets/curve.rs index 4aa69733..11075322 100644 --- a/ecdsa/src/gadgets/curve.rs +++ b/ecdsa/src/gadgets/curve.rs @@ -1,8 +1,11 @@ +use alloc::vec; +use alloc::vec::Vec; + +use plonky2::field::extension::Extendable; +use plonky2::field::types::Sample; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::BoolTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; use crate::curve::curve_types::{AffinePoint, Curve, CurveScalar}; use crate::gadgets::nonnative::{CircuitBuilderNonNative, NonNativeTarget}; @@ -254,16 +257,16 @@ impl, const D: usize> CircuitBuilderCurve #[cfg(test)] mod tests { - use std::ops::Neg; + use core::ops::Neg; use anyhow::Result; + use plonky2::field::secp256k1_base::Secp256K1Base; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::{Field, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_base::Secp256K1Base; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; use crate::curve::curve_types::{AffinePoint, Curve, CurveScalar}; use crate::curve::secp256k1::Secp256K1; diff --git a/ecdsa/src/gadgets/curve_fixed_base.rs b/ecdsa/src/gadgets/curve_fixed_base.rs index 0fd8e841..e7656f5c 100644 --- a/ecdsa/src/gadgets/curve_fixed_base.rs +++ b/ecdsa/src/gadgets/curve_fixed_base.rs @@ -1,10 +1,12 @@ +use alloc::vec::Vec; + use num::BigUint; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::hash::keccak::KeccakHash; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{GenericHashOut, Hasher}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; use crate::curve::curve_types::{AffinePoint, Curve, CurveScalar}; use crate::gadgets::curve::{AffinePointTarget, CircuitBuilderCurve}; @@ -66,13 +68,12 @@ pub fn fixed_base_curve_mul_circuit, cons #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::{PrimeField, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField; use crate::curve::curve_types::{Curve, CurveScalar}; use crate::curve::secp256k1::Secp256K1; diff --git a/ecdsa/src/gadgets/curve_msm.rs b/ecdsa/src/gadgets/curve_msm.rs index e059638c..7bb4a6cc 100644 --- a/ecdsa/src/gadgets/curve_msm.rs +++ b/ecdsa/src/gadgets/curve_msm.rs @@ -1,10 +1,12 @@ +use alloc::vec; + use num::BigUint; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::hash::keccak::KeccakHash; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{GenericHashOut, Hasher}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; use crate::curve::curve_types::{Curve, CurveScalar}; use crate::gadgets::curve::{AffinePointTarget, CircuitBuilderCurve}; @@ -79,12 +81,12 @@ pub fn curve_msm_circuit, const D: usize> #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::Sample; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; use crate::curve::curve_types::{Curve, CurveScalar}; use crate::curve::secp256k1::Secp256K1; diff --git a/ecdsa/src/gadgets/curve_windowed_mul.rs b/ecdsa/src/gadgets/curve_windowed_mul.rs index bc4e1caf..39fad17c 100644 --- a/ecdsa/src/gadgets/curve_windowed_mul.rs +++ b/ecdsa/src/gadgets/curve_windowed_mul.rs @@ -1,13 +1,15 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; use num::BigUint; +use plonky2::field::extension::Extendable; +use plonky2::field::types::{Field, Sample}; use plonky2::hash::hash_types::RichField; use plonky2::hash::keccak::KeccakHash; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{GenericHashOut, Hasher}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; use plonky2_u32::gadgets::arithmetic_u32::{CircuitBuilderU32, U32Target}; use crate::curve::curve_types::{Curve, CurveScalar}; @@ -169,22 +171,18 @@ impl, const D: usize> CircuitBuilderWindowedMul Result<()> { @@ -206,7 +204,7 @@ mod tests { }) .collect(); - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let access_index = rng.gen::() % num_points; let access_index_target = builder.constant(F::from_canonical_usize(access_index)); diff --git a/ecdsa/src/gadgets/ecdsa.rs b/ecdsa/src/gadgets/ecdsa.rs index 3ed6342d..657ec492 100644 --- a/ecdsa/src/gadgets/ecdsa.rs +++ b/ecdsa/src/gadgets/ecdsa.rs @@ -1,9 +1,9 @@ -use std::marker::PhantomData; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; +use plonky2::field::secp256k1_scalar::Secp256K1Scalar; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_field::secp256k1_scalar::Secp256K1Scalar; use crate::curve::curve_types::Curve; use crate::curve::secp256k1::Secp256K1; @@ -52,20 +52,14 @@ pub fn verify_message_circuit, const D: usize>( #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::types::Sample; use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; - use super::{ECDSAPublicKeyTarget, ECDSASignatureTarget}; - use crate::curve::curve_types::{Curve, CurveScalar}; + use super::*; + use crate::curve::curve_types::CurveScalar; use crate::curve::ecdsa::{sign_message, ECDSAPublicKey, ECDSASecretKey, ECDSASignature}; - use crate::curve::secp256k1::Secp256K1; - use crate::gadgets::curve::CircuitBuilderCurve; - use crate::gadgets::ecdsa::verify_message_circuit; - use crate::gadgets::nonnative::CircuitBuilderNonNative; fn test_ecdsa_circuit_with_config(config: CircuitConfig) -> Result<()> { const D: usize = 2; diff --git a/ecdsa/src/gadgets/glv.rs b/ecdsa/src/gadgets/glv.rs index 539b5de3..063dee35 100644 --- a/ecdsa/src/gadgets/glv.rs +++ b/ecdsa/src/gadgets/glv.rs @@ -1,14 +1,15 @@ -use std::marker::PhantomData; +use alloc::vec::Vec; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; +use plonky2::field::secp256k1_base::Secp256K1Base; +use plonky2::field::secp256k1_scalar::Secp256K1Scalar; +use plonky2::field::types::{Field, PrimeField}; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::iop::witness::PartitionWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_field::secp256k1_base::Secp256K1Base; -use plonky2_field::secp256k1_scalar::Secp256K1Scalar; -use plonky2_field::types::{Field, PrimeField}; use crate::curve::glv::{decompose_secp256k1_scalar, GLV_BETA, GLV_S}; use crate::curve::secp256k1::Secp256K1; @@ -132,12 +133,12 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::Sample; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; use crate::curve::curve_types::{Curve, CurveScalar}; use crate::curve::glv::glv_mul; diff --git a/ecdsa/src/gadgets/nonnative.rs b/ecdsa/src/gadgets/nonnative.rs index 29520bed..27dd1c65 100644 --- a/ecdsa/src/gadgets/nonnative.rs +++ b/ecdsa/src/gadgets/nonnative.rs @@ -1,17 +1,19 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; use num::{BigUint, Integer, One, Zero}; +use plonky2::field::extension::Extendable; +use plonky2::field::types::{Field, PrimeField}; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::iop::witness::PartitionWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::types::PrimeField; -use plonky2_field::{extension::Extendable, types::Field}; +use plonky2::util::ceil_div_usize; use plonky2_u32::gadgets::arithmetic_u32::{CircuitBuilderU32, U32Target}; use plonky2_u32::gadgets::range_check::range_check_u32_circuit; use plonky2_u32::witness::GeneratedValuesU32; -use plonky2_util::ceil_div_usize; use crate::gadgets::biguint::{ BigUintTarget, CircuitBuilderBiguint, GeneratedValuesBigUint, WitnessBigUint, @@ -642,12 +644,12 @@ impl, const D: usize, FF: PrimeField> SimpleGenerat #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::secp256k1_base::Secp256K1Base; + use plonky2::field::types::{Field, PrimeField, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_base::Secp256K1Base; - use plonky2_field::types::{Field, PrimeField}; use crate::gadgets::nonnative::CircuitBuilderNonNative; diff --git a/ecdsa/src/gadgets/split_nonnative.rs b/ecdsa/src/gadgets/split_nonnative.rs index 5ee3a864..977912e2 100644 --- a/ecdsa/src/gadgets/split_nonnative.rs +++ b/ecdsa/src/gadgets/split_nonnative.rs @@ -1,11 +1,12 @@ -use std::marker::PhantomData; +use alloc::vec::Vec; +use core::marker::PhantomData; use itertools::Itertools; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; use plonky2_u32::gadgets::arithmetic_u32::{CircuitBuilderU32, U32Target}; use crate::gadgets::biguint::BigUintTarget; @@ -96,15 +97,14 @@ impl, const D: usize> CircuitBuilderSplit #[cfg(test)] mod tests { use anyhow::Result; + use plonky2::field::secp256k1_scalar::Secp256K1Scalar; + use plonky2::field::types::Sample; use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::secp256k1_scalar::Secp256K1Scalar; - use plonky2_field::types::Field; + use super::*; use crate::gadgets::nonnative::{CircuitBuilderNonNative, NonNativeTarget}; - use crate::gadgets::split_nonnative::CircuitBuilderSplit; #[test] fn test_split_nonnative() -> Result<()> { diff --git a/ecdsa/src/lib.rs b/ecdsa/src/lib.rs index 92bdb857..bf84913a 100644 --- a/ecdsa/src/lib.rs +++ b/ecdsa/src/lib.rs @@ -1,7 +1,7 @@ #![allow(clippy::needless_range_loop)] -// Below lint is currently broken and produces false positives. -// TODO: Remove this override when Clippy is patched. -#![allow(clippy::derive_partial_eq_without_eq)] +#![cfg_attr(not(test), no_std)] + +extern crate alloc; pub mod curve; pub mod gadgets; diff --git a/evm/Cargo.toml b/evm/Cargo.toml index 5b48f524..17d855c1 100644 --- a/evm/Cargo.toml +++ b/evm/Cargo.toml @@ -5,22 +5,22 @@ version = "0.1.0" edition = "2021" [dependencies] -plonky2 = { path = "../plonky2", default-features = false, features = ["rand", "timing"] } -plonky2_util = { path = "../util" } -eth_trie_utils = "0.4.0" anyhow = "1.0.40" env_logger = "0.9.0" +eth_trie_utils = "0.4.0" ethereum-types = "0.14.0" hex = { version = "0.4.3", optional = true } hex-literal = "0.3.4" itertools = "0.10.3" keccak-hash = "0.10.0" log = "0.4.14" -num = "0.4.0" maybe_rayon = { path = "../maybe_rayon" } +num = "0.4.0" once_cell = "1.13.0" pest = "2.1.3" pest_derive = "2.1.0" +plonky2 = { path = "../plonky2", default-features = false, features = ["timing"] } +plonky2_util = { path = "../util" } rand = "0.8.5" rand_chacha = "0.3.1" ripemd = "0.1.3" diff --git a/evm/src/arithmetic/add.rs b/evm/src/arithmetic/add.rs index 1bf798cc..b09307b0 100644 --- a/evm/src/arithmetic/add.rs +++ b/evm/src/arithmetic/add.rs @@ -161,7 +161,7 @@ pub fn eval_ext_circuit, const D: usize>( #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -177,7 +177,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_ADD == 0`, then the constraints should be met even // if all values are garbage. @@ -200,7 +200,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // set `IS_ADD == 1` and ensure all constraints are satisfied. lv[IS_ADD] = F::ONE; diff --git a/evm/src/arithmetic/arithmetic_stark.rs b/evm/src/arithmetic/arithmetic_stark.rs index 08813a3b..5d835e77 100644 --- a/evm/src/arithmetic/arithmetic_stark.rs +++ b/evm/src/arithmetic/arithmetic_stark.rs @@ -6,12 +6,7 @@ use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::hash::hash_types::RichField; -use crate::arithmetic::add; -use crate::arithmetic::columns; -use crate::arithmetic::compare; -use crate::arithmetic::modular; -use crate::arithmetic::mul; -use crate::arithmetic::sub; +use crate::arithmetic::{add, columns, compare, modular, mul, sub}; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::stark::Stark; use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; diff --git a/evm/src/arithmetic/columns.rs b/evm/src/arithmetic/columns.rs index 10bf72d9..923fbc73 100644 --- a/evm/src/arithmetic/columns.rs +++ b/evm/src/arithmetic/columns.rs @@ -22,25 +22,20 @@ pub const IS_ADD: usize = 0; pub const IS_MUL: usize = IS_ADD + 1; pub const IS_SUB: usize = IS_MUL + 1; pub const IS_DIV: usize = IS_SUB + 1; -pub const IS_SDIV: usize = IS_DIV + 1; -pub const IS_MOD: usize = IS_SDIV + 1; -pub const IS_SMOD: usize = IS_MOD + 1; -pub const IS_ADDMOD: usize = IS_SMOD + 1; +pub const IS_MOD: usize = IS_DIV + 1; +pub const IS_ADDMOD: usize = IS_MOD + 1; pub const IS_SUBMOD: usize = IS_ADDMOD + 1; pub const IS_MULMOD: usize = IS_SUBMOD + 1; pub const IS_LT: usize = IS_MULMOD + 1; pub const IS_GT: usize = IS_LT + 1; -pub const IS_SLT: usize = IS_GT + 1; -pub const IS_SGT: usize = IS_SLT + 1; -pub const IS_SHL: usize = IS_SGT + 1; +pub const IS_SHL: usize = IS_GT + 1; pub const IS_SHR: usize = IS_SHL + 1; -pub const IS_SAR: usize = IS_SHR + 1; -const START_SHARED_COLS: usize = IS_SAR + 1; +const START_SHARED_COLS: usize = IS_SHR + 1; -pub(crate) const ALL_OPERATIONS: [usize; 17] = [ - IS_ADD, IS_MUL, IS_SUB, IS_DIV, IS_SDIV, IS_MOD, IS_SMOD, IS_ADDMOD, IS_SUBMOD, IS_MULMOD, - IS_LT, IS_GT, IS_SLT, IS_SGT, IS_SHL, IS_SHR, IS_SAR, +pub(crate) const ALL_OPERATIONS: [usize; 12] = [ + IS_ADD, IS_MUL, IS_SUB, IS_DIV, IS_MOD, IS_ADDMOD, IS_SUBMOD, IS_MULMOD, IS_LT, IS_GT, IS_SHL, + IS_SHR, ]; /// Within the Arithmetic Unit, there are shared columns which can be diff --git a/evm/src/arithmetic/compare.rs b/evm/src/arithmetic/compare.rs index 55dc5764..7a360430 100644 --- a/evm/src/arithmetic/compare.rs +++ b/evm/src/arithmetic/compare.rs @@ -35,8 +35,6 @@ pub(crate) fn generate(lv: &mut [F; NUM_ARITH_COLUMNS], op: usize) IS_LT => u256_sub_br(input0, input1), // input1 - input0 == diff + br*2^256 IS_GT => u256_sub_br(input1, input0), - IS_SLT => todo!(), - IS_SGT => todo!(), _ => panic!("op code not a comparison"), }; @@ -162,7 +160,7 @@ pub fn eval_ext_circuit, const D: usize>( #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -176,7 +174,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_LT == 0`, then the constraints should be met even if // all values are garbage. `eval_packed_generic` handles IS_GT @@ -201,7 +199,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); const N_ITERS: usize = 1000; for _ in 0..N_ITERS { diff --git a/evm/src/arithmetic/modular.rs b/evm/src/arithmetic/modular.rs index d0020166..09c3996e 100644 --- a/evm/src/arithmetic/modular.rs +++ b/evm/src/arithmetic/modular.rs @@ -87,7 +87,8 @@ //! In the case of DIV, we do something similar, except that we "replace" //! the modulus with "2^256" to force the quotient to be zero. -use num::{bigint::Sign, BigInt, One, Zero}; +use num::bigint::Sign; +use num::{BigInt, One, Zero}; use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; use plonky2::field::types::Field; @@ -500,7 +501,7 @@ pub(crate) fn eval_ext_circuit, const D: usize>( mod tests { use itertools::izip; use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -516,7 +517,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_ADDMOD == 0`, then the constraints should be met even // if all values are garbage. @@ -543,7 +544,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); for op_filter in [IS_ADDMOD, IS_DIV, IS_SUBMOD, IS_MOD, IS_MULMOD] { // Reset operation columns, then select one @@ -594,7 +595,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); for op_filter in [IS_ADDMOD, IS_SUBMOD, IS_DIV, IS_MOD, IS_MULMOD] { // Reset operation columns, then select one diff --git a/evm/src/arithmetic/mul.rs b/evm/src/arithmetic/mul.rs index 7dda18e2..d55ab27b 100644 --- a/evm/src/arithmetic/mul.rs +++ b/evm/src/arithmetic/mul.rs @@ -172,7 +172,7 @@ pub fn eval_ext_circuit, const D: usize>( #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -188,7 +188,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_MUL == 0`, then the constraints should be met even // if all values are garbage. @@ -211,7 +211,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // set `IS_MUL == 1` and ensure all constraints are satisfied. lv[IS_MUL] = F::ONE; diff --git a/evm/src/arithmetic/sub.rs b/evm/src/arithmetic/sub.rs index f8377651..d589f323 100644 --- a/evm/src/arithmetic/sub.rs +++ b/evm/src/arithmetic/sub.rs @@ -93,7 +93,7 @@ pub fn eval_ext_circuit, const D: usize>( #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; @@ -109,7 +109,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_SUB == 0`, then the constraints should be met even // if all values are garbage. @@ -132,7 +132,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng)); // set `IS_SUB == 1` and ensure all constraints are satisfied. lv[IS_SUB] = F::ONE; diff --git a/evm/src/arithmetic/utils.rs b/evm/src/arithmetic/utils.rs index 74999ab4..ec989c94 100644 --- a/evm/src/arithmetic/utils.rs +++ b/evm/src/arithmetic/utils.rs @@ -1,6 +1,5 @@ use std::ops::{Add, AddAssign, Mul, Neg, Range, Shr, Sub, SubAssign}; -use log::error; use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; @@ -11,21 +10,24 @@ use crate::arithmetic::columns::{NUM_ARITH_COLUMNS, N_LIMBS}; /// Emit an error message regarding unchecked range assumptions. /// Assumes the values in `cols` are `[cols[0], cols[0] + 1, ..., /// cols[0] + cols.len() - 1]`. +/// +/// TODO: Hamish to delete this when he has implemented and integrated +/// range checks. pub(crate) fn _range_check_error( - file: &str, - line: u32, - cols: Range, - signedness: &str, + _file: &str, + _line: u32, + _cols: Range, + _signedness: &str, ) { - error!( - "{}:{}: arithmetic unit skipped {}-bit {} range-checks on columns {}--{}: not yet implemented", - line, - file, - RC_BITS, - signedness, - cols.start, - cols.end - 1, - ); + // error!( + // "{}:{}: arithmetic unit skipped {}-bit {} range-checks on columns {}--{}: not yet implemented", + // line, + // file, + // RC_BITS, + // signedness, + // cols.start, + // cols.end - 1, + // ); } #[macro_export] diff --git a/evm/src/bin/assemble.rs b/evm/src/bin/assemble.rs index 1cf3a67c..2afd54d7 100644 --- a/evm/src/bin/assemble.rs +++ b/evm/src/bin/assemble.rs @@ -1,5 +1,4 @@ -use std::env; -use std::fs; +use std::{env, fs}; use hex::encode; use plonky2_evm::cpu::kernel::assemble_to_bytes; diff --git a/evm/src/cpu/columns/general.rs b/evm/src/cpu/columns/general.rs index eea5551c..de753182 100644 --- a/evm/src/cpu/columns/general.rs +++ b/evm/src/cpu/columns/general.rs @@ -10,6 +10,7 @@ pub(crate) union CpuGeneralColumnsView { arithmetic: CpuArithmeticView, logic: CpuLogicView, jumps: CpuJumpsView, + shift: CpuShiftView, } impl CpuGeneralColumnsView { @@ -52,6 +53,16 @@ impl CpuGeneralColumnsView { pub(crate) fn jumps_mut(&mut self) -> &mut CpuJumpsView { unsafe { &mut self.jumps } } + + // SAFETY: Each view is a valid interpretation of the underlying array. + pub(crate) fn shift(&self) -> &CpuShiftView { + unsafe { &self.shift } + } + + // SAFETY: Each view is a valid interpretation of the underlying array. + pub(crate) fn shift_mut(&mut self) -> &mut CpuShiftView { + unsafe { &mut self.shift } + } } impl PartialEq for CpuGeneralColumnsView { @@ -145,5 +156,12 @@ pub(crate) struct CpuJumpsView { pub(crate) should_trap: T, } +#[derive(Copy, Clone)] +pub(crate) struct CpuShiftView { + // For a shift amount of displacement: [T], this is the inverse of + // sum(displacement[1..]) or zero if the sum is zero. + pub(crate) high_limb_sum_inv: T, +} + // `u8` is guaranteed to have a `size_of` of 1. pub const NUM_SHARED_COLUMNS: usize = size_of::>(); diff --git a/evm/src/cpu/cpu_stark.rs b/evm/src/cpu/cpu_stark.rs index 24c7064e..7e6bd085 100644 --- a/evm/src/cpu/cpu_stark.rs +++ b/evm/src/cpu/cpu_stark.rs @@ -11,8 +11,8 @@ use plonky2::hash::hash_types::RichField; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::cpu::columns::{CpuColumnsView, COL_MAP, NUM_CPU_COLUMNS}; use crate::cpu::{ - bootstrap_kernel, control_flow, decode, dup_swap, jumps, membus, modfp254, simple_logic, stack, - stack_bounds, syscalls, + bootstrap_kernel, control_flow, decode, dup_swap, jumps, membus, modfp254, shift, simple_logic, + stack, stack_bounds, syscalls, }; use crate::cross_table_lookup::Column; use crate::memory::segments::Segment; @@ -150,6 +150,7 @@ impl, const D: usize> Stark for CpuStark, const D: usize> Stark for CpuStark Kernel { include_str!("asm/core/create_addresses.asm"), include_str!("asm/core/intrinsic_gas.asm"), include_str!("asm/core/invalid.asm"), + include_str!("asm/core/jumpdest_analysis.asm"), include_str!("asm/core/nonce.asm"), include_str!("asm/core/process_txn.asm"), include_str!("asm/core/syscall.asm"), @@ -77,6 +78,7 @@ pub(crate) fn combined_kernel() -> Kernel { include_str!("asm/sha2/store_pad.asm"), include_str!("asm/sha2/temp_words.asm"), include_str!("asm/sha2/write_length.asm"), + include_str!("asm/shift.asm"), include_str!("asm/transactions/router.asm"), include_str!("asm/transactions/type_0.asm"), include_str!("asm/transactions/type_1.asm"), diff --git a/evm/src/cpu/kernel/asm/account_code.asm b/evm/src/cpu/kernel/asm/account_code.asm index 14ea4037..78d877ec 100644 --- a/evm/src/cpu/kernel/asm/account_code.asm +++ b/evm/src/cpu/kernel/asm/account_code.asm @@ -21,7 +21,7 @@ global extcodehash: %endmacro %macro extcodesize - %stack (address) -> (address, %%after) + %stack (address) -> (address, 0, @SEGMENT_KERNEL_ACCOUNT_CODE, %%after) %jump(load_code) %%after: %endmacro @@ -44,7 +44,8 @@ global extcodesize: // Post stack: (empty) global extcodecopy: // stack: address, dest_offset, offset, size, retdest - %stack (address, dest_offset, offset, size, retdest) -> (address, extcodecopy_contd, size, offset, dest_offset, retdest) + %stack (address, dest_offset, offset, size, retdest) + -> (address, 0, @SEGMENT_KERNEL_ACCOUNT_CODE, extcodecopy_contd, size, offset, dest_offset, retdest) %jump(load_code) extcodecopy_contd: @@ -55,19 +56,22 @@ extcodecopy_contd: // Loop copying the `code[offset]` to `memory[dest_offset]` until `i==size`. // Each iteration increments `offset, dest_offset, i`. +// TODO: Consider implementing this with memcpy. extcodecopy_loop: // stack: i, size, code_length, offset, dest_offset, retdest DUP2 DUP2 EQ // stack: i == size, i, size, code_length, offset, dest_offset, retdest %jumpi(extcodecopy_end) - %stack (i, size, code_length, offset, dest_offset, retdest) -> (offset, code_length, offset, code_length, dest_offset, i, size, retdest) + %stack (i, size, code_length, offset, dest_offset, retdest) + -> (offset, code_length, offset, code_length, dest_offset, i, size, retdest) LT // stack: offset < code_length, offset, code_length, dest_offset, i, size, retdest DUP2 // stack: offset, offset < code_length, offset, code_length, dest_offset, i, size, retdest %mload_current(@SEGMENT_KERNEL_ACCOUNT_CODE) // stack: opcode, offset < code_length, offset, code_length, dest_offset, i, size, retdest - %stack (opcode, offset_lt_code_length, offset, code_length, dest_offset, i, size, retdest) -> (offset_lt_code_length, 0, opcode, offset, code_length, dest_offset, i, size, retdest) + %stack (opcode, offset_lt_code_length, offset, code_length, dest_offset, i, size, retdest) + -> (offset_lt_code_length, 0, opcode, offset, code_length, dest_offset, i, size, retdest) // If `offset >= code_length`, use `opcode=0`. Necessary since `SEGMENT_KERNEL_ACCOUNT_CODE` might be clobbered from previous calls. %select_bool // stack: opcode, offset, code_length, dest_offset, i, size, retdest @@ -93,41 +97,42 @@ extcodecopy_end: JUMP -// Loads the code at `address` in the `SEGMENT_KERNEL_ACCOUNT_CODE` at the current context and starting at offset 0. +// Loads the code at `address` into memory, at the given context and segment, starting at offset 0. // Checks that the hash of the loaded code corresponds to the `codehash` in the state trie. -// Pre stack: address, retdest -// Post stack: extcodesize(address) -load_code: - %stack (address, retdest) -> (extcodehash, address, load_code_ctd, retdest) +// Pre stack: address, ctx, segment, retdest +// Post stack: code_len +global load_code: + %stack (address, ctx, segment, retdest) -> (extcodehash, address, load_code_ctd, ctx, segment, retdest) JUMP load_code_ctd: - // stack: codehash, retdest + // stack: codehash, ctx, segment, retdest PROVER_INPUT(account_code::length) - // stack: code_length, codehash, retdest + // stack: code_length, codehash, ctx, segment, retdest PUSH 0 // Loop non-deterministically querying `code[i]` and storing it in `SEGMENT_KERNEL_ACCOUNT_CODE` at offset `i`, until `i==code_length`. load_code_loop: - // stack: i, code_length, codehash, retdest + // stack: i, code_length, codehash, ctx, segment, retdest DUP2 DUP2 EQ - // stack: i == code_length, i, code_length, codehash, retdest + // stack: i == code_length, i, code_length, codehash, ctx, segment, retdest %jumpi(load_code_check) PROVER_INPUT(account_code::get) - // stack: opcode, i, code_length, codehash, retdest + // stack: opcode, i, code_length, codehash, ctx, segment, retdest DUP2 - // stack: i, opcode, i, code_length, codehash, retdest - %mstore_current(@SEGMENT_KERNEL_ACCOUNT_CODE) - // stack: i, code_length, codehash, retdest + // stack: i, opcode, i, code_length, codehash, ctx, segment, retdest + DUP7 // segment + DUP7 // context + MSTORE_GENERAL + // stack: i, code_length, codehash, ctx, segment, retdest %increment - // stack: i+1, code_length, codehash, retdest + // stack: i+1, code_length, codehash, ctx, segment, retdest %jump(load_code_loop) // Check that the hash of the loaded code equals `codehash`. load_code_check: - // stack: i, code_length, codehash, retdest - POP - // stack: code_length, codehash, retdest - %stack (code_length, codehash, retdest) -> (0, @SEGMENT_KERNEL_ACCOUNT_CODE, 0, code_length, codehash, retdest, code_length) + // stack: i, code_length, codehash, ctx, segment, retdest + %stack (i, code_length, codehash, ctx, segment, retdest) + -> (ctx, segment, 0, code_length, codehash, retdest, code_length) KECCAK_GENERAL // stack: shouldbecodehash, codehash, retdest, code_length %assert_eq diff --git a/evm/src/cpu/kernel/asm/core/bootloader.asm b/evm/src/cpu/kernel/asm/core/bootloader.asm index 7ebdf022..d1062bdc 100644 --- a/evm/src/cpu/kernel/asm/core/bootloader.asm +++ b/evm/src/cpu/kernel/asm/core/bootloader.asm @@ -1,10 +1,14 @@ // Loads some prover-provided contract code into the code segment of memory, // then hashes the code and returns the hash. - global bootload_contract: - // stack: retdest + // stack: address, retdest +// %stack (address, retdest) -> (address, after_load_code, retdest) +// %jump(load_code) + PANIC // TODO - // TODO +global bootload_code: + // stack: code_len, retdest + PANIC // TODO // stack: code_hash, retdest SWAP1 diff --git a/evm/src/cpu/kernel/asm/core/jumpdest_analysis.asm b/evm/src/cpu/kernel/asm/core/jumpdest_analysis.asm new file mode 100644 index 00000000..a9d8adf2 --- /dev/null +++ b/evm/src/cpu/kernel/asm/core/jumpdest_analysis.asm @@ -0,0 +1,64 @@ +// Populates @SEGMENT_JUMPDEST_BITS for the given context's code. +// Pre stack: ctx, code_len, retdest +// Post stack: (empty) +global jumpdest_analysis: + // stack: ctx, code_len, retdest + PUSH 0 // i = 0 + +loop: + // stack: i, ctx, code_len, retdest + // Ideally we would break if i >= code_len, but checking i > code_len is + // cheaper. It doesn't hurt to over-read by 1, since we'll read 0 which is + // a no-op. + DUP3 DUP2 GT // i > code_len + %jumpi(return) + + // stack: i, ctx, code_len, retdest + %stack (i, ctx) -> (ctx, @SEGMENT_CODE, i, i, ctx) + MLOAD_GENERAL + // stack: opcode, i, ctx, code_len, retdest + + DUP1 %eq_const(0x5b) + // stack: opcode == JUMPDEST, opcode, i, ctx, code_len, retdest + %jumpi(encountered_jumpdest) + + // stack: opcode, i, ctx, code_len, retdest + %code_bytes_to_skip + // stack: bytes_to_skip, i, ctx, code_len, retdest + ADD + %jump(continue) + +encountered_jumpdest: + // stack: opcode, i, ctx, code_len, retdest + POP + // stack: i, ctx, code_len, retdest + %stack (i, ctx) -> (ctx, @SEGMENT_JUMPDEST_BITS, i, 1, i, ctx) + MSTORE_GENERAL + +continue: + // stack: i, ctx, code_len, retdest + %increment + %jump(loop) + +return: + // stack: i, ctx, code_len, retdest + %pop3 + JUMP + +// Determines how many bytes to skip, if any, based on the opcode we read. +// If we read a PUSH opcode, we skip over n bytes, otherwise we skip 0. +// +// Note that the range of PUSH opcodes is [0x60, 0x80). I.e. PUSH1 is 0x60 +// and PUSH32 is 0x7f. +%macro code_bytes_to_skip + // stack: opcode + %sub_const(0x60) + // stack: opcode - 0x60 + DUP1 %lt_const(0x20) + // stack: is_push_opcode, opcode - 0x60 + SWAP1 + %increment // n = opcode - 0x60 + 1 + // stack: n, is_push_opcode + MUL + // stack: bytes_to_skip +%endmacro diff --git a/evm/src/cpu/kernel/asm/main.asm b/evm/src/cpu/kernel/asm/main.asm index e8c8e3e4..41cb8079 100644 --- a/evm/src/cpu/kernel/asm/main.asm +++ b/evm/src/cpu/kernel/asm/main.asm @@ -1,5 +1,7 @@ global main: - // First, load all MPT data from the prover. + // First, initialise the shift table + %shift_table_init + // Second, load all MPT data from the prover. PUSH txn_loop %jump(load_all_mpts) diff --git a/evm/src/cpu/kernel/asm/shift.asm b/evm/src/cpu/kernel/asm/shift.asm new file mode 100644 index 00000000..ce481ea2 --- /dev/null +++ b/evm/src/cpu/kernel/asm/shift.asm @@ -0,0 +1,25 @@ +/// Initialise the lookup table of binary powers for doing left/right shifts +/// +/// Specifically, set SHIFT_TABLE_SEGMENT[i] = 2^i for i = 0..255. +%macro shift_table_init + push 1 // 2^0 + push 0 // initial offset is zero + push @SEGMENT_SHIFT_TABLE // segment + dup2 // kernel context is 0 + %rep 255 + // stack: context, segment, ost_i, 2^i + dup4 + dup1 + add + // stack: 2^(i+1), context, segment, ost_i, 2^i + dup4 + %increment + // stack: ost_(i+1), 2^(i+1), context, segment, ost_i, 2^i + dup4 + dup4 + // stack: context, segment, ost_(i+1), 2^(i+1), context, segment, ost_i, 2^i + %endrep + %rep 256 + mstore_general + %endrep +%endmacro diff --git a/evm/src/cpu/kernel/assembler.rs b/evm/src/cpu/kernel/assembler.rs index 5f9584ba..eddc3272 100644 --- a/evm/src/cpu/kernel/assembler.rs +++ b/evm/src/cpu/kernel/assembler.rs @@ -7,15 +7,12 @@ use plonky2_util::ceil_div_usize; use super::ast::PushTarget; use crate::cpu::kernel::ast::Item::LocalLabelDeclaration; -use crate::cpu::kernel::ast::StackReplacement; +use crate::cpu::kernel::ast::{File, Item, StackReplacement}; use crate::cpu::kernel::keccak_util::hash_kernel; +use crate::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; use crate::cpu::kernel::optimizer::optimize_asm; use crate::cpu::kernel::stack::stack_manipulation::expand_stack_manipulation; use crate::cpu::kernel::utils::u256_to_trimmed_be_bytes; -use crate::cpu::kernel::{ - ast::{File, Item}, - opcodes::{get_opcode, get_push_opcode}, -}; use crate::generation::prover_input::ProverInputFn; use crate::keccak_sponge::columns::KECCAK_RATE_BYTES; @@ -387,8 +384,9 @@ mod tests { use itertools::Itertools; + use crate::cpu::kernel::assembler::*; + use crate::cpu::kernel::ast::*; use crate::cpu::kernel::parser::parse; - use crate::cpu::kernel::{assembler::*, ast::*}; #[test] fn two_files() { diff --git a/evm/src/cpu/kernel/interpreter.rs b/evm/src/cpu/kernel/interpreter.rs index 6e3f9e5d..d9d70232 100644 --- a/evm/src/cpu/kernel/interpreter.rs +++ b/evm/src/cpu/kernel/interpreter.rs @@ -202,6 +202,25 @@ impl<'a> Interpreter<'a> { rlp.into_iter().map(U256::from).collect(); } + pub(crate) fn set_code(&mut self, context: usize, code: Vec) { + assert_ne!(context, 0, "Can't modify kernel code."); + while self.memory.context_memory.len() <= context { + self.memory + .context_memory + .push(MemoryContextState::default()); + } + self.memory.context_memory[context].segments[Segment::Code as usize].content = + code.into_iter().map(U256::from).collect(); + } + + pub(crate) fn get_jumpdest_bits(&self, context: usize) -> Vec { + self.memory.context_memory[context].segments[Segment::JumpdestBits as usize] + .content + .iter() + .map(|x| x.bit(0)) + .collect() + } + fn incr(&mut self, n: usize) { self.offset += n; } @@ -266,10 +285,10 @@ impl<'a> Interpreter<'a> { 0x31 => todo!(), // "BALANCE", 0x32 => todo!(), // "ORIGIN", 0x33 => todo!(), // "CALLER", - 0x34 => todo!(), // "CALLVALUE", - 0x35 => todo!(), // "CALLDATALOAD", - 0x36 => todo!(), // "CALLDATASIZE", - 0x37 => todo!(), // "CALLDATACOPY", + 0x34 => self.run_callvalue(), // "CALLVALUE", + 0x35 => self.run_calldataload(), // "CALLDATALOAD", + 0x36 => self.run_calldatasize(), // "CALLDATASIZE", + 0x37 => self.run_calldatacopy(), // "CALLDATACOPY", 0x38 => todo!(), // "CODESIZE", 0x39 => todo!(), // "CODECOPY", 0x3a => todo!(), // "GASPRICE", @@ -537,6 +556,51 @@ impl<'a> Interpreter<'a> { self.push(U256::from_big_endian(hash.as_bytes())); } + fn run_callvalue(&mut self) { + self.push( + self.memory.context_memory[self.context].segments[Segment::ContextMetadata as usize] + .get(ContextMetadata::CallValue as usize), + ) + } + + fn run_calldataload(&mut self) { + let offset = self.pop().as_usize(); + let value = U256::from_big_endian( + &(0..32) + .map(|i| { + self.memory + .mload_general(self.context, Segment::Calldata, offset + i) + .byte(0) + }) + .collect::>(), + ); + self.push(value); + } + + fn run_calldatasize(&mut self) { + self.push( + self.memory.context_memory[self.context].segments[Segment::ContextMetadata as usize] + .get(ContextMetadata::CalldataSize as usize), + ) + } + + fn run_calldatacopy(&mut self) { + let dest_offset = self.pop().as_usize(); + let offset = self.pop().as_usize(); + let size = self.pop().as_usize(); + for i in 0..size { + let calldata_byte = + self.memory + .mload_general(self.context, Segment::Calldata, offset + i); + self.memory.mstore_general( + self.context, + Segment::MainMemory, + dest_offset + i, + calldata_byte, + ); + } + } + fn run_prover_input(&mut self) -> anyhow::Result<()> { let prover_input_fn = self .prover_inputs_map diff --git a/evm/src/cpu/kernel/tests/core/jumpdest_analysis.rs b/evm/src/cpu/kernel/tests/core/jumpdest_analysis.rs new file mode 100644 index 00000000..022a18d7 --- /dev/null +++ b/evm/src/cpu/kernel/tests/core/jumpdest_analysis.rs @@ -0,0 +1,42 @@ +use anyhow::Result; + +use crate::cpu::kernel::aggregator::KERNEL; +use crate::cpu::kernel::interpreter::Interpreter; +use crate::cpu::kernel::opcodes::{get_opcode, get_push_opcode}; + +#[test] +fn test_jumpdest_analysis() -> Result<()> { + let jumpdest_analysis = KERNEL.global_labels["jumpdest_analysis"]; + const CONTEXT: usize = 3; // arbitrary + + let add = get_opcode("ADD"); + let push2 = get_push_opcode(2); + let jumpdest = get_opcode("JUMPDEST"); + + #[rustfmt::skip] + let code: Vec = vec![ + add, + jumpdest, + push2, + jumpdest, // part of PUSH2 + jumpdest, // part of PUSH2 + jumpdest, + add, + jumpdest, + ]; + + let expected_jumpdest_bits = vec![false, true, false, false, false, true, false, true]; + + // Contract creation transaction. + let initial_stack = vec![0xDEADBEEFu32.into(), code.len().into(), CONTEXT.into()]; + let mut interpreter = Interpreter::new_with_kernel(jumpdest_analysis, initial_stack); + interpreter.set_code(CONTEXT, code); + interpreter.run()?; + assert_eq!(interpreter.stack(), vec![]); + assert_eq!( + interpreter.get_jumpdest_bits(CONTEXT), + expected_jumpdest_bits + ); + + Ok(()) +} diff --git a/evm/src/cpu/kernel/tests/core/mod.rs b/evm/src/cpu/kernel/tests/core/mod.rs index dc9b2f39..502c57f1 100644 --- a/evm/src/cpu/kernel/tests/core/mod.rs +++ b/evm/src/cpu/kernel/tests/core/mod.rs @@ -1,2 +1,3 @@ mod create_addresses; mod intrinsic_gas; +mod jumpdest_analysis; diff --git a/evm/src/cpu/mod.rs b/evm/src/cpu/mod.rs index f679e3ae..bc338e2a 100644 --- a/evm/src/cpu/mod.rs +++ b/evm/src/cpu/mod.rs @@ -8,6 +8,7 @@ mod jumps; pub mod kernel; pub(crate) mod membus; mod modfp254; +mod shift; pub(crate) mod simple_logic; mod stack; mod stack_bounds; diff --git a/evm/src/cpu/shift.rs b/evm/src/cpu/shift.rs new file mode 100644 index 00000000..d383b6b2 --- /dev/null +++ b/evm/src/cpu/shift.rs @@ -0,0 +1,108 @@ +use plonky2::field::extension::Extendable; +use plonky2::field::packed::PackedField; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::cpu::columns::CpuColumnsView; +use crate::cpu::membus::NUM_GP_CHANNELS; +use crate::memory::segments::Segment; + +pub(crate) fn eval_packed( + lv: &CpuColumnsView

, + yield_constr: &mut ConstraintConsumer

, +) { + let is_shift = lv.op.shl + lv.op.shr; + let displacement = lv.mem_channels[1]; // holds the shift displacement d + let two_exp = lv.mem_channels[2]; // holds 2^d + + // Not needed here; val is the input and we're verifying that output is + // val * 2^d (mod 2^256) + //let val = lv.mem_channels[0]; + //let output = lv.mem_channels[NUM_GP_CHANNELS - 1]; + + let shift_table_segment = P::Scalar::from_canonical_u64(Segment::ShiftTable as u64); + + // Only lookup the shifting factor when displacement is < 2^32. + // two_exp.used is true (1) if the high limbs of the displacement are + // zero and false (0) otherwise. + let high_limbs_are_zero = two_exp.used; + yield_constr.constraint(is_shift * (two_exp.is_read - P::ONES)); + + let high_limbs_sum: P = displacement.value[1..].iter().copied().sum(); + let high_limbs_sum_inv = lv.general.shift().high_limb_sum_inv; + // Verify that high_limbs_are_zero = 0 implies high_limbs_sum != 0 and + // high_limbs_are_zero = 1 implies high_limbs_sum = 0. + let t = high_limbs_sum * high_limbs_sum_inv - (P::ONES - high_limbs_are_zero); + yield_constr.constraint(is_shift * t); + yield_constr.constraint(is_shift * high_limbs_sum * high_limbs_are_zero); + + // When the shift displacement is < 2^32, constrain the two_exp + // mem_channel to be the entry corresponding to `displacement` in + // the shift table lookup (will be zero if displacement >= 256). + yield_constr.constraint(is_shift * two_exp.addr_context); // read from kernel memory + yield_constr.constraint(is_shift * (two_exp.addr_segment - shift_table_segment)); + yield_constr.constraint(is_shift * (two_exp.addr_virtual - displacement.value[0])); + + // Other channels must be unused + for chan in &lv.mem_channels[3..NUM_GP_CHANNELS - 1] { + yield_constr.constraint(is_shift * chan.used); // channel is not used + } + + // Cross-table lookup must connect the memory channels here to MUL + // (in the case of left shift) or DIV (in the case of right shift) + // in the arithmetic table. Specifically, the mapping is + // + // 0 -> 0 (value to be shifted is the same) + // 2 -> 1 (two_exp becomes the multiplicand (resp. divisor)) + // last -> last (output is the same) +} + +pub(crate) fn eval_ext_circuit, const D: usize>( + builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder, + lv: &CpuColumnsView>, + yield_constr: &mut RecursiveConstraintConsumer, +) { + let is_shift = builder.add_extension(lv.op.shl, lv.op.shr); + let displacement = lv.mem_channels[1]; + let two_exp = lv.mem_channels[2]; + + let shift_table_segment = F::from_canonical_u64(Segment::ShiftTable as u64); + + let high_limbs_are_zero = two_exp.used; + let one = builder.one_extension(); + let t = builder.sub_extension(two_exp.is_read, one); + let t = builder.mul_extension(is_shift, t); + yield_constr.constraint(builder, t); + + let high_limbs_sum = builder.add_many_extension(&displacement.value[1..]); + let high_limbs_sum_inv = lv.general.shift().high_limb_sum_inv; + let t = builder.one_extension(); + let t = builder.sub_extension(t, high_limbs_are_zero); + let t = builder.mul_sub_extension(high_limbs_sum, high_limbs_sum_inv, t); + let t = builder.mul_extension(is_shift, t); + yield_constr.constraint(builder, t); + + let t = builder.mul_many_extension([is_shift, high_limbs_sum, high_limbs_are_zero]); + yield_constr.constraint(builder, t); + + let t = builder.mul_extension(is_shift, two_exp.addr_context); + yield_constr.constraint(builder, t); + let t = builder.arithmetic_extension( + F::ONE, + -shift_table_segment, + is_shift, + two_exp.addr_segment, + is_shift, + ); + yield_constr.constraint(builder, t); + let t = builder.sub_extension(two_exp.addr_virtual, displacement.value[0]); + let t = builder.mul_extension(is_shift, t); + yield_constr.constraint(builder, t); + + for chan in &lv.mem_channels[3..NUM_GP_CHANNELS - 1] { + let t = builder.mul_extension(is_shift, chan.used); + yield_constr.constraint(builder, t); + } +} diff --git a/evm/src/generation/prover_input.rs b/evm/src/generation/prover_input.rs index ad1cfce0..4515bd95 100644 --- a/evm/src/generation/prover_input.rs +++ b/evm/src/generation/prover_input.rs @@ -70,7 +70,7 @@ impl GenerationState { match input_fn.0[1].as_str() { "length" => { // Return length of code. - // stack: codehash + // stack: codehash, ... let codehash = stack.last().expect("Empty stack"); self.inputs.contract_code[&H256::from_uint(codehash)] .len() @@ -78,7 +78,7 @@ impl GenerationState { } "get" => { // Return `code[i]`. - // stack: i, code_length, codehash + // stack: i, code_length, codehash, ... let stacklen = stack.len(); let i = stack[stacklen - 1].as_usize(); let codehash = stack[stacklen - 3]; diff --git a/evm/src/keccak/round_flags.rs b/evm/src/keccak/round_flags.rs index 920ca4c8..d8c46680 100644 --- a/evm/src/keccak/round_flags.rs +++ b/evm/src/keccak/round_flags.rs @@ -5,11 +5,9 @@ use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use crate::keccak::columns::reg_step; -use crate::keccak::columns::NUM_COLUMNS; +use crate::keccak::columns::{reg_step, NUM_COLUMNS}; use crate::keccak::keccak_stark::NUM_ROUNDS; -use crate::vars::StarkEvaluationTargets; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; pub(crate) fn eval_round_flags>( vars: StarkEvaluationVars, diff --git a/evm/src/keccak_memory/keccak_memory_stark.rs b/evm/src/keccak_memory/keccak_memory_stark.rs index 1bbea168..3719fc8e 100644 --- a/evm/src/keccak_memory/keccak_memory_stark.rs +++ b/evm/src/keccak_memory/keccak_memory_stark.rs @@ -15,8 +15,7 @@ use crate::keccak_memory::columns::*; use crate::memory::segments::Segment; use crate::stark::Stark; use crate::util::trace_rows_to_poly_values; -use crate::vars::StarkEvaluationTargets; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; pub(crate) fn ctl_looked_data() -> Vec> { Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL, COL_READ_TIMESTAMP]).collect() diff --git a/evm/src/keccak_sponge/keccak_sponge_stark.rs b/evm/src/keccak_sponge/keccak_sponge_stark.rs index 219c0c21..f2af8895 100644 --- a/evm/src/keccak_sponge/keccak_sponge_stark.rs +++ b/evm/src/keccak_sponge/keccak_sponge_stark.rs @@ -21,8 +21,7 @@ use crate::keccak_sponge::columns::*; use crate::memory::segments::Segment; use crate::stark::Stark; use crate::util::trace_rows_to_poly_values; -use crate::vars::StarkEvaluationTargets; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; #[allow(unused)] // TODO: Should be used soon. pub(crate) fn ctl_looked_data() -> Vec> { diff --git a/evm/src/memory/segments.rs b/evm/src/memory/segments.rs index f8d536e9..1587d890 100644 --- a/evm/src/memory/segments.rs +++ b/evm/src/memory/segments.rs @@ -35,10 +35,14 @@ pub(crate) enum Segment { TrieEncodedChild = 14, /// A buffer used to store the lengths of the encodings of a branch node's children. TrieEncodedChildLen = 15, + /// A table of values 2^i for i=0..255 for use with shift + /// instructions; initialised by `kernel/asm/shift.asm::init_shift_table()`. + ShiftTable = 16, + JumpdestBits = 17, } impl Segment { - pub(crate) const COUNT: usize = 16; + pub(crate) const COUNT: usize = 18; pub(crate) fn all() -> [Self; Self::COUNT] { [ @@ -58,6 +62,8 @@ impl Segment { Self::TrieData, Self::TrieEncodedChild, Self::TrieEncodedChildLen, + Self::ShiftTable, + Self::JumpdestBits, ] } @@ -80,6 +86,8 @@ impl Segment { Segment::TrieData => "SEGMENT_TRIE_DATA", Segment::TrieEncodedChild => "SEGMENT_TRIE_ENCODED_CHILD", Segment::TrieEncodedChildLen => "SEGMENT_TRIE_ENCODED_CHILD_LEN", + Segment::ShiftTable => "SEGMENT_SHIFT_TABLE", + Segment::JumpdestBits => "SEGMENT_JUMPDEST_BITS", } } @@ -102,6 +110,8 @@ impl Segment { Segment::TrieData => 256, Segment::TrieEncodedChild => 256, Segment::TrieEncodedChildLen => 6, + Segment::ShiftTable => 256, + Segment::JumpdestBits => 1, } } } diff --git a/evm/src/prover.rs b/evm/src/prover.rs index 20e8c628..4627784d 100644 --- a/evm/src/prover.rs +++ b/evm/src/prover.rs @@ -27,9 +27,9 @@ use crate::keccak::keccak_stark::KeccakStark; use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark; use crate::logic::LogicStark; use crate::memory::memory_stark::MemoryStark; -use crate::permutation::PermutationCheckVars; use crate::permutation::{ compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet, + PermutationCheckVars, }; use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof}; use crate::stark::Stark; diff --git a/evm/src/recursive_verifier.rs b/evm/src/recursive_verifier.rs index 445497f8..bf307699 100644 --- a/evm/src/recursive_verifier.rs +++ b/evm/src/recursive_verifier.rs @@ -13,13 +13,12 @@ use plonky2::iop::target::Target; use plonky2::iop::witness::Witness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData, VerifierCircuitTarget}; -use plonky2::plonk::config::Hasher; -use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; use plonky2::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}; use plonky2::util::reducing::ReducingFactorTarget; use plonky2::with_context; -use crate::all_stark::NUM_TABLES; +use crate::all_stark::{AllStark, Table, NUM_TABLES}; use crate::config::StarkConfig; use crate::constraint_consumer::RecursiveConstraintConsumer; use crate::cpu::cpu_stark::CpuStark; @@ -41,13 +40,9 @@ use crate::proof::{ TrieRootsTarget, }; use crate::stark::Stark; -use crate::util::h160_limbs; +use crate::util::{h160_limbs, h256_limbs}; use crate::vanishing_poly::eval_vanishing_poly_circuit; use crate::vars::StarkEvaluationTargets; -use crate::{ - all_stark::{AllStark, Table}, - util::h256_limbs, -}; /// Table-wise recursive proofs of an `AllProof`. pub struct RecursiveAllProof< @@ -236,7 +231,7 @@ impl, C: GenericConfig, const D: usize> .enumerate() { builder.verify_proof::( - recursive_proof, + &recursive_proof, &verifier_data_target, &verifier_data[i].common, ); @@ -850,8 +845,7 @@ pub(crate) mod tests { use plonky2::iop::witness::{PartialWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::{CircuitConfig, VerifierCircuitData}; - use plonky2::plonk::config::Hasher; - use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; + use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; use plonky2::plonk::proof::ProofWithPublicInputs; use crate::all_stark::{AllStark, Table}; diff --git a/evm/src/stark.rs b/evm/src/stark.rs index 49c5b70b..72cee0ad 100644 --- a/evm/src/stark.rs +++ b/evm/src/stark.rs @@ -13,8 +13,7 @@ use plonky2_util::ceil_div_usize; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::permutation::PermutationPair; -use crate::vars::StarkEvaluationTargets; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; const TRACE_ORACLE_INDEX: usize = 0; const PERMUTATION_CTL_ORACLE_INDEX: usize = 1; diff --git a/evm/src/stark_testing.rs b/evm/src/stark_testing.rs index 81b0f68f..da628403 100644 --- a/evm/src/stark_testing.rs +++ b/evm/src/stark_testing.rs @@ -1,15 +1,12 @@ use anyhow::{ensure, Result}; -use plonky2::field::extension::Extendable; -use plonky2::field::extension::FieldExtension; +use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2::field::types::Field; +use plonky2::field::types::{Field, Sample}; use plonky2::hash::hash_types::RichField; -use plonky2::iop::witness::PartialWitness; -use plonky2::iop::witness::Witness; +use plonky2::iop::witness::{PartialWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; -use plonky2::plonk::config::GenericConfig; -use plonky2::plonk::config::Hasher; +use plonky2::plonk::config::{GenericConfig, Hasher}; use plonky2::util::transpose; use plonky2_util::{log2_ceil, log2_strict}; @@ -93,8 +90,8 @@ where { // Compute native constraint evaluation on random values. let vars = StarkEvaluationVars { - local_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), - next_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), + local_values: &F::Extension::rand_array::<{ S::COLUMNS }>(), + next_values: &F::Extension::rand_array::<{ S::COLUMNS }>(), }; let alphas = F::rand_vec(1); let z_last = F::Extension::rand(); diff --git a/evm/src/verifier.rs b/evm/src/verifier.rs index 0bfbc3d4..ce15399a 100644 --- a/evm/src/verifier.rs +++ b/evm/src/verifier.rs @@ -275,7 +275,7 @@ fn eval_l_0_and_l_last(log_n: usize, x: F) -> (F, F) { mod tests { use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::polynomial::PolynomialValues; - use plonky2::field::types::Field; + use plonky2::field::types::Sample; use crate::verifier::eval_l_0_and_l_last; diff --git a/field/Cargo.toml b/field/Cargo.toml index 1a72bd6c..1242dfe3 100644 --- a/field/Cargo.toml +++ b/field/Cargo.toml @@ -4,16 +4,12 @@ description = "Finite field arithmetic" version = "0.1.0" edition = "2021" -[features] -default = ["rand"] -rand = ["dep:rand"] - [dependencies] -plonky2_util = { path = "../util" } -anyhow = "1.0.40" -itertools = "0.10.0" -num = { version = "0.4", features = [ "rand" ] } -rand = { optional = true, version = "0.8.4" } -serde = { version = "1.0", features = ["derive"] } -unroll = "0.1.5" -static_assertions = "1.1.0" +anyhow = { version = "1.0.40", default-features = false } +itertools = { version = "0.10.0", default-features = false, features = ["use_alloc"] } +num = { version = "0.4", default-features = false, features = ["alloc", "rand"] } +plonky2_util = { path = "../util", default-features = false } +rand = { version = "0.8.5", default-features = false, features = ["getrandom"] } +serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } +static_assertions = { version = "1.1.0", default-features = false } +unroll = { version = "0.1.5", default-features = false } diff --git a/field/src/arch/x86_64/avx2_goldilocks_field.rs b/field/src/arch/x86_64/avx2_goldilocks_field.rs index adef1fac..dafd1503 100644 --- a/field/src/arch/x86_64/avx2_goldilocks_field.rs +++ b/field/src/arch/x86_64/avx2_goldilocks_field.rs @@ -1,20 +1,22 @@ use core::arch::x86_64::*; -use std::fmt; -use std::fmt::{Debug, Formatter}; -use std::iter::{Product, Sum}; -use std::mem::transmute; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt; +use core::fmt::{Debug, Formatter}; +use core::iter::{Product, Sum}; +use core::mem::transmute; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use crate::goldilocks_field::GoldilocksField; use crate::ops::Square; use crate::packed::PackedField; use crate::types::{Field, Field64}; -// Ideally `Avx2GoldilocksField` would wrap `__m256i`. Unfortunately, `__m256i` has an alignment of -// 32B, which would preclude us from casting `[GoldilocksField; 4]` (alignment 8B) to -// `Avx2GoldilocksField`. We need to ensure that `Avx2GoldilocksField` has the same alignment as -// `GoldilocksField`. Thus we wrap `[GoldilocksField; 4]` and use the `new` and `get` methods to -// convert to and from `__m256i`. +/// AVX2 Goldilocks Field +/// +/// Ideally `Avx2GoldilocksField` would wrap `__m256i`. Unfortunately, `__m256i` has an alignment of +/// 32B, which would preclude us from casting `[GoldilocksField; 4]` (alignment 8B) to +/// `Avx2GoldilocksField`. We need to ensure that `Avx2GoldilocksField` has the same alignment as +/// `GoldilocksField`. Thus we wrap `[GoldilocksField; 4]` and use the `new` and `get` methods to +/// convert to and from `__m256i`. #[derive(Copy, Clone)] #[repr(transparent)] pub struct Avx2GoldilocksField(pub [GoldilocksField; 4]); diff --git a/field/src/arch/x86_64/avx512_goldilocks_field.rs b/field/src/arch/x86_64/avx512_goldilocks_field.rs index f67e5000..2939e70e 100644 --- a/field/src/arch/x86_64/avx512_goldilocks_field.rs +++ b/field/src/arch/x86_64/avx512_goldilocks_field.rs @@ -1,20 +1,22 @@ use core::arch::x86_64::*; -use std::fmt; -use std::fmt::{Debug, Formatter}; -use std::iter::{Product, Sum}; -use std::mem::transmute; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt; +use core::fmt::{Debug, Formatter}; +use core::iter::{Product, Sum}; +use core::mem::transmute; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use crate::goldilocks_field::GoldilocksField; use crate::ops::Square; use crate::packed::PackedField; use crate::types::{Field, Field64}; -// Ideally `Avx512GoldilocksField` would wrap `__m512i`. Unfortunately, `__m512i` has an alignment -// of 64B, which would preclude us from casting `[GoldilocksField; 8]` (alignment 8B) to -// `Avx512GoldilocksField`. We need to ensure that `Avx512GoldilocksField` has the same alignment as -// `GoldilocksField`. Thus we wrap `[GoldilocksField; 8]` and use the `new` and `get` methods to -// convert to and from `__m512i`. +/// AVX512 Goldilocks Field +/// +/// Ideally `Avx512GoldilocksField` would wrap `__m512i`. Unfortunately, `__m512i` has an alignment +/// of 64B, which would preclude us from casting `[GoldilocksField; 8]` (alignment 8B) to +/// `Avx512GoldilocksField`. We need to ensure that `Avx512GoldilocksField` has the same alignment as +/// `GoldilocksField`. Thus we wrap `[GoldilocksField; 8]` and use the `new` and `get` methods to +/// convert to and from `__m512i`. #[derive(Copy, Clone)] #[repr(transparent)] pub struct Avx512GoldilocksField(pub [GoldilocksField; 8]); diff --git a/field/src/cosets.rs b/field/src/cosets.rs index 46b43d90..5a59a963 100644 --- a/field/src/cosets.rs +++ b/field/src/cosets.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use num::bigint::BigUint; use crate::types::Field; diff --git a/field/src/extension/algebra.rs b/field/src/extension/algebra.rs index 5840ae81..8ca939b2 100644 --- a/field/src/extension/algebra.rs +++ b/field/src/extension/algebra.rs @@ -1,6 +1,7 @@ -use std::fmt::{Debug, Display, Formatter}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use alloc::vec::Vec; +use core::fmt::{self, Debug, Display, Formatter}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use crate::extension::OEF; @@ -42,7 +43,7 @@ impl, const D: usize> From for ExtensionAlgebra { } impl, const D: usize> Display for ExtensionAlgebra { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "({})", self.0[0])?; for i in 1..D { write!(f, " + ({})*b^{i}", self.0[i])?; @@ -52,7 +53,7 @@ impl, const D: usize> Display for ExtensionAlgebra { } impl, const D: usize> Debug for ExtensionAlgebra { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(self, f) } } @@ -190,12 +191,14 @@ impl, const D: usize> PolynomialCoeffsAlgebra { #[cfg(test)] mod tests { + use alloc::vec::Vec; + use itertools::Itertools; use crate::extension::algebra::ExtensionAlgebra; use crate::extension::{Extendable, FieldExtension}; use crate::goldilocks_field::GoldilocksField; - use crate::types::Field; + use crate::types::{Field, Sample}; /// Tests that the multiplication on the extension algebra lifts that of the field extension. fn test_extension_algebra, const D: usize>() { diff --git a/field/src/extension/mod.rs b/field/src/extension/mod.rs index ed596764..bbbaca25 100644 --- a/field/src/extension/mod.rs +++ b/field/src/extension/mod.rs @@ -1,4 +1,4 @@ -use std::convert::TryInto; +use alloc::vec::Vec; use crate::types::Field; diff --git a/field/src/extension/quadratic.rs b/field/src/extension/quadratic.rs index 278abba9..c0c9758b 100644 --- a/field/src/extension/quadratic.rs +++ b/field/src/extension/quadratic.rs @@ -1,13 +1,13 @@ -use std::fmt::{Debug, Display, Formatter}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt::{self, Debug, Display, Formatter}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::bigint::BigUint; use serde::{Deserialize, Serialize}; use crate::extension::{Extendable, FieldExtension, Frobenius, OEF}; use crate::ops::Square; -use crate::types::Field; +use crate::types::{Field, Sample}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(bound = "")] @@ -48,6 +48,16 @@ impl> From for QuadraticExtension { } } +impl> Sample for QuadraticExtension { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + Self([F::sample(rng), F::sample(rng)]) + } +} + impl> Field for QuadraticExtension { const ZERO: Self = Self([F::ZERO; 2]); const ONE: Self = Self([F::ONE, F::ZERO]); @@ -99,21 +109,16 @@ impl> Field for QuadraticExtension { fn from_noncanonical_u128(n: u128) -> Self { F::from_noncanonical_u128(n).into() } - - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - Self([F::rand_from_rng(rng), F::rand_from_rng(rng)]) - } } impl> Display for QuadraticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{} + {}*a", self.0[0], self.0[1]) } } impl> Debug for QuadraticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(self, f) } } diff --git a/field/src/extension/quartic.rs b/field/src/extension/quartic.rs index 6df39903..e7aba63b 100644 --- a/field/src/extension/quartic.rs +++ b/field/src/extension/quartic.rs @@ -1,6 +1,6 @@ -use std::fmt::{Debug, Display, Formatter}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt::{self, Debug, Display, Formatter}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::bigint::BigUint; use num::traits::Pow; @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use crate::extension::{Extendable, FieldExtension, Frobenius, OEF}; use crate::ops::Square; -use crate::types::Field; +use crate::types::{Field, Sample}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(bound = "")] @@ -49,6 +49,21 @@ impl> From for QuarticExtension { } } +impl> Sample for QuarticExtension { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + Self::from_basefield_array([ + F::sample(rng), + F::sample(rng), + F::sample(rng), + F::sample(rng), + ]) + } +} + impl> Field for QuarticExtension { const ZERO: Self = Self([F::ZERO; 4]); const ONE: Self = Self([F::ONE, F::ZERO, F::ZERO, F::ZERO]); @@ -104,20 +119,10 @@ impl> Field for QuarticExtension { fn from_noncanonical_u128(n: u128) -> Self { F::from_noncanonical_u128(n).into() } - - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - Self::from_basefield_array([ - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - ]) - } } impl> Display for QuarticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!( f, "{} + {}*a + {}*a^2 + {}*a^3", @@ -127,7 +132,7 @@ impl> Display for QuarticExtension { } impl> Debug for QuarticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(self, f) } } diff --git a/field/src/extension/quintic.rs b/field/src/extension/quintic.rs index 6680ebc7..d4b605eb 100644 --- a/field/src/extension/quintic.rs +++ b/field/src/extension/quintic.rs @@ -1,6 +1,6 @@ -use std::fmt::{Debug, Display, Formatter}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt::{self, Debug, Display, Formatter}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::bigint::BigUint; use num::traits::Pow; @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use crate::extension::{Extendable, FieldExtension, Frobenius, OEF}; use crate::ops::Square; -use crate::types::Field; +use crate::types::{Field, Sample}; #[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[serde(bound = "")] @@ -49,6 +49,22 @@ impl> From for QuinticExtension { } } +impl> Sample for QuinticExtension { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + Self::from_basefield_array([ + F::sample(rng), + F::sample(rng), + F::sample(rng), + F::sample(rng), + F::sample(rng), + ]) + } +} + impl> Field for QuinticExtension { const ZERO: Self = Self([F::ZERO; 5]); const ONE: Self = Self([F::ONE, F::ZERO, F::ZERO, F::ZERO, F::ZERO]); @@ -110,21 +126,10 @@ impl> Field for QuinticExtension { fn from_noncanonical_u128(n: u128) -> Self { F::from_noncanonical_u128(n).into() } - - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - Self::from_basefield_array([ - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - ]) - } } impl> Display for QuinticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!( f, "{} + {}*a + {}*a^2 + {}*a^3 + {}*a^4", @@ -134,7 +139,7 @@ impl> Display for QuinticExtension { } impl> Debug for QuinticExtension { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(self, f) } } diff --git a/field/src/fft.rs b/field/src/fft.rs index 6ede8af6..0a0b5dd8 100644 --- a/field/src/fft.rs +++ b/field/src/fft.rs @@ -1,5 +1,5 @@ -use std::cmp::{max, min}; -use std::option::Option; +use alloc::vec::Vec; +use core::cmp::{max, min}; use plonky2_util::{log2_strict, reverse_index_bits_in_place}; use unroll::unroll_for_loops; @@ -207,6 +207,8 @@ pub(crate) fn fft_classic(values: &mut [F], r: usize, root_table: &Fft #[cfg(test)] mod tests { + use alloc::vec::Vec; + use plonky2_util::{log2_ceil, log2_strict}; use crate::fft::{fft, fft_with_options, ifft}; @@ -224,7 +226,7 @@ mod tests { // "random", the last degree_padded-degree of them are zero. let coeffs = (0..degree) .map(|i| F::from_canonical_usize(i * 1337 % 100)) - .chain(std::iter::repeat(F::ZERO).take(degree_padded - degree)) + .chain(core::iter::repeat(F::ZERO).take(degree_padded - degree)) .collect::>(); assert_eq!(coeffs.len(), degree_padded); let coefficients = PolynomialCoeffs { coeffs }; diff --git a/field/src/field_testing.rs b/field/src/field_testing.rs index d85ac5e9..4c53c234 100644 --- a/field/src/field_testing.rs +++ b/field/src/field_testing.rs @@ -1,15 +1,17 @@ -use crate::extension::Extendable; -use crate::extension::Frobenius; +use crate::extension::{Extendable, Frobenius}; use crate::ops::Square; -use crate::types::Field; +use crate::types::{Field, Sample}; #[macro_export] macro_rules! test_field_arithmetic { ($field:ty) => { mod field_arithmetic { + use alloc::vec::Vec; + use num::bigint::BigUint; + use rand::rngs::OsRng; use rand::Rng; - use $crate::types::Field; + use $crate::types::{Field, Sample}; #[test] fn batch_inversion() { @@ -72,7 +74,7 @@ macro_rules! test_field_arithmetic { fn exponentiation_large() { type F = $field; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let base = F::rand(); let pow = BigUint::from(rng.gen::()); diff --git a/field/src/goldilocks_extensions.rs b/field/src/goldilocks_extensions.rs index 2175494f..8b53f8b5 100644 --- a/field/src/goldilocks_extensions.rs +++ b/field/src/goldilocks_extensions.rs @@ -1,4 +1,4 @@ -use std::ops::Mul; +use core::ops::Mul; use static_assertions::const_assert; diff --git a/field/src/goldilocks_field.rs b/field/src/goldilocks_field.rs index e036ab9b..9f0b0519 100644 --- a/field/src/goldilocks_field.rs +++ b/field/src/goldilocks_field.rs @@ -1,15 +1,14 @@ -use std::fmt; -use std::fmt::{Debug, Display, Formatter}; -use std::hash::{Hash, Hasher}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::fmt::{self, Debug, Display, Formatter}; +use core::hash::{Hash, Hasher}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::{BigUint, Integer}; use plonky2_util::{assume, branch_hint}; use serde::{Deserialize, Serialize}; use crate::inversion::try_inverse_u64; -use crate::types::{Field, Field64, PrimeField, PrimeField64}; +use crate::types::{Field, Field64, PrimeField, PrimeField64, Sample}; const EPSILON: u64 = (1 << 32) - 1; @@ -57,6 +56,17 @@ impl Debug for GoldilocksField { } } +impl Sample for GoldilocksField { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + use rand::Rng; + Self::from_canonical_u64(rng.gen_range(0..Self::ORDER)) + } +} + impl Field for GoldilocksField { const ZERO: Self = Self(0); const ONE: Self = Self(1); @@ -104,11 +114,6 @@ impl Field for GoldilocksField { reduce128(n) } - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - Self::from_canonical_u64(rng.gen_range(0..Self::ORDER)) - } - #[inline] fn multiply_accumulate(&self, x: Self, y: Self) -> Self { // u64 + u64 * u64 cannot overflow. @@ -300,10 +305,9 @@ impl DivAssign for GoldilocksField { #[inline(always)] #[cfg(target_arch = "x86_64")] unsafe fn add_no_canonicalize_trashing_input(x: u64, y: u64) -> u64 { - use std::arch::asm; let res_wrapped: u64; let adjustment: u64; - asm!( + core::arch::asm!( "add {0}, {1}", // Trick. The carry flag is set iff the addition overflowed. // sbb x, y does x := x - y - CF. In our case, x and y are both {1:e}, so it simply does diff --git a/field/src/interpolation.rs b/field/src/interpolation.rs index 8f64e9d7..df708457 100644 --- a/field/src/interpolation.rs +++ b/field/src/interpolation.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use plonky2_util::log2_ceil; use crate::fft::ifft; @@ -79,7 +81,7 @@ mod tests { use crate::extension::quartic::QuarticExtension; use crate::goldilocks_field::GoldilocksField; use crate::polynomial::PolynomialCoeffs; - use crate::types::Field; + use crate::types::{Field, Sample}; #[test] fn interpolant_random() { diff --git a/field/src/inversion.rs b/field/src/inversion.rs index 740e6562..45d17ab5 100644 --- a/field/src/inversion.rs +++ b/field/src/inversion.rs @@ -6,8 +6,8 @@ use crate::types::PrimeField64; #[inline(always)] fn safe_iteration(f: &mut u64, g: &mut u64, c: &mut i128, d: &mut i128, k: &mut u32) { if f < g { - std::mem::swap(f, g); - std::mem::swap(c, d); + core::mem::swap(f, g); + core::mem::swap(c, d); } if *f & 3 == *g & 3 { // f - g = 0 (mod 4) @@ -36,8 +36,8 @@ fn safe_iteration(f: &mut u64, g: &mut u64, c: &mut i128, d: &mut i128, k: &mut #[inline(always)] unsafe fn unsafe_iteration(f: &mut u64, g: &mut u64, c: &mut i128, d: &mut i128, k: &mut u32) { if *f < *g { - std::mem::swap(f, g); - std::mem::swap(c, d); + core::mem::swap(f, g); + core::mem::swap(c, d); } if *f & 3 == *g & 3 { // f - g = 0 (mod 4) diff --git a/field/src/lib.rs b/field/src/lib.rs index ec5fc80e..86b6aebe 100644 --- a/field/src/lib.rs +++ b/field/src/lib.rs @@ -6,10 +6,16 @@ #![allow(clippy::needless_range_loop)] #![allow(clippy::return_self_not_must_use)] #![feature(generic_const_exprs)] -#![feature(specialization)] #![feature(stdsimd)] +#![feature(specialization)] +#![cfg_attr(not(test), no_std)] + +extern crate alloc; + +mod inversion; pub(crate) mod arch; + pub mod batch_util; pub mod cosets; pub mod extension; @@ -17,7 +23,6 @@ pub mod fft; pub mod goldilocks_extensions; pub mod goldilocks_field; pub mod interpolation; -mod inversion; pub mod ops; pub mod packable; pub mod packed; @@ -29,5 +34,6 @@ pub mod zero_poly_coset; #[cfg(test)] mod field_testing; + #[cfg(test)] mod prime_field_testing; diff --git a/field/src/ops.rs b/field/src/ops.rs index bf8ff8a9..ce05ea78 100644 --- a/field/src/ops.rs +++ b/field/src/ops.rs @@ -1,4 +1,4 @@ -use std::ops::Mul; +use core::ops::Mul; pub trait Square { fn square(&self) -> Self; diff --git a/field/src/packed.rs b/field/src/packed.rs index 9708f9e3..531071fd 100644 --- a/field/src/packed.rs +++ b/field/src/packed.rs @@ -1,7 +1,7 @@ -use std::fmt::Debug; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, Mul, MulAssign, Neg, Sub, SubAssign}; -use std::slice; +use core::fmt::Debug; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, Mul, MulAssign, Neg, Sub, SubAssign}; +use core::slice; use crate::ops::Square; use crate::types::Field; @@ -82,7 +82,7 @@ where ); let buf_ptr = buf.as_ptr().cast::(); let n = buf.len() / Self::WIDTH; - unsafe { std::slice::from_raw_parts(buf_ptr, n) } + unsafe { slice::from_raw_parts(buf_ptr, n) } } fn pack_slice_mut(buf: &mut [Self::Scalar]) -> &mut [Self] { assert!( @@ -93,7 +93,7 @@ where ); let buf_ptr = buf.as_mut_ptr().cast::(); let n = buf.len() / Self::WIDTH; - unsafe { std::slice::from_raw_parts_mut(buf_ptr, n) } + unsafe { slice::from_raw_parts_mut(buf_ptr, n) } } fn doubles(&self) -> Self { diff --git a/field/src/polynomial/division.rs b/field/src/polynomial/division.rs index 561b9661..7d85d549 100644 --- a/field/src/polynomial/division.rs +++ b/field/src/polynomial/division.rs @@ -1,3 +1,6 @@ +use alloc::vec; +use alloc::vec::Vec; + use plonky2_util::log2_ceil; use crate::polynomial::PolynomialCoeffs; @@ -68,7 +71,7 @@ impl PolynomialCoeffs { } /// Let `self=p(X)`, this returns `(p(X)-p(z))/(X-z)`. - /// See https://en.wikipedia.org/wiki/Horner%27s_method + /// See pub fn divide_by_linear(&self, z: F) -> PolynomialCoeffs { let mut bs = self .coeffs @@ -131,17 +134,18 @@ impl PolynomialCoeffs { #[cfg(test)] mod tests { - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; use crate::extension::quartic::QuarticExtension; use crate::goldilocks_field::GoldilocksField; use crate::polynomial::PolynomialCoeffs; - use crate::types::Field; + use crate::types::{Field, Sample}; #[test] fn test_division_by_linear() { type F = QuarticExtension; - let n = thread_rng().gen_range(1..1000); + let n = OsRng.gen_range(1..1000); let poly = PolynomialCoeffs::new(F::rand_vec(n)); let z = F::rand(); let ev = poly.eval(z); diff --git a/field/src/polynomial/mod.rs b/field/src/polynomial/mod.rs index 09ed69c7..f61ad419 100644 --- a/field/src/polynomial/mod.rs +++ b/field/src/polynomial/mod.rs @@ -1,8 +1,10 @@ pub(crate) mod division; -use std::cmp::max; -use std::iter::Sum; -use std::ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}; +use alloc::vec; +use alloc::vec::Vec; +use core::cmp::max; +use core::iter::Sum; +use core::ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}; use anyhow::{ensure, Result}; use itertools::Itertools; @@ -440,10 +442,12 @@ impl Mul for &PolynomialCoeffs { mod tests { use std::time::Instant; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; use super::*; use crate::goldilocks_field::GoldilocksField; + use crate::types::Sample; #[test] fn test_trimmed() { @@ -516,7 +520,7 @@ mod tests { #[test] fn test_polynomial_multiplication() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let (a_deg, b_deg) = (rng.gen_range(1..10_000), rng.gen_range(1..10_000)); let a = PolynomialCoeffs::new(F::rand_vec(a_deg)); let b = PolynomialCoeffs::new(F::rand_vec(b_deg)); @@ -532,7 +536,7 @@ mod tests { #[test] fn test_inv_mod_xn() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let a_deg = rng.gen_range(0..1_000); let n = rng.gen_range(1..1_000); let mut a = PolynomialCoeffs::new(F::rand_vec(a_deg + 1)); @@ -557,7 +561,7 @@ mod tests { #[test] fn test_polynomial_long_division() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let (a_deg, b_deg) = (rng.gen_range(1..10_000), rng.gen_range(1..10_000)); let a = PolynomialCoeffs::new(F::rand_vec(a_deg)); let b = PolynomialCoeffs::new(F::rand_vec(b_deg)); @@ -571,7 +575,7 @@ mod tests { #[test] fn test_polynomial_division() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let (a_deg, b_deg) = (rng.gen_range(1..10_000), rng.gen_range(1..10_000)); let a = PolynomialCoeffs::new(F::rand_vec(a_deg)); let b = PolynomialCoeffs::new(F::rand_vec(b_deg)); @@ -585,7 +589,7 @@ mod tests { #[test] fn test_polynomial_division_by_constant() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let a_deg = rng.gen_range(1..10_000); let a = PolynomialCoeffs::new(F::rand_vec(a_deg)); let b = PolynomialCoeffs::from(vec![F::rand()]); @@ -601,7 +605,7 @@ mod tests { #[test] fn test_division_linear() { type F = GoldilocksField; - let mut rng = thread_rng(); + let mut rng = OsRng; let l = 14; let n = 1 << l; let g = F::primitive_root_of_unity(l); diff --git a/field/src/prime_field_testing.rs b/field/src/prime_field_testing.rs index 13431265..42dc9462 100644 --- a/field/src/prime_field_testing.rs +++ b/field/src/prime_field_testing.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use crate::types::PrimeField64; /// Generates a series of non-negative integers less than `modulus` which cover a range of @@ -68,7 +70,7 @@ where macro_rules! test_prime_field_arithmetic { ($field:ty) => { mod prime_field_arithmetic { - use std::ops::{Add, Mul, Neg, Sub}; + use core::ops::{Add, Mul, Neg, Sub}; use $crate::ops::Square; use $crate::types::{Field, Field64}; diff --git a/field/src/secp256k1_base.rs b/field/src/secp256k1_base.rs index 504d63d7..eaa964f8 100644 --- a/field/src/secp256k1_base.rs +++ b/field/src/secp256k1_base.rs @@ -1,15 +1,15 @@ -use std::fmt; -use std::fmt::{Debug, Display, Formatter}; -use std::hash::{Hash, Hasher}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use alloc::vec::Vec; +use core::fmt::{self, Debug, Display, Formatter}; +use core::hash::{Hash, Hasher}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use itertools::Itertools; use num::bigint::BigUint; use num::{Integer, One}; use serde::{Deserialize, Serialize}; -use crate::types::{Field, PrimeField}; +use crate::types::{Field, PrimeField, Sample}; /// The base field of the secp256k1 elliptic curve. /// @@ -65,6 +65,17 @@ impl Debug for Secp256K1Base { } } +impl Sample for Secp256K1Base { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + use num::bigint::RandBigInt; + Self::from_noncanonical_biguint(rng.gen_biguint_below(&Self::order())) + } +} + impl Field for Secp256K1Base { const ZERO: Self = Self([0; 4]); const ONE: Self = Self([1, 0, 0, 0]); @@ -131,12 +142,6 @@ impl Field for Secp256K1Base { fn from_noncanonical_u96(n: (u64, u32)) -> Self { Self([n.0, n.1 as u64, 0, 0]) } - - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - use num::bigint::RandBigInt; - Self::from_noncanonical_biguint(rng.gen_biguint_below(&Self::order())) - } } impl PrimeField for Secp256K1Base { diff --git a/field/src/secp256k1_scalar.rs b/field/src/secp256k1_scalar.rs index e70b154d..1f1de697 100644 --- a/field/src/secp256k1_scalar.rs +++ b/field/src/secp256k1_scalar.rs @@ -1,16 +1,15 @@ -use std::convert::TryInto; -use std::fmt; -use std::fmt::{Debug, Display, Formatter}; -use std::hash::{Hash, Hasher}; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use alloc::vec::Vec; +use core::fmt::{self, Debug, Display, Formatter}; +use core::hash::{Hash, Hasher}; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use itertools::Itertools; use num::bigint::BigUint; use num::{Integer, One}; use serde::{Deserialize, Serialize}; -use crate::types::{Field, PrimeField}; +use crate::types::{Field, PrimeField, Sample}; /// The base field of the secp256k1 elliptic curve. /// @@ -68,6 +67,17 @@ impl Debug for Secp256K1Scalar { } } +impl Sample for Secp256K1Scalar { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { + use num::bigint::RandBigInt; + Self::from_noncanonical_biguint(rng.gen_biguint_below(&Self::order())) + } +} + impl Field for Secp256K1Scalar { const ZERO: Self = Self([0; 4]); const ONE: Self = Self([1, 0, 0, 0]); @@ -140,12 +150,6 @@ impl Field for Secp256K1Scalar { fn from_noncanonical_u96(n: (u64, u32)) -> Self { Self([n.0, n.1 as u64, 0, 0]) } - - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self { - use num::bigint::RandBigInt; - Self::from_noncanonical_biguint(rng.gen_biguint_below(&Self::order())) - } } impl PrimeField for Secp256K1Scalar { diff --git a/field/src/types.rs b/field/src/types.rs index 545f90c5..0ae31847 100644 --- a/field/src/types.rs +++ b/field/src/types.rs @@ -1,17 +1,49 @@ -use std::fmt::{Debug, Display}; -use std::hash::Hash; -use std::iter::{Product, Sum}; -use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; +use alloc::vec; +use alloc::vec::Vec; +use core::fmt::{Debug, Display}; +use core::hash::Hash; +use core::iter::{Product, Sum}; +use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::bigint::BigUint; use num::{Integer, One, ToPrimitive, Zero}; use plonky2_util::bits_u64; +use rand::rngs::OsRng; use serde::de::DeserializeOwned; use serde::Serialize; use crate::extension::Frobenius; use crate::ops::Square; +/// Sampling +pub trait Sample: Sized { + /// Samples a single value using `rng`. + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized; + + /// Samples a single value using the [`OsRng`]. + #[inline] + fn rand() -> Self { + Self::sample(&mut OsRng) + } + + /// Samples a [`Vec`] of values of length `n` using [`OsRng`]. + #[inline] + fn rand_vec(n: usize) -> Vec { + (0..n).map(|_| Self::rand()).collect() + } + + /// Samples an array of values of length `N` using [`OsRng`]. + #[inline] + fn rand_array() -> [Self; N] { + Self::rand_vec(N) + .try_into() + .ok() + .expect("This conversion can never fail.") + } +} + /// A finite field. pub trait Field: 'static @@ -33,6 +65,7 @@ pub trait Field: + Debug + Default + Display + + Sample + Send + Sync + Serialize @@ -317,9 +350,6 @@ pub trait Field: Self::from_noncanonical_u128(n) } - #[cfg(feature = "rand")] - fn rand_from_rng(rng: &mut R) -> Self; - fn exp_power_of_2(&self, power_log: usize) -> Self { let mut res = *self; for _ in 0..power_log { @@ -397,21 +427,6 @@ pub trait Field: } } - #[cfg(feature = "rand")] - fn rand() -> Self { - Self::rand_from_rng(&mut rand::thread_rng()) - } - - #[cfg(feature = "rand")] - fn rand_arr() -> [Self; N] { - Self::rand_vec(N).try_into().unwrap() - } - - #[cfg(feature = "rand")] - fn rand_vec(n: usize) -> Vec { - (0..n).map(|_| Self::rand()).collect() - } - /// Representative `g` of the coset used in FRI, so that LDEs in FRI are done over `gH`. fn coset_shift() -> Self { Self::MULTIPLICATIVE_GROUP_GENERATOR diff --git a/field/src/zero_poly_coset.rs b/field/src/zero_poly_coset.rs index 8d63bc69..53b66a75 100644 --- a/field/src/zero_poly_coset.rs +++ b/field/src/zero_poly_coset.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use crate::packed::PackedField; use crate::types::Field; diff --git a/insertion/Cargo.toml b/insertion/Cargo.toml index 481c2d5d..c007a975 100644 --- a/insertion/Cargo.toml +++ b/insertion/Cargo.toml @@ -5,5 +5,9 @@ version = "0.1.0" edition = "2021" [dependencies] +anyhow = { version = "1.0.40", default-features = false } +plonky2 = { path = "../plonky2", default-features = false } + +[dev-dependencies] plonky2 = { path = "../plonky2" } -anyhow = "1.0.40" + diff --git a/insertion/src/insert_gadget.rs b/insertion/src/insert_gadget.rs index ff0ec397..1574d8fb 100644 --- a/insertion/src/insert_gadget.rs +++ b/insertion/src/insert_gadget.rs @@ -1,3 +1,6 @@ +use alloc::vec; +use alloc::vec::Vec; + use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; @@ -50,7 +53,7 @@ impl, const D: usize> CircuitBuilderInsert #[cfg(test)] mod tests { use anyhow::Result; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/insertion/src/insertion_gate.rs b/insertion/src/insertion_gate.rs index 2757dd23..8019649a 100644 --- a/insertion/src/insertion_gate.rs +++ b/insertion/src/insertion_gate.rs @@ -1,5 +1,9 @@ -use std::marker::PhantomData; -use std::ops::Range; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use core::ops::Range; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::types::Field; @@ -317,18 +321,14 @@ impl, const D: usize> SimpleGenerator for Insert #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; - use plonky2::gates::gate::Gate; + use plonky2::field::types::Sample; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use crate::insertion_gate::InsertionGate; + use super::*; #[test] fn wire_indices() { diff --git a/insertion/src/lib.rs b/insertion/src/lib.rs index 15c91253..e71919dd 100644 --- a/insertion/src/lib.rs +++ b/insertion/src/lib.rs @@ -4,6 +4,9 @@ #![allow(clippy::len_without_is_empty)] #![allow(clippy::needless_range_loop)] #![allow(clippy::return_self_not_must_use)] +#![no_std] + +extern crate alloc; pub mod insert_gadget; pub mod insertion_gate; diff --git a/maybe_rayon/Cargo.toml b/maybe_rayon/Cargo.toml index f8cc95fb..b3c1e78a 100644 --- a/maybe_rayon/Cargo.toml +++ b/maybe_rayon/Cargo.toml @@ -3,7 +3,6 @@ name = "maybe_rayon" version = "0.1.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] parallel = ["rayon"] diff --git a/maybe_rayon/src/lib.rs b/maybe_rayon/src/lib.rs index d24ba2e5..c4bfb2e9 100644 --- a/maybe_rayon/src/lib.rs +++ b/maybe_rayon/src/lib.rs @@ -1,13 +1,16 @@ #[cfg(not(feature = "parallel"))] -use std::{ +use core::{ iter::{FlatMap, IntoIterator, Iterator}, slice::{Chunks, ChunksExact, ChunksExactMut, ChunksMut}, }; #[cfg(feature = "parallel")] -pub use rayon::prelude::{ - IndexedParallelIterator, ParallelDrainFull, ParallelDrainRange, ParallelExtend, - ParallelIterator, +pub use rayon::{ + self, + prelude::{ + IndexedParallelIterator, ParallelDrainFull, ParallelDrainRange, ParallelExtend, + ParallelIterator, + }, }; #[cfg(feature = "parallel")] use rayon::{ diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index b63ef5a5..ab131463 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -11,45 +11,46 @@ edition = "2021" default-run = "generate_constants" [features] -default = ["parallel", "rand", "rand_chacha", "timing", "gate_testing"] -parallel = ["maybe_rayon/parallel"] -rand = ["dep:rand", "plonky2_field/rand"] -gate_testing = ["rand"] -rand_chacha = ["dep:rand_chacha"] -timing = [] +default = ["gate_testing", "parallel", "rand_chacha", "std", "timing"] +gate_testing = [] +parallel = ["hashbrown/rayon", "maybe_rayon/parallel"] +std = ["anyhow/std", "rand/std"] +timing = ["std"] [dependencies] -plonky2_field = { path = "../field" } -plonky2_util = { path = "../util" } -log = "0.4.14" -itertools = "0.10.0" -num = { version = "0.4", features = [ "rand" ] } -rand = { version = "0.8.4", optional = true } -rand_chacha = { version = "0.3.1", optional = true } -maybe_rayon = { path = "../maybe_rayon" } -unroll = "0.1.5" -anyhow = "1.0.40" -serde = { version = "1.0", features = ["derive"] } -serde_cbor = "0.11.1" -keccak-hash = "0.8.0" -static_assertions = "1.1.0" +ahash = { version = "0.7.6", default-features = false, features = ["compile-time-rng"] } # NOTE: Be sure to keep this version the same as the dependency in `hashbrown`. +anyhow = { version = "1.0.40", default-features = false } +hashbrown = { version = "0.12.3", default-features = false, features = ["ahash", "serde"] } # NOTE: When upgrading, see `ahash` dependency. +itertools = { version = "0.10.0", default-features = false } +keccak-hash = { version = "0.8.0", default-features = false } +log = { version = "0.4.14", default-features = false } +maybe_rayon = { path = "../maybe_rayon", default-features = false } +num = { version = "0.4", default-features = false, features = ["rand"] } +plonky2_field = { path = "../field", default-features = false } +plonky2_util = { path = "../util", default-features = false } +rand = { version = "0.8.4", default-features = false } +rand_chacha = { version = "0.3.1", optional = true, default-features = false } +serde = { version = "1.0", default-features = false, features = ["derive"] } +static_assertions = { version = "1.1.0", default-features = false } +unroll = { version = "0.1.5", default-features = false } [dev-dependencies] -rand = "0.8.4" -rand_chacha = "0.3.1" -criterion = "0.4.0" -env_logger = "0.9.0" -tynm = "0.1.6" -structopt = "0.3.26" -num_cpus = "1.13.1" -rayon = "1.5.1" +criterion = { version = "0.4.0", default-features = false } +env_logger = { version = "0.9.0", default-features = false } +num_cpus = { version = "1.14.0", default-features = false } +plonky2 = { path = "." } +rand = { version = "0.8.4", default-features = false, features = ["getrandom"] } +rand_chacha = { version = "0.3.1", default-features = false } +serde_cbor = { version = "0.11.2" } +structopt = { version = "0.3.26", default-features = false } +tynm = { version = "0.1.6", default-features = false } [target.'cfg(not(target_env = "msvc"))'.dev-dependencies] jemallocator = "0.3.2" [[bin]] name = "generate_constants" -required-features = ["rand", "rand_chacha"] +required-features = ["rand_chacha"] [[bench]] name = "field_arithmetic" diff --git a/plonky2/benches/hashing.rs b/plonky2/benches/hashing.rs index 673e0572..485a6361 100644 --- a/plonky2/benches/hashing.rs +++ b/plonky2/benches/hashing.rs @@ -1,10 +1,8 @@ -#![allow(incomplete_features)] -#![feature(generic_const_exprs)] - mod allocator; use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::field::types::Sample; use plonky2::hash::hash_types::{BytesHash, RichField}; use plonky2::hash::hashing::SPONGE_WIDTH; use plonky2::hash::keccak::KeccakHash; @@ -27,7 +25,7 @@ pub(crate) fn bench_poseidon(c: &mut Criterion) { &format!("poseidon<{}, {SPONGE_WIDTH}>", type_name::()), |b| { b.iter_batched( - || F::rand_arr::(), + || F::rand_array::(), |state| F::poseidon(state), BatchSize::SmallInput, ) diff --git a/plonky2/benches/merkle.rs b/plonky2/benches/merkle.rs index 88302ae9..f9bae127 100644 --- a/plonky2/benches/merkle.rs +++ b/plonky2/benches/merkle.rs @@ -1,5 +1,3 @@ -#![feature(generic_const_exprs)] - mod allocator; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; @@ -13,10 +11,7 @@ use tynm::type_name; const ELEMS_PER_LEAF: usize = 135; -pub(crate) fn bench_merkle_tree>(c: &mut Criterion) -where - [(); H::HASH_SIZE]:, -{ +pub(crate) fn bench_merkle_tree>(c: &mut Criterion) { let mut group = c.benchmark_group(&format!( "merkle-tree<{}, {}>", type_name::(), diff --git a/plonky2/benches/reverse_index_bits.rs b/plonky2/benches/reverse_index_bits.rs index 8916fb5d..5c838a18 100644 --- a/plonky2/benches/reverse_index_bits.rs +++ b/plonky2/benches/reverse_index_bits.rs @@ -2,7 +2,7 @@ mod allocator; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use plonky2::field::goldilocks_field::GoldilocksField; -use plonky2::field::types::Field; +use plonky2::field::types::Sample; use plonky2_util::{reverse_index_bits, reverse_index_bits_in_place}; type F = GoldilocksField; diff --git a/plonky2/benches/transpose.rs b/plonky2/benches/transpose.rs index 64d103ad..c2aecd5f 100644 --- a/plonky2/benches/transpose.rs +++ b/plonky2/benches/transpose.rs @@ -2,7 +2,7 @@ mod allocator; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use plonky2::field::goldilocks_field::GoldilocksField; -use plonky2::field::types::Field; +use plonky2::field::types::Sample; use plonky2::util::transpose; fn criterion_benchmark(c: &mut Criterion) { diff --git a/plonky2/examples/bench_recursion.rs b/plonky2/examples/bench_recursion.rs index c9d24be7..059ca963 100644 --- a/plonky2/examples/bench_recursion.rs +++ b/plonky2/examples/bench_recursion.rs @@ -2,30 +2,28 @@ // custom CLI argument parsing (even with harness disabled). We could also have // put it in `src/bin/`, but then we wouldn't have access to // `[dev-dependencies]`. -#![allow(incomplete_features)] -#![feature(generic_const_exprs)] -use std::{num::ParseIntError, ops::RangeInclusive, str::FromStr}; +use core::num::ParseIntError; +use core::ops::RangeInclusive; +use core::str::FromStr; use anyhow::{anyhow, Context as _, Result}; use log::{info, Level, LevelFilter}; -use plonky2::{ - gates::noop::NoopGate, - hash::hash_types::RichField, - iop::witness::{PartialWitness, Witness}, - plonk::{ - circuit_builder::CircuitBuilder, - circuit_data::{ - CircuitConfig, CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, - }, - config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig}, - proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs}, - prover::prove, - }, - util::timing::TimingTree, +use maybe_rayon::rayon; +use plonky2::gates::noop::NoopGate; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::witness::{PartialWitness, Witness}; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::circuit_data::{ + CircuitConfig, CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }; +use plonky2::plonk::config::{AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig}; +use plonky2::plonk::proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs}; +use plonky2::plonk::prover::prove; +use plonky2::util::timing::TimingTree; use plonky2_field::extension::Extendable; -use rand::{rngs::OsRng, RngCore, SeedableRng}; +use rand::rngs::OsRng; +use rand::{RngCore, SeedableRng}; use rand_chacha::ChaCha8Rng; use structopt::StructOpt; @@ -66,10 +64,7 @@ struct Options { fn dummy_proof, C: GenericConfig, const D: usize>( config: &CircuitConfig, log2_size: usize, -) -> Result> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result> { // 'size' is in degree, but we want number of noop gates. A non-zero amount of padding will be added and size will be rounded to the next power of two. To hit our target size, we go just under the previous power of two and hope padding is less than half the proof. let num_dummy_gates = match log2_size { 0 => return Err(anyhow!("size must be at least 1")), @@ -107,21 +102,17 @@ fn recursive_proof< ) -> Result> where InnerC::Hasher: AlgebraicHasher, - [(); C::Hasher::HASH_SIZE]:, { let (inner_proof, inner_vd, inner_cd) = inner; let mut builder = CircuitBuilder::::new(config.clone()); - let mut pw = PartialWitness::new(); let pt = builder.add_virtual_proof_with_pis::(inner_cd); - pw.set_proof_with_pis_target(&pt, inner_proof); let inner_data = VerifierCircuitTarget { constants_sigmas_cap: builder.add_virtual_cap(inner_cd.config.fri_config.cap_height), circuit_digest: builder.add_virtual_hash(), }; - pw.set_verifier_data_target(&inner_data, inner_vd); - builder.verify_proof::(pt, &inner_data, inner_cd); + builder.verify_proof::(&pt, &inner_data, inner_cd); builder.print_gate_counts(0); if let Some(min_degree_bits) = min_degree_bits { @@ -137,6 +128,10 @@ where let data = builder.build::(); + let mut pw = PartialWitness::new(); + pw.set_proof_with_pis_target(&pt, inner_proof); + pw.set_verifier_data_target(&inner_data, inner_vd); + let mut timing = TimingTree::new("prove", Level::Debug); let proof = prove(&data.prover_only, &data.common, pw, &mut timing)?; timing.print(); @@ -151,11 +146,8 @@ fn test_serialization, C: GenericConfig, proof: &ProofWithPublicInputs, vd: &VerifierOnlyCircuitData, cd: &CommonCircuitData, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ - let proof_bytes = proof.to_bytes()?; +) -> Result<()> { + let proof_bytes = proof.to_bytes(); info!("Proof length: {} bytes", proof_bytes.len()); let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?; assert_eq!(proof, &proof_from_bytes); @@ -168,7 +160,7 @@ where info!("{:.4}s to compress proof", now.elapsed().as_secs_f64()); assert_eq!(proof, &decompressed_compressed_proof); - let compressed_proof_bytes = compressed_proof.to_bytes()?; + let compressed_proof_bytes = compressed_proof.to_bytes(); info!( "Compressed proof length: {} bytes", compressed_proof_bytes.len() diff --git a/plonky2/examples/square_root.rs b/plonky2/examples/square_root.rs index 7d4d2fee..512c842c 100644 --- a/plonky2/examples/square_root.rs +++ b/plonky2/examples/square_root.rs @@ -1,7 +1,7 @@ -use std::marker::PhantomData; +use core::marker::PhantomData; use anyhow::Result; -use plonky2::field::types::{Field, PrimeField}; +use plonky2::field::types::{PrimeField, Sample}; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::Target; diff --git a/plonky2/src/bin/generate_constants.rs b/plonky2/src/bin/generate_constants.rs index c5f8fae2..608df815 100644 --- a/plonky2/src/bin/generate_constants.rs +++ b/plonky2/src/bin/generate_constants.rs @@ -2,8 +2,8 @@ #![allow(clippy::needless_range_loop)] -use plonky2_field::goldilocks_field::GoldilocksField; -use plonky2_field::types::Field64; +use plonky2::field::goldilocks_field::GoldilocksField; +use plonky2::field::types::Field64; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; diff --git a/plonky2/src/fri/challenges.rs b/plonky2/src/fri/challenges.rs index 011a8897..7a184504 100644 --- a/plonky2/src/fri/challenges.rs +++ b/plonky2/src/fri/challenges.rs @@ -1,6 +1,5 @@ -use plonky2_field::extension::Extendable; -use plonky2_field::polynomial::PolynomialCoeffs; - +use crate::field::extension::Extendable; +use crate::field::polynomial::PolynomialCoeffs; use crate::fri::proof::{FriChallenges, FriChallengesTarget}; use crate::fri::structure::{FriOpenings, FriOpeningsTarget}; use crate::fri::FriConfig; @@ -49,16 +48,8 @@ impl> Challenger { self.observe_extension_elements(&final_poly.coeffs); - let fri_pow_response = C::InnerHasher::hash_no_pad( - &self - .get_hash() - .elements - .iter() - .copied() - .chain(Some(pow_witness)) - .collect::>(), - ) - .elements[0]; + self.observe_element(pow_witness); + let fri_pow_response = self.get_challenge(); let fri_query_indices = (0..num_fri_queries) .map(|_| self.get_challenge().to_canonical_u64() as usize % lde_size) @@ -105,16 +96,8 @@ impl, H: AlgebraicHasher, const D: usize> self.observe_extension_elements(&final_poly.0); - let pow_inputs = self - .get_hash(builder) - .elements - .iter() - .copied() - .chain(Some(pow_witness)) - .collect(); - let fri_pow_response = builder - .hash_n_to_hash_no_pad::(pow_inputs) - .elements[0]; + self.observe_element(pow_witness); + let fri_pow_response = self.get_challenge(builder); let fri_query_indices = (0..num_fri_queries) .map(|_| self.get_challenge(builder)) diff --git a/plonky2/src/fri/mod.rs b/plonky2/src/fri/mod.rs index 90f1c940..ca800b98 100644 --- a/plonky2/src/fri/mod.rs +++ b/plonky2/src/fri/mod.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use crate::fri::reduction_strategies::FriReductionStrategy; mod challenges; diff --git a/plonky2/src/fri/oracle.rs b/plonky2/src/fri/oracle.rs index 75f8847a..cc114d98 100644 --- a/plonky2/src/fri/oracle.rs +++ b/plonky2/src/fri/oracle.rs @@ -1,12 +1,14 @@ +use alloc::format; +use alloc::vec::Vec; + use itertools::Itertools; use maybe_rayon::*; -use plonky2_field::extension::Extendable; -use plonky2_field::fft::FftRootTable; -use plonky2_field::packed::PackedField; -use plonky2_field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2_field::types::Field; -use plonky2_util::{log2_strict, reverse_index_bits_in_place}; +use crate::field::extension::Extendable; +use crate::field::fft::FftRootTable; +use crate::field::packed::PackedField; +use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues}; +use crate::field::types::Field; use crate::fri::proof::FriProof; use crate::fri::prover::fri_proof; use crate::fri::structure::{FriBatchInfo, FriInstanceInfo}; @@ -14,12 +16,11 @@ use crate::fri::FriParams; use crate::hash::hash_types::RichField; use crate::hash::merkle_tree::MerkleTree; use crate::iop::challenger::Challenger; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::GenericConfig; use crate::timed; use crate::util::reducing::ReducingFactor; -use crate::util::reverse_bits; use crate::util::timing::TimingTree; -use crate::util::transpose; +use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place, transpose}; /// Four (~64 bit) field elements gives ~128 bit security. pub const SALT_SIZE: usize = 4; @@ -45,10 +46,7 @@ impl, C: GenericConfig, const D: usize> cap_height: usize, timing: &mut TimingTree, fft_root_table: Option<&FftRootTable>, - ) -> Self - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Self { let coeffs = timed!( timing, "IFFT", @@ -73,10 +71,7 @@ impl, C: GenericConfig, const D: usize> cap_height: usize, timing: &mut TimingTree, fft_root_table: Option<&FftRootTable>, - ) -> Self - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Self { let degree = polynomials[0].len(); let lde_values = timed!( timing, @@ -169,10 +164,7 @@ impl, C: GenericConfig, const D: usize> challenger: &mut Challenger, fri_params: &FriParams, timing: &mut TimingTree, - ) -> FriProof - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> FriProof { assert!(D > 1, "Not implemented for D=1."); let alpha = challenger.get_extension_challenge::(); let mut alpha = ReducingFactor::new(alpha); diff --git a/plonky2/src/fri/proof.rs b/plonky2/src/fri/proof.rs index f7945b28..f841b274 100644 --- a/plonky2/src/fri/proof.rs +++ b/plonky2/src/fri/proof.rs @@ -1,14 +1,15 @@ -use std::collections::HashMap; +use alloc::vec; +use alloc::vec::Vec; +use hashbrown::HashMap; use itertools::izip; -use plonky2_field::extension::{flatten, unflatten, Extendable}; -use plonky2_field::polynomial::PolynomialCoeffs; use serde::{Deserialize, Serialize}; +use crate::field::extension::{flatten, unflatten, Extendable}; +use crate::field::polynomial::PolynomialCoeffs; use crate::fri::FriParams; use crate::gadgets::polynomial::PolynomialCoeffsExtTarget; -use crate::hash::hash_types::MerkleCapTarget; -use crate::hash::hash_types::RichField; +use crate::hash::hash_types::{MerkleCapTarget, RichField}; use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget}; use crate::hash::merkle_tree::MerkleCap; use crate::hash::path_compression::{compress_merkle_proofs, decompress_merkle_proofs}; @@ -111,7 +112,7 @@ pub struct FriProof, H: Hasher, const D: usize> pub pow_witness: F, } -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct FriProofTarget { pub commit_phase_merkle_caps: Vec, pub query_round_proofs: Vec>, @@ -245,10 +246,7 @@ impl, H: Hasher, const D: usize> CompressedFriPr challenges: &ProofChallenges, fri_inferred_elements: FriInferredElements, params: &FriParams, - ) -> FriProof - where - [(); H::HASH_SIZE]:, - { + ) -> FriProof { let CompressedFriProof { commit_phase_merkle_caps, query_round_proofs, diff --git a/plonky2/src/fri/prover.rs b/plonky2/src/fri/prover.rs index 71efe98a..e65a9731 100644 --- a/plonky2/src/fri/prover.rs +++ b/plonky2/src/fri/prover.rs @@ -1,17 +1,19 @@ -use itertools::Itertools; -use maybe_rayon::*; -use plonky2_field::extension::{flatten, unflatten, Extendable}; -use plonky2_field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2_util::reverse_index_bits_in_place; +use alloc::vec::Vec; +use maybe_rayon::*; + +use crate::field::extension::{flatten, unflatten, Extendable}; +use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues}; use crate::fri::proof::{FriInitialTreeProof, FriProof, FriQueryRound, FriQueryStep}; use crate::fri::{FriConfig, FriParams}; -use crate::hash::hash_types::{HashOut, RichField}; +use crate::hash::hash_types::RichField; +use crate::hash::hashing::{PlonkyPermutation, SPONGE_RATE}; use crate::hash::merkle_tree::MerkleTree; use crate::iop::challenger::Challenger; use crate::plonk::config::{GenericConfig, Hasher}; use crate::plonk::plonk_common::reduce_with_powers; use crate::timed; +use crate::util::reverse_index_bits_in_place; use crate::util::timing::TimingTree; /// Builds a FRI proof. @@ -24,10 +26,7 @@ pub fn fri_proof, C: GenericConfig, const challenger: &mut Challenger, fri_params: &FriParams, timing: &mut TimingTree, -) -> FriProof -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> FriProof { let n = lde_polynomial_values.len(); assert_eq!(lde_polynomial_coeffs.len(), n); @@ -44,11 +43,10 @@ where ); // PoW phase - let current_hash = challenger.get_hash(); let pow_witness = timed!( timing, "find proof-of-work witness", - fri_proof_of_work::(current_hash, &fri_params.config) + fri_proof_of_work::(challenger, &fri_params.config) ); // Query phase @@ -63,18 +61,17 @@ where } } +type FriCommitedTrees = ( + Vec>::Hasher>>, + PolynomialCoeffs<>::Extension>, +); + fn fri_committed_trees, C: GenericConfig, const D: usize>( mut coeffs: PolynomialCoeffs, mut values: PolynomialValues, challenger: &mut Challenger, fri_params: &FriParams, -) -> ( - Vec>, - PolynomialCoeffs, -) -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> FriCommitedTrees { let mut trees = Vec::new(); let mut shift = F::MULTIPLICATIVE_GROUP_GENERATOR; @@ -114,28 +111,55 @@ where (trees, coeffs) } +/// Performs the proof-of-work (a.k.a. grinding) step of the FRI protocol. Returns the PoW witness. fn fri_proof_of_work, C: GenericConfig, const D: usize>( - current_hash: HashOut, + challenger: &mut Challenger, config: &FriConfig, ) -> F { - (0..=F::NEG_ONE.to_canonical_u64()) + let min_leading_zeros = config.proof_of_work_bits + (64 - F::order().bits()) as u32; + + // The easiest implementation would be repeatedly clone our Challenger. With each clone, we'd + // observe an incrementing PoW witness, then get the PoW response. If it contained sufficient + // leading zeros, we'd end the search, and store this clone as our new challenger. + // + // However, performance is critical here. We want to avoid cloning Challenger, particularly + // since it stores vectors, which means allocations. We'd like a more compact state to clone. + // + // We know that a duplex will be performed right after we send the PoW witness, so we can ignore + // any output_buffer, which will be invalidated. We also know input_buffer.len() < SPONGE_WIDTH, + // an invariant of Challenger. + // + // We separate the duplex operation into two steps, one which can be performed now, and the + // other which depends on the PoW witness candidate. The first step is the overwrite our sponge + // state with any inputs (excluding the PoW witness candidate). The second step is to overwrite + // one more element of our sponge state with the candidate, then apply the permutation, + // obtaining our duplex's post-state which contains the PoW response. + let mut duplex_intermediate_state = challenger.sponge_state; + let witness_input_pos = challenger.input_buffer.len(); + for (i, input) in challenger.input_buffer.iter().enumerate() { + duplex_intermediate_state[i] = *input; + } + + let pow_witness = (0..=F::NEG_ONE.to_canonical_u64()) .into_par_iter() - .find_any(|&i| { - C::InnerHasher::hash_no_pad( - ¤t_hash - .elements - .iter() - .copied() - .chain(Some(F::from_canonical_u64(i))) - .collect_vec(), - ) - .elements[0] - .to_canonical_u64() - .leading_zeros() - >= config.proof_of_work_bits + (64 - F::order().bits()) as u32 + .find_any(|&candidate| { + let mut duplex_state = duplex_intermediate_state; + duplex_state[witness_input_pos] = F::from_canonical_u64(candidate); + duplex_state = + <>::Hasher as Hasher>::Permutation::permute(duplex_state); + let pow_response = duplex_state[SPONGE_RATE - 1]; + let leading_zeros = pow_response.to_canonical_u64().leading_zeros(); + leading_zeros >= min_leading_zeros }) .map(F::from_canonical_u64) - .expect("Proof of work failed. This is highly unlikely!") + .expect("Proof of work failed. This is highly unlikely!"); + + // Recompute pow_response using our normal Challenger code, and make sure it matches. + challenger.observe_element(pow_witness); + let pow_response = challenger.get_challenge(); + let leading_zeros = pow_response.to_canonical_u64().leading_zeros(); + assert!(leading_zeros >= min_leading_zeros); + pow_witness } fn fri_prover_query_rounds< diff --git a/plonky2/src/fri/recursive_verifier.rs b/plonky2/src/fri/recursive_verifier.rs index d14420c1..822dd559 100644 --- a/plonky2/src/fri/recursive_verifier.rs +++ b/plonky2/src/fri/recursive_verifier.rs @@ -1,7 +1,9 @@ -use itertools::Itertools; -use plonky2_field::extension::Extendable; -use plonky2_util::{log2_strict, reverse_index_bits_in_place}; +use alloc::vec::Vec; +use alloc::{format, vec}; +use itertools::Itertools; + +use crate::field::extension::Extendable; use crate::fri::proof::{ FriChallengesTarget, FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget, FriQueryStepTarget, @@ -13,13 +15,13 @@ use crate::gates::high_degree_interpolation::HighDegreeInterpolationGate; use crate::gates::interpolation::InterpolationGate; use crate::gates::low_degree_interpolation::LowDegreeInterpolationGate; use crate::gates::random_access::RandomAccessGate; -use crate::hash::hash_types::MerkleCapTarget; -use crate::hash::hash_types::RichField; +use crate::hash::hash_types::{MerkleCapTarget, RichField}; use crate::iop::ext_target::{flatten_target, ExtensionTarget}; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::util::reducing::ReducingFactorTarget; +use crate::util::{log2_strict, reverse_index_bits_in_place}; use crate::with_context; impl, const D: usize> CircuitBuilder { diff --git a/plonky2/src/fri/reduction_strategies.rs b/plonky2/src/fri/reduction_strategies.rs index 4252564e..409a0224 100644 --- a/plonky2/src/fri/reduction_strategies.rs +++ b/plonky2/src/fri/reduction_strategies.rs @@ -1,3 +1,6 @@ +use alloc::vec; +use alloc::vec::Vec; +#[cfg(feature = "timing")] use std::time::Instant; use log::debug; @@ -32,7 +35,6 @@ impl FriReductionStrategy { ) -> Vec { match self { FriReductionStrategy::Fixed(reduction_arity_bits) => reduction_arity_bits.to_vec(), - &FriReductionStrategy::ConstantArityBits(arity_bits, final_poly_bits) => { let mut result = Vec::new(); while degree_bits > final_poly_bits @@ -45,7 +47,6 @@ impl FriReductionStrategy { result.shrink_to_fit(); result } - FriReductionStrategy::MinSize(opt_max_arity_bits) => { min_size_arity_bits(degree_bits, rate_bits, num_queries, *opt_max_arity_bits) } @@ -63,11 +64,13 @@ fn min_size_arity_bits( // in an optimal sequence, we would need a really massive polynomial. let max_arity_bits = opt_max_arity_bits.unwrap_or(4); + #[cfg(feature = "timing")] let start = Instant::now(); let (mut arity_bits, fri_proof_size) = min_size_arity_bits_helper(degree_bits, rate_bits, num_queries, max_arity_bits, vec![]); arity_bits.shrink_to_fit(); + #[cfg(feature = "timing")] debug!( "min_size_arity_bits took {:.3}s", start.elapsed().as_secs_f32() diff --git a/plonky2/src/fri/structure.rs b/plonky2/src/fri/structure.rs index 0d64ae20..7d7436d5 100644 --- a/plonky2/src/fri/structure.rs +++ b/plonky2/src/fri/structure.rs @@ -1,7 +1,8 @@ //! Information about the structure of a FRI instance, in terms of the oracles and polynomials //! involved, and the points they are opened at. -use std::ops::Range; +use alloc::vec::Vec; +use core::ops::Range; use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; diff --git a/plonky2/src/fri/validate_shape.rs b/plonky2/src/fri/validate_shape.rs index 0ef85c4c..526da8f7 100644 --- a/plonky2/src/fri/validate_shape.rs +++ b/plonky2/src/fri/validate_shape.rs @@ -1,6 +1,6 @@ use anyhow::ensure; -use plonky2_field::extension::Extendable; +use crate::field::extension::Extendable; use crate::fri::proof::{FriProof, FriQueryRound, FriQueryStep}; use crate::fri::structure::FriInstanceInfo; use crate::fri::FriParams; diff --git a/plonky2/src/fri/verifier.rs b/plonky2/src/fri/verifier.rs index 02816000..6644b971 100644 --- a/plonky2/src/fri/verifier.rs +++ b/plonky2/src/fri/verifier.rs @@ -1,9 +1,10 @@ -use anyhow::{ensure, Result}; -use plonky2_field::extension::{flatten, Extendable, FieldExtension}; -use plonky2_field::interpolation::{barycentric_weights, interpolate}; -use plonky2_field::types::Field; -use plonky2_util::{log2_strict, reverse_index_bits_in_place}; +use alloc::vec::Vec; +use anyhow::{ensure, Result}; + +use crate::field::extension::{flatten, Extendable, FieldExtension}; +use crate::field::interpolation::{barycentric_weights, interpolate}; +use crate::field::types::Field; use crate::fri::proof::{FriChallenges, FriInitialTreeProof, FriProof, FriQueryRound}; use crate::fri::structure::{FriBatchInfo, FriInstanceInfo, FriOpenings}; use crate::fri::validate_shape::validate_fri_proof_shape; @@ -13,7 +14,7 @@ use crate::hash::merkle_proofs::verify_merkle_proof_to_cap; use crate::hash::merkle_tree::MerkleCap; use crate::plonk::config::{GenericConfig, Hasher}; use crate::util::reducing::ReducingFactor; -use crate::util::reverse_bits; +use crate::util::{log2_strict, reverse_bits, reverse_index_bits_in_place}; /// Computes P'(x^arity) from {P(x*g^i)}_(i=0..arity), where g is a `arity`-th root of unity /// and P' is the FRI reduced polynomial. @@ -57,17 +58,18 @@ pub(crate) fn fri_verify_proof_of_work, const D: us Ok(()) } -pub fn verify_fri_proof, C: GenericConfig, const D: usize>( +pub fn verify_fri_proof< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +>( instance: &FriInstanceInfo, openings: &FriOpenings, challenges: &FriChallenges, initial_merkle_caps: &[MerkleCap], proof: &FriProof, params: &FriParams, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result<()> { validate_fri_proof_shape::(proof, instance, params)?; // Size of the LDE domain. @@ -109,10 +111,7 @@ fn fri_verify_initial_proof>( x_index: usize, proof: &FriInitialTreeProof, initial_merkle_caps: &[MerkleCap], -) -> Result<()> -where - [(); H::HASH_SIZE]:, -{ +) -> Result<()> { for ((evals, merkle_proof), cap) in proof.evals_proofs.iter().zip(initial_merkle_caps) { verify_merkle_proof_to_cap::(evals.clone(), x_index, cap, merkle_proof)?; } @@ -177,10 +176,7 @@ fn fri_verifier_query_round< n: usize, round_proof: &FriQueryRound, params: &FriParams, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result<()> { fri_verify_initial_proof::( x_index, &round_proof.initial_trees_proof, diff --git a/plonky2/src/fri/witness_util.rs b/plonky2/src/fri/witness_util.rs index ddf3c401..670319de 100644 --- a/plonky2/src/fri/witness_util.rs +++ b/plonky2/src/fri/witness_util.rs @@ -1,6 +1,6 @@ use itertools::Itertools; -use plonky2_field::extension::Extendable; +use crate::field::extension::Extendable; use crate::fri::proof::{FriProof, FriProofTarget}; use crate::hash::hash_types::RichField; use crate::iop::witness::Witness; diff --git a/plonky2/src/gadgets/arithmetic.rs b/plonky2/src/gadgets/arithmetic.rs index 33facd74..3e42fa11 100644 --- a/plonky2/src/gadgets/arithmetic.rs +++ b/plonky2/src/gadgets/arithmetic.rs @@ -1,8 +1,9 @@ -use std::borrow::Borrow; - -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field64; +use alloc::vec; +use alloc::vec::Vec; +use core::borrow::Borrow; +use crate::field::extension::Extendable; +use crate::field::types::Field64; use crate::gates::arithmetic_base::ArithmeticGate; use crate::gates::exponentiation::ExponentiationGate; use crate::hash::hash_types::RichField; diff --git a/plonky2/src/gadgets/arithmetic_extension.rs b/plonky2/src/gadgets/arithmetic_extension.rs index 23c401b8..e37d4deb 100644 --- a/plonky2/src/gadgets/arithmetic_extension.rs +++ b/plonky2/src/gadgets/arithmetic_extension.rs @@ -1,10 +1,9 @@ -use std::borrow::Borrow; - -use plonky2_field::extension::FieldExtension; -use plonky2_field::extension::{Extendable, OEF}; -use plonky2_field::types::{Field, Field64}; -use plonky2_util::bits_u64; +use alloc::vec; +use alloc::vec::Vec; +use core::borrow::Borrow; +use crate::field::extension::{Extendable, FieldExtension, OEF}; +use crate::field::types::{Field, Field64}; use crate::gates::arithmetic_extension::ArithmeticExtensionGate; use crate::gates::multiplication_extension::MulExtensionGate; use crate::hash::hash_types::RichField; @@ -13,6 +12,7 @@ use crate::iop::generator::{GeneratedValues, SimpleGenerator}; use crate::iop::target::Target; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; +use crate::util::bits_u64; impl, const D: usize> CircuitBuilder { pub fn arithmetic_extension( @@ -569,9 +569,9 @@ pub(crate) struct ExtensionArithmeticOperation, const #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::extension::algebra::ExtensionAlgebra; - use plonky2_field::types::Field; + use crate::field::extension::algebra::ExtensionAlgebra; + use crate::field::types::Sample; use crate::iop::ext_target::ExtensionAlgebraTarget; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; @@ -665,8 +665,8 @@ mod tests { builder.connect_extension(zt.0[i], comp_zt.0[i]); } - let x = ExtensionAlgebra::(FF::rand_arr()); - let y = ExtensionAlgebra::(FF::rand_arr()); + let x = ExtensionAlgebra::(FF::rand_array()); + let y = ExtensionAlgebra::(FF::rand_array()); let z = x * y; for i in 0..D { pw.set_extension_target(xt.0[i], x.0[i]); diff --git a/plonky2/src/gadgets/hash.rs b/plonky2/src/gadgets/hash.rs index ec235340..2b8bfac5 100644 --- a/plonky2/src/gadgets/hash.rs +++ b/plonky2/src/gadgets/hash.rs @@ -1,5 +1,4 @@ -use plonky2_field::extension::Extendable; - +use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; use crate::hash::hashing::SPONGE_WIDTH; use crate::iop::target::{BoolTarget, Target}; diff --git a/plonky2/src/gadgets/polynomial.rs b/plonky2/src/gadgets/polynomial.rs index 60bfcca4..80beb62f 100644 --- a/plonky2/src/gadgets/polynomial.rs +++ b/plonky2/src/gadgets/polynomial.rs @@ -1,12 +1,13 @@ -use plonky2_field::extension::Extendable; +use alloc::vec::Vec; +use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; use crate::iop::ext_target::{ExtensionAlgebraTarget, ExtensionTarget}; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; use crate::util::reducing::ReducingFactorTarget; -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct PolynomialCoeffsExtTarget(pub Vec>); impl PolynomialCoeffsExtTarget { @@ -14,6 +15,10 @@ impl PolynomialCoeffsExtTarget { self.0.len() } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn eval_scalar>( &self, builder: &mut CircuitBuilder, diff --git a/plonky2/src/gadgets/random_access.rs b/plonky2/src/gadgets/random_access.rs index 0d32c360..d3a3ff1b 100644 --- a/plonky2/src/gadgets/random_access.rs +++ b/plonky2/src/gadgets/random_access.rs @@ -1,11 +1,12 @@ -use plonky2_field::extension::Extendable; -use plonky2_util::log2_strict; +use alloc::vec::Vec; +use crate::field::extension::Extendable; use crate::gates::random_access::RandomAccessGate; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; +use crate::util::log2_strict; impl, const D: usize> CircuitBuilder { /// Checks that a `Target` matches a vector at a non-deterministic index. @@ -55,9 +56,9 @@ impl, const D: usize> CircuitBuilder { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::types::Field; use super::*; + use crate::field::types::{Field, Sample}; use crate::iop::witness::PartialWitness; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gadgets/range_check.rs b/plonky2/src/gadgets/range_check.rs index 267b1040..22977bc1 100644 --- a/plonky2/src/gadgets/range_check.rs +++ b/plonky2/src/gadgets/range_check.rs @@ -1,5 +1,7 @@ -use plonky2_field::extension::Extendable; +use alloc::vec; +use alloc::vec::Vec; +use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; use crate::iop::generator::{GeneratedValues, SimpleGenerator}; use crate::iop::target::{BoolTarget, Target}; diff --git a/plonky2/src/gadgets/select.rs b/plonky2/src/gadgets/select.rs index d234a003..c2531488 100644 --- a/plonky2/src/gadgets/select.rs +++ b/plonky2/src/gadgets/select.rs @@ -1,5 +1,4 @@ -use plonky2_field::extension::Extendable; - +use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::{BoolTarget, Target}; @@ -40,8 +39,8 @@ impl, const D: usize> CircuitBuilder { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::types::Field; + use crate::field::types::Sample; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; diff --git a/plonky2/src/gadgets/split_base.rs b/plonky2/src/gadgets/split_base.rs index c539d784..c462d5bf 100644 --- a/plonky2/src/gadgets/split_base.rs +++ b/plonky2/src/gadgets/split_base.rs @@ -1,15 +1,18 @@ -use std::borrow::Borrow; +use alloc::vec; +use alloc::vec::Vec; +use core::borrow::Borrow; use itertools::Itertools; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; +use crate::field::extension::Extendable; +use crate::field::types::Field; use crate::gates::base_sum::BaseSumGate; use crate::hash::hash_types::RichField; use crate::iop::generator::{GeneratedValues, SimpleGenerator}; use crate::iop::target::{BoolTarget, Target}; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; +use crate::util::log_floor; impl, const D: usize> CircuitBuilder { /// Split the given element into a list of targets, where each one represents a @@ -33,6 +36,11 @@ impl, const D: usize> CircuitBuilder { pub(crate) fn le_sum(&mut self, bits: impl Iterator>) -> Target { let bits = bits.map(|b| *b.borrow()).collect_vec(); let num_bits = bits.len(); + assert!( + num_bits <= log_floor(F::ORDER, 2), + "{} bits may overflow the field", + num_bits + ); if num_bits == 0 { return self.zero(); } @@ -99,11 +107,11 @@ impl SimpleGenerator for BaseSumGenerator { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::types::Field; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; + use super::*; use crate::iop::witness::PartialWitness; - use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use crate::plonk::verifier::verify; @@ -145,7 +153,7 @@ mod tests { let pw = PartialWitness::new(); let mut builder = CircuitBuilder::::new(config); - let n = thread_rng().gen_range(0..(1 << 30)); + let n = OsRng.gen_range(0..(1 << 30)); let x = builder.constant(F::from_canonical_usize(n)); let zero = builder._false(); diff --git a/plonky2/src/gadgets/split_join.rs b/plonky2/src/gadgets/split_join.rs index cd2bbc93..1a1575c0 100644 --- a/plonky2/src/gadgets/split_join.rs +++ b/plonky2/src/gadgets/split_join.rs @@ -1,12 +1,14 @@ -use plonky2_field::extension::Extendable; -use plonky2_util::ceil_div_usize; +use alloc::vec; +use alloc::vec::Vec; +use crate::field::extension::Extendable; use crate::gates::base_sum::BaseSumGate; use crate::hash::hash_types::RichField; use crate::iop::generator::{GeneratedValues, SimpleGenerator}; use crate::iop::target::{BoolTarget, Target}; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; +use crate::util::ceil_div_usize; impl, const D: usize> CircuitBuilder { /// Split the given integer into a list of wires, where each one represents a diff --git a/plonky2/src/gates/arithmetic_base.rs b/plonky2/src/gates/arithmetic_base.rs index 03560faf..34d6e244 100644 --- a/plonky2/src/gates/arithmetic_base.rs +++ b/plonky2/src/gates/arithmetic_base.rs @@ -1,6 +1,10 @@ -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; +use alloc::boxed::Box; +use alloc::format; +use alloc::string::String; +use alloc::vec::Vec; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -209,8 +213,8 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::arithmetic_base::ArithmeticGate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::plonk::circuit_data::CircuitConfig; diff --git a/plonky2/src/gates/arithmetic_extension.rs b/plonky2/src/gates/arithmetic_extension.rs index 26b7074b..ee7f96be 100644 --- a/plonky2/src/gates/arithmetic_extension.rs +++ b/plonky2/src/gates/arithmetic_extension.rs @@ -1,8 +1,10 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::extension::FieldExtension; +use alloc::boxed::Box; +use alloc::format; +use alloc::string::String; +use alloc::vec::Vec; +use core::ops::Range; +use crate::field::extension::{Extendable, FieldExtension}; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; @@ -204,8 +206,8 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::arithmetic_extension::ArithmeticExtensionGate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::plonk::circuit_data::CircuitConfig; diff --git a/plonky2/src/gates/base_sum.rs b/plonky2/src/gates/base_sum.rs index 1252c8e3..27eb2c69 100644 --- a/plonky2/src/gates/base_sum.rs +++ b/plonky2/src/gates/base_sum.rs @@ -1,10 +1,12 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::{Field, Field64}; -use plonky2_util::log_floor; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::ops::Range; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; +use crate::field::types::{Field, Field64}; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -20,6 +22,7 @@ use crate::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; +use crate::util::log_floor; /// A gate which can decompose a number into base B little-endian limbs. #[derive(Copy, Clone, Debug)] @@ -197,8 +200,8 @@ impl SimpleGenerator for BaseSplitGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::base_sum::BaseSumGate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gates/constant.rs b/plonky2/src/gates/constant.rs index 513caad9..bf365b04 100644 --- a/plonky2/src/gates/constant.rs +++ b/plonky2/src/gates/constant.rs @@ -1,6 +1,10 @@ -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -113,8 +117,8 @@ impl, const D: usize> PackedEvaluableBase for #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::constant::ConstantGate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::plonk::circuit_data::CircuitConfig; diff --git a/plonky2/src/gates/exponentiation.rs b/plonky2/src/gates/exponentiation.rs index ca1ba395..138ec3c9 100644 --- a/plonky2/src/gates/exponentiation.rs +++ b/plonky2/src/gates/exponentiation.rs @@ -1,10 +1,13 @@ -use std::marker::PhantomData; - -use plonky2_field::extension::Extendable; -use plonky2_field::ops::Square; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use crate::field::extension::Extendable; +use crate::field::ops::Square; +use crate::field::packed::PackedField; +use crate::field::types::Field; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -282,21 +285,17 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use plonky2_util::log2_ceil; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::exponentiation::ExponentiationGate; - use crate::gates::gate::Gate; + use super::*; + use crate::field::goldilocks_field::GoldilocksField; + use crate::field::types::Sample; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::hash::hash_types::HashOut; - use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use crate::plonk::vars::EvaluationVars; + use crate::util::log2_ceil; const MAX_POWER_BITS: usize = 17; @@ -379,7 +378,7 @@ mod tests { v.iter().map(|&x| x.into()).collect::>() } - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let base = F::TWO; let power = rng.gen::() % (1 << MAX_POWER_BITS); diff --git a/plonky2/src/gates/gate.rs b/plonky2/src/gates/gate.rs index 781e0cbd..34950b76 100644 --- a/plonky2/src/gates/gate.rs +++ b/plonky2/src/gates/gate.rs @@ -1,13 +1,17 @@ -use std::collections::HashMap; -use std::fmt::{Debug, Error, Formatter}; -use std::hash::{Hash, Hasher}; -use std::ops::Range; -use std::sync::Arc; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::sync::Arc; +use alloc::vec; +use alloc::vec::Vec; +use core::fmt::{Debug, Error, Formatter}; +use core::hash::{Hash, Hasher}; +use core::ops::Range; -use plonky2_field::batch_util::batch_multiply_inplace; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::types::Field; +use hashbrown::HashMap; +use crate::field::batch_util::batch_multiply_inplace; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::Field; use crate::gates::selectors::UNUSED_SELECTOR; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; diff --git a/plonky2/src/gates/gate_testing.rs b/plonky2/src/gates/gate_testing.rs index e9c4c237..c6cae2bb 100644 --- a/plonky2/src/gates/gate_testing.rs +++ b/plonky2/src/gates/gate_testing.rs @@ -1,19 +1,20 @@ -use anyhow::{ensure, Result}; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2_field::types::Field; -use plonky2_util::log2_ceil; +use alloc::vec; +use alloc::vec::Vec; +use anyhow::{ensure, Result}; + +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues}; +use crate::field::types::{Field, Sample}; use crate::gates::gate::Gate; -use crate::hash::hash_types::HashOut; -use crate::hash::hash_types::RichField; +use crate::hash::hash_types::{HashOut, RichField}; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::GenericConfig; use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBaseBatch}; use crate::plonk::verifier::verify; -use crate::util::transpose; +use crate::util::{log2_ceil, transpose}; const WITNESS_SIZE: usize = 1 << 5; const WITNESS_DEGREE: usize = WITNESS_SIZE - 1; @@ -92,10 +93,7 @@ pub fn test_eval_fns< const D: usize, >( gate: G, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result<()> { // Test that `eval_unfiltered` and `eval_unfiltered_base` are coherent. let wires_base = F::rand_vec(gate.num_wires()); let constants_base = F::rand_vec(gate.num_constants()); diff --git a/plonky2/src/gates/high_degree_interpolation.rs b/plonky2/src/gates/high_degree_interpolation.rs index 0bc4ab65..65573898 100644 --- a/plonky2/src/gates/high_degree_interpolation.rs +++ b/plonky2/src/gates/high_degree_interpolation.rs @@ -1,11 +1,14 @@ -use std::marker::PhantomData; -use std::ops::Range; - -use plonky2_field::extension::algebra::PolynomialCoeffsAlgebra; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::interpolation::interpolant; -use plonky2_field::polynomial::PolynomialCoeffs; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use core::ops::Range; +use crate::field::extension::algebra::PolynomialCoeffsAlgebra; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::interpolation::interpolant; +use crate::field::polynomial::PolynomialCoeffs; use crate::gadgets::polynomial::PolynomialCoeffsExtAlgebraTarget; use crate::gates::gate::Gate; use crate::gates::interpolation::InterpolationGate; @@ -270,20 +273,14 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::polynomial::PolynomialCoeffs; - use plonky2_field::types::Field; - use crate::gates::gate::Gate; + use super::*; + use crate::field::goldilocks_field::GoldilocksField; + use crate::field::types::{Field, Sample}; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; - use crate::gates::high_degree_interpolation::HighDegreeInterpolationGate; - use crate::gates::interpolation::InterpolationGate; use crate::hash::hash_types::HashOut; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use crate::plonk::vars::EvaluationVars; #[test] fn wire_indices() { diff --git a/plonky2/src/gates/interpolation.rs b/plonky2/src/gates/interpolation.rs index d417fa6b..07179006 100644 --- a/plonky2/src/gates/interpolation.rs +++ b/plonky2/src/gates/interpolation.rs @@ -1,7 +1,7 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; +use alloc::vec; +use core::ops::Range; +use crate::field::extension::Extendable; use crate::gates::gate::Gate; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; @@ -105,10 +105,10 @@ impl, const D: usize> CircuitBuilder { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::extension::FieldExtension; - use plonky2_field::interpolation::interpolant; - use plonky2_field::types::Field; + use crate::field::extension::FieldExtension; + use crate::field::interpolation::interpolant; + use crate::field::types::{Field, Sample}; use crate::gates::high_degree_interpolation::HighDegreeInterpolationGate; use crate::gates::low_degree_interpolation::LowDegreeInterpolationGate; use crate::iop::witness::PartialWitness; diff --git a/plonky2/src/gates/low_degree_interpolation.rs b/plonky2/src/gates/low_degree_interpolation.rs index 8fd2ed47..f4f3286c 100644 --- a/plonky2/src/gates/low_degree_interpolation.rs +++ b/plonky2/src/gates/low_degree_interpolation.rs @@ -1,12 +1,15 @@ -use std::marker::PhantomData; -use std::ops::Range; - -use plonky2_field::extension::algebra::PolynomialCoeffsAlgebra; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::interpolation::interpolant; -use plonky2_field::polynomial::PolynomialCoeffs; -use plonky2_field::types::Field; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use core::ops::Range; +use crate::field::extension::algebra::PolynomialCoeffsAlgebra; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::interpolation::interpolant; +use crate::field::polynomial::PolynomialCoeffs; +use crate::field::types::Field; use crate::gadgets::polynomial::PolynomialCoeffsExtAlgebraTarget; use crate::gates::gate::Gate; use crate::gates::interpolation::InterpolationGate; @@ -383,11 +386,11 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::extension::quadratic::QuadraticExtension; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::polynomial::PolynomialCoeffs; - use plonky2_field::types::Field; + use crate::field::extension::quadratic::QuadraticExtension; + use crate::field::goldilocks_field::GoldilocksField; + use crate::field::polynomial::PolynomialCoeffs; + use crate::field::types::{Field, Sample}; use crate::gates::gate::Gate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::interpolation::InterpolationGate; diff --git a/plonky2/src/gates/multiplication_extension.rs b/plonky2/src/gates/multiplication_extension.rs index 8e6b44d7..02243450 100644 --- a/plonky2/src/gates/multiplication_extension.rs +++ b/plonky2/src/gates/multiplication_extension.rs @@ -1,8 +1,10 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::extension::FieldExtension; +use alloc::boxed::Box; +use alloc::format; +use alloc::string::String; +use alloc::vec::Vec; +use core::ops::Range; +use crate::field::extension::{Extendable, FieldExtension}; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; @@ -181,11 +183,10 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use super::*; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; - use crate::gates::multiplication_extension::MulExtensionGate; - use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; #[test] diff --git a/plonky2/src/gates/noop.rs b/plonky2/src/gates/noop.rs index fb5326bd..f6f9853a 100644 --- a/plonky2/src/gates/noop.rs +++ b/plonky2/src/gates/noop.rs @@ -1,5 +1,8 @@ -use plonky2_field::extension::Extendable; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use crate::field::extension::Extendable; use crate::gates::gate::Gate; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; @@ -54,8 +57,7 @@ impl, const D: usize> Gate for NoopGate { #[cfg(test)] mod tests { - use plonky2_field::goldilocks_field::GoldilocksField; - + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::noop::NoopGate; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gates/packed_util.rs b/plonky2/src/gates/packed_util.rs index 0889f9ae..361eb3a2 100644 --- a/plonky2/src/gates/packed_util.rs +++ b/plonky2/src/gates/packed_util.rs @@ -1,7 +1,9 @@ -use plonky2_field::extension::Extendable; -use plonky2_field::packable::Packable; -use plonky2_field::packed::PackedField; +use alloc::vec; +use alloc::vec::Vec; +use crate::field::extension::Extendable; +use crate::field::packable::Packable; +use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; diff --git a/plonky2/src/gates/poseidon.rs b/plonky2/src/gates/poseidon.rs index 26ec2594..c02eec84 100644 --- a/plonky2/src/gates/poseidon.rs +++ b/plonky2/src/gates/poseidon.rs @@ -1,8 +1,11 @@ -use std::marker::PhantomData; - -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use crate::field::extension::Extendable; +use crate::field::types::Field; use crate::gates::gate::Gate; use crate::gates::poseidon_mds::PoseidonMdsGate; use crate::gates::util::StridedConstraintConsumer; @@ -23,16 +26,12 @@ use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase}; /// This also has some extra features to make it suitable for efficiently verifying Merkle proofs. /// It has a flag which can be used to swap the first four inputs with the next four, for ordering /// sibling digests. -#[derive(Debug)] -pub struct PoseidonGate, const D: usize> { - _phantom: PhantomData, -} +#[derive(Debug, Default)] +pub struct PoseidonGate, const D: usize>(PhantomData); impl, const D: usize> PoseidonGate { pub fn new() -> Self { - PoseidonGate { - _phantom: PhantomData, - } + Self(PhantomData) } /// The wire index for the `i`th input to the permutation. @@ -506,9 +505,9 @@ impl + Poseidon, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; + use crate::field::goldilocks_field::GoldilocksField; + use crate::field::types::Field; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::poseidon::PoseidonGate; use crate::hash::hashing::SPONGE_WIDTH; diff --git a/plonky2/src/gates/poseidon_mds.rs b/plonky2/src/gates/poseidon_mds.rs index 289246e1..94c1486c 100644 --- a/plonky2/src/gates/poseidon_mds.rs +++ b/plonky2/src/gates/poseidon_mds.rs @@ -1,11 +1,13 @@ -use std::marker::PhantomData; -use std::ops::Range; - -use plonky2_field::extension::algebra::ExtensionAlgebra; -use plonky2_field::extension::Extendable; -use plonky2_field::extension::FieldExtension; -use plonky2_field::types::Field; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use core::ops::Range; +use crate::field::extension::algebra::ExtensionAlgebra; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::Field; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; @@ -18,16 +20,13 @@ use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase}; -#[derive(Debug)] -pub struct PoseidonMdsGate + Poseidon, const D: usize> { - _phantom: PhantomData, -} +/// Poseidon MDS Gate +#[derive(Debug, Default)] +pub struct PoseidonMdsGate + Poseidon, const D: usize>(PhantomData); impl + Poseidon, const D: usize> PoseidonMdsGate { pub fn new() -> Self { - PoseidonMdsGate { - _phantom: PhantomData, - } + Self(PhantomData) } pub fn wires_input(i: usize) -> Range { diff --git a/plonky2/src/gates/public_input.rs b/plonky2/src/gates/public_input.rs index 60c9a4c3..10c42f00 100644 --- a/plonky2/src/gates/public_input.rs +++ b/plonky2/src/gates/public_input.rs @@ -1,8 +1,10 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use core::ops::Range; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -99,8 +101,7 @@ impl, const D: usize> PackedEvaluableBase for #[cfg(test)] mod tests { - use plonky2_field::goldilocks_field::GoldilocksField; - + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::public_input::PublicInputGate; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gates/random_access.rs b/plonky2/src/gates/random_access.rs index 3ea2f55e..52972cd3 100644 --- a/plonky2/src/gates/random_access.rs +++ b/plonky2/src/gates/random_access.rs @@ -1,10 +1,14 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; use itertools::Itertools; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; +use crate::field::types::Field; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; @@ -376,19 +380,16 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; - use crate::gates::gate::Gate; + use super::*; + use crate::field::goldilocks_field::GoldilocksField; + use crate::field::types::Sample; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; - use crate::gates::random_access::RandomAccessGate; use crate::hash::hash_types::HashOut; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use crate::plonk::vars::EvaluationVars; #[test] fn low_degree() { @@ -449,7 +450,7 @@ mod tests { .map(|_| F::rand_vec(vec_size)) .collect::>(); let access_indices = (0..num_copies) - .map(|_| thread_rng().gen_range(0..vec_size)) + .map(|_| OsRng.gen_range(0..vec_size)) .collect::>(); let gate = RandomAccessGate:: { bits, diff --git a/plonky2/src/gates/reducing.rs b/plonky2/src/gates/reducing.rs index 02f8ac2d..64a1a986 100644 --- a/plonky2/src/gates/reducing.rs +++ b/plonky2/src/gates/reducing.rs @@ -1,8 +1,10 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::extension::FieldExtension; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::ops::Range; +use crate::field::extension::{Extendable, FieldExtension}; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; @@ -210,8 +212,8 @@ impl, const D: usize> SimpleGenerator for Reduci #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::reducing::ReducingGate; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gates/reducing_extension.rs b/plonky2/src/gates/reducing_extension.rs index 8b04ec99..27483e1f 100644 --- a/plonky2/src/gates/reducing_extension.rs +++ b/plonky2/src/gates/reducing_extension.rs @@ -1,8 +1,10 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::extension::FieldExtension; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::ops::Range; +use crate::field::extension::{Extendable, FieldExtension}; use crate::gates::gate::Gate; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; @@ -204,8 +206,8 @@ impl, const D: usize> SimpleGenerator for Reduci #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::goldilocks_field::GoldilocksField; + use crate::field::goldilocks_field::GoldilocksField; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::gates::reducing_extension::ReducingExtensionGate; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/gates/selectors.rs b/plonky2/src/gates/selectors.rs index 6cea86a7..7c5c6f34 100644 --- a/plonky2/src/gates/selectors.rs +++ b/plonky2/src/gates/selectors.rs @@ -1,8 +1,9 @@ -use std::ops::Range; - -use plonky2_field::extension::Extendable; -use plonky2_field::polynomial::PolynomialValues; +use alloc::vec; +use alloc::vec::Vec; +use core::ops::Range; +use crate::field::extension::Extendable; +use crate::field::polynomial::PolynomialValues; use crate::gates::gate::{GateInstance, GateRef}; use crate::hash::hash_types::RichField; @@ -28,7 +29,7 @@ impl SelectorsInfo { /// `|G| + max_{g in G} g.degree() <= max_degree`. These groups are constructed greedily from /// the list of gates sorted by degree. /// We build a selector polynomial `S_i` for each group `G_i`, with -/// S_i[j] = +/// S_i\[j\] = /// if j-th row gate=g_k in G_i /// k /// else diff --git a/plonky2/src/gates/util.rs b/plonky2/src/gates/util.rs index fb5ab6b1..88d77471 100644 --- a/plonky2/src/gates/util.rs +++ b/plonky2/src/gates/util.rs @@ -1,6 +1,6 @@ -use std::marker::PhantomData; +use core::marker::PhantomData; -use plonky2_field::packed::PackedField; +use crate::field::packed::PackedField; /// Writes constraints yielded by a gate to a buffer, with a given stride. /// Permits us to abstract the underlying memory layout. In particular, we can make a matrix of diff --git a/plonky2/src/hash/arch/aarch64/poseidon_goldilocks_neon.rs b/plonky2/src/hash/arch/aarch64/poseidon_goldilocks_neon.rs index 352456e7..10d81f28 100644 --- a/plonky2/src/hash/arch/aarch64/poseidon_goldilocks_neon.rs +++ b/plonky2/src/hash/arch/aarch64/poseidon_goldilocks_neon.rs @@ -1,15 +1,15 @@ #![allow(clippy::assertions_on_constants)] -use std::arch::aarch64::*; -use std::arch::asm; -use std::mem::transmute; +use core::arch::aarch64::*; +use core::arch::asm; +use core::mem::transmute; -use plonky2_field::goldilocks_field::GoldilocksField; -use plonky2_util::branch_hint; use static_assertions::const_assert; use unroll::unroll_for_loops; +use crate::field::goldilocks_field::GoldilocksField; use crate::hash::poseidon::Poseidon; +use crate::util::branch_hint; // ========================================== CONSTANTS =========================================== diff --git a/plonky2/src/hash/arch/x86_64/poseidon_goldilocks_avx2_bmi2.rs b/plonky2/src/hash/arch/x86_64/poseidon_goldilocks_avx2_bmi2.rs index b40b4277..c7a65f90 100644 --- a/plonky2/src/hash/arch/x86_64/poseidon_goldilocks_avx2_bmi2.rs +++ b/plonky2/src/hash/arch/x86_64/poseidon_goldilocks_avx2_bmi2.rs @@ -1,15 +1,15 @@ -use std::arch::asm; -use std::arch::x86_64::*; -use std::mem::size_of; +use core::arch::asm; +use core::arch::x86_64::*; +use core::mem::size_of; -use plonky2_field::types::Field; -use plonky2_field::goldilocks_field::GoldilocksField; -use plonky2_util::branch_hint; use static_assertions::const_assert; +use crate::field::goldilocks_field::GoldilocksField; +use crate::field::types::Field; use crate::hash::poseidon::{ Poseidon, ALL_ROUND_CONSTANTS, HALF_N_FULL_ROUNDS, N_PARTIAL_ROUNDS, N_ROUNDS, }; +use crate::util::branch_hint; // WARNING: This code contains tricks that work for the current MDS matrix and round constants, but // are not guaranteed to work if those are changed. diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index f416732a..c725c45c 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -1,7 +1,10 @@ -use plonky2_field::goldilocks_field::GoldilocksField; -use plonky2_field::types::{Field, PrimeField64}; +use alloc::vec::Vec; + +use anyhow::ensure; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use crate::field::goldilocks_field::GoldilocksField; +use crate::field::types::{Field, PrimeField64, Sample}; use crate::hash::poseidon::Poseidon; use crate::iop::target::Target; use crate::plonk::config::GenericHashOut; @@ -23,6 +26,7 @@ impl HashOut { elements: [F::ZERO; 4], }; + // TODO: Switch to a TryFrom impl. pub fn from_vec(elements: Vec) -> Self { debug_assert!(elements.len() == 4); Self { @@ -35,24 +39,43 @@ impl HashOut { elements[0..elements_in.len()].copy_from_slice(elements_in); Self { elements } } +} - #[cfg(feature = "parallel")] - pub fn rand_from_rng(rng: &mut R) -> Self { +impl From<[F; 4]> for HashOut { + fn from(elements: [F; 4]) -> Self { + Self { elements } + } +} + +impl TryFrom<&[F]> for HashOut { + type Error = anyhow::Error; + + fn try_from(elements: &[F]) -> Result { + ensure!(elements.len() == 4); + Ok(Self { + elements: elements.try_into().unwrap(), + }) + } +} + +impl Sample for HashOut +where + F: Field, +{ + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { Self { elements: [ - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), - F::rand_from_rng(rng), + F::sample(rng), + F::sample(rng), + F::sample(rng), + F::sample(rng), ], } } - - pub fn rand() -> Self { - Self { - elements: [F::rand(), F::rand(), F::rand(), F::rand()], - } - } } impl GenericHashOut for HashOut { @@ -93,6 +116,7 @@ pub struct HashOutTarget { } impl HashOutTarget { + // TODO: Switch to a TryFrom impl. pub fn from_vec(elements: Vec) -> Self { debug_assert!(elements.len() == 4); Self { @@ -107,6 +131,23 @@ impl HashOutTarget { } } +impl From<[Target; 4]> for HashOutTarget { + fn from(elements: [Target; 4]) -> Self { + Self { elements } + } +} + +impl TryFrom<&[Target]> for HashOutTarget { + type Error = anyhow::Error; + + fn try_from(elements: &[Target]) -> Result { + ensure!(elements.len() == 4); + Ok(Self { + elements: elements.try_into().unwrap(), + }) + } +} + #[derive(Clone, Debug)] pub struct MerkleCapTarget(pub Vec); @@ -114,18 +155,16 @@ pub struct MerkleCapTarget(pub Vec); #[derive(Eq, PartialEq, Copy, Clone, Debug)] pub struct BytesHash(pub [u8; N]); -impl BytesHash { - #[cfg(feature = "rand")] - pub fn rand_from_rng(rng: &mut R) -> Self { +impl Sample for BytesHash { + #[inline] + fn sample(rng: &mut R) -> Self + where + R: rand::RngCore + ?Sized, + { let mut buf = [0; N]; rng.fill_bytes(&mut buf); Self(buf) } - - #[cfg(feature = "rand")] - pub fn rand() -> Self { - Self::rand_from_rng(&mut rand::thread_rng()) - } } impl GenericHashOut for BytesHash { diff --git a/plonky2/src/hash/hashing.rs b/plonky2/src/hash/hashing.rs index c903201b..3e93447a 100644 --- a/plonky2/src/hash/hashing.rs +++ b/plonky2/src/hash/hashing.rs @@ -1,9 +1,9 @@ //! Concrete instantiation of a hash function. -use plonky2_field::extension::Extendable; +use alloc::vec::Vec; -use crate::hash::hash_types::RichField; -use crate::hash::hash_types::{HashOut, HashOutTarget}; +use crate::field::extension::Extendable; +use crate::hash::hash_types::{HashOut, HashOutTarget, RichField}; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::config::AlgebraicHasher; diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index 9a061d82..0efa154c 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -1,5 +1,7 @@ -use std::iter; -use std::mem::size_of; +use alloc::vec; +use alloc::vec::Vec; +use core::iter; +use core::mem::size_of; use itertools::Itertools; use keccak_hash::keccak; @@ -7,7 +9,7 @@ use keccak_hash::keccak; use crate::hash::hash_types::{BytesHash, RichField}; use crate::hash::hashing::{PlonkyPermutation, SPONGE_WIDTH}; use crate::plonk::config::Hasher; -use crate::util::serialization::Buffer; +use crate::util::serialization::Write; /// Keccak-256 pseudo-permutation (not necessarily one-to-one) used in the challenger. /// A state `input: [F; 12]` is sent to the field representation of `H(input) || H(H(input)) || H(H(H(input)))` @@ -51,16 +53,17 @@ impl PlonkyPermutation for KeccakPermutation { /// Keccak-256 hash function. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct KeccakHash; + impl Hasher for KeccakHash { const HASH_SIZE: usize = N; type Hash = BytesHash; type Permutation = KeccakPermutation; fn hash_no_pad(input: &[F]) -> Self::Hash { - let mut buffer = Buffer::new(Vec::new()); + let mut buffer = Vec::new(); buffer.write_field_vec(input).unwrap(); let mut arr = [0; N]; - let hash_bytes = keccak(buffer.bytes()).0; + let hash_bytes = keccak(buffer).0; arr.copy_from_slice(&hash_bytes[..N]); BytesHash(arr) } diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index f54793d9..69d7299f 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -1,9 +1,11 @@ +use alloc::vec; +use alloc::vec::Vec; + use anyhow::{ensure, Result}; -use plonky2_field::extension::Extendable; use serde::{Deserialize, Serialize}; -use crate::hash::hash_types::RichField; -use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget}; +use crate::field::extension::Extendable; +use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::hashing::SPONGE_WIDTH; use crate::hash::merkle_tree::MerkleCap; use crate::iop::target::{BoolTarget, Target}; @@ -21,6 +23,10 @@ impl> MerkleProof { pub fn len(&self) -> usize { self.siblings.len() } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } } #[derive(Clone, Debug)] @@ -36,10 +42,7 @@ pub fn verify_merkle_proof>( leaf_index: usize, merkle_root: H::Hash, proof: &MerkleProof, -) -> Result<()> -where - [(); H::HASH_SIZE]:, -{ +) -> Result<()> { let merkle_cap = MerkleCap(vec![merkle_root]); verify_merkle_proof_to_cap(leaf_data, leaf_index, &merkle_cap, proof) } @@ -51,10 +54,7 @@ pub fn verify_merkle_proof_to_cap>( leaf_index: usize, merkle_cap: &MerkleCap, proof: &MerkleProof, -) -> Result<()> -where - [(); H::HASH_SIZE]:, -{ +) -> Result<()> { let mut index = leaf_index; let mut current_digest = H::hash_or_noop(&leaf_data); for &sibling_digest in proof.siblings.iter() { @@ -150,10 +150,11 @@ impl, const D: usize> CircuitBuilder { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::types::Field; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; use super::*; + use crate::field::types::Field; use crate::hash::merkle_tree::MerkleTree; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; @@ -179,7 +180,7 @@ mod tests { let cap_height = 1; let leaves = random_data::(n, 7); let tree = MerkleTree::>::Hasher>::new(leaves, cap_height); - let i: usize = thread_rng().gen_range(0..n); + let i: usize = OsRng.gen_range(0..n); let proof = tree.prove(i); let proof_t = MerkleProofTarget { diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 703a353e..92f1dca0 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -1,14 +1,14 @@ -use std::mem::MaybeUninit; -use std::slice; +use alloc::vec::Vec; +use core::mem::MaybeUninit; +use core::slice; use maybe_rayon::*; -use plonky2_util::log2_strict; use serde::{Deserialize, Serialize}; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; -use crate::plonk::config::GenericHashOut; -use crate::plonk::config::Hasher; +use crate::plonk::config::{GenericHashOut, Hasher}; +use crate::util::log2_strict; /// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree. /// It can be used in place of the root to verify Merkle paths, which are `h` elements shorter. @@ -21,6 +21,10 @@ impl> MerkleCap { self.0.len() } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn height(&self) -> usize { log2_strict(self.len()) } @@ -64,10 +68,7 @@ fn capacity_up_to_mut(v: &mut Vec, len: usize) -> &mut [MaybeUninit] { fn fill_subtree>( digests_buf: &mut [MaybeUninit], leaves: &[Vec], -) -> H::Hash -where - [(); H::HASH_SIZE]:, -{ +) -> H::Hash { assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { H::hash_or_noop(&leaves[0]) @@ -98,9 +99,7 @@ fn fill_digests_buf>( cap_buf: &mut [MaybeUninit], leaves: &[Vec], cap_height: usize, -) where - [(); H::HASH_SIZE]:, -{ +) { // Special case of a tree that's all cap. The usual case will panic because we'll try to split // an empty slice into chunks of `0`. (We would not need this if there was a way to split into // `blah` chunks as opposed to chunks _of_ `blah`.) @@ -132,10 +131,7 @@ fn fill_digests_buf>( } impl> MerkleTree { - pub fn new(leaves: Vec>, cap_height: usize) -> Self - where - [(); H::HASH_SIZE]:, - { + pub fn new(leaves: Vec>, cap_height: usize) -> Self { let log2_leaves_len = log2_strict(leaves.len()); assert!( cap_height <= log2_leaves_len, @@ -212,9 +208,9 @@ impl> MerkleTree { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::extension::Extendable; use super::*; + use crate::field::extension::Extendable; use crate::hash::merkle_proofs::verify_merkle_proof_to_cap; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; @@ -222,13 +218,14 @@ mod tests { (0..n).map(|_| F::rand_vec(k)).collect() } - fn verify_all_leaves, C: GenericConfig, const D: usize>( + fn verify_all_leaves< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, + >( leaves: Vec>, cap_height: usize, - ) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Result<()> { let tree = MerkleTree::::new(leaves.clone(), cap_height); for (i, leaf) in leaves.into_iter().enumerate() { let proof = tree.prove(i); diff --git a/plonky2/src/hash/path_compression.rs b/plonky2/src/hash/path_compression.rs index 2685f723..d4f7d5eb 100644 --- a/plonky2/src/hash/path_compression.rs +++ b/plonky2/src/hash/path_compression.rs @@ -1,5 +1,7 @@ -use std::collections::HashMap; +use alloc::vec; +use alloc::vec::Vec; +use hashbrown::HashMap; use num::Integer; use crate::hash::hash_types::RichField; @@ -57,10 +59,7 @@ pub(crate) fn decompress_merkle_proofs>( compressed_proofs: &[MerkleProof], height: usize, cap_height: usize, -) -> Vec> -where - [(); H::HASH_SIZE]:, -{ +) -> Vec> { let num_leaves = 1 << height; let compressed_proofs = compressed_proofs.to_vec(); let mut decompressed_proofs = Vec::with_capacity(compressed_proofs.len()); @@ -115,10 +114,11 @@ where #[cfg(test)] mod tests { - use plonky2_field::types::Field; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; use super::*; + use crate::field::types::Sample; use crate::hash::merkle_tree::MerkleTree; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; @@ -132,7 +132,7 @@ mod tests { let vs = (0..1 << h).map(|_| vec![F::rand()]).collect::>(); let mt = MerkleTree::>::Hasher>::new(vs.clone(), cap_height); - let mut rng = thread_rng(); + let mut rng = OsRng; let k = rng.gen_range(1..=1 << h); let indices = (0..k).map(|_| rng.gen_range(0..1 << h)).collect::>(); let proofs = indices.iter().map(|&i| mt.prove(i)).collect::>(); diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index 54c2379f..e7436018 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -1,10 +1,13 @@ //! Implementation of the Poseidon hash function, as described in -//! https://eprint.iacr.org/2019/458.pdf +//! + +use alloc::vec; +use alloc::vec::Vec; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::types::{Field, PrimeField64}; use unroll::unroll_for_loops; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::{Field, PrimeField64}; use crate::gates::gate::Gate; use crate::gates::poseidon::PoseidonGate; use crate::gates::poseidon_mds::PoseidonMdsGate; @@ -684,8 +687,7 @@ impl AlgebraicHasher for PoseidonHash { #[cfg(test)] pub(crate) mod test_helpers { - use plonky2_field::types::Field; - + use crate::field::types::Field; use crate::hash::hashing::SPONGE_WIDTH; use crate::hash::poseidon::Poseidon; diff --git a/plonky2/src/hash/poseidon_goldilocks.rs b/plonky2/src/hash/poseidon_goldilocks.rs index 658b397e..b6e9bc74 100644 --- a/plonky2/src/hash/poseidon_goldilocks.rs +++ b/plonky2/src/hash/poseidon_goldilocks.rs @@ -4,8 +4,7 @@ //! `poseidon_constants.sage` script in the `mir-protocol/hash-constants` //! repository. -use plonky2_field::goldilocks_field::GoldilocksField; - +use crate::field::goldilocks_field::GoldilocksField; use crate::hash::poseidon::{Poseidon, N_PARTIAL_ROUNDS}; #[rustfmt::skip] @@ -271,10 +270,8 @@ impl Poseidon for GoldilocksField { #[cfg(test)] mod tests { - use plonky2_field::goldilocks_field::GoldilocksField as F; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField64; - + use crate::field::goldilocks_field::GoldilocksField as F; + use crate::field::types::{Field, PrimeField64}; use crate::hash::poseidon::test_helpers::{check_consistency, check_test_vectors}; #[test] diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index c601ae0f..b5661a91 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -1,10 +1,9 @@ -use std::convert::TryInto; -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; -use plonky2_field::extension::{Extendable, FieldExtension}; - -use crate::hash::hash_types::RichField; -use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget}; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::hashing::{PlonkyPermutation, SPONGE_RATE, SPONGE_WIDTH}; use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; @@ -15,8 +14,8 @@ use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher}; /// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir. #[derive(Clone)] pub struct Challenger> { - sponge_state: [F; SPONGE_WIDTH], - input_buffer: Vec, + pub(crate) sponge_state: [F; SPONGE_WIDTH], + pub(crate) input_buffer: Vec, output_buffer: Vec, _phantom: PhantomData, } @@ -170,6 +169,7 @@ pub struct RecursiveChallenger, H: AlgebraicHasher< sponge_state: [Target; SPONGE_WIDTH], input_buffer: Vec, output_buffer: Vec, + __: PhantomData<(F, H)>, } impl, H: AlgebraicHasher, const D: usize> @@ -177,18 +177,20 @@ impl, H: AlgebraicHasher, const D: usize> { pub fn new(builder: &mut CircuitBuilder) -> Self { let zero = builder.zero(); - RecursiveChallenger { + Self { sponge_state: [zero; SPONGE_WIDTH], input_buffer: Vec::new(), output_buffer: Vec::new(), + __: PhantomData, } } pub fn from_state(sponge_state: [Target; SPONGE_WIDTH]) -> Self { - RecursiveChallenger { + Self { sponge_state, input_buffer: vec![], output_buffer: vec![], + __: PhantomData, } } @@ -298,8 +300,7 @@ impl, H: AlgebraicHasher, const D: usize> #[cfg(test)] mod tests { - use plonky2_field::types::Field; - + use crate::field::types::Sample; use crate::iop::challenger::{Challenger, RecursiveChallenger}; use crate::iop::generator::generate_partial_witness; use crate::iop::target::Target; diff --git a/plonky2/src/iop/ext_target.rs b/plonky2/src/iop/ext_target.rs index 8a63cf44..c9929b21 100644 --- a/plonky2/src/iop/ext_target.rs +++ b/plonky2/src/iop/ext_target.rs @@ -1,9 +1,9 @@ -use std::ops::Range; - -use plonky2_field::extension::algebra::ExtensionAlgebra; -use plonky2_field::extension::{Extendable, FieldExtension, OEF}; -use plonky2_field::types::Field; +use alloc::vec::Vec; +use core::ops::Range; +use crate::field::extension::algebra::ExtensionAlgebra; +use crate::field::extension::{Extendable, FieldExtension, OEF}; +use crate::field::types::Field; use crate::hash::hash_types::RichField; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; diff --git a/plonky2/src/iop/generator.rs b/plonky2/src/iop/generator.rs index 9948198e..86a4d923 100644 --- a/plonky2/src/iop/generator.rs +++ b/plonky2/src/iop/generator.rs @@ -1,9 +1,10 @@ -use std::fmt::Debug; -use std::marker::PhantomData; - -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::types::Field; +use alloc::vec; +use alloc::vec::Vec; +use core::fmt::Debug; +use core::marker::PhantomData; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::Field; use crate::hash::hash_types::{HashOut, HashOutTarget, RichField}; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::{BoolTarget, Target}; @@ -272,7 +273,6 @@ impl SimpleGenerator for RandomValueGenerator { fn run_once(&self, _witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { let random_value = F::rand(); - out_buffer.set_target(self.target, random_value); } } diff --git a/plonky2/src/iop/mod.rs b/plonky2/src/iop/mod.rs index de315a09..47642edc 100644 --- a/plonky2/src/iop/mod.rs +++ b/plonky2/src/iop/mod.rs @@ -1,4 +1,5 @@ //! Logic common to multiple IOPs. + pub mod challenger; pub mod ext_target; pub mod generator; diff --git a/plonky2/src/iop/target.rs b/plonky2/src/iop/target.rs index bda1a47c..15be6943 100644 --- a/plonky2/src/iop/target.rs +++ b/plonky2/src/iop/target.rs @@ -1,4 +1,5 @@ -use std::ops::Range; +use alloc::vec::Vec; +use core::ops::Range; use crate::iop::ext_target::ExtensionTarget; use crate::iop::wire::Wire; diff --git a/plonky2/src/iop/wire.rs b/plonky2/src/iop/wire.rs index dd46afe2..e6161bf3 100644 --- a/plonky2/src/iop/wire.rs +++ b/plonky2/src/iop/wire.rs @@ -1,4 +1,5 @@ -use std::ops::Range; +use alloc::vec::Vec; +use core::ops::Range; use crate::plonk::circuit_data::CircuitConfig; diff --git a/plonky2/src/iop/witness.rs b/plonky2/src/iop/witness.rs index 9a3cb662..fe6b9473 100644 --- a/plonky2/src/iop/witness.rs +++ b/plonky2/src/iop/witness.rs @@ -1,14 +1,14 @@ -use std::collections::HashMap; +use alloc::vec; +use alloc::vec::Vec; +use hashbrown::HashMap; use itertools::Itertools; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::types::Field; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::Field; use crate::fri::structure::{FriOpenings, FriOpeningsTarget}; use crate::fri::witness_util::set_fri_proof_target; -use crate::hash::hash_types::HashOutTarget; -use crate::hash::hash_types::RichField; -use crate::hash::hash_types::{HashOut, MerkleCapTarget}; +use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::{BoolTarget, Target}; @@ -250,14 +250,14 @@ impl MatrixWitness { } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct PartialWitness { pub(crate) target_values: HashMap, } impl PartialWitness { pub fn new() -> Self { - PartialWitness { + Self { target_values: HashMap::new(), } } diff --git a/plonky2/src/lib.rs b/plonky2/src/lib.rs index 8a517a11..c357c27a 100644 --- a/plonky2/src/lib.rs +++ b/plonky2/src/lib.rs @@ -1,15 +1,10 @@ -#![allow(incomplete_features)] -#![allow(const_evaluatable_unchecked)] -#![allow(clippy::new_without_default)] #![allow(clippy::too_many_arguments)] -#![allow(clippy::type_complexity)] -#![allow(clippy::len_without_is_empty)] #![allow(clippy::needless_range_loop)] -#![allow(clippy::return_self_not_must_use)] -#![feature(generic_const_exprs)] -#![feature(specialization)] -#![feature(stdsimd)] +#![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; + +#[doc(inline)] pub use plonky2_field as field; pub mod fri; diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index dfd23426..6bad9296 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -1,16 +1,20 @@ -use std::cmp::max; -use std::collections::{BTreeMap, HashMap, HashSet}; +use alloc::boxed::Box; +use alloc::collections::BTreeMap; +use alloc::vec; +use alloc::vec::Vec; +use core::cmp::max; +#[cfg(feature = "std")] use std::time::Instant; +use hashbrown::{HashMap, HashSet}; use itertools::Itertools; use log::{debug, info, Level}; -use plonky2_field::cosets::get_unique_coset_shifts; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::fft::fft_root_table; -use plonky2_field::polynomial::PolynomialValues; -use plonky2_field::types::Field; -use plonky2_util::{log2_ceil, log2_strict}; +use crate::field::cosets::get_unique_coset_shifts; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::fft::fft_root_table; +use crate::field::polynomial::PolynomialValues; +use crate::field::types::Field; use crate::fri::oracle::PolynomialBatch; use crate::fri::{FriConfig, FriParams}; use crate::gadgets::arithmetic::BaseArithmeticOperation; @@ -36,7 +40,7 @@ use crate::plonk::circuit_data::{ CircuitConfig, CircuitData, CommonCircuitData, ProverCircuitData, ProverOnlyCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::{GenericConfig, GenericHashOut, Hasher}; use crate::plonk::copy_constraint::CopyConstraint; use crate::plonk::permutation_argument::Forest; use crate::plonk::plonk_common::PlonkOracle; @@ -44,11 +48,16 @@ use crate::timed; use crate::util::context_tree::ContextTree; use crate::util::partial_products::num_partial_products; use crate::util::timing::TimingTree; -use crate::util::{transpose, transpose_poly_values}; +use crate::util::{log2_ceil, log2_strict, transpose, transpose_poly_values}; pub struct CircuitBuilder, const D: usize> { pub config: CircuitConfig, + /// A domain separator, which is included in the initial Fiat-Shamir seed. This is generally not + /// needed, but can be used to ensure that proofs for one application are not valid for another. + /// Defaults to the empty vector. + domain_separator: Option>, + /// The types of gates used in this circuit. gates: HashSet>, @@ -98,6 +107,7 @@ impl, const D: usize> CircuitBuilder { pub fn new(config: CircuitConfig) -> Self { let builder = CircuitBuilder { config, + domain_separator: None, gates: HashSet::new(), gate_instances: Vec::new(), public_inputs: Vec::new(), @@ -141,6 +151,11 @@ impl, const D: usize> CircuitBuilder { ); } + pub fn set_domain_separator(&mut self, separator: Vec) { + assert!(self.domain_separator.is_none()); + self.domain_separator = Some(separator); + } + pub fn num_gates(&self) -> usize { self.gate_instances.len() } @@ -229,9 +244,15 @@ impl, const D: usize> CircuitBuilder { self.register_public_input(t); t } + /// Add a virtual verifier data, register it as a public input and set it to `self.verifier_data_public_input`. /// WARNING: Do not register any public input after calling this! TODO: relax this - pub(crate) fn add_verifier_data_public_input(&mut self) { + pub fn add_verifier_data_public_inputs(&mut self) { + assert!( + self.verifier_data_public_input.is_none(), + "add_verifier_data_public_inputs only needs to be called once" + ); + let verifier_data = VerifierCircuitTarget { constants_sigmas_cap: self.add_virtual_cap(self.config.fri_config.cap_height), circuit_digest: self.add_virtual_hash(), @@ -685,12 +706,22 @@ impl, const D: usize> CircuitBuilder { } } + /// In PLONK's permutation argument, there's a slight chance of division by zero. We can + /// mitigate this by randomizing some unused witness elements, so if proving fails with + /// division by zero, the next attempt will have an (almost) independent chance of success. + /// See https://github.com/mir-protocol/plonky2/issues/456 + fn randomize_unused_pi_wires(&mut self, pi_gate: usize) { + for wire in PublicInputGate::wires_public_inputs_hash().end..self.config.num_wires { + self.add_simple_generator(RandomValueGenerator { + target: Target::wire(pi_gate, wire), + }); + } + } + /// Builds a "full circuit", with both prover and verifier data. - pub fn build>(mut self) -> CircuitData - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn build>(mut self) -> CircuitData { let mut timing = TimingTree::new("preprocess", Level::Trace); + #[cfg(feature = "std")] let start = Instant::now(); let rate_bits = self.config.fri_config.rate_bits; let cap_height = self.config.fri_config.cap_height; @@ -708,6 +739,7 @@ impl, const D: usize> CircuitBuilder { { self.connect(hash_part, Target::wire(pi_gate, wire)) } + self.randomize_unused_pi_wires(pi_gate); // Make sure we have enough constant generators. If not, add a `ConstantGate`. while self.constants_to_targets.len() > self.constant_generators.len() { @@ -834,9 +866,12 @@ impl, const D: usize> CircuitBuilder { num_partial_products(self.config.num_routed_wires, quotient_degree_factor); let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone(); + let domain_separator = self.domain_separator.unwrap_or_default(); + let domain_separator_digest = C::Hasher::hash_pad(&domain_separator); // TODO: This should also include an encoding of gate constraints. let circuit_digest_parts = [ constants_sigmas_cap.flatten(), + domain_separator_digest.to_vec(), vec![ F::from_canonical_usize(degree_bits), /* Add other circuit data here */ @@ -857,7 +892,7 @@ impl, const D: usize> CircuitBuilder { num_partial_products, }; if let Some(goal_data) = self.goal_common_data { - assert_eq!(goal_data, common); + assert_eq!(goal_data, common, "The expected circuit data passed to cyclic recursion method did not match the actual circuit"); } let prover_only = ProverOnlyCircuitData { @@ -878,6 +913,7 @@ impl, const D: usize> CircuitBuilder { }; timing.print(); + #[cfg(feature = "std")] debug!("Building circuit took {}s", start.elapsed().as_secs_f32()); CircuitData { prover_only, @@ -887,20 +923,14 @@ impl, const D: usize> CircuitBuilder { } /// Builds a "prover circuit", with data needed to generate proofs but not verify them. - pub fn build_prover>(self) -> ProverCircuitData - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn build_prover>(self) -> ProverCircuitData { // TODO: Can skip parts of this. let circuit_data = self.build(); circuit_data.prover_data() } /// Builds a "verifier circuit", with data needed to verify proofs but not generate them. - pub fn build_verifier>(self) -> VerifierCircuitData - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn build_verifier>(self) -> VerifierCircuitData { // TODO: Can skip parts of this. let circuit_data = self.build(); circuit_data.verifier_data() diff --git a/plonky2/src/plonk/circuit_data.rs b/plonky2/src/plonk/circuit_data.rs index b5e411f1..df455a3a 100644 --- a/plonky2/src/plonk/circuit_data.rs +++ b/plonky2/src/plonk/circuit_data.rs @@ -1,10 +1,13 @@ -use std::collections::BTreeMap; -use std::ops::{Range, RangeFrom}; +use alloc::boxed::Box; +use alloc::collections::BTreeMap; +use alloc::vec; +use alloc::vec::Vec; +use core::ops::{Range, RangeFrom}; use anyhow::Result; -use plonky2_field::extension::Extendable; -use plonky2_field::fft::FftRootTable; +use crate::field::extension::Extendable; +use crate::field::fft::FftRootTable; use crate::field::types::Field; use crate::fri::oracle::PolynomialBatch; use crate::fri::reduction_strategies::FriReductionStrategy; @@ -112,10 +115,7 @@ pub struct CircuitData, C: GenericConfig, impl, C: GenericConfig, const D: usize> CircuitData { - pub fn prove(&self, inputs: PartialWitness) -> Result> - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn prove(&self, inputs: PartialWitness) -> Result> { prove( &self.prover_only, &self.common, @@ -124,20 +124,14 @@ impl, C: GenericConfig, const D: usize> ) } - pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> { verify(proof_with_pis, &self.verifier_only, &self.common) } pub fn verify_compressed( &self, compressed_proof_with_pis: CompressedProofWithPublicInputs, - ) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Result<()> { compressed_proof_with_pis.verify(&self.verifier_only, &self.common) } @@ -151,10 +145,7 @@ impl, C: GenericConfig, const D: usize> pub fn decompress( &self, proof: CompressedProofWithPublicInputs, - ) -> Result> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Result> { proof.decompress(&self.verifier_only.circuit_digest, &self.common) } @@ -202,10 +193,7 @@ pub struct ProverCircuitData< impl, C: GenericConfig, const D: usize> ProverCircuitData { - pub fn prove(&self, inputs: PartialWitness) -> Result> - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn prove(&self, inputs: PartialWitness) -> Result> { prove( &self.prover_only, &self.common, @@ -229,20 +217,14 @@ pub struct VerifierCircuitData< impl, C: GenericConfig, const D: usize> VerifierCircuitData { - pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + pub fn verify(&self, proof_with_pis: ProofWithPublicInputs) -> Result<()> { verify(proof_with_pis, &self.verifier_only, &self.common) } pub fn verify_compressed( &self, compressed_proof_with_pis: CompressedProofWithPublicInputs, - ) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Result<()> { compressed_proof_with_pis.verify(&self.verifier_only, &self.common) } } diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index f55bad1f..36d29cff 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -1,12 +1,14 @@ -use std::fmt::Debug; +use alloc::vec; +use alloc::vec::Vec; +use core::fmt::Debug; -use plonky2_field::extension::quadratic::QuadraticExtension; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::goldilocks_field::GoldilocksField; -use serde::{de::DeserializeOwned, Serialize}; +use serde::de::DeserializeOwned; +use serde::Serialize; -use crate::hash::hash_types::HashOut; -use crate::hash::hash_types::RichField; +use crate::field::extension::quadratic::QuadraticExtension; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::goldilocks_field::GoldilocksField; +use crate::hash::hash_types::{HashOut, RichField}; use crate::hash::hashing::{PlonkyPermutation, SPONGE_WIDTH}; use crate::hash::keccak::KeccakHash; use crate::hash::poseidon::PoseidonHash; @@ -26,6 +28,8 @@ pub trait GenericHashOut: pub trait Hasher: Sized + Clone + Debug + Eq + PartialEq { /// Size of `Hash` in bytes. const HASH_SIZE: usize; + + /// Hash Output type Hash: GenericHashOut; /// Permutation used in the sponge construction. @@ -48,12 +52,9 @@ pub trait Hasher: Sized + Clone + Debug + Eq + PartialEq { /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. - fn hash_or_noop(inputs: &[F]) -> Self::Hash - where - [(); Self::HASH_SIZE]:, - { + fn hash_or_noop(inputs: &[F]) -> Self::Hash { if inputs.len() <= 4 { - let mut inputs_bytes = [0u8; Self::HASH_SIZE]; + let mut inputs_bytes = vec![0u8; Self::HASH_SIZE]; for i in 0..inputs.len() { inputs_bytes[i * 8..(i + 1) * 8] .copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes()); diff --git a/plonky2/src/plonk/copy_constraint.rs b/plonky2/src/plonk/copy_constraint.rs index a838ed37..50e85fbf 100644 --- a/plonky2/src/plonk/copy_constraint.rs +++ b/plonky2/src/plonk/copy_constraint.rs @@ -1,3 +1,5 @@ +use alloc::string::String; + use crate::iop::target::Target; /// A named copy constraint. diff --git a/plonky2/src/plonk/get_challenges.rs b/plonky2/src/plonk/get_challenges.rs index 116529e7..240d9047 100644 --- a/plonky2/src/plonk/get_challenges.rs +++ b/plonky2/src/plonk/get_challenges.rs @@ -1,8 +1,10 @@ -use std::collections::HashSet; +use alloc::vec; +use alloc::vec::Vec; -use plonky2_field::extension::Extendable; -use plonky2_field::polynomial::PolynomialCoeffs; +use hashbrown::HashSet; +use crate::field::extension::Extendable; +use crate::field::polynomial::PolynomialCoeffs; use crate::fri::proof::{CompressedFriProof, FriChallenges, FriProof, FriProofTarget}; use crate::fri::verifier::{compute_evaluation, fri_combine_initial, PrecomputedReducedOpenings}; use crate::gadgets::polynomial::PolynomialCoeffsExtTarget; diff --git a/plonky2/src/plonk/permutation_argument.rs b/plonky2/src/plonk/permutation_argument.rs index 3658a12d..7052cabe 100644 --- a/plonky2/src/plonk/permutation_argument.rs +++ b/plonky2/src/plonk/permutation_argument.rs @@ -1,13 +1,14 @@ -use std::collections::HashMap; +use alloc::vec::Vec; +use hashbrown::HashMap; use maybe_rayon::*; -use plonky2_field::polynomial::PolynomialValues; -use plonky2_field::types::Field; +use crate::field::polynomial::PolynomialValues; +use crate::field::types::Field; use crate::iop::target::Target; use crate::iop::wire::Wire; -/// Disjoint Set Forest data-structure following https://en.wikipedia.org/wiki/Disjoint-set_data_structure. +/// Disjoint Set Forest data-structure following . pub struct Forest { /// A map of parent pointers, stored as indices. pub(crate) parents: Vec, @@ -44,7 +45,7 @@ impl Forest { self.parents.push(index); } - /// Path compression method, see https://en.wikipedia.org/wiki/Disjoint-set_data_structure#Finding_set_representatives. + /// Path compression method, see . pub fn find(&mut self, mut x_index: usize) -> usize { // Note: We avoid recursion here since the chains can be long, causing stack overflows. diff --git a/plonky2/src/plonk/plonk_common.rs b/plonky2/src/plonk/plonk_common.rs index 24a94bb3..53c75af1 100644 --- a/plonky2/src/plonk/plonk_common.rs +++ b/plonky2/src/plonk/plonk_common.rs @@ -1,7 +1,9 @@ -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; +use alloc::vec; +use alloc::vec::Vec; +use crate::field::extension::Extendable; +use crate::field::packed::PackedField; +use crate::field::types::Field; use crate::fri::oracle::SALT_SIZE; use crate::gates::arithmetic_base::ArithmeticGate; use crate::hash::hash_types::RichField; diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index 1a7a26db..fb9e6cde 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -1,8 +1,11 @@ +use alloc::vec; +use alloc::vec::Vec; + use anyhow::ensure; use maybe_rayon::*; -use plonky2_field::extension::Extendable; use serde::{Deserialize, Serialize}; +use crate::field::extension::Extendable; use crate::fri::oracle::PolynomialBatch; use crate::fri::proof::{ CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget, @@ -18,7 +21,9 @@ use crate::iop::target::Target; use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData}; use crate::plonk::config::{GenericConfig, Hasher}; use crate::plonk::verifier::verify_with_challenges; -use crate::util::serialization::Buffer; +use crate::util::serialization::Write; +#[cfg(feature = "std")] +use crate::util::serialization::{Buffer, Read}; #[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] #[serde(bound = "")] @@ -35,7 +40,7 @@ pub struct Proof, C: GenericConfig, const pub opening_proof: FriProof, } -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct ProofTarget { pub wires_cap: MerkleCapTarget, pub plonk_zs_partial_products_cap: MerkleCapTarget, @@ -98,18 +103,23 @@ impl, C: GenericConfig, const D: usize> C::InnerHasher::hash_no_pad(&self.public_inputs) } - pub fn to_bytes(&self) -> anyhow::Result> { - let mut buffer = Buffer::new(Vec::new()); - buffer.write_proof_with_public_inputs(self)?; - Ok(buffer.bytes()) + pub fn to_bytes(&self) -> Vec { + let mut buffer = Vec::new(); + buffer + .write_proof_with_public_inputs(self) + .expect("Writing to a byte-vector cannot fail."); + buffer } + #[cfg(feature = "std")] pub fn from_bytes( bytes: Vec, common_data: &CommonCircuitData, ) -> anyhow::Result { let mut buffer = Buffer::new(bytes); - let proof = buffer.read_proof_with_public_inputs(common_data)?; + let proof = buffer + .read_proof_with_public_inputs(common_data) + .map_err(anyhow::Error::msg)?; Ok(proof) } } @@ -139,10 +149,7 @@ impl, C: GenericConfig, const D: usize> challenges: &ProofChallenges, fri_inferred_elements: FriInferredElements, params: &FriParams, - ) -> Proof - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Proof { let CompressedProof { wires_cap, plonk_zs_partial_products_cap, @@ -179,10 +186,7 @@ impl, C: GenericConfig, const D: usize> self, circuit_digest: &<>::Hasher as Hasher>::Hash, common_data: &CommonCircuitData, - ) -> anyhow::Result> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> anyhow::Result> { let challenges = self.get_challenges(self.get_public_inputs_hash(), circuit_digest, common_data)?; let fri_inferred_elements = self.get_inferred_elements(&challenges, common_data); @@ -199,10 +203,7 @@ impl, C: GenericConfig, const D: usize> self, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, - ) -> anyhow::Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> anyhow::Result<()> { ensure!( self.public_inputs.len() == common_data.num_public_inputs, "Number of public inputs doesn't match circuit data." @@ -232,18 +233,23 @@ impl, C: GenericConfig, const D: usize> C::InnerHasher::hash_no_pad(&self.public_inputs) } - pub fn to_bytes(&self) -> anyhow::Result> { - let mut buffer = Buffer::new(Vec::new()); - buffer.write_compressed_proof_with_public_inputs(self)?; - Ok(buffer.bytes()) + pub fn to_bytes(&self) -> Vec { + let mut buffer = Vec::new(); + buffer + .write_compressed_proof_with_public_inputs(self) + .expect("Writing to a byte-vector cannot fail."); + buffer } + #[cfg(feature = "std")] pub fn from_bytes( bytes: Vec, common_data: &CommonCircuitData, ) -> anyhow::Result { let mut buffer = Buffer::new(bytes); - let proof = buffer.read_compressed_proof_with_public_inputs(common_data)?; + let proof = buffer + .read_compressed_proof_with_public_inputs(common_data) + .map_err(anyhow::Error::msg)?; Ok(proof) } } @@ -277,7 +283,7 @@ pub(crate) struct FriInferredElements, const D: usi pub Vec, ); -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct ProofWithPublicInputsTarget { pub proof: ProofTarget, pub public_inputs: Vec, @@ -385,8 +391,8 @@ impl OpeningSetTarget { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2_field::types::Field; + use crate::field::types::Sample; use crate::fri::reduction_strategies::FriReductionStrategy; use crate::gates::noop::NoopGate; use crate::iop::witness::PartialWitness; diff --git a/plonky2/src/plonk/prover.rs b/plonky2/src/plonk/prover.rs index 621f20ef..d45fa573 100644 --- a/plonky2/src/plonk/prover.rs +++ b/plonky2/src/plonk/prover.rs @@ -1,14 +1,14 @@ -use std::mem::swap; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::mem::swap; -use anyhow::ensure; -use anyhow::Result; +use anyhow::{ensure, Result}; use maybe_rayon::*; -use plonky2_field::extension::Extendable; -use plonky2_field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2_field::zero_poly_coset::ZeroPolyOnCoset; -use plonky2_util::{ceil_div_usize, log2_ceil}; +use crate::field::extension::Extendable; +use crate::field::polynomial::{PolynomialCoeffs, PolynomialValues}; use crate::field::types::Field; +use crate::field::zero_poly_coset::ZeroPolyOnCoset; use crate::fri::oracle::PolynomialBatch; use crate::hash::hash_types::RichField; use crate::iop::challenger::Challenger; @@ -17,24 +17,20 @@ use crate::iop::witness::{MatrixWitness, PartialWitness, Witness}; use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData}; use crate::plonk::config::{GenericConfig, Hasher}; use crate::plonk::plonk_common::PlonkOracle; -use crate::plonk::proof::OpeningSet; -use crate::plonk::proof::{Proof, ProofWithPublicInputs}; +use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs}; use crate::plonk::vanishing_poly::eval_vanishing_poly_base_batch; use crate::plonk::vars::EvaluationVarsBaseBatch; use crate::timed; use crate::util::partial_products::{partial_products_and_z_gx, quotient_chunk_products}; use crate::util::timing::TimingTree; -use crate::util::transpose; +use crate::util::{ceil_div_usize, log2_ceil, transpose}; pub fn prove, C: GenericConfig, const D: usize>( prover_data: &ProverOnlyCircuitData, common_data: &CommonCircuitData, inputs: PartialWitness, timing: &mut TimingTree, -) -> Result> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result> { let config = &common_data.config; let num_challenges = config.num_challenges; let quotient_degree = common_data.quotient_degree(); @@ -351,7 +347,7 @@ fn compute_quotient_polys< let num_batches = ceil_div_usize(points.len(), BATCH_SIZE); let quotient_values: Vec> = points_batches .enumerate() - .map(|(batch_i, xs_batch)| { + .flat_map(|(batch_i, xs_batch)| { // Each batch must be the same size, except the last one, which may be smaller. debug_assert!( xs_batch.len() == BATCH_SIZE @@ -448,7 +444,6 @@ fn compute_quotient_polys< } quotient_values_batch }) - .flatten() .collect(); transpose("ient_values) diff --git a/plonky2/src/plonk/validate_shape.rs b/plonky2/src/plonk/validate_shape.rs index 1e6708cc..0b4bf4a8 100644 --- a/plonky2/src/plonk/validate_shape.rs +++ b/plonky2/src/plonk/validate_shape.rs @@ -1,9 +1,9 @@ use anyhow::ensure; -use plonky2_field::extension::Extendable; +use crate::field::extension::Extendable; use crate::hash::hash_types::RichField; use crate::plonk::circuit_data::CommonCircuitData; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::GenericConfig; use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs}; pub(crate) fn validate_proof_with_pis_shape( @@ -13,20 +13,16 @@ pub(crate) fn validate_proof_with_pis_shape( where F: RichField + Extendable, C: GenericConfig, - [(); C::Hasher::HASH_SIZE]:, { let ProofWithPublicInputs { proof, public_inputs, } = proof_with_pis; - validate_proof_shape(proof, common_data)?; - ensure!( public_inputs.len() == common_data.num_public_inputs, "Number of public inputs doesn't match circuit data." ); - Ok(()) } @@ -37,7 +33,6 @@ fn validate_proof_shape( where F: RichField + Extendable, C: GenericConfig, - [(); C::Hasher::HASH_SIZE]:, { let config = &common_data.config; let Proof { @@ -49,7 +44,6 @@ where // validate_fri_proof_shape), so we ignore it here. opening_proof: _, } = proof; - let OpeningSet { constants, plonk_sigmas, @@ -59,12 +53,10 @@ where partial_products, quotient_polys, } = openings; - let cap_height = common_data.fri_params.config.cap_height; ensure!(wires_cap.height() == cap_height); ensure!(plonk_zs_partial_products_cap.height() == cap_height); ensure!(quotient_polys_cap.height() == cap_height); - ensure!(constants.len() == common_data.num_constants); ensure!(plonk_sigmas.len() == config.num_routed_wires); ensure!(wires.len() == config.num_wires); @@ -72,6 +64,5 @@ where ensure!(plonk_zs_next.len() == config.num_challenges); ensure!(partial_products.len() == config.num_challenges * common_data.num_partial_products); ensure!(quotient_polys.len() == common_data.num_quotient_polys()); - Ok(()) } diff --git a/plonky2/src/plonk/vanishing_poly.rs b/plonky2/src/plonk/vanishing_poly.rs index 28d43f4f..30473650 100644 --- a/plonky2/src/plonk/vanishing_poly.rs +++ b/plonky2/src/plonk/vanishing_poly.rs @@ -1,8 +1,10 @@ -use plonky2_field::batch_util::batch_add_inplace; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::types::Field; -use plonky2_field::zero_poly_coset::ZeroPolyOnCoset; +use alloc::vec::Vec; +use alloc::{format, vec}; +use crate::field::batch_util::batch_add_inplace; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::types::Field; +use crate::field::zero_poly_coset::ZeroPolyOnCoset; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; diff --git a/plonky2/src/plonk/vars.rs b/plonky2/src/plonk/vars.rs index a2474e79..758018f5 100644 --- a/plonky2/src/plonk/vars.rs +++ b/plonky2/src/plonk/vars.rs @@ -1,10 +1,9 @@ -use std::ops::Range; - -use plonky2_field::extension::algebra::ExtensionAlgebra; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; +use core::ops::Range; +use crate::field::extension::algebra::ExtensionAlgebra; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::packed::PackedField; +use crate::field::types::Field; use crate::hash::hash_types::{HashOut, HashOutTarget, RichField}; use crate::iop::ext_target::{ExtensionAlgebraTarget, ExtensionTarget}; use crate::util::strided_view::PackedStridedView; @@ -85,6 +84,10 @@ impl<'a, F: Field> EvaluationVarsBaseBatch<'a, F> { self.batch_size } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn view(&self, index: usize) -> EvaluationVarsBase<'a, F> { // We cannot implement `Index` as `EvaluationVarsBase` is a struct, not a reference. assert!(index < self.len()); diff --git a/plonky2/src/plonk/verifier.rs b/plonky2/src/plonk/verifier.rs index 52681558..893720c6 100644 --- a/plonky2/src/plonk/verifier.rs +++ b/plonky2/src/plonk/verifier.rs @@ -1,7 +1,7 @@ use anyhow::{ensure, Result}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; +use crate::field::extension::Extendable; +use crate::field::types::Field; use crate::fri::verifier::verify_fri_proof; use crate::hash::hash_types::RichField; use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData}; @@ -16,10 +16,7 @@ pub(crate) fn verify, C: GenericConfig, c proof_with_pis: ProofWithPublicInputs, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result<()> { validate_proof_with_pis_shape(&proof_with_pis, common_data)?; let public_inputs_hash = proof_with_pis.get_public_inputs_hash(); @@ -48,10 +45,7 @@ pub(crate) fn verify_with_challenges< challenges: ProofChallenges, verifier_data: &VerifierOnlyCircuitData, common_data: &CommonCircuitData, -) -> Result<()> -where - [(); C::Hasher::HASH_SIZE]:, -{ +) -> Result<()> { let local_constants = &proof.openings.constants; let local_wires = &proof.openings.wires; let vars = EvaluationVars { diff --git a/plonky2/src/recursion/conditional_recursive_verifier.rs b/plonky2/src/recursion/conditional_recursive_verifier.rs index 6bafc623..be7ed028 100644 --- a/plonky2/src/recursion/conditional_recursive_verifier.rs +++ b/plonky2/src/recursion/conditional_recursive_verifier.rs @@ -1,74 +1,22 @@ -use anyhow::{ensure, Result}; -use itertools::Itertools; -use plonky2_field::extension::Extendable; -use plonky2_util::ceil_div_usize; +use alloc::vec::Vec; +use itertools::Itertools; + +use crate::field::extension::Extendable; use crate::fri::proof::{ FriInitialTreeProofTarget, FriProofTarget, FriQueryRoundTarget, FriQueryStepTarget, }; use crate::gadgets::polynomial::PolynomialCoeffsExtTarget; -use crate::gates::noop::NoopGate; use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::merkle_proofs::MerkleProofTarget; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::{BoolTarget, Target}; -use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::circuit_data::{ - CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, -}; -use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; -use crate::plonk::proof::{ - OpeningSetTarget, ProofTarget, ProofWithPublicInputs, ProofWithPublicInputsTarget, -}; +use crate::plonk::circuit_data::{CommonCircuitData, VerifierCircuitTarget}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig}; +use crate::plonk::proof::{OpeningSetTarget, ProofTarget, ProofWithPublicInputsTarget}; use crate::with_context; -/// Generate a proof having a given `CommonCircuitData`. -pub(crate) fn dummy_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, ->( - common_data: &CommonCircuitData, -) -> Result<( - ProofWithPublicInputs, - VerifierOnlyCircuitData, -)> -where - [(); C::Hasher::HASH_SIZE]:, -{ - let config = common_data.config.clone(); - - let mut pw = PartialWitness::new(); - let mut builder = CircuitBuilder::::new(config); - - ensure!( - !common_data.config.zero_knowledge, - "Degree calculation can be off if zero-knowledge is on." - ); - let degree = common_data.degree(); - // Number of `NoopGate`s to add to get a circuit of size `degree` in the end. - // Need to account for public input hashing, a `PublicInputGate` and a `ConstantGate`. - let num_noop_gate = degree - ceil_div_usize(common_data.num_public_inputs, 8) - 2; - - for _ in 0..num_noop_gate { - builder.add_gate(NoopGate, vec![]); - } - for gate in &common_data.gates { - builder.add_gate_to_gate_set(gate.clone()); - } - for _ in 0..common_data.num_public_inputs { - let t = builder.add_virtual_public_input(); - pw.set_target(t, F::ZERO); - } - - let data = builder.build::(); - assert_eq!(&data.common, common_data); - let proof = data.prove(pw)?; - - Ok((proof, data.verifier_only)) -} - impl, const D: usize> CircuitBuilder { /// Verify `proof0` if `condition` else verify `proof1`. /// `proof0` and `proof1` are assumed to use the same `CommonCircuitData`. @@ -143,7 +91,7 @@ impl, const D: usize> CircuitBuilder { ), }; - self.verify_proof::(selected_proof, &selected_verifier_data, inner_common_data); + self.verify_proof::(&selected_proof, &selected_verifier_data, inner_common_data); } /// Conditionally verify a proof with a new generated dummy proof. @@ -189,7 +137,7 @@ impl, const D: usize> CircuitBuilder { h1: HashOutTarget, ) -> HashOutTarget { HashOutTarget { - elements: std::array::from_fn(|i| self.select(b, h0.elements[i], h1.elements[i])), + elements: core::array::from_fn(|i| self.select(b, h0.elements[i], h1.elements[i])), } } @@ -369,13 +317,15 @@ impl, const D: usize> CircuitBuilder { #[cfg(test)] mod tests { use anyhow::Result; + use hashbrown::HashMap; use super::*; - use crate::field::types::Field; + use crate::field::types::Sample; use crate::gates::noop::NoopGate; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use crate::recursion::dummy_circuit::{dummy_circuit, dummy_proof}; #[test] fn test_conditional_recursive_verifier() -> Result<()> { @@ -400,7 +350,8 @@ mod tests { data.verify(proof.clone())?; // Generate dummy proof with the same `CommonCircuitData`. - let (dummy_proof, dummy_data) = dummy_proof(&data.common)?; + let dummy_data = dummy_circuit(&data.common); + let dummy_proof = dummy_proof(&dummy_data, HashMap::new())?; // Conditionally verify the two proofs. let mut builder = CircuitBuilder::::new(config); @@ -418,7 +369,7 @@ mod tests { constants_sigmas_cap: builder.add_virtual_cap(data.common.config.fri_config.cap_height), circuit_digest: builder.add_virtual_hash(), }; - pw.set_verifier_data_target(&dummy_inner_data, &dummy_data); + pw.set_verifier_data_target(&dummy_inner_data, &dummy_data.verifier_only); let b = builder.constant_bool(F::rand().0 % 2 == 0); builder.conditionally_verify_proof::( b, diff --git a/plonky2/src/recursion/cyclic_recursion.rs b/plonky2/src/recursion/cyclic_recursion.rs index f2ad7eb9..497d655b 100644 --- a/plonky2/src/recursion/cyclic_recursion.rs +++ b/plonky2/src/recursion/cyclic_recursion.rs @@ -1,8 +1,12 @@ #![allow(clippy::int_plus_one)] // Makes more sense for some inequalities below. -use anyhow::{ensure, Result}; -use itertools::Itertools; -use plonky2_field::extension::Extendable; +use alloc::vec; + +use anyhow::{ensure, Result}; +use hashbrown::HashMap; +use itertools::Itertools; + +use crate::field::extension::Extendable; use crate::gates::noop::NoopGate; use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::merkle_tree::MerkleCap; @@ -10,12 +14,11 @@ use crate::iop::target::{BoolTarget, Target}; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::{ - CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, + CircuitData, CommonCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }; -use crate::plonk::config::Hasher; use crate::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::plonk::proof::{ProofWithPublicInputs, ProofWithPublicInputsTarget}; -use crate::recursion::conditional_recursive_verifier::dummy_proof; +use crate::recursion::dummy_circuit::{dummy_circuit, dummy_proof}; pub struct CyclicRecursionData< 'a, @@ -28,12 +31,17 @@ pub struct CyclicRecursionData< common_data: &'a CommonCircuitData, } -pub struct CyclicRecursionTarget { - pub proof: ProofWithPublicInputsTarget, - pub verifier_data: VerifierCircuitTarget, - pub dummy_proof: ProofWithPublicInputsTarget, - pub dummy_verifier_data: VerifierCircuitTarget, - pub base_case: BoolTarget, +pub struct CyclicRecursionTarget +where + F: RichField + Extendable, + C: GenericConfig, +{ + pub(crate) proof: ProofWithPublicInputsTarget, + pub(crate) verifier_data: VerifierCircuitTarget, + pub(crate) dummy_proof: ProofWithPublicInputsTarget, + pub(crate) dummy_verifier_data: VerifierCircuitTarget, + pub(crate) condition: BoolTarget, + pub(crate) dummy_circuit: CircuitData, } impl, const D: usize> VerifierOnlyCircuitData { @@ -48,7 +56,7 @@ impl, const D: usize> VerifierOnlyCircuitData { let constants_sigmas_cap = MerkleCap( (0..cap_len) .map(|i| HashOut { - elements: std::array::from_fn(|j| slice[len - 4 * (cap_len - i) + j]), + elements: core::array::from_fn(|j| slice[len - 4 * (cap_len - i) + j]), }) .collect(), ); @@ -73,12 +81,12 @@ impl VerifierCircuitTarget { let constants_sigmas_cap = MerkleCapTarget( (0..cap_len) .map(|i| HashOutTarget { - elements: std::array::from_fn(|j| slice[len - 4 * (cap_len - i) + j]), + elements: core::array::from_fn(|j| slice[len - 4 * (cap_len - i) + j]), }) .collect(), ); let circuit_digest = HashOutTarget { - elements: std::array::from_fn(|i| slice[len - 4 - 4 * cap_len + i]), + elements: core::array::from_fn(|i| slice[len - 4 - 4 * cap_len + i]), }; Ok(Self { @@ -89,24 +97,32 @@ impl VerifierCircuitTarget { } impl, const D: usize> CircuitBuilder { - /// Cyclic recursion gadget. + /// If `condition` is true, recursively verify a proof for the same circuit as the one we're + /// currently building. + /// + /// For a typical IVC use case, `condition` will be false for the very first proof in a chain, + /// i.e. the base case. + /// + /// Note that this does not enforce that the inner circuit uses the correct verification key. + /// This is not possible to check in this recursive circuit, since we do not know the + /// verification key until after we build it. Verifiers must separately call + /// `check_cyclic_proof_verifier_data`, in addition to verifying a recursive proof, to check + /// that the verification key matches. + /// /// WARNING: Do not register any public input after calling this! TODO: relax this pub fn cyclic_recursion>( &mut self, - // Flag set to true for the base case of the cycle where we verify a dummy proof to bootstrap the cycle. Set to false otherwise. - base_case: BoolTarget, - previous_virtual_public_inputs: &[Target], - common_data: &mut CommonCircuitData, - ) -> Result> + condition: BoolTarget, + proof_with_pis: &ProofWithPublicInputsTarget, + common_data: &CommonCircuitData, + ) -> Result> where C::Hasher: AlgebraicHasher, - [(); C::Hasher::HASH_SIZE]:, { - if self.verifier_data_public_input.is_none() { - self.add_verifier_data_public_input(); - } - let verifier_data = self.verifier_data_public_input.clone().unwrap(); - common_data.num_public_inputs = self.num_public_inputs(); + let verifier_data = self + .verifier_data_public_input + .clone() + .expect("Must call add_verifier_data_public_inputs before cyclic recursion"); self.goal_common_data = Some(common_data.clone()); let dummy_verifier_data = VerifierCircuitTarget { @@ -114,10 +130,12 @@ impl, const D: usize> CircuitBuilder { circuit_digest: self.add_virtual_hash(), }; - let proof = self.add_virtual_proof_with_pis::(common_data); let dummy_proof = self.add_virtual_proof_with_pis::(common_data); - let pis = VerifierCircuitTarget::from_slice::(&proof.public_inputs, common_data)?; + let pis = VerifierCircuitTarget::from_slice::( + &proof_with_pis.public_inputs, + common_data, + )?; // Connect previous verifier data to current one. This guarantees that every proof in the cycle uses the same verifier data. self.connect_hashes(pis.circuit_digest, verifier_data.circuit_digest); for (h0, h1) in pis @@ -129,20 +147,13 @@ impl, const D: usize> CircuitBuilder { self.connect_hashes(*h0, *h1); } - for (x, y) in previous_virtual_public_inputs - .iter() - .zip(&proof.public_inputs) - { - self.connect(*x, *y); - } - - // Verify the dummy proof if `base_case` is set to true, otherwise verify the "real" proof. + // Verify the real proof if `condition` is set to true, otherwise verify the dummy proof. self.conditionally_verify_proof::( - base_case, + condition, + proof_with_pis, + &verifier_data, &dummy_proof, &dummy_verifier_data, - &proof, - &verifier_data, common_data, ); @@ -156,33 +167,35 @@ impl, const D: usize> CircuitBuilder { } Ok(CyclicRecursionTarget { - proof, - verifier_data: verifier_data.clone(), + proof: proof_with_pis.clone(), + verifier_data, dummy_proof, dummy_verifier_data, - base_case, + condition, + dummy_circuit: dummy_circuit(common_data), }) } } /// Set the targets in a `CyclicRecursionTarget` to their corresponding values in a `CyclicRecursionData`. +/// The `public_inputs` parameter let the caller specify certain public inputs (identified by their +/// indices) which should be given specific values. The rest will default to zero. pub fn set_cyclic_recursion_data_target< F: RichField + Extendable, C: GenericConfig, const D: usize, >( pw: &mut PartialWitness, - cyclic_recursion_data_target: &CyclicRecursionTarget, + cyclic_recursion_data_target: &CyclicRecursionTarget, cyclic_recursion_data: &CyclicRecursionData, // Public inputs to set in the base case to seed some initial data. - public_inputs: &[F], + mut public_inputs: HashMap, ) -> Result<()> where C::Hasher: AlgebraicHasher, - [(); C::Hasher::HASH_SIZE]:, { if let Some(proof) = cyclic_recursion_data.proof { - pw.set_bool_target(cyclic_recursion_data_target.base_case, false); + pw.set_bool_target(cyclic_recursion_data_target.condition, true); pw.set_proof_with_pis_target(&cyclic_recursion_data_target.proof, proof); pw.set_verifier_data_target( &cyclic_recursion_data_target.verifier_data, @@ -194,36 +207,41 @@ where cyclic_recursion_data.verifier_data, ); } else { - let (dummy_proof, dummy_data) = dummy_proof::(cyclic_recursion_data.common_data)?; - pw.set_bool_target(cyclic_recursion_data_target.base_case, true); - let mut proof = dummy_proof.clone(); - proof.public_inputs[0..public_inputs.len()].copy_from_slice(public_inputs); - let pis_len = proof.public_inputs.len(); - // The circuit checks that the verifier data is the same throughout the cycle, so - // we set the verifier data to the "real" verifier data even though it's unused in the base case. - let num_cap = cyclic_recursion_data + pw.set_bool_target(cyclic_recursion_data_target.condition, false); + + let pis_len = cyclic_recursion_data_target + .dummy_circuit + .common + .num_public_inputs; + let cap_elements = cyclic_recursion_data .common_data .config .fri_config .num_cap_elements(); - let s = pis_len - 4 - 4 * num_cap; - proof.public_inputs[s..s + 4] - .copy_from_slice(&cyclic_recursion_data.verifier_data.circuit_digest.elements); - for i in 0..num_cap { - proof.public_inputs[s + 4 * (1 + i)..s + 4 * (2 + i)].copy_from_slice( - &cyclic_recursion_data.verifier_data.constants_sigmas_cap.0[i].elements, - ); + let start_vk_pis = pis_len - 4 - 4 * cap_elements; + + // The circuit checks that the verifier data is the same throughout the cycle, so + // we set the verifier data to the "real" verifier data even though it's unused in the base case. + let verifier_data = &cyclic_recursion_data.verifier_data; + public_inputs.extend((start_vk_pis..).zip(verifier_data.circuit_digest.elements)); + + for i in 0..cap_elements { + let start = start_vk_pis + 4 + 4 * i; + public_inputs.extend((start..).zip(verifier_data.constants_sigmas_cap.0[i].elements)); } + let proof = dummy_proof(&cyclic_recursion_data_target.dummy_circuit, public_inputs)?; pw.set_proof_with_pis_target(&cyclic_recursion_data_target.proof, &proof); pw.set_verifier_data_target( &cyclic_recursion_data_target.verifier_data, cyclic_recursion_data.verifier_data, ); - pw.set_proof_with_pis_target(&cyclic_recursion_data_target.dummy_proof, &dummy_proof); + + let dummy_p = dummy_proof(&cyclic_recursion_data_target.dummy_circuit, HashMap::new())?; + pw.set_proof_with_pis_target(&cyclic_recursion_data_target.dummy_proof, &dummy_p); pw.set_verifier_data_target( &cyclic_recursion_data_target.dummy_verifier_data, - &dummy_data, + &cyclic_recursion_data_target.dummy_circuit.verifier_only, ); } @@ -231,8 +249,7 @@ where } /// Additional checks to be performed on a cyclic recursive proof in addition to verifying the proof. -/// Checks that the `base_case` flag is boolean and that the purported verifier data in the public inputs -/// match the real verifier data. +/// Checks that the purported verifier data in the public inputs match the real verifier data. pub fn check_cyclic_proof_verifier_data< F: RichField + Extendable, C: GenericConfig, @@ -254,20 +271,19 @@ where #[cfg(test)] mod tests { - use anyhow::Result; - use plonky2_field::extension::Extendable; - use plonky2_field::types::PrimeField64; + use hashbrown::HashMap; - use crate::field::types::Field; + use crate::field::extension::Extendable; + use crate::field::types::{Field, PrimeField64}; use crate::gates::noop::NoopGate; - use crate::hash::hash_types::RichField; + use crate::hash::hash_types::{HashOutTarget, RichField}; use crate::hash::hashing::hash_n_to_hash_no_pad; use crate::hash::poseidon::{PoseidonHash, PoseidonPermutation}; use crate::iop::witness::PartialWitness; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::{CircuitConfig, CommonCircuitData, VerifierCircuitTarget}; - use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig}; + use crate::plonk::config::{AlgebraicHasher, GenericConfig, PoseidonGoldilocksConfig}; use crate::recursion::cyclic_recursion::{ check_cyclic_proof_verifier_data, set_cyclic_recursion_data_target, CyclicRecursionData, }; @@ -280,7 +296,6 @@ mod tests { >() -> CommonCircuitData where C::Hasher: AlgebraicHasher, - [(); C::Hasher::HASH_SIZE]:, { let config = CircuitConfig::standard_recursion_config(); let builder = CircuitBuilder::::new(config); @@ -292,7 +307,7 @@ mod tests { constants_sigmas_cap: builder.add_virtual_cap(data.common.config.fri_config.cap_height), circuit_digest: builder.add_virtual_hash(), }; - builder.verify_proof::(proof, &verifier_data, &data.common); + builder.verify_proof::(&proof, &verifier_data, &data.common); let data = builder.build::(); let config = CircuitConfig::standard_recursion_config(); @@ -302,13 +317,19 @@ mod tests { constants_sigmas_cap: builder.add_virtual_cap(data.common.config.fri_config.cap_height), circuit_digest: builder.add_virtual_hash(), }; - builder.verify_proof::(proof, &verifier_data, &data.common); + builder.verify_proof::(&proof, &verifier_data, &data.common); while builder.num_gates() < 1 << 12 { builder.add_gate(NoopGate, vec![]); } builder.build::().common } + /// Uses cyclic recursion to build a hash chain. + /// The circuit has the following public input structure: + /// - Initial hash (4) + /// - Output for the tip of the hash chain (4) + /// - Chain length, i.e. the number of times the hash has been applied (1) + /// - VK for cyclic recursion (?) #[test] fn test_cyclic_recursion() -> Result<()> { const D: usize = 2; @@ -316,57 +337,62 @@ mod tests { type F = >::F; let config = CircuitConfig::standard_recursion_config(); - let mut pw = PartialWitness::new(); let mut builder = CircuitBuilder::::new(config); + let one = builder.one(); // Circuit that computes a repeated hash. let initial_hash = builder.add_virtual_hash(); builder.register_public_inputs(&initial_hash.elements); - // Hash from the previous proof. - let old_hash = builder.add_virtual_hash(); - // The input hash is either the previous hash or the initial hash depending on whether - // the last proof was a base case. - let input_hash = builder.add_virtual_hash(); - let h = builder.hash_n_to_hash_no_pad::(input_hash.elements.to_vec()); - builder.register_public_inputs(&h.elements); - // Previous counter. - let old_counter = builder.add_virtual_target(); - let one = builder.one(); - let new_counter = builder.add_virtual_public_input(); - let old_pis = [ - initial_hash.elements.as_slice(), - old_hash.elements.as_slice(), - [old_counter].as_slice(), - ] - .concat(); + let current_hash_in = builder.add_virtual_hash(); + let current_hash_out = + builder.hash_n_to_hash_no_pad::(current_hash_in.elements.to_vec()); + builder.register_public_inputs(¤t_hash_out.elements); + let counter = builder.add_virtual_public_input(); let mut common_data = common_data_for_recursion::(); + builder.add_verifier_data_public_inputs(); + common_data.num_public_inputs = builder.num_public_inputs(); + + let condition = builder.add_virtual_bool_target_safe(); + + // Unpack inner proof's public inputs. + let inner_proof_with_pis = builder.add_virtual_proof_with_pis::(&common_data); + let inner_pis = &inner_proof_with_pis.public_inputs; + let inner_initial_hash = HashOutTarget::try_from(&inner_pis[0..4]).unwrap(); + let inner_latest_hash = HashOutTarget::try_from(&inner_pis[4..8]).unwrap(); + let inner_counter = inner_pis[8]; + + // Connect our initial hash to that of our inner proof. (If there is no inner proof, the + // initial hash will be unconstrained, which is intentional.) + builder.connect_hashes(initial_hash, inner_initial_hash); + + // The input hash is the previous hash output if we have an inner proof, or the initial hash + // if this is the base case. + let actual_hash_in = builder.select_hash(condition, inner_latest_hash, initial_hash); + builder.connect_hashes(current_hash_in, actual_hash_in); + + // Our chain length will be inner_counter + 1 if we have an inner proof, or 1 if not. + let new_counter = builder.mul_add(condition.target, inner_counter, one); + builder.connect(counter, new_counter); - let base_case = builder.add_virtual_bool_target_safe(); - // Add cyclic recursion gadget. let cyclic_data_target = - builder.cyclic_recursion::(base_case, &old_pis, &mut common_data)?; - let input_hash_bis = - builder.select_hash(cyclic_data_target.base_case, initial_hash, old_hash); - builder.connect_hashes(input_hash, input_hash_bis); - let not_base_case = builder.sub(one, cyclic_data_target.base_case.target); - // New counter is the previous counter +1 if the previous proof wasn't a base case. - let new_counter_bis = builder.add(old_counter, not_base_case); - builder.connect(new_counter, new_counter_bis); + builder.cyclic_recursion::(condition, &inner_proof_with_pis, &common_data)?; let cyclic_circuit_data = builder.build::(); + let mut pw = PartialWitness::new(); let cyclic_recursion_data = CyclicRecursionData { proof: &None, // Base case: We don't have a proof to put here yet. verifier_data: &cyclic_circuit_data.verifier_only, common_data: &cyclic_circuit_data.common, }; let initial_hash = [F::ZERO, F::ONE, F::TWO, F::from_canonical_usize(3)]; + let initial_hash_pis = initial_hash.into_iter().enumerate().collect(); set_cyclic_recursion_data_target( &mut pw, &cyclic_data_target, &cyclic_recursion_data, - &initial_hash, + initial_hash_pis, )?; let proof = cyclic_circuit_data.prove(pw)?; check_cyclic_proof_verifier_data( @@ -387,7 +413,7 @@ mod tests { &mut pw, &cyclic_data_target, &cyclic_recursion_data, - &[], + HashMap::new(), )?; let proof = cyclic_circuit_data.prove(pw)?; check_cyclic_proof_verifier_data( @@ -408,7 +434,7 @@ mod tests { &mut pw, &cyclic_data_target, &cyclic_recursion_data, - &[], + HashMap::new(), )?; let proof = cyclic_circuit_data.prove(pw)?; check_cyclic_proof_verifier_data( @@ -421,17 +447,20 @@ mod tests { let initial_hash = &proof.public_inputs[..4]; let hash = &proof.public_inputs[4..8]; let counter = proof.public_inputs[8]; - let mut h: [F; 4] = initial_hash.try_into().unwrap(); - assert_eq!( - hash, - std::iter::repeat_with(|| { - h = hash_n_to_hash_no_pad::(&h).elements; - h - }) - .nth(counter.to_canonical_u64() as usize) - .unwrap() + let expected_hash: [F; 4] = iterate_poseidon( + initial_hash.try_into().unwrap(), + counter.to_canonical_u64() as usize, ); + assert_eq!(hash, expected_hash); cyclic_circuit_data.verify(proof) } + + fn iterate_poseidon(initial_state: [F; 4], n: usize) -> [F; 4] { + let mut current = initial_state; + for _ in 0..n { + current = hash_n_to_hash_no_pad::(¤t).elements; + } + current + } } diff --git a/plonky2/src/recursion/dummy_circuit.rs b/plonky2/src/recursion/dummy_circuit.rs new file mode 100644 index 00000000..4012b5e6 --- /dev/null +++ b/plonky2/src/recursion/dummy_circuit.rs @@ -0,0 +1,67 @@ +use alloc::vec; + +use hashbrown::HashMap; +use plonky2_field::extension::Extendable; +use plonky2_util::ceil_div_usize; + +use crate::gates::noop::NoopGate; +use crate::hash::hash_types::RichField; +use crate::iop::witness::{PartialWitness, Witness}; +use crate::plonk::circuit_builder::CircuitBuilder; +use crate::plonk::circuit_data::{CircuitData, CommonCircuitData}; +use crate::plonk::config::GenericConfig; +use crate::plonk::proof::ProofWithPublicInputs; + +/// Generate a proof for a dummy circuit. The `public_inputs` parameter let the caller specify +/// certain public inputs (identified by their indices) which should be given specific values. +/// The rest will default to zero. +pub(crate) fn dummy_proof( + circuit: &CircuitData, + nonzero_public_inputs: HashMap, +) -> anyhow::Result> +where + F: RichField + Extendable, + C: GenericConfig, +{ + let mut pw = PartialWitness::new(); + for i in 0..circuit.common.num_public_inputs { + let pi = nonzero_public_inputs.get(&i).copied().unwrap_or_default(); + pw.set_target(circuit.prover_only.public_inputs[i], pi); + } + circuit.prove(pw) +} + +/// Generate a circuit matching a given `CommonCircuitData`. +pub(crate) fn dummy_circuit< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +>( + common_data: &CommonCircuitData, +) -> CircuitData { + let config = common_data.config.clone(); + assert!( + !common_data.config.zero_knowledge, + "Degree calculation can be off if zero-knowledge is on." + ); + + // Number of `NoopGate`s to add to get a circuit of size `degree` in the end. + // Need to account for public input hashing, a `PublicInputGate` and a `ConstantGate`. + let degree = common_data.degree(); + let num_noop_gate = degree - ceil_div_usize(common_data.num_public_inputs, 8) - 2; + + let mut builder = CircuitBuilder::::new(config); + for _ in 0..num_noop_gate { + builder.add_gate(NoopGate, vec![]); + } + for gate in &common_data.gates { + builder.add_gate_to_gate_set(gate.clone()); + } + for _ in 0..common_data.num_public_inputs { + builder.add_virtual_public_input(); + } + + let circuit = builder.build::(); + assert_eq!(&circuit.common, common_data); + circuit +} diff --git a/plonky2/src/recursion/mod.rs b/plonky2/src/recursion/mod.rs index 33e8212e..3aba4ffd 100644 --- a/plonky2/src/recursion/mod.rs +++ b/plonky2/src/recursion/mod.rs @@ -1,3 +1,4 @@ pub mod conditional_recursive_verifier; pub mod cyclic_recursion; +pub(crate) mod dummy_circuit; pub mod recursive_verifier; diff --git a/plonky2/src/recursion/recursive_verifier.rs b/plonky2/src/recursion/recursive_verifier.rs index 8dbab974..d53095a4 100644 --- a/plonky2/src/recursion/recursive_verifier.rs +++ b/plonky2/src/recursion/recursive_verifier.rs @@ -1,5 +1,4 @@ -use plonky2_field::extension::Extendable; - +use crate::field::extension::Extendable; use crate::hash::hash_types::{HashOutTarget, RichField}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::{CommonCircuitData, VerifierCircuitTarget}; @@ -17,7 +16,7 @@ impl, const D: usize> CircuitBuilder { /// Recursively verifies an inner proof. pub fn verify_proof>( &mut self, - proof_with_pis: ProofWithPublicInputsTarget, + proof_with_pis: &ProofWithPublicInputsTarget, inner_verifier_data: &VerifierCircuitTarget, inner_common_data: &CommonCircuitData, ) where @@ -37,7 +36,7 @@ impl, const D: usize> CircuitBuilder { ); self.verify_proof_with_challenges::( - proof_with_pis.proof, + &proof_with_pis.proof, public_inputs_hash, challenges, inner_verifier_data, @@ -48,7 +47,7 @@ impl, const D: usize> CircuitBuilder { /// Recursively verifies an inner proof. fn verify_proof_with_challenges>( &mut self, - proof: ProofTarget, + proof: &ProofTarget, public_inputs_hash: HashOutTarget, challenges: ProofChallengesTarget, inner_verifier_data: &VerifierCircuitTarget, @@ -107,9 +106,9 @@ impl, const D: usize> CircuitBuilder { let merkle_caps = &[ inner_verifier_data.constants_sigmas_cap.clone(), - proof.wires_cap, - proof.plonk_zs_partial_products_cap, - proof.quotient_polys_cap, + proof.wires_cap.clone(), + proof.plonk_zs_partial_products_cap.clone(), + proof.quotient_polys_cap.clone(), ]; let fri_instance = inner_common_data.get_fri_instance_target(self, challenges.plonk_zeta); @@ -194,9 +193,7 @@ mod tests { use crate::gates::noop::NoopGate; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_data::{CircuitConfig, VerifierOnlyCircuitData}; - use crate::plonk::config::{ - GenericConfig, Hasher, KeccakGoldilocksConfig, PoseidonGoldilocksConfig, - }; + use crate::plonk::config::{GenericConfig, KeccakGoldilocksConfig, PoseidonGoldilocksConfig}; use crate::plonk::proof::{CompressedProofWithPublicInputs, ProofWithPublicInputs}; use crate::plonk::prover::prove; use crate::util::timing::TimingTree; @@ -323,18 +320,17 @@ mod tests { Ok(()) } + type Proof = ( + ProofWithPublicInputs, + VerifierOnlyCircuitData, + CommonCircuitData, + ); + /// Creates a dummy proof which should have roughly `num_dummy_gates` gates. fn dummy_proof, C: GenericConfig, const D: usize>( config: &CircuitConfig, num_dummy_gates: u64, - ) -> Result<( - ProofWithPublicInputs, - VerifierOnlyCircuitData, - CommonCircuitData, - )> - where - [(); C::Hasher::HASH_SIZE]:, - { + ) -> Result> { let mut builder = CircuitBuilder::::new(config.clone()); for _ in 0..num_dummy_gates { builder.add_gate(NoopGate, vec![]); @@ -361,14 +357,9 @@ mod tests { min_degree_bits: Option, print_gate_counts: bool, print_timing: bool, - ) -> Result<( - ProofWithPublicInputs, - VerifierOnlyCircuitData, - CommonCircuitData, - )> + ) -> Result> where InnerC::Hasher: AlgebraicHasher, - [(); C::Hasher::HASH_SIZE]:, { let mut builder = CircuitBuilder::::new(config.clone()); let mut pw = PartialWitness::new(); @@ -385,7 +376,7 @@ mod tests { ); pw.set_hash_target(inner_data.circuit_digest, inner_vd.circuit_digest); - builder.verify_proof::(pt, &inner_data, &inner_cd); + builder.verify_proof::(&pt, &inner_data, &inner_cd); if print_gate_counts { builder.print_gate_counts(0); @@ -423,11 +414,8 @@ mod tests { proof: &ProofWithPublicInputs, vd: &VerifierOnlyCircuitData, cd: &CommonCircuitData, - ) -> Result<()> - where - [(); C::Hasher::HASH_SIZE]:, - { - let proof_bytes = proof.to_bytes()?; + ) -> Result<()> { + let proof_bytes = proof.to_bytes(); info!("Proof length: {} bytes", proof_bytes.len()); let proof_from_bytes = ProofWithPublicInputs::from_bytes(proof_bytes, cd)?; assert_eq!(proof, &proof_from_bytes); @@ -440,7 +428,7 @@ mod tests { info!("{:.4}s to compress proof", now.elapsed().as_secs_f64()); assert_eq!(proof, &decompressed_compressed_proof); - let compressed_proof_bytes = compressed_proof.to_bytes()?; + let compressed_proof_bytes = compressed_proof.to_bytes(); info!( "Compressed proof length: {} bytes", compressed_proof_bytes.len() diff --git a/plonky2/src/util/context_tree.rs b/plonky2/src/util/context_tree.rs index 7ec5214c..565e2d35 100644 --- a/plonky2/src/util/context_tree.rs +++ b/plonky2/src/util/context_tree.rs @@ -1,3 +1,7 @@ +use alloc::string::{String, ToString}; +use alloc::vec; +use alloc::vec::Vec; + use log::{log, Level}; /// The hierarchy of contexts, and the gate count contributed by each one. Useful for debugging. diff --git a/plonky2/src/util/mod.rs b/plonky2/src/util/mod.rs index 9f15b8a1..19f3cb74 100644 --- a/plonky2/src/util/mod.rs +++ b/plonky2/src/util/mod.rs @@ -1,8 +1,15 @@ -use plonky2_field::polynomial::PolynomialValues; -use plonky2_field::types::Field; +use alloc::vec; +use alloc::vec::Vec; + +#[doc(inline)] +pub use plonky2_util::*; + +use crate::field::polynomial::PolynomialValues; +use crate::field::types::Field; pub(crate) mod context_tree; pub(crate) mod partial_products; + pub mod reducing; pub mod serialization; pub mod strided_view; @@ -57,9 +64,7 @@ pub(crate) fn reverse_bits(n: usize, num_bits: usize) -> usize { #[cfg(test)] mod tests { - use plonky2_util::{reverse_index_bits, reverse_index_bits_in_place}; - - use crate::util::reverse_bits; + use super::*; #[test] fn test_reverse_bits() { diff --git a/plonky2/src/util/partial_products.rs b/plonky2/src/util/partial_products.rs index 39e59a28..ff2261b8 100644 --- a/plonky2/src/util/partial_products.rs +++ b/plonky2/src/util/partial_products.rs @@ -1,13 +1,14 @@ -use std::iter; +use alloc::vec::Vec; +use core::iter; use itertools::Itertools; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; -use plonky2_util::ceil_div_usize; +use crate::field::extension::Extendable; +use crate::field::types::Field; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::plonk::circuit_builder::CircuitBuilder; +use crate::util::ceil_div_usize; pub(crate) fn quotient_chunk_products( quotient_values: &[F], @@ -107,9 +108,8 @@ pub(crate) fn check_partial_products_circuit, const #[cfg(test)] mod tests { - use plonky2_field::goldilocks_field::GoldilocksField; - use super::*; + use crate::field::goldilocks_field::GoldilocksField; #[test] fn test_partial_products() { diff --git a/plonky2/src/util/reducing.rs b/plonky2/src/util/reducing.rs index fae5f03b..0bed0b84 100644 --- a/plonky2/src/util/reducing.rs +++ b/plonky2/src/util/reducing.rs @@ -1,10 +1,11 @@ -use std::borrow::Borrow; - -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::packed::PackedField; -use plonky2_field::polynomial::PolynomialCoeffs; -use plonky2_field::types::Field; +use alloc::vec; +use alloc::vec::Vec; +use core::borrow::Borrow; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::packed::PackedField; +use crate::field::polynomial::PolynomialCoeffs; +use crate::field::types::Field; use crate::gates::arithmetic_extension::ArithmeticExtensionGate; use crate::gates::reducing::ReducingGate; use crate::gates::reducing_extension::ReducingExtensionGate; @@ -19,7 +20,7 @@ use crate::plonk::circuit_builder::CircuitBuilder; /// scale the second one by `a^(r-1-k)`, and add them up. /// This struct abstract away these operations by implementing Horner's method and keeping track /// of the number of multiplications by `a` to compute the scaling factor. -/// See https://github.com/mir-protocol/plonky2/pull/69 for more details and discussions. +/// See for more details and discussions. #[derive(Debug, Clone)] pub struct ReducingFactor { base: F, @@ -276,6 +277,7 @@ mod tests { use anyhow::Result; use super::*; + use crate::field::types::Sample; use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; diff --git a/plonky2/src/util/serialization.rs b/plonky2/src/util/serialization.rs index 076e42f4..c3ad0887 100644 --- a/plonky2/src/util/serialization.rs +++ b/plonky2/src/util/serialization.rs @@ -1,11 +1,14 @@ -use std::collections::HashMap; -use std::io::Cursor; -use std::io::{Read, Result, Write}; +use alloc::vec; +use alloc::vec::Vec; +use core::convert::Infallible; +use core::fmt::{Debug, Display, Formatter}; +use core::mem::size_of; -use plonky2_field::extension::{Extendable, FieldExtension}; -use plonky2_field::polynomial::PolynomialCoeffs; -use plonky2_field::types::{Field64, PrimeField64}; +use hashbrown::HashMap; +use crate::field::extension::{Extendable, FieldExtension}; +use crate::field::polynomial::PolynomialCoeffs; +use crate::field::types::{Field64, PrimeField64}; use crate::fri::proof::{ CompressedFriProof, CompressedFriQueryRounds, FriInitialTreeProof, FriProof, FriQueryRound, FriQueryStep, @@ -20,63 +23,79 @@ use crate::plonk::proof::{ CompressedProof, CompressedProofWithPublicInputs, OpeningSet, Proof, ProofWithPublicInputs, }; +/// A no_std compatible variant of `std::io::Error` #[derive(Debug)] -pub struct Buffer(Cursor>); +pub struct IoError; -impl Buffer { - pub fn new(buffer: Vec) -> Self { - Self(Cursor::new(buffer)) +impl Display for IoError { + fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { + Debug::fmt(self, f) } +} - pub fn len(&self) -> usize { - self.0.get_ref().len() - } +/// A no_std compatible variant of `std::io::Result` +pub type IoResult = Result; - pub fn bytes(self) -> Vec { - self.0.into_inner() - } +/// A `Read` which is able to report how many bytes are remaining. +pub trait Remaining: Read { + /// Returns the number of bytes remaining in the buffer. + fn remaining(&self) -> usize; - fn write_u8(&mut self, x: u8) -> Result<()> { - self.0.write_all(&[x]) + /// Returns whether zero bytes are remaining. + fn is_empty(&self) -> bool { + self.remaining() == 0 } - fn read_u8(&mut self) -> Result { - let mut buf = [0; std::mem::size_of::()]; - self.0.read_exact(&mut buf)?; +} + +/// Similar to `std::io::Read`, but works with no_std. +pub trait Read { + /// Reads exactly the length of `bytes` from `self` and writes it to `bytes`. + fn read_exact(&mut self, bytes: &mut [u8]) -> IoResult<()>; + + /// Reads a `u8` value from `self`. + #[inline] + fn read_u8(&mut self) -> IoResult { + let mut buf = [0; size_of::()]; + self.read_exact(&mut buf)?; Ok(buf[0]) } - fn write_u32(&mut self, x: u32) -> Result<()> { - self.0.write_all(&x.to_le_bytes()) - } - fn read_u32(&mut self) -> Result { - let mut buf = [0; std::mem::size_of::()]; - self.0.read_exact(&mut buf)?; + /// Reads a `u32` value from `self`. + #[inline] + fn read_u32(&mut self) -> IoResult { + let mut buf = [0; size_of::()]; + self.read_exact(&mut buf)?; Ok(u32::from_le_bytes(buf)) } - fn write_field(&mut self, x: F) -> Result<()> { - self.0.write_all(&x.to_canonical_u64().to_le_bytes()) - } - fn read_field(&mut self) -> Result { - let mut buf = [0; std::mem::size_of::()]; - self.0.read_exact(&mut buf)?; - Ok(F::from_canonical_u64(u64::from_le_bytes( - buf.try_into().unwrap(), - ))) + /// Reads a element from the field `F` with size less than `2^64` from `self.` + #[inline] + fn read_field(&mut self) -> IoResult + where + F: Field64, + { + let mut buf = [0; size_of::()]; + self.read_exact(&mut buf)?; + Ok(F::from_canonical_u64(u64::from_le_bytes(buf))) } - fn write_field_ext, const D: usize>( - &mut self, - x: F::Extension, - ) -> Result<()> { - for &a in &x.to_basefield_array() { - self.write_field(a)?; - } - Ok(()) + /// Reads a vector of elements from the field `F` from `self`. + #[inline] + fn read_field_vec(&mut self, length: usize) -> IoResult> + where + F: Field64, + { + (0..length) + .map(|_| self.read_field()) + .collect::, _>>() } - fn read_field_ext, const D: usize>( - &mut self, - ) -> Result { + + /// Reads an element from the field extension of `F` from `self.` + #[inline] + fn read_field_ext(&mut self) -> IoResult + where + F: Field64 + Extendable, + { let mut arr = [F::ZERO; D]; for a in arr.iter_mut() { *a = self.read_field()?; @@ -86,87 +105,55 @@ impl Buffer { )) } - fn write_hash>(&mut self, h: H::Hash) -> Result<()> { - self.0.write_all(&h.to_bytes()) + /// Reads a vector of elements from the field extension of `F` from `self`. + #[inline] + fn read_field_ext_vec( + &mut self, + length: usize, + ) -> IoResult> + where + F: RichField + Extendable, + { + (0..length).map(|_| self.read_field_ext::()).collect() } - fn read_hash>(&mut self) -> Result { + /// Reads a hash value from `self`. + #[inline] + fn read_hash(&mut self) -> IoResult + where + F: RichField, + H: Hasher, + { let mut buf = vec![0; H::HASH_SIZE]; - self.0.read_exact(&mut buf)?; + self.read_exact(&mut buf)?; Ok(H::Hash::from_bytes(&buf)) } - fn write_merkle_cap>( - &mut self, - cap: &MerkleCap, - ) -> Result<()> { - for &a in &cap.0 { - self.write_hash::(a)?; - } - Ok(()) - } - fn read_merkle_cap>( - &mut self, - cap_height: usize, - ) -> Result> { + /// Reads a value of type [`MerkleCap`] from `self` with the given `cap_height`. + #[inline] + fn read_merkle_cap(&mut self, cap_height: usize) -> IoResult> + where + F: RichField, + H: Hasher, + { let cap_length = 1 << cap_height; Ok(MerkleCap( (0..cap_length) .map(|_| self.read_hash::()) - .collect::>>()?, + .collect::, _>>()?, )) } - pub fn write_field_vec(&mut self, v: &[F]) -> Result<()> { - for &a in v { - self.write_field(a)?; - } - Ok(()) - } - pub fn read_field_vec(&mut self, length: usize) -> Result> { - (0..length) - .map(|_| self.read_field()) - .collect::>>() - } - - fn write_field_ext_vec, const D: usize>( - &mut self, - v: &[F::Extension], - ) -> Result<()> { - for &a in v { - self.write_field_ext::(a)?; - } - Ok(()) - } - fn read_field_ext_vec, const D: usize>( - &mut self, - length: usize, - ) -> Result> { - (0..length) - .map(|_| self.read_field_ext::()) - .collect::>>() - } - - fn write_opening_set, const D: usize>( - &mut self, - os: &OpeningSet, - ) -> Result<()> { - self.write_field_ext_vec::(&os.constants)?; - self.write_field_ext_vec::(&os.plonk_sigmas)?; - self.write_field_ext_vec::(&os.wires)?; - self.write_field_ext_vec::(&os.plonk_zs)?; - self.write_field_ext_vec::(&os.plonk_zs_next)?; - self.write_field_ext_vec::(&os.partial_products)?; - self.write_field_ext_vec::(&os.quotient_polys) - } - fn read_opening_set< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`OpeningSet`] from `self` with the given `common_data`. + #[inline] + fn read_opening_set( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let constants = self.read_field_ext_vec::(common_data.num_constants)?; let plonk_sigmas = self.read_field_ext_vec::(config.num_routed_wires)?; @@ -189,52 +176,31 @@ impl Buffer { }) } - fn write_merkle_proof>( - &mut self, - p: &MerkleProof, - ) -> Result<()> { - let length = p.siblings.len(); - self.write_u8( - length - .try_into() - .expect("Merkle proof length must fit in u8."), - )?; - for &h in &p.siblings { - self.write_hash::(h)?; - } - Ok(()) - } - fn read_merkle_proof>(&mut self) -> Result> { + /// Reads a value of type [`MerkleProof`] from `self`. + #[inline] + fn read_merkle_proof(&mut self) -> IoResult> + where + F: RichField, + H: Hasher, + { let length = self.read_u8()?; Ok(MerkleProof { siblings: (0..length) .map(|_| self.read_hash::()) - .collect::>>()?, + .collect::>()?, }) } - fn write_fri_initial_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - fitp: &FriInitialTreeProof, - ) -> Result<()> { - for (v, p) in &fitp.evals_proofs { - self.write_field_vec(v)?; - self.write_merkle_proof(p)?; - } - Ok(()) - } - fn read_fri_initial_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`FriInitialTreeProof`] from `self` with the given `common_data`. + #[inline] + fn read_fri_initial_proof( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let salt = salt_size(common_data.fri_params.hiding); let mut evals_proofs = Vec::with_capacity(4); @@ -262,26 +228,18 @@ impl Buffer { Ok(FriInitialTreeProof { evals_proofs }) } - fn write_fri_query_step< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - fqs: &FriQueryStep, - ) -> Result<()> { - self.write_field_ext_vec::(&fqs.evals)?; - self.write_merkle_proof(&fqs.merkle_proof) - } - fn read_fri_query_step< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`FriQueryStep`] from `self` with the given `arity` and `compressed` + /// flag. + #[inline] + fn read_fri_query_step( &mut self, arity: usize, compressed: bool, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let evals = self.read_field_ext_vec::(arity - usize::from(compressed))?; let merkle_proof = self.read_merkle_proof()?; Ok(FriQueryStep { @@ -290,30 +248,16 @@ impl Buffer { }) } - fn write_fri_query_rounds< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - fqrs: &[FriQueryRound], - ) -> Result<()> { - for fqr in fqrs { - self.write_fri_initial_proof::(&fqr.initial_trees_proof)?; - for fqs in &fqr.steps { - self.write_fri_query_step::(fqs)?; - } - } - Ok(()) - } - fn read_fri_query_rounds< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a vector of [`FriQueryRound`]s from `self` with `common_data`. + #[inline] + fn read_fri_query_rounds( &mut self, common_data: &CommonCircuitData, - ) -> Result>> { + ) -> IoResult>> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let mut fqrs = Vec::with_capacity(config.fri_config.num_query_rounds); for _ in 0..config.fri_config.num_query_rounds { @@ -323,7 +267,7 @@ impl Buffer { .reduction_arity_bits .iter() .map(|&ar| self.read_fri_query_step::(1 << ar, false)) - .collect::>()?; + .collect::>()?; fqrs.push(FriQueryRound { initial_trees_proof, steps, @@ -332,25 +276,20 @@ impl Buffer { Ok(fqrs) } - fn write_fri_proof, C: GenericConfig, const D: usize>( - &mut self, - fp: &FriProof, - ) -> Result<()> { - for cap in &fp.commit_phase_merkle_caps { - self.write_merkle_cap(cap)?; - } - self.write_fri_query_rounds::(&fp.query_round_proofs)?; - self.write_field_ext_vec::(&fp.final_poly.coeffs)?; - self.write_field(fp.pow_witness) - } - fn read_fri_proof, C: GenericConfig, const D: usize>( + /// Reads a value of type [`FriProof`] from `self` with `common_data`. + #[inline] + fn read_fri_proof( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let commit_phase_merkle_caps = (0..common_data.fri_params.reduction_arity_bits.len()) .map(|_| self.read_merkle_cap(config.fri_config.cap_height)) - .collect::>>()?; + .collect::, _>>()?; let query_round_proofs = self.read_fri_query_rounds::(common_data)?; let final_poly = PolynomialCoeffs::new( self.read_field_ext_vec::(common_data.fri_params.final_poly_len())?, @@ -364,27 +303,22 @@ impl Buffer { }) } - pub fn write_proof, C: GenericConfig, const D: usize>( - &mut self, - proof: &Proof, - ) -> Result<()> { - self.write_merkle_cap(&proof.wires_cap)?; - self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?; - self.write_merkle_cap(&proof.quotient_polys_cap)?; - self.write_opening_set(&proof.openings)?; - self.write_fri_proof::(&proof.opening_proof) - } - pub fn read_proof, C: GenericConfig, const D: usize>( + /// Reads a value of type [`Proof`] from `self` with `common_data`. + #[inline] + fn read_proof( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let wires_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let plonk_zs_partial_products_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let quotient_polys_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let openings = self.read_opening_set::(common_data)?; let opening_proof = self.read_fri_proof::(common_data)?; - Ok(Proof { wires_cap, plonk_zs_partial_products_cap, @@ -394,78 +328,39 @@ impl Buffer { }) } - pub fn write_proof_with_public_inputs< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - proof_with_pis: &ProofWithPublicInputs, - ) -> Result<()> { - let ProofWithPublicInputs { - proof, - public_inputs, - } = proof_with_pis; - self.write_proof(proof)?; - self.write_field_vec(public_inputs) - } - pub fn read_proof_with_public_inputs< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`ProofWithPublicInputs`] from `self` with `common_data`. + #[inline] + fn read_proof_with_public_inputs( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + Self: Remaining, + F: RichField + Extendable, + C: GenericConfig, + { let proof = self.read_proof(common_data)?; - let public_inputs = self.read_field_vec( - (self.len() - self.0.position() as usize) / std::mem::size_of::(), - )?; - + let public_inputs = self.read_field_vec(self.remaining() / size_of::())?; Ok(ProofWithPublicInputs { proof, public_inputs, }) } - fn write_compressed_fri_query_rounds< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - cfqrs: &CompressedFriQueryRounds, - ) -> Result<()> { - for &i in &cfqrs.indices { - self.write_u32(i as u32)?; - } - - let mut initial_trees_proofs = cfqrs.initial_trees_proofs.iter().collect::>(); - initial_trees_proofs.sort_by_key(|&x| x.0); - for (_, itp) in initial_trees_proofs { - self.write_fri_initial_proof::(itp)?; - } - for h in &cfqrs.steps { - let mut fri_query_steps = h.iter().collect::>(); - fri_query_steps.sort_by_key(|&x| x.0); - for (_, fqs) in fri_query_steps { - self.write_fri_query_step::(fqs)?; - } - } - Ok(()) - } - fn read_compressed_fri_query_rounds< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`CompressedFriQueryRounds`] from `self` with `common_data`. + #[inline] + fn read_compressed_fri_query_rounds( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let original_indices = (0..config.fri_config.num_query_rounds) .map(|_| self.read_u32().map(|i| i as usize)) - .collect::>>()?; + .collect::, _>>()?; let mut indices = original_indices.clone(); indices.sort_unstable(); indices.dedup(); @@ -483,7 +378,7 @@ impl Buffer { indices.dedup(); let query_steps = (0..indices.len()) .map(|_| self.read_fri_query_step::(1 << a, true)) - .collect::>>()?; + .collect::, _>>()?; steps.push( indices .iter() @@ -500,33 +395,20 @@ impl Buffer { }) } - fn write_compressed_fri_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - fp: &CompressedFriProof, - ) -> Result<()> { - for cap in &fp.commit_phase_merkle_caps { - self.write_merkle_cap(cap)?; - } - self.write_compressed_fri_query_rounds::(&fp.query_round_proofs)?; - self.write_field_ext_vec::(&fp.final_poly.coeffs)?; - self.write_field(fp.pow_witness) - } - fn read_compressed_fri_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`CompressedFriProof`] from `self` with `common_data`. + #[inline] + fn read_compressed_fri_proof( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let commit_phase_merkle_caps = (0..common_data.fri_params.reduction_arity_bits.len()) .map(|_| self.read_merkle_cap(config.fri_config.cap_height)) - .collect::>>()?; + .collect::, _>>()?; let query_round_proofs = self.read_compressed_fri_query_rounds::(common_data)?; let final_poly = PolynomialCoeffs::new( self.read_field_ext_vec::(common_data.fri_params.final_poly_len())?, @@ -540,35 +422,22 @@ impl Buffer { }) } - pub fn write_compressed_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - proof: &CompressedProof, - ) -> Result<()> { - self.write_merkle_cap(&proof.wires_cap)?; - self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?; - self.write_merkle_cap(&proof.quotient_polys_cap)?; - self.write_opening_set(&proof.openings)?; - self.write_compressed_fri_proof::(&proof.opening_proof) - } - pub fn read_compressed_proof< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( + /// Reads a value of type [`CompressedProof`] from `self` with `common_data`. + #[inline] + fn read_compressed_proof( &mut self, common_data: &CommonCircuitData, - ) -> Result> { + ) -> IoResult> + where + F: RichField + Extendable, + C: GenericConfig, + { let config = &common_data.config; let wires_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let plonk_zs_partial_products_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let quotient_polys_cap = self.read_merkle_cap(config.fri_config.cap_height)?; let openings = self.read_opening_set::(common_data)?; let opening_proof = self.read_compressed_fri_proof::(common_data)?; - Ok(CompressedProof { wires_cap, plonk_zs_partial_products_cap, @@ -578,14 +447,321 @@ impl Buffer { }) } - pub fn write_compressed_proof_with_public_inputs< + /// Reads a value of type [`CompressedProofWithPublicInputs`] from `self` with `common_data`. + #[inline] + fn read_compressed_proof_with_public_inputs( + &mut self, + common_data: &CommonCircuitData, + ) -> IoResult> + where + Self: Remaining, F: RichField + Extendable, C: GenericConfig, - const D: usize, - >( + { + let proof = self.read_compressed_proof(common_data)?; + let public_inputs = self.read_field_vec(self.remaining() / size_of::())?; + Ok(CompressedProofWithPublicInputs { + proof, + public_inputs, + }) + } +} + +/// Writing +pub trait Write { + /// Error Type + type Error; + + /// Writes all `bytes` to `self`. + fn write_all(&mut self, bytes: &[u8]) -> IoResult<()>; + + /// Writes a byte `x` to `self`. + #[inline] + fn write_u8(&mut self, x: u8) -> IoResult<()> { + self.write_all(&[x]) + } + + /// Writes a word `x` to `self.` + #[inline] + fn write_u32(&mut self, x: u32) -> IoResult<()> { + self.write_all(&x.to_le_bytes()) + } + + /// Writes an element `x` from the field `F` to `self`. + #[inline] + fn write_field(&mut self, x: F) -> IoResult<()> + where + F: PrimeField64, + { + self.write_all(&x.to_canonical_u64().to_le_bytes()) + } + + /// Writes a vector `v` of elements from the field `F` to `self`. + #[inline] + fn write_field_vec(&mut self, v: &[F]) -> IoResult<()> + where + F: PrimeField64, + { + for &a in v { + self.write_field(a)?; + } + Ok(()) + } + + /// Writes an element `x` from the field extension of `F` to `self`. + #[inline] + fn write_field_ext(&mut self, x: F::Extension) -> IoResult<()> + where + F: RichField + Extendable, + { + for &a in &x.to_basefield_array() { + self.write_field(a)?; + } + Ok(()) + } + + /// Writes a vector `v` of elements from the field extension of `F` to `self`. + #[inline] + fn write_field_ext_vec(&mut self, v: &[F::Extension]) -> IoResult<()> + where + F: RichField + Extendable, + { + for &a in v { + self.write_field_ext::(a)?; + } + Ok(()) + } + + /// Writes a hash `h` to `self`. + #[inline] + fn write_hash(&mut self, h: H::Hash) -> IoResult<()> + where + F: RichField, + H: Hasher, + { + self.write_all(&h.to_bytes()) + } + + /// Writes `cap`, a value of type [`MerkleCap`], to `self`. + #[inline] + fn write_merkle_cap(&mut self, cap: &MerkleCap) -> IoResult<()> + where + F: RichField, + H: Hasher, + { + for &a in &cap.0 { + self.write_hash::(a)?; + } + Ok(()) + } + + /// Writes a value `os` of type [`OpeningSet`] to `self.` + #[inline] + fn write_opening_set(&mut self, os: &OpeningSet) -> IoResult<()> + where + F: RichField + Extendable, + { + self.write_field_ext_vec::(&os.constants)?; + self.write_field_ext_vec::(&os.plonk_sigmas)?; + self.write_field_ext_vec::(&os.wires)?; + self.write_field_ext_vec::(&os.plonk_zs)?; + self.write_field_ext_vec::(&os.plonk_zs_next)?; + self.write_field_ext_vec::(&os.partial_products)?; + self.write_field_ext_vec::(&os.quotient_polys) + } + + /// Writes a value `p` of type [`MerkleProof`] to `self.` + #[inline] + fn write_merkle_proof(&mut self, p: &MerkleProof) -> IoResult<()> + where + F: RichField, + H: Hasher, + { + let length = p.siblings.len(); + self.write_u8( + length + .try_into() + .expect("Merkle proof length must fit in u8."), + )?; + for &h in &p.siblings { + self.write_hash::(h)?; + } + Ok(()) + } + + /// Writes a value `fitp` of type [`FriInitialTreeProof`] to `self.` + #[inline] + fn write_fri_initial_proof( + &mut self, + fitp: &FriInitialTreeProof, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + for (v, p) in &fitp.evals_proofs { + self.write_field_vec(v)?; + self.write_merkle_proof(p)?; + } + Ok(()) + } + + /// Writes a value `fqs` of type [`FriQueryStep`] to `self.` + #[inline] + fn write_fri_query_step( + &mut self, + fqs: &FriQueryStep, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + self.write_field_ext_vec::(&fqs.evals)?; + self.write_merkle_proof(&fqs.merkle_proof) + } + + /// Writes a value `fqrs` of type [`FriQueryRound`] to `self.` + #[inline] + fn write_fri_query_rounds( + &mut self, + fqrs: &[FriQueryRound], + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + for fqr in fqrs { + self.write_fri_initial_proof::(&fqr.initial_trees_proof)?; + for fqs in &fqr.steps { + self.write_fri_query_step::(fqs)?; + } + } + Ok(()) + } + + /// Writes a value `fq` of type [`FriProof`] to `self.` + #[inline] + fn write_fri_proof( + &mut self, + fp: &FriProof, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + for cap in &fp.commit_phase_merkle_caps { + self.write_merkle_cap(cap)?; + } + self.write_fri_query_rounds::(&fp.query_round_proofs)?; + self.write_field_ext_vec::(&fp.final_poly.coeffs)?; + self.write_field(fp.pow_witness) + } + + /// Writes a value `proof` of type [`Proof`] to `self.` + #[inline] + fn write_proof(&mut self, proof: &Proof) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + self.write_merkle_cap(&proof.wires_cap)?; + self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?; + self.write_merkle_cap(&proof.quotient_polys_cap)?; + self.write_opening_set(&proof.openings)?; + self.write_fri_proof::(&proof.opening_proof) + } + + /// Writes a value `proof_with_pis` of type [`ProofWithPublicInputs`] to `self.` + #[inline] + fn write_proof_with_public_inputs( + &mut self, + proof_with_pis: &ProofWithPublicInputs, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + let ProofWithPublicInputs { + proof, + public_inputs, + } = proof_with_pis; + self.write_proof(proof)?; + self.write_field_vec(public_inputs) + } + + /// Writes a value `cfqrs` of type [`CompressedFriQueryRounds`] to `self.` + #[inline] + fn write_compressed_fri_query_rounds( + &mut self, + cfqrs: &CompressedFriQueryRounds, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + for &i in &cfqrs.indices { + self.write_u32(i as u32)?; + } + let mut initial_trees_proofs = cfqrs.initial_trees_proofs.iter().collect::>(); + initial_trees_proofs.sort_by_key(|&x| x.0); + for (_, itp) in initial_trees_proofs { + self.write_fri_initial_proof::(itp)?; + } + for h in &cfqrs.steps { + let mut fri_query_steps = h.iter().collect::>(); + fri_query_steps.sort_by_key(|&x| x.0); + for (_, fqs) in fri_query_steps { + self.write_fri_query_step::(fqs)?; + } + } + Ok(()) + } + + /// Writes a value `fq` of type [`CompressedFriProof`] to `self.` + #[inline] + fn write_compressed_fri_proof( + &mut self, + fp: &CompressedFriProof, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + for cap in &fp.commit_phase_merkle_caps { + self.write_merkle_cap(cap)?; + } + self.write_compressed_fri_query_rounds::(&fp.query_round_proofs)?; + self.write_field_ext_vec::(&fp.final_poly.coeffs)?; + self.write_field(fp.pow_witness) + } + + /// Writes a value `proof` of type [`CompressedProof`] to `self.` + #[inline] + fn write_compressed_proof( + &mut self, + proof: &CompressedProof, + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { + self.write_merkle_cap(&proof.wires_cap)?; + self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?; + self.write_merkle_cap(&proof.quotient_polys_cap)?; + self.write_opening_set(&proof.openings)?; + self.write_compressed_fri_proof::(&proof.opening_proof) + } + + /// Writes a value `proof_with_pis` of type [`CompressedProofWithPublicInputs`] to `self.` + #[inline] + fn write_compressed_proof_with_public_inputs( &mut self, proof_with_pis: &CompressedProofWithPublicInputs, - ) -> Result<()> { + ) -> IoResult<()> + where + F: RichField + Extendable, + C: GenericConfig, + { let CompressedProofWithPublicInputs { proof, public_inputs, @@ -593,22 +769,57 @@ impl Buffer { self.write_compressed_proof(proof)?; self.write_field_vec(public_inputs) } - pub fn read_compressed_proof_with_public_inputs< - F: RichField + Extendable, - C: GenericConfig, - const D: usize, - >( - &mut self, - common_data: &CommonCircuitData, - ) -> Result> { - let proof = self.read_compressed_proof(common_data)?; - let public_inputs = self.read_field_vec( - (self.len() - self.0.position() as usize) / std::mem::size_of::(), - )?; +} - Ok(CompressedProofWithPublicInputs { - proof, - public_inputs, - }) +impl Write for Vec { + type Error = Infallible; + + #[inline] + fn write_all(&mut self, bytes: &[u8]) -> IoResult<()> { + self.extend_from_slice(bytes); + Ok(()) + } +} + +/// Buffer +#[cfg(feature = "std")] +#[derive(Debug)] +pub struct Buffer { + bytes: Vec, + pos: usize, +} + +#[cfg(feature = "std")] +impl Buffer { + /// Builds a new [`Buffer`] over `buffer`. + #[inline] + pub fn new(bytes: Vec) -> Self { + Self { bytes, pos: 0 } + } + + /// Returns the inner buffer. + #[inline] + pub fn bytes(self) -> Vec { + self.bytes + } +} + +impl Remaining for Buffer { + fn remaining(&self) -> usize { + self.bytes.len() - self.pos + } +} + +impl Read for Buffer { + #[inline] + fn read_exact(&mut self, bytes: &mut [u8]) -> IoResult<()> { + let n = bytes.len(); + if self.remaining() < n { + Err(IoError) + } else { + bytes.copy_from_slice(&self.bytes[self.pos..][..n]); + self.pos += n; + Ok(()) + } } } diff --git a/plonky2/src/util/strided_view.rs b/plonky2/src/util/strided_view.rs index 0c1094fd..c165da2c 100644 --- a/plonky2/src/util/strided_view.rs +++ b/plonky2/src/util/strided_view.rs @@ -1,8 +1,8 @@ -use std::marker::PhantomData; -use std::mem::size_of; -use std::ops::{Index, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::marker::PhantomData; +use core::mem::size_of; +use core::ops::{Index, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; -use plonky2_field::packed::PackedField; +use crate::field::packed::PackedField; /// Imagine a slice, but with a stride (a la a NumPy array). /// @@ -123,6 +123,11 @@ impl<'a, P: PackedField> PackedStridedView<'a, P> { pub fn len(&self) -> usize { self.length } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } } impl<'a, P: PackedField> Index for PackedStridedView<'a, P> { diff --git a/rustfmt.toml b/rustfmt.toml index 65106950..4d029471 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,2 +1,3 @@ -unstable_features = true group_imports = "StdExternalCrate" +imports_granularity = "Module" +unstable_features = true diff --git a/starky/Cargo.toml b/starky/Cargo.toml index 43bea53e..cdeb189a 100644 --- a/starky/Cargo.toml +++ b/starky/Cargo.toml @@ -5,14 +5,17 @@ version = "0.1.0" edition = "2021" [features] -default = ["parallel"] +default = ["parallel", "std", "timing"] parallel = ["plonky2/parallel", "maybe_rayon/parallel"] +std = ["anyhow/std", "plonky2/std"] +timing = ["plonky2/timing"] [dependencies] -plonky2 = { path = "../plonky2", default-features = false, features = ["rand", "timing"] } -plonky2_util = { path = "../util" } -maybe_rayon = { path = "../maybe_rayon"} -anyhow = "1.0.40" -env_logger = "0.9.0" -itertools = "0.10.0" -log = "0.4.14" +anyhow = { version = "1.0.40", default-features = false } +itertools = { version = "0.10.0", default-features = false } +log = { version = "0.4.14", default-features = false } +maybe_rayon = { path = "../maybe_rayon", default-features = false } +plonky2 = { path = "../plonky2", default-features = false } + +[dev-dependencies] +env_logger = { version = "0.9.0", default-features = false } diff --git a/starky/src/constraint_consumer.rs b/starky/src/constraint_consumer.rs index 1a061c20..03548935 100644 --- a/starky/src/constraint_consumer.rs +++ b/starky/src/constraint_consumer.rs @@ -1,4 +1,6 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; use plonky2::field::extension::Extendable; use plonky2::field::packed::PackedField; diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index b8c8c01e..9397d6d7 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -1,4 +1,6 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index e84f5599..2f1a9064 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialCoeffs; use plonky2::fri::proof::{FriProof, FriProofTarget}; diff --git a/starky/src/lib.rs b/starky/src/lib.rs index 4e4b2eb6..89a57d77 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -2,10 +2,14 @@ #![allow(clippy::too_many_arguments)] #![allow(clippy::type_complexity)] #![feature(generic_const_exprs)] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +mod get_challenges; pub mod config; pub mod constraint_consumer; -mod get_challenges; pub mod permutation; pub mod proof; pub mod prover; diff --git a/starky/src/permutation.rs b/starky/src/permutation.rs index 7d422171..ee4225f3 100644 --- a/starky/src/permutation.rs +++ b/starky/src/permutation.rs @@ -1,5 +1,8 @@ //! Permutation arguments. +use alloc::vec; +use alloc::vec::Vec; + use itertools::Itertools; use maybe_rayon::*; use plonky2::field::batch_util::batch_multiply_inplace; diff --git a/starky/src/proof.rs b/starky/src/proof.rs index c9900c08..86093857 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -1,3 +1,6 @@ +use alloc::vec; +use alloc::vec::Vec; + use itertools::Itertools; use maybe_rayon::*; use plonky2::field::extension::{Extendable, FieldExtension}; diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 0d291cf3..dc445f24 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -1,4 +1,5 @@ -use std::iter::once; +use alloc::vec::Vec; +use core::iter::once; use anyhow::{ensure, Result}; use itertools::Itertools; @@ -15,14 +16,13 @@ use plonky2::iop::challenger::Challenger; use plonky2::plonk::config::{GenericConfig, Hasher}; use plonky2::timed; use plonky2::util::timing::TimingTree; -use plonky2::util::transpose; -use plonky2_util::{log2_ceil, log2_strict}; +use plonky2::util::{log2_ceil, log2_strict, transpose}; use crate::config::StarkConfig; use crate::constraint_consumer::ConstraintConsumer; -use crate::permutation::PermutationCheckVars; use crate::permutation::{ compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, + PermutationCheckVars, }; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; diff --git a/starky/src/recursive_verifier.rs b/starky/src/recursive_verifier.rs index 04858d55..d080f80f 100644 --- a/starky/src/recursive_verifier.rs +++ b/starky/src/recursive_verifier.rs @@ -1,4 +1,5 @@ -use std::iter::once; +use alloc::vec::Vec; +use core::iter::once; use anyhow::{ensure, Result}; use itertools::Itertools; diff --git a/starky/src/stark.rs b/starky/src/stark.rs index 8ebca87c..b223d107 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -1,3 +1,6 @@ +use alloc::vec; +use alloc::vec::Vec; + use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::packed::PackedField; use plonky2::fri::structure::{ @@ -7,13 +10,12 @@ use plonky2::fri::structure::{ use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_util::ceil_div_usize; +use plonky2::util::ceil_div_usize; use crate::config::StarkConfig; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::permutation::PermutationPair; -use crate::vars::StarkEvaluationTargets; -use crate::vars::StarkEvaluationVars; +use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars}; /// Represents a STARK system. pub trait Stark, const D: usize>: Sync { diff --git a/starky/src/stark_testing.rs b/starky/src/stark_testing.rs index b13b90df..7adcacc4 100644 --- a/starky/src/stark_testing.rs +++ b/starky/src/stark_testing.rs @@ -1,15 +1,16 @@ +use alloc::vec; +use alloc::vec::Vec; + use anyhow::{ensure, Result}; use plonky2::field::extension::{Extendable, FieldExtension}; use plonky2::field::polynomial::{PolynomialCoeffs, PolynomialValues}; -use plonky2::field::types::Field; +use plonky2::field::types::{Field, Sample}; use plonky2::hash::hash_types::RichField; use plonky2::iop::witness::{PartialWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; -use plonky2::plonk::config::GenericConfig; -use plonky2::plonk::config::Hasher; -use plonky2::util::transpose; -use plonky2_util::{log2_ceil, log2_strict}; +use plonky2::plonk::config::{GenericConfig, Hasher}; +use plonky2::util::{log2_ceil, log2_strict, transpose}; use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use crate::stark::Stark; @@ -30,7 +31,7 @@ where let trace_ldes = random_low_degree_matrix::(S::COLUMNS, rate_bits); let size = trace_ldes.len(); - let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>(); + let public_inputs = F::rand_array::<{ S::PUBLIC_INPUTS }>(); let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits); let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits); @@ -92,9 +93,9 @@ where { // Compute native constraint evaluation on random values. let vars = StarkEvaluationVars { - local_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), - next_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(), - public_inputs: &F::Extension::rand_arr::<{ S::PUBLIC_INPUTS }>(), + local_values: &F::Extension::rand_array::<{ S::COLUMNS }>(), + next_values: &F::Extension::rand_array::<{ S::COLUMNS }>(), + public_inputs: &F::Extension::rand_array::<{ S::PUBLIC_INPUTS }>(), }; let alphas = F::rand_vec(1); let z_last = F::Extension::rand(); diff --git a/starky/src/util.rs b/starky/src/util.rs index 297978ee..1adee000 100644 --- a/starky/src/util.rs +++ b/starky/src/util.rs @@ -1,3 +1,5 @@ +use alloc::vec::Vec; + use itertools::Itertools; use plonky2::field::polynomial::PolynomialValues; use plonky2::field::types::Field; diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 18ae9a27..443fcb31 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -1,4 +1,5 @@ -use std::iter::once; +use alloc::vec::Vec; +use core::iter::once; use anyhow::{anyhow, ensure, Result}; use itertools::Itertools; @@ -260,7 +261,7 @@ fn check_permutation_options< mod tests { use plonky2::field::goldilocks_field::GoldilocksField; use plonky2::field::polynomial::PolynomialValues; - use plonky2::field::types::Field; + use plonky2::field::types::Sample; use crate::verifier::eval_l_0_and_l_last; diff --git a/system_zero/Cargo.toml b/system_zero/Cargo.toml index 826d69d8..6a36ee25 100644 --- a/system_zero/Cargo.toml +++ b/system_zero/Cargo.toml @@ -5,15 +5,15 @@ version = "0.1.0" edition = "2021" [dependencies] -plonky2 = { path = "../plonky2" } -plonky2_util = { path = "../util" } -starky = { path = "../starky" } anyhow = "1.0.40" env_logger = "0.9.0" itertools = "0.10.0" log = "0.4.14" +plonky2 = { path = "../plonky2" } +plonky2_util = { path = "../util" } rand = "0.8.4" rand_chacha = "0.3.1" +starky = { path = "../starky" } [dev-dependencies] criterion = "0.4.0" diff --git a/system_zero/src/alu/bitops.rs b/system_zero/src/alu/bitops.rs index 1f9875ca..aed63415 100644 --- a/system_zero/src/alu/bitops.rs +++ b/system_zero/src/alu/bitops.rs @@ -237,7 +237,7 @@ pub(crate) fn eval_bitop_circuit, const D: usize>( #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::Sample; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; use starky::constraint_consumer::ConstraintConsumer; @@ -250,7 +250,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_bitop == 0`, then the constraints should be met even // if all values are garbage. @@ -275,7 +275,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); const BITOPS: [usize; 4] = [IS_AND, IS_IOR, IS_XOR, IS_ANDNOT]; for bitop in BITOPS { @@ -317,7 +317,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); const BITOPS: [usize; 4] = [IS_AND, IS_IOR, IS_XOR, IS_ANDNOT]; for bitop in BITOPS { diff --git a/system_zero/src/alu/division.rs b/system_zero/src/alu/division.rs index 65bedd8f..055aafd3 100644 --- a/system_zero/src/alu/division.rs +++ b/system_zero/src/alu/division.rs @@ -160,7 +160,7 @@ pub(crate) fn eval_division_circuit, const D: usize #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::Sample; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; use starky::constraint_consumer::ConstraintConsumer; @@ -173,7 +173,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); // if `IS_DIV == 0`, then the constraints should be met even if all values are garbage. values[IS_DIV] = F::ZERO; @@ -195,7 +195,7 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); // set `IS_DIV == 1` and ensure all constraints are satisfied. values[IS_DIV] = F::ONE; diff --git a/system_zero/src/alu/mod.rs b/system_zero/src/alu/mod.rs index 33f5e902..a02a51e2 100644 --- a/system_zero/src/alu/mod.rs +++ b/system_zero/src/alu/mod.rs @@ -4,8 +4,7 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use starky::vars::StarkEvaluationTargets; -use starky::vars::StarkEvaluationVars; +use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::alu::addition::{eval_addition, eval_addition_circuit, generate_addition}; use crate::alu::bitops::{eval_bitop, eval_bitop_circuit, generate_bitop}; diff --git a/system_zero/src/core_registers.rs b/system_zero/src/core_registers.rs index 3cf7843d..210a9971 100644 --- a/system_zero/src/core_registers.rs +++ b/system_zero/src/core_registers.rs @@ -4,8 +4,7 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use starky::vars::StarkEvaluationTargets; -use starky::vars::StarkEvaluationVars; +use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::public_input_layout::NUM_PUBLIC_INPUTS; use crate::registers::core::*; diff --git a/system_zero/src/lookup.rs b/system_zero/src/lookup.rs index c4036424..d1d16223 100644 --- a/system_zero/src/lookup.rs +++ b/system_zero/src/lookup.rs @@ -13,8 +13,7 @@ use plonky2::field::types::{Field, PrimeField64}; use plonky2::hash::hash_types::RichField; use plonky2::plonk::circuit_builder::CircuitBuilder; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use starky::vars::StarkEvaluationTargets; -use starky::vars::StarkEvaluationVars; +use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::public_input_layout::NUM_PUBLIC_INPUTS; use crate::registers::lookup::*; diff --git a/system_zero/src/permutation_unit.rs b/system_zero/src/permutation_unit.rs index c94e1677..4ba469b4 100644 --- a/system_zero/src/permutation_unit.rs +++ b/system_zero/src/permutation_unit.rs @@ -5,8 +5,7 @@ use plonky2::hash::hashing::SPONGE_WIDTH; use plonky2::hash::poseidon::{Poseidon, HALF_N_FULL_ROUNDS, N_PARTIAL_ROUNDS}; use plonky2::plonk::circuit_builder::CircuitBuilder; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; -use starky::vars::StarkEvaluationTargets; -use starky::vars::StarkEvaluationVars; +use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::public_input_layout::NUM_PUBLIC_INPUTS; use crate::registers::permutation::*; @@ -255,7 +254,7 @@ pub(crate) fn eval_permutation_unit_circuit, const #[cfg(test)] mod tests { use plonky2::field::goldilocks_field::GoldilocksField; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use plonky2::hash::poseidon::Poseidon; use rand::SeedableRng; use rand_chacha::ChaCha8Rng; @@ -297,14 +296,14 @@ mod tests { type F = GoldilocksField; let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25); - let state = [F::default(); SPONGE_WIDTH].map(|_| F::rand_from_rng(&mut rng)); + let state = [F::default(); SPONGE_WIDTH].map(|_| F::sample(&mut rng)); // Get true Poseidon hash let target = GoldilocksField::poseidon(state); // Get result from `generate_permutation_unit` // Initialize `values` with randomness to test that the code doesn't rely on zero-filling. - let mut values = [F::default(); NUM_COLUMNS].map(|_| F::rand_from_rng(&mut rng)); + let mut values = [F::default(); NUM_COLUMNS].map(|_| F::sample(&mut rng)); for i in 0..SPONGE_WIDTH { values[col_input(i)] = state[i]; } diff --git a/system_zero/src/system_zero.rs b/system_zero/src/system_zero.rs index 19c2df8c..00673c7e 100644 --- a/system_zero/src/system_zero.rs +++ b/system_zero/src/system_zero.rs @@ -11,8 +11,7 @@ use plonky2::util::transpose; use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; use starky::permutation::PermutationPair; use starky::stark::Stark; -use starky::vars::StarkEvaluationTargets; -use starky::vars::StarkEvaluationVars; +use starky::vars::{StarkEvaluationTargets, StarkEvaluationVars}; use crate::alu::{eval_alu, eval_alu_circuit, generate_alu}; use crate::core_registers::{ diff --git a/u32/Cargo.toml b/u32/Cargo.toml index f0a1d349..273db263 100644 --- a/u32/Cargo.toml +++ b/u32/Cargo.toml @@ -3,13 +3,12 @@ name = "plonky2_u32" version = "0.1.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -anyhow = "1.0.40" -rand = "0.8.4" -num = { version = "0.4", features = [ "rand" ] } -itertools = "0.10.0" -plonky2 = { path = "../plonky2" } -plonky2_util = { path = "../util" } -plonky2_field = { path = "../field" } +anyhow = { version = "1.0.40", default-features = false } +itertools = { version = "0.10.0", default-features = false } +num = { version = "0.4", default-features = false } +plonky2 = { path = "../plonky2", default-features = false } + +[dev-dependencies] +plonky2 = { path = "../plonky2", default-features = false, features = ["gate_testing"] } +rand = { version = "0.8.4", default-features = false, features = ["getrandom"] } diff --git a/u32/src/gadgets/arithmetic_u32.rs b/u32/src/gadgets/arithmetic_u32.rs index 7475681c..65f5ac07 100644 --- a/u32/src/gadgets/arithmetic_u32.rs +++ b/u32/src/gadgets/arithmetic_u32.rs @@ -1,11 +1,13 @@ -use std::marker::PhantomData; +use alloc::vec; +use alloc::vec::Vec; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::Target; use plonky2::iop::witness::{PartitionWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; use crate::gates::add_many_u32::U32AddManyGate; use crate::gates::arithmetic_u32::U32ArithmeticGate; @@ -258,12 +260,12 @@ impl, const D: usize> SimpleGenerator mod tests { use anyhow::Result; use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use rand::{thread_rng, Rng}; + use rand::rngs::OsRng; + use rand::Rng; - use crate::gadgets::arithmetic_u32::CircuitBuilderU32; + use super::*; #[test] pub fn test_add_many_u32s() -> Result<()> { @@ -278,7 +280,7 @@ mod tests { let pw = PartialWitness::new(); let mut builder = CircuitBuilder::::new(config); - let mut rng = thread_rng(); + let mut rng = OsRng; let mut to_add = Vec::new(); let mut sum = 0u64; for _ in 0..NUM_ADDENDS { diff --git a/u32/src/gadgets/multiple_comparison.rs b/u32/src/gadgets/multiple_comparison.rs index 09ad2eb5..8d82c296 100644 --- a/u32/src/gadgets/multiple_comparison.rs +++ b/u32/src/gadgets/multiple_comparison.rs @@ -1,10 +1,13 @@ +use alloc::vec; +use alloc::vec::Vec; + +use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::{BoolTarget, Target}; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; -use plonky2_util::ceil_div_usize; +use plonky2::util::ceil_div_usize; -use super::arithmetic_u32::U32Target; +use crate::gadgets::arithmetic_u32::U32Target; use crate::gates::comparison::ComparisonGate; /// Returns true if a is less than or equal to b, considered as base-`2^num_bits` limbs of a large value. @@ -78,14 +81,14 @@ pub fn list_le_u32_circuit, const D: usize>( mod tests { use anyhow::Result; use num::BigUint; + use plonky2::field::types::Field; use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2_field::types::Field; + use rand::rngs::OsRng; use rand::Rng; - use crate::gadgets::multiple_comparison::list_le_circuit; + use super::*; fn test_list_le(size: usize, num_bits: usize) -> Result<()> { const D: usize = 2; @@ -95,7 +98,7 @@ mod tests { let pw = PartialWitness::new(); let mut builder = CircuitBuilder::::new(config); - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let lst1: Vec = (0..size) .map(|_| rng.gen_range(0..(1 << num_bits))) diff --git a/u32/src/gadgets/range_check.rs b/u32/src/gadgets/range_check.rs index fb6a9b1e..9e8cf2ad 100644 --- a/u32/src/gadgets/range_check.rs +++ b/u32/src/gadgets/range_check.rs @@ -1,7 +1,10 @@ +use alloc::vec; +use alloc::vec::Vec; + +use plonky2::field::extension::Extendable; use plonky2::hash::hash_types::RichField; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_field::extension::Extendable; use crate::gadgets::arithmetic_u32::U32Target; use crate::gates::range_check_u32::U32RangeCheckGate; diff --git a/u32/src/gates/add_many_u32.rs b/u32/src/gates/add_many_u32.rs index f37075cd..a2bd2dac 100644 --- a/u32/src/gates/add_many_u32.rs +++ b/u32/src/gates/add_many_u32.rs @@ -1,6 +1,12 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::format; +use alloc::string::String; +use alloc::vec::Vec; +use core::marker::PhantomData; use itertools::unfold; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::gates::gate::Gate; use plonky2::gates::util::StridedConstraintConsumer; use plonky2::hash::hash_types::RichField; @@ -12,9 +18,7 @@ use plonky2::iop::witness::{PartitionWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; -use plonky2_util::ceil_div_usize; +use plonky2::util::ceil_div_usize; const LOG2_MAX_NUM_ADDENDS: usize = 4; const MAX_NUM_ADDENDS: usize = 16; @@ -340,21 +344,17 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use itertools::unfold; - use plonky2::gates::gate::Gate; + use plonky2::field::extension::quartic::QuarticExtension; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::Sample; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::extension::quartic::QuarticExtension; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::add_many_u32::U32AddManyGate; + use super::*; #[test] fn low_degree() { @@ -428,7 +428,7 @@ mod tests { v0.iter().chain(v1.iter()).map(|&x| x.into()).collect() } - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let addends: Vec> = (0..NUM_U32_ADD_MANY_OPS) .map(|_| (0..NUM_ADDENDS).map(|_| rng.gen::() as u64).collect()) .collect(); diff --git a/u32/src/gates/arithmetic_u32.rs b/u32/src/gates/arithmetic_u32.rs index 47889954..3f249d29 100644 --- a/u32/src/gates/arithmetic_u32.rs +++ b/u32/src/gates/arithmetic_u32.rs @@ -1,6 +1,13 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; use itertools::unfold; +use plonky2::field::extension::Extendable; +use plonky2::field::packed::PackedField; +use plonky2::field::types::Field; use plonky2::gates::gate::Gate; use plonky2::gates::packed_util::PackedEvaluableBase; use plonky2::gates::util::StridedConstraintConsumer; @@ -16,9 +23,6 @@ use plonky2::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; /// A gate to perform a basic mul-add on 32-bit values (we assume they are range-checked beforehand). #[derive(Copy, Clone, Debug)] @@ -411,21 +415,16 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2::gates::gate::Gate; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::Sample; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; - use plonky2::hash::hash_types::RichField; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::extension::Extendable; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::arithmetic_u32::U32ArithmeticGate; + use super::*; #[test] fn low_degree() { @@ -507,7 +506,7 @@ mod tests { type FF = >::FE; const NUM_U32_ARITHMETIC_OPS: usize = 3; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let multiplicands_0: Vec<_> = (0..NUM_U32_ARITHMETIC_OPS) .map(|_| rng.gen::() as u64) .collect(); diff --git a/u32/src/gates/comparison.rs b/u32/src/gates/comparison.rs index a2a0cfcf..6cb67106 100644 --- a/u32/src/gates/comparison.rs +++ b/u32/src/gates/comparison.rs @@ -1,5 +1,12 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; +use plonky2::field::packed::PackedField; +use plonky2::field::types::{Field, Field64}; use plonky2::gates::gate::Gate; use plonky2::gates::packed_util::PackedEvaluableBase; use plonky2::gates::util::StridedConstraintConsumer; @@ -15,10 +22,7 @@ use plonky2::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::{Field, Field64}; -use plonky2_util::{bits_u64, ceil_div_usize}; +use plonky2::util::{bits_u64, ceil_div_usize}; /// A gate for checking that one value is less than or equal to another. #[derive(Clone, Debug)] @@ -512,20 +516,16 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2::gates::gate::Gate; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::{PrimeField64, Sample}; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField64; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::comparison::ComparisonGate; + use super::*; #[test] fn wire_indices() { @@ -657,7 +657,7 @@ mod tests { v.iter().map(|&x| x.into()).collect() }; - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let max: u64 = 1 << (num_bits - 1); let first_input_u64 = rng.gen_range(0..max); let second_input_u64 = { diff --git a/u32/src/gates/range_check_u32.rs b/u32/src/gates/range_check_u32.rs index 6e8f2cd5..6dd20d48 100644 --- a/u32/src/gates/range_check_u32.rs +++ b/u32/src/gates/range_check_u32.rs @@ -1,5 +1,11 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::gates::gate::Gate; use plonky2::gates::util::StridedConstraintConsumer; use plonky2::hash::hash_types::RichField; @@ -10,9 +16,7 @@ use plonky2::iop::witness::{PartitionWitness, Witness}; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit}; use plonky2::plonk::vars::{EvaluationTargets, EvaluationVars, EvaluationVarsBase}; -use plonky2_field::extension::Extendable; -use plonky2_field::types::Field; -use plonky2_util::ceil_div_usize; +use plonky2::util::ceil_div_usize; /// A gate which can decompose a number into base B little-endian limbs. #[derive(Copy, Clone, Debug)] @@ -201,22 +205,18 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; use itertools::unfold; - use plonky2::gates::gate::Gate; + use plonky2::field::extension::quartic::QuarticExtension; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::{Field, Sample}; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::extension::quartic::QuarticExtension; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use plonky2_util::ceil_div_usize; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::range_check_u32::U32RangeCheckGate; + use super::*; #[test] fn low_degree() { @@ -290,7 +290,7 @@ mod tests { #[test] fn test_gate_constraint_good() { - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let input_limbs: Vec<_> = (0..8).map(|_| rng.gen::() as u64).collect(); test_gate_constraint(input_limbs); @@ -299,7 +299,7 @@ mod tests { #[test] #[should_panic] fn test_gate_constraint_bad() { - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let input_limbs: Vec<_> = (0..8).map(|_| rng.gen()).collect(); test_gate_constraint(input_limbs); diff --git a/u32/src/gates/subtraction_u32.rs b/u32/src/gates/subtraction_u32.rs index b08d900b..9a3b1db6 100644 --- a/u32/src/gates/subtraction_u32.rs +++ b/u32/src/gates/subtraction_u32.rs @@ -1,5 +1,12 @@ -use std::marker::PhantomData; +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; +use alloc::{format, vec}; +use core::marker::PhantomData; +use plonky2::field::extension::Extendable; +use plonky2::field::packed::PackedField; +use plonky2::field::types::Field; use plonky2::gates::gate::Gate; use plonky2::gates::packed_util::PackedEvaluableBase; use plonky2::gates::util::StridedConstraintConsumer; @@ -15,9 +22,6 @@ use plonky2::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; /// A gate to perform a subtraction on 32-bit limbs: given `x`, `y`, and `borrow`, it returns /// the result `x - y - borrow` and, if this underflows, a new `borrow`. Inputs are not range-checked. @@ -329,21 +333,17 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; - use anyhow::Result; - use plonky2::gates::gate::Gate; + use plonky2::field::extension::quartic::QuarticExtension; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::field::types::{PrimeField64, Sample}; use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; use plonky2::hash::hash_types::HashOut; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::extension::quartic::QuarticExtension; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField64; + use rand::rngs::OsRng; use rand::Rng; - use crate::gates::subtraction_u32::U32SubtractionGate; + use super::*; #[test] fn low_degree() { @@ -415,7 +415,7 @@ mod tests { v0.iter().chain(v1.iter()).map(|&x| x.into()).collect() } - let mut rng = rand::thread_rng(); + let mut rng = OsRng; let inputs_x = (0..NUM_U32_SUBTRACTION_OPS) .map(|_| rng.gen::() as u64) .collect(); diff --git a/u32/src/lib.rs b/u32/src/lib.rs index 0957c7bc..2d8d07f3 100644 --- a/u32/src/lib.rs +++ b/u32/src/lib.rs @@ -1,4 +1,7 @@ #![allow(clippy::needless_range_loop)] +#![no_std] + +extern crate alloc; pub mod gadgets; pub mod gates; diff --git a/u32/src/witness.rs b/u32/src/witness.rs index ddc3432f..004fedc6 100644 --- a/u32/src/witness.rs +++ b/u32/src/witness.rs @@ -1,6 +1,6 @@ +use plonky2::field::types::{Field, PrimeField64}; use plonky2::iop::generator::GeneratedValues; use plonky2::iop::witness::Witness; -use plonky2_field::types::{Field, PrimeField64}; use crate::gadgets::arithmetic_u32::U32Target; diff --git a/util/Cargo.toml b/util/Cargo.toml index a1ab402a..4e0b4b15 100644 --- a/util/Cargo.toml +++ b/util/Cargo.toml @@ -5,3 +5,4 @@ version = "0.1.0" edition = "2021" [dependencies] +rand = { version = "0.8.5", default-features = false, features = ["getrandom"] } diff --git a/util/src/lib.rs b/util/src/lib.rs index b22a4236..d516a558 100644 --- a/util/src/lib.rs +++ b/util/src/lib.rs @@ -4,16 +4,19 @@ #![allow(clippy::len_without_is_empty)] #![allow(clippy::needless_range_loop)] #![allow(clippy::return_self_not_must_use)] +#![no_std] -use std::arch::asm; -use std::hint::unreachable_unchecked; -use std::mem::size_of; -use std::ptr::{swap, swap_nonoverlapping}; +extern crate alloc; -mod transpose_util; +use alloc::vec::Vec; +use core::hint::unreachable_unchecked; +use core::mem::size_of; +use core::ptr::{swap, swap_nonoverlapping}; use crate::transpose_util::transpose_in_place_square; +mod transpose_util; + pub fn bits_u64(n: u64) -> usize { (64 - n.leading_zeros()) as usize } @@ -267,15 +270,71 @@ pub fn assume(p: bool) { /// This function has no semantics. It is a hint only. #[inline(always)] pub fn branch_hint() { + // NOTE: These are the currently supported assembly architectures. See the + // [nightly reference](https://doc.rust-lang.org/nightly/reference/inline-assembly.html) for + // the most up-to-date list. + #[cfg(any( + target_arch = "aarch64", + target_arch = "arm", + target_arch = "riscv32", + target_arch = "riscv64", + target_arch = "x86", + target_arch = "x86_64", + ))] unsafe { - asm!("", options(nomem, nostack, preserves_flags)); + core::arch::asm!("", options(nomem, nostack, preserves_flags)); } } #[cfg(test)] mod tests { + use alloc::vec; + use alloc::vec::Vec; + + use rand::rngs::OsRng; + use rand::Rng; + use crate::{log2_ceil, log2_strict}; + #[test] + fn test_reverse_index_bits() { + let lengths = [32, 128, 1 << 16]; + let mut rng = OsRng; + for _ in 0..32 { + for length in lengths { + let mut rand_list: Vec = Vec::with_capacity(length); + rand_list.resize_with(length, || rng.gen()); + + let out = super::reverse_index_bits(&rand_list); + let expect = reverse_index_bits_naive(&rand_list); + + for (out, expect) in out.iter().zip(&expect) { + assert_eq!(out, expect); + } + } + } + } + + #[test] + fn test_reverse_index_bits_in_place() { + let lengths = [32, 128, 1 << 16]; + let mut rng = OsRng; + for _ in 0..32 { + for length in lengths { + let mut rand_list: Vec = Vec::with_capacity(length); + rand_list.resize_with(length, || rng.gen()); + + let expect = reverse_index_bits_naive(&rand_list); + + super::reverse_index_bits_in_place(&mut rand_list); + + for (got, expect) in rand_list.iter().zip(&expect) { + assert_eq!(got, expect); + } + } + } + } + #[test] fn test_log2_strict() { assert_eq!(log2_strict(1), 0); @@ -326,4 +385,17 @@ mod tests { assert_eq!(log2_ceil(usize::MAX - 1), usize::BITS as usize); assert_eq!(log2_ceil(usize::MAX), usize::BITS as usize); } + + fn reverse_index_bits_naive(arr: &[T]) -> Vec { + let n = arr.len(); + let n_power = log2_strict(n); + + let mut out = vec![None; n]; + for (i, v) in arr.iter().enumerate() { + let dst = i.reverse_bits() >> (64 - n_power); + out[dst] = Some(*v); + } + + out.into_iter().map(|x| x.unwrap()).collect() + } } diff --git a/util/src/transpose_util.rs b/util/src/transpose_util.rs index 1c8280a8..a79a7adb 100644 --- a/util/src/transpose_util.rs +++ b/util/src/transpose_util.rs @@ -1,4 +1,4 @@ -use std::ptr::swap; +use core::ptr::swap; const LB_BLOCK_SIZE: usize = 3; diff --git a/waksman/Cargo.toml b/waksman/Cargo.toml index 5aa25dc9..7be3f414 100644 --- a/waksman/Cargo.toml +++ b/waksman/Cargo.toml @@ -5,11 +5,11 @@ version = "0.1.0" edition = "2021" [dependencies] +anyhow = "1.0.40" +array_tool = "1.0.3" +bimap = "0.6.1" +itertools = "0.10.0" "plonky2" = { path = "../plonky2" } "plonky2_field" = { path = "../field" } "plonky2_util" = { path = "../util" } -anyhow = "1.0.40" -bimap = "0.6.1" -itertools = "0.10.0" rand = "0.8.4" -array_tool = "1.0.3" diff --git a/waksman/src/gates/assert_le.rs b/waksman/src/gates/assert_le.rs index 27242370..745bb62f 100644 --- a/waksman/src/gates/assert_le.rs +++ b/waksman/src/gates/assert_le.rs @@ -446,7 +446,7 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { - use std::marker::PhantomData; + use core::marker::PhantomData; use anyhow::Result; use plonky2::gates::gate::Gate; @@ -456,8 +456,7 @@ mod tests { use plonky2::plonk::vars::EvaluationVars; use plonky2_field::extension::quartic::QuarticExtension; use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; - use plonky2_field::types::PrimeField64; + use plonky2_field::types::{Field, PrimeField64, Sample}; use rand::Rng; use crate::gates::assert_le::AssertLessThanGate; diff --git a/waksman/src/gates/switch.rs b/waksman/src/gates/switch.rs index 4509bf0a..58fad4c7 100644 --- a/waksman/src/gates/switch.rs +++ b/waksman/src/gates/switch.rs @@ -334,7 +334,7 @@ mod tests { use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use plonky2::plonk::vars::EvaluationVars; use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::Field; + use plonky2_field::types::{Field, Sample}; use crate::gates::switch::SwitchGate; diff --git a/waksman/src/permutation.rs b/waksman/src/permutation.rs index 90dc5086..b9d69f75 100644 --- a/waksman/src/permutation.rs +++ b/waksman/src/permutation.rs @@ -1,7 +1,8 @@ use std::collections::BTreeMap; use std::marker::PhantomData; -use plonky2::field::{extension::Extendable, types::Field}; +use plonky2::field::extension::Extendable; +use plonky2::field::types::Field; use plonky2::hash::hash_types::RichField; use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; use plonky2::iop::target::Target; @@ -370,11 +371,12 @@ impl SimpleGenerator for PermutationGenerator { #[cfg(test)] mod tests { use anyhow::Result; - use plonky2::field::types::Field; + use plonky2::field::types::{Field, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use rand::{seq::SliceRandom, thread_rng, Rng}; + use rand::seq::SliceRandom; + use rand::{thread_rng, Rng}; use super::*; diff --git a/waksman/src/sorting.rs b/waksman/src/sorting.rs index 010bc8b9..dbfe8a81 100644 --- a/waksman/src/sorting.rs +++ b/waksman/src/sorting.rs @@ -183,7 +183,7 @@ impl, const D: usize> SimpleGenerator #[cfg(test)] mod tests { use anyhow::Result; - use plonky2::field::types::{Field, PrimeField64}; + use plonky2::field::types::{Field, PrimeField64, Sample}; use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};