From eeeb99f546cf29a4dfc01765dde73f416c1ab1c1 Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Tue, 11 Feb 2025 11:55:02 +0100 Subject: [PATCH] add support for Poseidon2 BN254 --- plonky2/Cargo.toml | 2 + plonky2/examples/poseidon2_bn254_example.rs | 73 +++++++++ plonky2/src/hash/mod.rs | 1 + plonky2/src/hash/poseidon2_bn254.rs | 157 ++++++++++++++++++++ plonky2/src/plonk/config.rs | 11 ++ 5 files changed, 244 insertions(+) create mode 100644 plonky2/examples/poseidon2_bn254_example.rs create mode 100644 plonky2/src/hash/poseidon2_bn254.rs diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 4f67f35a..7d7de547 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -35,6 +35,8 @@ unroll = { workspace = true } web-time = { version = "1.0.0", optional = true } strum = "0.26" strum_macros = "0.26" +rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"} +ark-serialize = {version = "0.5.0"} # Local dependencies plonky2_field = { version = "1.0.0", path = "../field", default-features = false } diff --git a/plonky2/examples/poseidon2_bn254_example.rs b/plonky2/examples/poseidon2_bn254_example.rs new file mode 100644 index 00000000..9d408dee --- /dev/null +++ b/plonky2/examples/poseidon2_bn254_example.rs @@ -0,0 +1,73 @@ +use std::fs; +use anyhow::Result; +use plonky2::field::types::Field; +use plonky2::iop::witness::{PartialWitness, WitnessWrite}; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::circuit_data::CircuitConfig; +use plonky2::plonk::config::{GenericConfig, Poseidon2BN254Config}; +use plonky2::plonk::prover::ProverOptions; +use plonky2::plonk::verifier::{VerifierOptions, HashStatisticsPrintLevel}; + +/// An example of using Plonky2 to prove a statement of the form +/// "I know the 100th element of the Fibonacci sequence, starting with constants a and b." +/// When a == 0 and b == 1, this is proving knowledge of the 100th (standard) Fibonacci number. +fn main() -> Result<()> { + const D: usize = 2; + type C = Poseidon2BN254Config; + type F = >::F; + + let config = CircuitConfig::standard_recursion_config(); + let mut builder = CircuitBuilder::::new(config); + + // The arithmetic circuit. + let initial_a = builder.add_virtual_target(); + let initial_b = builder.add_virtual_target(); + let mut prev_target = initial_a; + let mut cur_target = initial_b; + for _ in 0..99 { + let temp = builder.add(prev_target, cur_target); + prev_target = cur_target; + cur_target = temp; + } + + // Public inputs are the two initial values (provided below) and the result (which is generated). + builder.register_public_input(initial_a); + builder.register_public_input(initial_b); + builder.register_public_input(cur_target); + + // Provide initial values. + let mut pw = PartialWitness::new(); + pw.set_target(initial_a, F::ZERO)?; + pw.set_target(initial_b, F::ONE)?; + + let data = builder.build::(); + + let prover_opts = ProverOptions { + export_witness: Some(String::from("fibonacci_witness.json")), + print_hash_statistics: HashStatisticsPrintLevel::Info, + }; + + let proof = data.prove_with_options(pw, &prover_opts)?; + + // serialize circuit into JSON + // let common_circuit_data_serialized = serde_json::to_string(&data.common ).unwrap(); + // let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap(); + // let proof_serialized = serde_json::to_string(&proof ).unwrap(); + // fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file"); + // fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file"); + // fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file"); + + // println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap); + // println!("circ digest: {:?}", data.verifier_only.circuit_digest); + + println!( + "100th Fibonacci number mod |F| (starting with {}, {}) is: {}", + proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2] + ); + + let verifier_opts = VerifierOptions { + print_hash_statistics: HashStatisticsPrintLevel::Summary, + }; + data.verify_with_options(proof, &verifier_opts) + +} diff --git a/plonky2/src/hash/mod.rs b/plonky2/src/hash/mod.rs index 0e4bb8a5..d20f0250 100644 --- a/plonky2/src/hash/mod.rs +++ b/plonky2/src/hash/mod.rs @@ -11,3 +11,4 @@ pub mod merkle_tree; pub mod path_compression; pub mod poseidon; pub mod poseidon_goldilocks; +pub mod poseidon2_bn254; diff --git a/plonky2/src/hash/poseidon2_bn254.rs b/plonky2/src/hash/poseidon2_bn254.rs new file mode 100644 index 00000000..6d9b66b0 --- /dev/null +++ b/plonky2/src/hash/poseidon2_bn254.rs @@ -0,0 +1,157 @@ +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; +use core::mem::size_of; + +use rust_bn254_hash::hash::Hash; +use crate::hash::hash_types::{BytesHash, RichField}; +use crate::hash::hashing::PlonkyPermutation; +use crate::plonk::config::Hasher; +use rust_bn254_hash::sponge::{sponge_u64_pad, sponge_u64_no_pad}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + +pub const SPONGE_RATE: usize = 8; +pub const SPONGE_CAPACITY: usize = 4; +pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; + +#[derive(Copy, Clone, Default, Debug, PartialEq)] +pub struct Poseidon2BN254Permutation { + state: [F; SPONGE_WIDTH], +} + +impl Eq for Poseidon2BN254Permutation {} + +impl AsRef<[F]> for Poseidon2BN254Permutation { + fn as_ref(&self) -> &[F] { + &self.state + } +} + +impl PlonkyPermutation for Poseidon2BN254Permutation { + const RATE: usize = SPONGE_RATE; + const WIDTH: usize = SPONGE_WIDTH; + + fn new>(elts: I) -> Self { + let mut perm = Self { + state: [F::default(); SPONGE_WIDTH], + }; + perm.set_from_iter(elts, 0); + perm + } + + fn set_elt(&mut self, elt: F, idx: usize) { + self.state[idx] = elt; + } + + fn set_from_slice(&mut self, elts: &[F], start_idx: usize) { + let begin = start_idx; + let end = start_idx + elts.len(); + self.state[begin..end].copy_from_slice(elts); + } + + fn set_from_iter>(&mut self, elts: I, start_idx: usize) { + for (s, e) in self.state[start_idx..].iter_mut().zip(elts) { + *s = e; + } + } + + fn permute(&mut self) { + // convert state of Goldilocks elems to u64 + let mut state_u64 = vec![0u64; SPONGE_WIDTH ]; + for i in 0..SPONGE_WIDTH { + state_u64[i] + = self.state[i].to_canonical_u64(); + } + + // Create an iterator that repeatedly applies the sponge permutation. + let hash_onion = core::iter::repeat_with(|| { + // Compute the next hash layer. + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64.clone()); + // Convert the sponge output to u64. + let output = felts_to_u64(hash); + // Update the state for the next iteration. + state_u64 = output.clone(); + output.into_iter() + }).flatten(); + + // Parse field elements from u64 stream, using rejection sampling such that words that don't + // fit in F are ignored. + let new_state: Vec = hash_onion + .filter(|&word| word < F::ORDER) + .map(F::from_canonical_u64) + .take(SPONGE_WIDTH) + .collect(); + // update the state + self.state = new_state.try_into().expect("State length mismatch"); + } + + fn squeeze(&self) -> &[F] { + &self.state[..Self::RATE] + } +} + +const N: usize = 32; +/// Keccak-256 hash function. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub struct Poseidon2BN254; +impl Hasher for Poseidon2BN254 { + const HASH_SIZE: usize = N; + type Hash = BytesHash; + type Permutation = Poseidon2BN254Permutation; + + fn hash_no_pad(input: &[F]) -> Self::Hash { + let mut state_u64 = vec![0u64; input.len() ]; + for i in 0..input.len() { + state_u64[i] + = input[i].to_canonical_u64(); + } + let mut arr = [0; N]; + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); + let hash_bytes = felts_to_bytes(hash); + arr.copy_from_slice(&hash_bytes[..N]); + BytesHash(arr) + } + + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { + let mut input_bytes = vec![0; N * 2]; + input_bytes[0..N].copy_from_slice(&left.0); + input_bytes[N..].copy_from_slice(&right.0); + let mut arr = [0; N]; + let state_u64: Vec = input_bytes + .chunks_exact(8) + .map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap())) + .collect(); + + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); + let hash_bytes = felts_to_bytes(hash); + arr.copy_from_slice(&hash_bytes[..N]); + BytesHash(arr) + } +} + +fn felts_to_bytes(f: E) -> Vec where + E: CanonicalSerialize +{ + let mut bytes = Vec::new(); + f.serialize_uncompressed(&mut bytes).expect("serialization failed"); + bytes +} + +fn bytes_to_felts(bytes: &[u8]) -> E where + E: CanonicalDeserialize +{ + let fr_res = E::deserialize_uncompressed(bytes).unwrap(); + fr_res +} + +fn felts_to_u64(f: E) -> Vec + where + E: CanonicalSerialize, +{ + let mut bytes = Vec::new(); + f.serialize_uncompressed(&mut bytes) + .expect("serialization failed"); + bytes + .chunks_exact(size_of::()) + .map(|chunk| u64::from_le_bytes(chunk.try_into().unwrap())) + .collect() +} diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 217c8897..741daeb7 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -19,6 +19,7 @@ use crate::field::goldilocks_field::GoldilocksField; use crate::hash::hash_types::{HashOut, RichField}; use crate::hash::hashing::PlonkyPermutation; use crate::hash::keccak::KeccakHash; +use crate::hash::poseidon2_bn254::Poseidon2BN254; use crate::hash::poseidon::PoseidonHash; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; @@ -124,3 +125,13 @@ impl GenericConfig<2> for KeccakGoldilocksConfig { type Hasher = KeccakHash<25>; type InnerHasher = PoseidonHash; } + +/// Configuration using PoseidonBN254 over the Goldilocks field. +#[derive(Debug, Copy, Clone, Default, Eq, PartialEq)] +pub struct Poseidon2BN254Config; +impl GenericConfig<2> for Poseidon2BN254Config { + type F = GoldilocksField; + type FE = QuadraticExtension; + type Hasher = Poseidon2BN254; + type InnerHasher = PoseidonHash; +}