From eeeb99f546cf29a4dfc01765dde73f416c1ab1c1 Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Tue, 11 Feb 2025 11:55:02 +0100 Subject: [PATCH 1/5] add support for Poseidon2 BN254 --- plonky2/Cargo.toml | 2 + plonky2/examples/poseidon2_bn254_example.rs | 73 +++++++++ plonky2/src/hash/mod.rs | 1 + plonky2/src/hash/poseidon2_bn254.rs | 157 ++++++++++++++++++++ plonky2/src/plonk/config.rs | 11 ++ 5 files changed, 244 insertions(+) create mode 100644 plonky2/examples/poseidon2_bn254_example.rs create mode 100644 plonky2/src/hash/poseidon2_bn254.rs diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 4f67f35a..7d7de547 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -35,6 +35,8 @@ unroll = { workspace = true } web-time = { version = "1.0.0", optional = true } strum = "0.26" strum_macros = "0.26" +rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"} +ark-serialize = {version = "0.5.0"} # Local dependencies plonky2_field = { version = "1.0.0", path = "../field", default-features = false } diff --git a/plonky2/examples/poseidon2_bn254_example.rs b/plonky2/examples/poseidon2_bn254_example.rs new file mode 100644 index 00000000..9d408dee --- /dev/null +++ b/plonky2/examples/poseidon2_bn254_example.rs @@ -0,0 +1,73 @@ +use std::fs; +use anyhow::Result; +use plonky2::field::types::Field; +use plonky2::iop::witness::{PartialWitness, WitnessWrite}; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::circuit_data::CircuitConfig; +use plonky2::plonk::config::{GenericConfig, Poseidon2BN254Config}; +use plonky2::plonk::prover::ProverOptions; +use plonky2::plonk::verifier::{VerifierOptions, HashStatisticsPrintLevel}; + +/// An example of using Plonky2 to prove a statement of the form +/// "I know the 100th element of the Fibonacci sequence, starting with constants a and b." +/// When a == 0 and b == 1, this is proving knowledge of the 100th (standard) Fibonacci number. +fn main() -> Result<()> { + const D: usize = 2; + type C = Poseidon2BN254Config; + type F = >::F; + + let config = CircuitConfig::standard_recursion_config(); + let mut builder = CircuitBuilder::::new(config); + + // The arithmetic circuit. + let initial_a = builder.add_virtual_target(); + let initial_b = builder.add_virtual_target(); + let mut prev_target = initial_a; + let mut cur_target = initial_b; + for _ in 0..99 { + let temp = builder.add(prev_target, cur_target); + prev_target = cur_target; + cur_target = temp; + } + + // Public inputs are the two initial values (provided below) and the result (which is generated). + builder.register_public_input(initial_a); + builder.register_public_input(initial_b); + builder.register_public_input(cur_target); + + // Provide initial values. + let mut pw = PartialWitness::new(); + pw.set_target(initial_a, F::ZERO)?; + pw.set_target(initial_b, F::ONE)?; + + let data = builder.build::(); + + let prover_opts = ProverOptions { + export_witness: Some(String::from("fibonacci_witness.json")), + print_hash_statistics: HashStatisticsPrintLevel::Info, + }; + + let proof = data.prove_with_options(pw, &prover_opts)?; + + // serialize circuit into JSON + // let common_circuit_data_serialized = serde_json::to_string(&data.common ).unwrap(); + // let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap(); + // let proof_serialized = serde_json::to_string(&proof ).unwrap(); + // fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file"); + // fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file"); + // fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file"); + + // println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap); + // println!("circ digest: {:?}", data.verifier_only.circuit_digest); + + println!( + "100th Fibonacci number mod |F| (starting with {}, {}) is: {}", + proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2] + ); + + let verifier_opts = VerifierOptions { + print_hash_statistics: HashStatisticsPrintLevel::Summary, + }; + data.verify_with_options(proof, &verifier_opts) + +} diff --git a/plonky2/src/hash/mod.rs b/plonky2/src/hash/mod.rs index 0e4bb8a5..d20f0250 100644 --- a/plonky2/src/hash/mod.rs +++ b/plonky2/src/hash/mod.rs @@ -11,3 +11,4 @@ pub mod merkle_tree; pub mod path_compression; pub mod poseidon; pub mod poseidon_goldilocks; +pub mod poseidon2_bn254; diff --git a/plonky2/src/hash/poseidon2_bn254.rs b/plonky2/src/hash/poseidon2_bn254.rs new file mode 100644 index 00000000..6d9b66b0 --- /dev/null +++ b/plonky2/src/hash/poseidon2_bn254.rs @@ -0,0 +1,157 @@ +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; +use core::mem::size_of; + +use rust_bn254_hash::hash::Hash; +use crate::hash::hash_types::{BytesHash, RichField}; +use crate::hash::hashing::PlonkyPermutation; +use crate::plonk::config::Hasher; +use rust_bn254_hash::sponge::{sponge_u64_pad, sponge_u64_no_pad}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + +pub const SPONGE_RATE: usize = 8; +pub const SPONGE_CAPACITY: usize = 4; +pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; + +#[derive(Copy, Clone, Default, Debug, PartialEq)] +pub struct Poseidon2BN254Permutation { + state: [F; SPONGE_WIDTH], +} + +impl Eq for Poseidon2BN254Permutation {} + +impl AsRef<[F]> for Poseidon2BN254Permutation { + fn as_ref(&self) -> &[F] { + &self.state + } +} + +impl PlonkyPermutation for Poseidon2BN254Permutation { + const RATE: usize = SPONGE_RATE; + const WIDTH: usize = SPONGE_WIDTH; + + fn new>(elts: I) -> Self { + let mut perm = Self { + state: [F::default(); SPONGE_WIDTH], + }; + perm.set_from_iter(elts, 0); + perm + } + + fn set_elt(&mut self, elt: F, idx: usize) { + self.state[idx] = elt; + } + + fn set_from_slice(&mut self, elts: &[F], start_idx: usize) { + let begin = start_idx; + let end = start_idx + elts.len(); + self.state[begin..end].copy_from_slice(elts); + } + + fn set_from_iter>(&mut self, elts: I, start_idx: usize) { + for (s, e) in self.state[start_idx..].iter_mut().zip(elts) { + *s = e; + } + } + + fn permute(&mut self) { + // convert state of Goldilocks elems to u64 + let mut state_u64 = vec![0u64; SPONGE_WIDTH ]; + for i in 0..SPONGE_WIDTH { + state_u64[i] + = self.state[i].to_canonical_u64(); + } + + // Create an iterator that repeatedly applies the sponge permutation. + let hash_onion = core::iter::repeat_with(|| { + // Compute the next hash layer. + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64.clone()); + // Convert the sponge output to u64. + let output = felts_to_u64(hash); + // Update the state for the next iteration. + state_u64 = output.clone(); + output.into_iter() + }).flatten(); + + // Parse field elements from u64 stream, using rejection sampling such that words that don't + // fit in F are ignored. + let new_state: Vec = hash_onion + .filter(|&word| word < F::ORDER) + .map(F::from_canonical_u64) + .take(SPONGE_WIDTH) + .collect(); + // update the state + self.state = new_state.try_into().expect("State length mismatch"); + } + + fn squeeze(&self) -> &[F] { + &self.state[..Self::RATE] + } +} + +const N: usize = 32; +/// Keccak-256 hash function. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub struct Poseidon2BN254; +impl Hasher for Poseidon2BN254 { + const HASH_SIZE: usize = N; + type Hash = BytesHash; + type Permutation = Poseidon2BN254Permutation; + + fn hash_no_pad(input: &[F]) -> Self::Hash { + let mut state_u64 = vec![0u64; input.len() ]; + for i in 0..input.len() { + state_u64[i] + = input[i].to_canonical_u64(); + } + let mut arr = [0; N]; + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); + let hash_bytes = felts_to_bytes(hash); + arr.copy_from_slice(&hash_bytes[..N]); + BytesHash(arr) + } + + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { + let mut input_bytes = vec![0; N * 2]; + input_bytes[0..N].copy_from_slice(&left.0); + input_bytes[N..].copy_from_slice(&right.0); + let mut arr = [0; N]; + let state_u64: Vec = input_bytes + .chunks_exact(8) + .map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap())) + .collect(); + + let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); + let hash_bytes = felts_to_bytes(hash); + arr.copy_from_slice(&hash_bytes[..N]); + BytesHash(arr) + } +} + +fn felts_to_bytes(f: E) -> Vec where + E: CanonicalSerialize +{ + let mut bytes = Vec::new(); + f.serialize_uncompressed(&mut bytes).expect("serialization failed"); + bytes +} + +fn bytes_to_felts(bytes: &[u8]) -> E where + E: CanonicalDeserialize +{ + let fr_res = E::deserialize_uncompressed(bytes).unwrap(); + fr_res +} + +fn felts_to_u64(f: E) -> Vec + where + E: CanonicalSerialize, +{ + let mut bytes = Vec::new(); + f.serialize_uncompressed(&mut bytes) + .expect("serialization failed"); + bytes + .chunks_exact(size_of::()) + .map(|chunk| u64::from_le_bytes(chunk.try_into().unwrap())) + .collect() +} diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 217c8897..741daeb7 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -19,6 +19,7 @@ use crate::field::goldilocks_field::GoldilocksField; use crate::hash::hash_types::{HashOut, RichField}; use crate::hash::hashing::PlonkyPermutation; use crate::hash::keccak::KeccakHash; +use crate::hash::poseidon2_bn254::Poseidon2BN254; use crate::hash::poseidon::PoseidonHash; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; @@ -124,3 +125,13 @@ impl GenericConfig<2> for KeccakGoldilocksConfig { type Hasher = KeccakHash<25>; type InnerHasher = PoseidonHash; } + +/// Configuration using PoseidonBN254 over the Goldilocks field. +#[derive(Debug, Copy, Clone, Default, Eq, PartialEq)] +pub struct Poseidon2BN254Config; +impl GenericConfig<2> for Poseidon2BN254Config { + type F = GoldilocksField; + type FE = QuadraticExtension; + type Hasher = Poseidon2BN254; + type InnerHasher = PoseidonHash; +} From 972c3c06453eb7b8e248186bf0a221eaf1d98065 Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Thu, 27 Feb 2025 10:46:39 +0100 Subject: [PATCH 2/5] impl duplex and hasher for GenericField --- plonky2/Cargo.toml | 2 + plonky2/examples/poseidon2_bn254_example.rs | 3 +- plonky2/src/fri/challenges.rs | 8 +- plonky2/src/fri/prover.rs | 42 +-- plonky2/src/hash/batch_merkle_tree.rs | 23 +- plonky2/src/hash/duplex.rs | 121 ++++++++ plonky2/src/hash/hash_types.rs | 71 ++++- plonky2/src/hash/keccak.rs | 63 +++- plonky2/src/hash/merkle_proofs.rs | 11 +- plonky2/src/hash/merkle_tree.rs | 16 +- plonky2/src/hash/mod.rs | 3 +- plonky2/src/hash/path_compression.rs | 5 +- plonky2/src/hash/poseidon.rs | 63 +++- plonky2/src/hash/poseidon2_bn254.rs | 324 +++++++++++++++----- plonky2/src/iop/challenger.rs | 86 ++---- plonky2/src/plonk/circuit_builder.rs | 7 +- plonky2/src/plonk/config.rs | 115 +++++-- plonky2/src/plonk/proof.rs | 8 +- plonky2/src/plonk/prover.rs | 5 +- 19 files changed, 728 insertions(+), 248 deletions(-) create mode 100644 plonky2/src/hash/duplex.rs diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 7d7de547..854a595e 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -37,6 +37,8 @@ strum = "0.26" strum_macros = "0.26" rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"} ark-serialize = {version = "0.5.0"} +ark-bn254 = "0.5.0" +ark-ff = "0.5.0" # Local dependencies plonky2_field = { version = "1.0.0", path = "../field", default-features = false } diff --git a/plonky2/examples/poseidon2_bn254_example.rs b/plonky2/examples/poseidon2_bn254_example.rs index 9d408dee..a783eadf 100644 --- a/plonky2/examples/poseidon2_bn254_example.rs +++ b/plonky2/examples/poseidon2_bn254_example.rs @@ -56,9 +56,9 @@ fn main() -> Result<()> { // fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file"); // fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file"); // fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file"); - // println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap); // println!("circ digest: {:?}", data.verifier_only.circuit_digest); + // println!("proof part: {:?}", proof.proof.wires_cap.0); println!( "100th Fibonacci number mod |F| (starting with {}, {}) is: {}", @@ -69,5 +69,4 @@ fn main() -> Result<()> { print_hash_statistics: HashStatisticsPrintLevel::Summary, }; data.verify_with_options(proof, &verifier_opts) - } diff --git a/plonky2/src/fri/challenges.rs b/plonky2/src/fri/challenges.rs index b261a55e..2a69b656 100644 --- a/plonky2/src/fri/challenges.rs +++ b/plonky2/src/fri/challenges.rs @@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::challenger::{Challenger, RecursiveChallenger}; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, Hasher}; impl> Challenger { pub fn observe_openings(&mut self, openings: &FriOpenings) @@ -57,8 +57,10 @@ impl> Challenger { if let Some(step_count) = max_num_query_steps { let cap_len = (1 << config.cap_height) * NUM_HASH_OUT_ELTS; let zero_cap = vec![F::ZERO; cap_len]; + let zero_cap_felts: Vec> = zero_cap.into_iter().map(GenericField::Goldilocks).collect(); for _ in commit_phase_merkle_caps.len()..step_count { - self.observe_elements(&zero_cap); + // self.observe_elements(&zero_cap); + self.observe_elements(&zero_cap_felts); self.get_extension_challenge::(); } } @@ -73,7 +75,7 @@ impl> Challenger { } } - self.observe_element(pow_witness); + self.observe_element(GenericField::Goldilocks(pow_witness)); let fri_pow_response = self.get_challenge(); let fri_query_indices = (0..num_fri_queries) diff --git a/plonky2/src/fri/prover.rs b/plonky2/src/fri/prover.rs index d478ae5d..916f4fca 100644 --- a/plonky2/src/fri/prover.rs +++ b/plonky2/src/fri/prover.rs @@ -14,7 +14,7 @@ use crate::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS}; use crate::hash::hashing::*; use crate::hash::merkle_tree::MerkleTree; use crate::iop::challenger::Challenger; -use crate::plonk::config::GenericConfig; +use crate::plonk::config::{GenericConfig, GenericField}; use crate::plonk::plonk_common::reduce_with_powers; use crate::plonk::prover::ProverOptions; use crate::plonk::verifier::HashStatisticsPrintLevel; @@ -136,8 +136,9 @@ fn fri_committed_trees, C: GenericConfig, if let Some(step_count) = max_num_query_steps { let cap_len = (1 << fri_params.config.cap_height) * NUM_HASH_OUT_ELTS; let zero_cap = vec![F::ZERO; cap_len]; + let zero_cap_felts: Vec> = zero_cap.into_iter().map(GenericField::Goldilocks).collect(); for _ in fri_params.reduction_arity_bits.len()..step_count { - challenger.observe_elements(&zero_cap); + challenger.observe_elements(&zero_cap_felts); challenger.get_extension_challenge::(); } } @@ -171,45 +172,12 @@ pub(crate) fn fri_proof_of_work< ) -> F { let min_leading_zeros = config.proof_of_work_bits + (64 - F::order().bits()) as u32; - // The easiest implementation would be repeatedly clone our Challenger. With each clone, we'd - // observe an incrementing PoW witness, then get the PoW response. If it contained sufficient - // leading zeros, we'd end the search, and store this clone as our new challenger. - // - // However, performance is critical here. We want to avoid cloning Challenger, particularly - // since it stores vectors, which means allocations. We'd like a more compact state to clone. - // - // We know that a duplex will be performed right after we send the PoW witness, so we can ignore - // any output_buffer, which will be invalidated. We also know - // input_buffer.len() < H::Permutation::WIDTH, an invariant of Challenger. - // - // We separate the duplex operation into two steps, one which can be performed now, and the - // other which depends on the PoW witness candidate. The first step is the overwrite our sponge - // state with any inputs (excluding the PoW witness candidate). The second step is to overwrite - // one more element of our sponge state with the candidate, then apply the permutation, - // obtaining our duplex's post-state which contains the PoW response. - let mut duplex_intermediate_state = challenger.sponge_state; - let witness_input_pos = challenger.input_buffer.len(); - duplex_intermediate_state.set_from_iter(challenger.input_buffer.clone(), 0); - - // println!("duplex_intermediate_state = {:?}", duplex_intermediate_state); - - let pow_witness = (0..=F::NEG_ONE.to_canonical_u64()) - .into_par_iter() - .find_any(|&candidate| { - let mut duplex_state = duplex_intermediate_state; - duplex_state.set_elt(F::from_canonical_u64(candidate), witness_input_pos); - duplex_state.permute(); - let pow_response = duplex_state.squeeze().iter().last().unwrap(); - let leading_zeros = pow_response.to_canonical_u64().leading_zeros(); - leading_zeros >= min_leading_zeros - }) - .map(F::from_canonical_u64) - .expect("Proof of work failed. This is highly unlikely!"); + let pow_witness = challenger.grind(min_leading_zeros); // println!("pow_witness = {:?}",pow_witness); // Recompute pow_response using our normal Challenger code, and make sure it matches. - challenger.observe_element(pow_witness); + challenger.observe_element(GenericField::Goldilocks(pow_witness)); let pow_response = challenger.get_challenge(); let leading_zeros = pow_response.to_canonical_u64().leading_zeros(); assert!(leading_zeros >= min_leading_zeros); diff --git a/plonky2/src/hash/batch_merkle_tree.rs b/plonky2/src/hash/batch_merkle_tree.rs index eaa49977..64f85258 100644 --- a/plonky2/src/hash/batch_merkle_tree.rs +++ b/plonky2/src/hash/batch_merkle_tree.rs @@ -10,7 +10,7 @@ use crate::hash::merkle_proofs::MerkleProof; use crate::hash::merkle_tree::{ capacity_up_to_mut, fill_digests_buf, merkle_tree_prove, MerkleCap, }; -use crate::plonk::config::{GenericHashOut, Hasher}; +use crate::plonk::config::{GenericField, GenericHashOut, Hasher}; use crate::util::log2_strict; #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -56,9 +56,20 @@ impl> BatchMerkleTree { let mut digests_buf_pos = 0; let mut cap = vec![]; - let dummy_leaves = vec![vec![F::ZERO]; 1 << cap_height]; - leaves.push(dummy_leaves); - for window in leaves.windows(2) { + let dummy_leaves_felts = vec![vec![GenericField::Goldilocks(F::ZERO)]; 1 << cap_height]; + let mut leaves_felts: Vec>>> = leaves.clone().into_iter() + .map(|matrix| { + matrix.into_iter() + .map(|vec| { + vec.into_iter() + .map(|f| GenericField::Goldilocks(f)) + .collect() + }) + .collect() + }) + .collect(); + leaves_felts.push(dummy_leaves_felts); + for window in leaves_felts.windows(2) { let cur = &window[0]; let next = &window[1]; @@ -82,7 +93,7 @@ impl> BatchMerkleTree { ); } else { // The rest leaf layers - let new_leaves: Vec> = cap + let new_leaves: Vec>> = cap .iter() .enumerate() .map(|(i, cap_hash)| { @@ -118,8 +129,6 @@ impl> BatchMerkleTree { digests.set_len(num_digests); } - // remove dummy leaves - leaves.pop(); Self { leaves, diff --git a/plonky2/src/hash/duplex.rs b/plonky2/src/hash/duplex.rs new file mode 100644 index 00000000..17f9bef1 --- /dev/null +++ b/plonky2/src/hash/duplex.rs @@ -0,0 +1,121 @@ +use plonky2_maybe_rayon::ParallelIterator; +use plonky2_maybe_rayon::rayon::iter::IntoParallelIterator; +use crate::hash::hash_types::RichField; +use crate::hash::hashing::PlonkyPermutation; +use crate::plonk::config::{GenericField, Hasher, HasherField}; +use plonky2_field::types::PrimeField64; + +#[derive(Debug, Clone)] +pub enum DuplexState> { + Absorbing { + state: H::Permutation, + buf: Vec>, // Buffer for absorbing inputs. + }, + Squeezing { + state: H::Permutation, + buf: Vec, // Buffer holding squeezed outputs. + }, +} + +impl> DuplexState { + /// creates a new duplex state in absorbing mode with an initial zero state. + pub fn new() -> Self { + DuplexState::Absorbing { + state: H::Permutation::new(core::iter::repeat(H::HF::get_zero())), + buf: Vec::new(), + } + } + + /// absorb a generic field element. + /// In absorbing mode: the element is appended to the buffer. + /// In squeezing mode: we trash any current outputs and switch back to absorbing. + pub fn absorb(&mut self, element: GenericField) { + match self { + DuplexState::Absorbing { buf, .. } => { + buf.push(element); + } + DuplexState::Squeezing { state, .. } => { + let mut buf = Vec::new(); + buf.push(element); + *self = DuplexState::Absorbing { + state: state.clone(), + buf, + }; + } + } + } + + /// Squeeze out a single challenge element (Goldilocks field elements) + /// In absorbing mode: the buffer elements are absorbed by calling the sponge + /// and switching the state to `Squeezing` and filling the output buffer with Goldilocks elems + /// In squeezing mode: we take elements from the buffer, if buffer is empty we permute and fill the buffer. + pub fn squeeze(&mut self) -> F { + match self { + DuplexState::Absorbing { state, buf, .. } => { + let input: Vec> = buf.drain(..).collect(); + H::sponge(state, input); + let out_buf: Vec = Self::squeeze_f(state); + // switch + *self = DuplexState::Squeezing { + state: state.clone(), + buf: out_buf, + }; + // fall back to squeezing. + self.squeeze() + } + DuplexState::Squeezing { state, buf, .. } => { + if buf.is_empty() { + // If the buffer is empty, permute to refill it. + state.permute(); + *buf = Self::squeeze_f(state); + } + let e = buf.pop().expect("Output buffer should not be empty"); + e + } + } + } + + /// squeeze out goldilocks field elements from the state + fn squeeze_f(state: &mut H::Permutation) -> Vec{ + let out = H::squeeze_goldilocks(state); + assert!(out.len()>0); + out + } + + /// grind moved here from the FRI prover + /// it handles both modes (`Absorbing` and `Squeezing`) + pub fn grind(&mut self, min_leading_zeros: u32) -> F { + match self { + DuplexState::Absorbing { state, buf, .. } => { + + let duplex_intermediate_state = state.clone(); + let buf_felts: Vec> = buf.clone(); + + Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros) + } + DuplexState::Squeezing { state, .. } => { + let duplex_intermediate_state = state.clone(); + let buf_felts = vec![]; + Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros) + } + } + } + + fn grind_helper(state: H::Permutation, input: Vec>, min_leading_zeros: u32) -> F { + let pow_witness = (0..=F::NEG_ONE.to_canonical_u64()) + .into_par_iter() + .find_any(|&candidate| { + let mut duplex_state = state.clone(); + let mut sponge_input = input.clone(); + sponge_input.push(GenericField::Goldilocks(F::from_canonical_u64(candidate))); + H::sponge(&mut duplex_state, sponge_input); + let temp_buf = Self::squeeze_f(&mut duplex_state); + let pow_response = temp_buf.iter().last().unwrap(); + let leading_zeros = PrimeField64::to_canonical_u64(pow_response).leading_zeros(); + leading_zeros >= min_leading_zeros + }) + .map(F::from_canonical_u64) + .expect("Proof of work failed. This is highly unlikely!"); + pow_witness + } +} \ No newline at end of file diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 9f9d976b..6ca645ba 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -10,13 +10,67 @@ use crate::field::goldilocks_field::GoldilocksField; use crate::field::types::{Field, PrimeField64, Sample}; use crate::hash::poseidon::Poseidon; use crate::iop::target::Target; -use crate::plonk::config::GenericHashOut; +use crate::plonk::config::{GenericField, GenericHashOut}; +use ark_bn254::Fr as BN254Fr; +use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes}; /// A prime order field with the features we need to use it as a base field in our argument system. pub trait RichField: PrimeField64 + Poseidon {} impl RichField for GoldilocksField {} +/// Hash digest for the BN254 field, contains single Fr element +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +pub struct BN254HashOut{ + pub element: BN254Fr +} + +/// Serialize the bn254 field , uses Arkworks +impl Serialize for BN254HashOut { + fn serialize < S > ( & self, serializer: S) -> Result < S::Ok, S::Error > + where S: Serializer { + + let element_to_bytes = felts_to_bytes(&self.element); + serializer.serialize_bytes( & element_to_bytes) + } +} +/// Deserialize the bn254 field , uses Arkworks +impl<'de> Deserialize<'de> for BN254HashOut { + fn deserialize< + D>(deserializer: D) -> Result< Self, + D::Error> + where D: Deserializer<'de> { + let element_as_bytes = < [u8; 32] >::deserialize(deserializer) ?; + let mut element_array = < [u8; 32] >::default(); + element_array.copy_from_slice( & element_as_bytes[0..32]); + + let deserialized_element = bytes_to_felts(&element_array); + + Ok( Self { + element: deserialized_element, + }) + } +} + +/// implement GenericHashOut for the BN254 hash digest +/// `F` here is the goldilocks not the BN254 field +impl GenericHashOut for BN254HashOut { + fn to_bytes(&self) -> Vec { + felts_to_bytes(&self.element) + } + + fn from_bytes(bytes: &[u8]) -> Self { + assert_eq!(bytes.len(), 32); + BN254HashOut{ + element: bytes_to_felts(bytes) + } + } + + fn to_vec(&self) -> Vec> { + vec![GenericField::BN254(self.element.clone())] + } +} + pub const NUM_HASH_OUT_ELTS: usize = 4; /// Represents a ~256 bit hash output. @@ -103,8 +157,12 @@ impl GenericHashOut for HashOut { } } - fn to_vec(&self) -> Vec { - self.elements.to_vec() + fn to_vec(&self) -> Vec> { + self.elements + .iter() + .copied() + .map(GenericField::::Goldilocks) + .collect() } } @@ -181,14 +239,15 @@ impl GenericHashOut for BytesHash { Self(bytes.try_into().unwrap()) } - fn to_vec(&self) -> Vec { + fn to_vec(&self) -> Vec> { self.0 // Chunks of 7 bytes since 8 bytes would allow collisions. .chunks(7) .map(|bytes| { - let mut arr = [0; 8]; + let mut arr = [0u8; 8]; arr[..bytes.len()].copy_from_slice(bytes); - F::from_canonical_u64(u64::from_le_bytes(arr)) + let raw = F::from_canonical_u64(u64::from_le_bytes(arr)); + GenericField::::Goldilocks(raw) }) .collect() } diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index d3fa8c4b..0ab9ee20 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -7,7 +7,7 @@ use keccak_hash::keccak; use crate::hash::hash_types::{BytesHash, RichField}; use crate::hash::hashing::PlonkyPermutation; -use crate::plonk::config::Hasher; +use crate::plonk::config::{GenericField, GenericHashOut, Hasher}; use crate::util::serialization::Write; pub const SPONGE_RATE: usize = 8; @@ -102,19 +102,76 @@ impl PlonkyPermutation for KeccakPermutation { #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct KeccakHash; impl Hasher for KeccakHash { + type HF = F; const HASH_SIZE: usize = N; type Hash = BytesHash; type Permutation = KeccakPermutation; - fn hash_no_pad(input: &[F]) -> Self::Hash { + fn hash_pad(input: &[GenericField]) -> Self::Hash { + let mut padded_input = input.to_vec(); + padded_input.push(GenericField::Goldilocks(F::ONE)); + while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { + padded_input.push(GenericField::Goldilocks(F::ZERO)); + } + padded_input.push(GenericField::Goldilocks(F::ONE)); + Self::hash_no_pad(&padded_input) + } + + fn hash_or_noop(inputs: &[GenericField]) -> Self::Hash { + let hash_size = 4 * 8; + if inputs.len() * 8 <= hash_size { + let mut inputs_bytes = vec![0u8; hash_size]; + for i in 0..inputs.len() { + let goldilocks_felt = match inputs[i].clone() { + GenericField::Goldilocks(v) => { v } + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + }; + inputs_bytes[i * 8..(i + 1) * 8] + .copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes()); + } + as GenericHashOut>::from_bytes(&inputs_bytes) + } else { + Self::hash_no_pad(inputs) + } + } + + fn hash_no_pad(input: &[GenericField]) -> Self::Hash { let mut buffer = Vec::with_capacity(input.len()); - buffer.write_field_vec(input).unwrap(); + let mut goldilocks_felts = vec![]; + for e in input { + // only accept goldilocks (for now!) + match e { + GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)} + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + } + } + + buffer.write_field_vec(&goldilocks_felts).unwrap(); let mut arr = [0; N]; let hash_bytes = keccak(buffer).0; arr.copy_from_slice(&hash_bytes[..N]); BytesHash(arr) } + fn sponge(state: &mut Self::Permutation, input: Vec>) { + let mut goldilocks_felts = vec![]; + for e in input { + //only accept goldilocks (for now!) + match e { + GenericField::Goldilocks(v) => {goldilocks_felts.push(v)} + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + } + } + for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) { + state.set_from_slice(chunk, 0); + state.permute(); + } + } + + fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec { + state.squeeze().to_vec() + } + fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { let mut v = vec![0; N * 2]; v[0..N].copy_from_slice(&left.0); diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index cb9b0b21..4bbfb2a8 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::VerifierCircuitTarget; -use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher}; #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[serde(bound = "")] @@ -77,7 +77,12 @@ pub fn verify_batch_merkle_proof_to_cap>( proof: &MerkleProof, ) -> Result<()> { assert_eq!(leaf_data.len(), leaf_heights.len()); - let mut current_digest = H::hash_or_noop(&leaf_data[0]); + let leaf_data_felts: Vec>> = leaf_data.into_iter() + .map(|inner| { + inner.into_iter().map(|f| GenericField::Goldilocks(f.clone())).collect() + }) + .collect(); + let mut current_digest = H::hash_or_noop(&leaf_data_felts[0]); let mut current_height = leaf_heights[0]; let mut leaf_data_index = 1; for &sibling_digest in &proof.siblings { @@ -92,7 +97,7 @@ pub fn verify_batch_merkle_proof_to_cap>( if leaf_data_index < leaf_heights.len() && current_height == leaf_heights[leaf_data_index] { let mut new_leaves = current_digest.to_vec(); - new_leaves.extend_from_slice(&leaf_data[leaf_data_index]); + new_leaves.extend_from_slice(&leaf_data_felts[leaf_data_index]); current_digest = H::hash_or_noop(&new_leaves); leaf_data_index += 1; } diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 31bcf5e3..12cf1e39 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; -use crate::plonk::config::{GenericHashOut, Hasher}; +use crate::plonk::config::{GenericField, GenericHashOut, Hasher}; use crate::util::log2_strict; /// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree. @@ -37,7 +37,7 @@ impl> MerkleCap { log2_strict(self.len()) } - pub fn flatten(&self) -> Vec { + pub fn flatten(&self) -> Vec> { self.0.iter().flat_map(|&h| h.to_vec()).collect() } } @@ -85,7 +85,7 @@ pub(crate) fn capacity_up_to_mut(v: &mut Vec, len: usize) -> &mut [MaybeUn pub(crate) fn fill_subtree>( digests_buf: &mut [MaybeUninit], - leaves: &[Vec], + leaves: &[Vec>], ) -> H::Hash { assert_eq!(leaves.len(), digests_buf.len() / 2 + 1); if digests_buf.is_empty() { @@ -115,7 +115,7 @@ pub(crate) fn fill_subtree>( pub(crate) fn fill_digests_buf>( digests_buf: &mut [MaybeUninit], cap_buf: &mut [MaybeUninit], - leaves: &[Vec], + leaves: &[Vec>], cap_height: usize, ) { // Special case of a tree that's all cap. The usual case will panic because we'll try to split @@ -207,8 +207,12 @@ impl> MerkleTree { let digests_buf = capacity_up_to_mut(&mut digests, num_digests); let cap_buf = capacity_up_to_mut(&mut cap, len_cap); - fill_digests_buf::(digests_buf, cap_buf, &leaves[..], cap_height); - + let leaves_felts: Vec>> = leaves.clone().into_iter() + .map(|inner| { + inner.into_iter().map(|f| GenericField::Goldilocks(f)).collect() + }) + .collect(); + fill_digests_buf::(digests_buf, cap_buf, &leaves_felts[..], cap_height); unsafe { // SAFETY: `fill_digests_buf` and `cap` initialized the spare capacity up to // `num_digests` and `len_cap`, resp. diff --git a/plonky2/src/hash/mod.rs b/plonky2/src/hash/mod.rs index d20f0250..3d0b3a57 100644 --- a/plonky2/src/hash/mod.rs +++ b/plonky2/src/hash/mod.rs @@ -11,4 +11,5 @@ pub mod merkle_tree; pub mod path_compression; pub mod poseidon; pub mod poseidon_goldilocks; -pub mod poseidon2_bn254; +pub mod duplex; +pub mod poseidon2_bn254; \ No newline at end of file diff --git a/plonky2/src/hash/path_compression.rs b/plonky2/src/hash/path_compression.rs index 517576bf..18fe470c 100644 --- a/plonky2/src/hash/path_compression.rs +++ b/plonky2/src/hash/path_compression.rs @@ -6,7 +6,7 @@ use num::Integer; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; -use crate::plonk::config::Hasher; +use crate::plonk::config::{GenericField, Hasher}; /// Compress multiple Merkle proofs on the same tree by removing redundancy in the Merkle paths. pub(crate) fn compress_merkle_proofs>( @@ -68,7 +68,8 @@ pub(crate) fn decompress_merkle_proofs>( for (&i, v) in leaves_indices.iter().zip(leaves_data) { // Observe the leaves. - seen.insert(i + num_leaves, H::hash_or_noop(v)); + let v_felts: Vec> = v.clone().into_iter().map(GenericField::Goldilocks).collect(); + seen.insert(i + num_leaves, H::hash_or_noop(&v_felts)); } // Iterators over the siblings. diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index aa7936ee..51455695 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -19,7 +19,7 @@ use crate::hash::hashing::{HashUsage, increment_given_hash_counter}; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::config::{AlgebraicHasher, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher}; pub const SPONGE_RATE: usize = 8; pub const SPONGE_CAPACITY: usize = 4; @@ -875,17 +875,74 @@ impl PlonkyPermutation< #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct PoseidonHash; impl Hasher for PoseidonHash { + type HF = F; const HASH_SIZE: usize = 4 * 8; type Hash = HashOut; type Permutation = PoseidonPermutation; - fn hash_no_pad(input: &[F]) -> Self::Hash { - hash_n_to_hash_no_pad::(input) + fn hash_no_pad(input: &[GenericField]) -> Self::Hash { + let mut goldilocks_felts = vec![]; + for e in input { + // for goldilocks only accept goldilocks (for now!) + match e { + GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)} + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + } + } + hash_n_to_hash_no_pad::(&goldilocks_felts) + } + + fn hash_pad(input: &[GenericField]) -> Self::Hash { + let mut padded_input = input.to_vec(); + padded_input.push(GenericField::Goldilocks(F::ONE)); + while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { + padded_input.push(GenericField::Goldilocks(F::ZERO)); + } + padded_input.push(GenericField::Goldilocks(F::ONE)); + Self::hash_no_pad(&padded_input) + } + + fn hash_or_noop(inputs: &[GenericField]) -> Self::Hash { + let hash_size = 4 * 8; + if inputs.len() * 8 <= hash_size { + let mut inputs_bytes = vec![0u8; hash_size]; + for i in 0..inputs.len() { + // only accept goldilocks (for now!) + let goldilocks_felt = match inputs[i].clone() { + GenericField::Goldilocks(v) => { v } + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + }; + inputs_bytes[i * 8..(i + 1) * 8] + .copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes()); + } + Self::Hash::from_bytes(&inputs_bytes) + } else { + Self::hash_no_pad(inputs) + } + } + + fn sponge(state: &mut Self::Permutation, input: Vec>) { + let mut goldilocks_felts = vec![]; + for e in input { + // only accept goldilocks (for now!) + match e { + GenericField::Goldilocks(v) => {goldilocks_felts.push(v)} + GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")} + } + } + for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) { + state.set_from_slice(chunk, 0); + state.permute(); + } } fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { compress::(left, right) } + + fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec { + state.squeeze().to_vec() + } } impl AlgebraicHasher for PoseidonHash { diff --git a/plonky2/src/hash/poseidon2_bn254.rs b/plonky2/src/hash/poseidon2_bn254.rs index 6d9b66b0..b2b60175 100644 --- a/plonky2/src/hash/poseidon2_bn254.rs +++ b/plonky2/src/hash/poseidon2_bn254.rs @@ -1,134 +1,308 @@ #[cfg(not(feature = "std"))] use alloc::{vec, vec::Vec}; +use core::fmt::Debug; use core::mem::size_of; -use rust_bn254_hash::hash::Hash; -use crate::hash::hash_types::{BytesHash, RichField}; +use crate::hash::hash_types::{BN254HashOut, RichField}; use crate::hash::hashing::PlonkyPermutation; -use crate::plonk::config::Hasher; -use rust_bn254_hash::sponge::{sponge_u64_pad, sponge_u64_no_pad}; +use crate::plonk::config::{GenericField, Hasher}; +use rust_bn254_hash::state::State; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_bn254::{Fr as BN254Fr}; +use rust_bn254_hash::poseidon2::permutation::permute_inplace as permute_bn254_inplace; +use ark_ff::{BigInt, PrimeField, Zero}; +use rust_bn254_hash::hash::Hash; +use rust_bn254_hash::sponge::{sponge_felts_no_pad, sponge_felts_pad}; -pub const SPONGE_RATE: usize = 8; -pub const SPONGE_CAPACITY: usize = 4; +pub const SPONGE_RATE: usize = 2; +pub const SPONGE_CAPACITY: usize = 1; pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY; -#[derive(Copy, Clone, Default, Debug, PartialEq)] -pub struct Poseidon2BN254Permutation { - state: [F; SPONGE_WIDTH], +/// Poseidon2 state with BN254 elements +#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)] +pub struct Poseidon2BN254Perm { + state: [BN254Fr; SPONGE_WIDTH], } -impl Eq for Poseidon2BN254Permutation {} - -impl AsRef<[F]> for Poseidon2BN254Permutation { - fn as_ref(&self) -> &[F] { +/// needed for PlonkyPermutation +impl AsRef<[BN254Fr]> for Poseidon2BN254Perm { + fn as_ref(&self) -> &[BN254Fr] { &self.state } } -impl PlonkyPermutation for Poseidon2BN254Permutation { +impl PlonkyPermutation for Poseidon2BN254Perm { const RATE: usize = SPONGE_RATE; const WIDTH: usize = SPONGE_WIDTH; - fn new>(elts: I) -> Self { + fn new>(elts: I) -> Self { let mut perm = Self { - state: [F::default(); SPONGE_WIDTH], + state: [BN254Fr::default(); SPONGE_WIDTH], }; perm.set_from_iter(elts, 0); perm } - fn set_elt(&mut self, elt: F, idx: usize) { + fn set_elt(&mut self, elt: BN254Fr, idx: usize) { self.state[idx] = elt; } - fn set_from_slice(&mut self, elts: &[F], start_idx: usize) { + fn set_from_slice(&mut self, elts: &[BN254Fr], start_idx: usize) { let begin = start_idx; let end = start_idx + elts.len(); self.state[begin..end].copy_from_slice(elts); } - fn set_from_iter>(&mut self, elts: I, start_idx: usize) { + fn set_from_iter>(&mut self, elts: I, start_idx: usize) { for (s, e) in self.state[start_idx..].iter_mut().zip(elts) { *s = e; } } + /// calls the permutation in `rust-bn254-hash` + /// we can probably refactor the state and eliminate the conversion in this fn. fn permute(&mut self) { - // convert state of Goldilocks elems to u64 - let mut state_u64 = vec![0u64; SPONGE_WIDTH ]; - for i in 0..SPONGE_WIDTH { - state_u64[i] - = self.state[i].to_canonical_u64(); - } + let mut s = State{ + x: self.state[0].clone(), + y: self.state[1].clone(), + z: self.state[2].clone(), + }; - // Create an iterator that repeatedly applies the sponge permutation. - let hash_onion = core::iter::repeat_with(|| { - // Compute the next hash layer. - let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64.clone()); - // Convert the sponge output to u64. - let output = felts_to_u64(hash); - // Update the state for the next iteration. - state_u64 = output.clone(); - output.into_iter() - }).flatten(); + permute_bn254_inplace(&mut s); + + self.state = [ + s.x, + s.y, + s.z, + ]; - // Parse field elements from u64 stream, using rejection sampling such that words that don't - // fit in F are ignored. - let new_state: Vec = hash_onion - .filter(|&word| word < F::ORDER) - .map(F::from_canonical_u64) - .take(SPONGE_WIDTH) - .collect(); - // update the state - self.state = new_state.try_into().expect("State length mismatch"); } - fn squeeze(&self) -> &[F] { + fn squeeze(&self) -> &[BN254Fr] { &self.state[..Self::RATE] } + } -const N: usize = 32; -/// Keccak-256 hash function. + + #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct Poseidon2BN254; impl Hasher for Poseidon2BN254 { - const HASH_SIZE: usize = N; - type Hash = BytesHash; - type Permutation = Poseidon2BN254Permutation; + type HF = BN254Fr; + const HASH_SIZE: usize = 32; + type Hash = BN254HashOut; + type Permutation = Poseidon2BN254Perm; - fn hash_no_pad(input: &[F]) -> Self::Hash { - let mut state_u64 = vec![0u64; input.len() ]; - for i in 0..input.len() { - state_u64[i] - = input[i].to_canonical_u64(); + fn hash_no_pad(input: &[GenericField]) -> Self::Hash { + let bn_felts = generic_field_to_bn(input); + let hash = sponge_felts_no_pad(Hash::Poseidon2, bn_felts); + BN254HashOut { + element: hash, } - let mut arr = [0; N]; - let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); - let hash_bytes = felts_to_bytes(hash); - arr.copy_from_slice(&hash_bytes[..N]); - BytesHash(arr) + } + + fn hash_pad(input: &[GenericField]) -> Self::Hash { + let bn_felts = generic_field_to_bn(input); + let hash = sponge_felts_pad(Hash::Poseidon2, bn_felts); + BN254HashOut { + element: hash, + } + } + + fn hash_or_noop(inputs: &[GenericField]) -> Self::Hash { + let hash_size = 32; + if check_len_in_bytes(inputs) <= hash_size { + if inputs.len() == 1 { + // if there is one element and it is a BN field element return it. + if let GenericField::BN254(v) = inputs[0].clone() { + return BN254HashOut{element: v}; + } + } + } + // TODO: if we get 4 or less Goldilocks -> convert to BN return? + Self::hash_no_pad(inputs) + } + + + fn sponge(state: &mut Self::Permutation, input: Vec>) { + + let bn_felts = generic_field_to_bn(&input); + + // absorb in overwrite mode + for chunk in bn_felts.chunks(2) { + state.set_from_slice(chunk, 0); + state.permute(); + } + } fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash { - let mut input_bytes = vec![0; N * 2]; - input_bytes[0..N].copy_from_slice(&left.0); - input_bytes[N..].copy_from_slice(&right.0); - let mut arr = [0; N]; - let state_u64: Vec = input_bytes - .chunks_exact(8) - .map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap())) - .collect(); - let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64); - let hash_bytes = felts_to_bytes(hash); - arr.copy_from_slice(&hash_bytes[..N]); - BytesHash(arr) + let mut perm = Self::Permutation::new(core::iter::repeat(BN254Fr::zero())); + perm.set_from_slice(&[left.element], 0); + perm.set_from_slice(&[right.element], 1); + + perm.permute(); + let out = perm.squeeze(); + + BN254HashOut { + element: out[0].clone(), + } + } + + fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec { + let bn_out = state.squeeze(); + let bn_bytes: Vec = bn_out.iter().flat_map(|e| felts_to_bytes(e)).collect(); + let goldilocks_felts: Vec = bytes_to_u64(&bn_bytes).iter().map(|e| F::from_canonical_u64(*e)).collect(); + assert!(goldilocks_felts.len()>0); + goldilocks_felts } } -fn felts_to_bytes(f: E) -> Vec where +// --------- Conversion helper functions --------------------- + +/// converts a vec of goldilocks to bn254 +/// takes 7 goldilocks and converts to 2 bn254 +fn goldilocks_to_bn(input: &Vec) -> Vec{ + let u64s: Vec = input.iter().map(|x| x.to_canonical_u64()).collect(); + let l = u64s.len(); + let m = l / 7; + let mut result = Vec::new(); + + for i in 0..m { + let group: [u64; 7] = u64s[7 * i..7 * (i + 1)].try_into().unwrap(); + let (a, b) = u64s_to_felts(group); + result.push(a); + result.push(b); + } + + let r = l - 7 * m; + if r > 0 { + let mut ws = [0u64; 7]; + for i in 0..r { + ws[i] = u64s[7 * m + i]; + } + let (a, b) = u64s_to_felts(ws); + // check that we don't push zero field elements + if a != BN254Fr::zero() { + result.push(a); + } + if b != BN254Fr::zero() { + result.push(b); + } + } + result +} + +const BIGINT_TWO_TO_64: BigInt<4> = BigInt( [0,1,0,0] ); +const BIGINT_TWO_TO_128: BigInt<4> = BigInt( [0,0,1,0] ); +const BIGINT_TWO_TO_192: BigInt<4> = BigInt( [0,0,0,1] ); + +/// converts u64 to BN254 - taken directly from: rust-bn254-hash +pub fn u64s_to_felts(ws: [u64; 7]) -> (BN254Fr, BN254Fr) { + let hi = ws[6] >> 32; + let lo = ws[6] & 0xFFFF_FFFF; + + let field_powers_of_two_to_64: [BN254Fr;3] = + [ + BN254Fr::from_bigint(BIGINT_TWO_TO_64 ).unwrap(), + BN254Fr::from_bigint(BIGINT_TWO_TO_128).unwrap(), + BN254Fr::from_bigint(BIGINT_TWO_TO_192).unwrap() + ]; + + let x = BN254Fr::from(ws[0]) + + field_powers_of_two_to_64[0] * BN254Fr::from(ws[1]) + + field_powers_of_two_to_64[1] * BN254Fr::from(ws[2]) + + field_powers_of_two_to_64[2] * BN254Fr::from(lo); + + let y = BN254Fr::from(ws[3]) + + field_powers_of_two_to_64[0] * BN254Fr::from(ws[4]) + + field_powers_of_two_to_64[1] * BN254Fr::from(ws[5]) + + field_powers_of_two_to_64[2] * BN254Fr::from(hi); + + (x, y) +} + +/// converts a slice of bytes to 64 by taking 63 bits at a time +/// this makes it safe for conversion from bytes to Goldilocks field elems +/// this fn ignores any remaining bit that are less than 63 bits at the end +pub fn bytes_to_u64(x: &[u8]) -> Vec { + let total_bits = x.len() * 8; + let num_chunks = total_bits / 63; // ignore any leftover bits + let mut result = Vec::with_capacity(num_chunks); + + for i in 0..num_chunks { + let bit_offset = i * 63; + let first_byte = bit_offset / 8; + let shift = bit_offset % 8; + // how many bits do we need? We need (shift + 63) bits in total. + // convert that to bytes by rounding up. + let needed_bytes = ((shift + 63) + 7) / 8; + + if first_byte + needed_bytes > x.len() { + break; // break out if incomplete chunk + } + + let mut chunk: u128 = 0; + for j in 0..needed_bytes { + chunk |= (x[first_byte + j] as u128) << (8 * j); + } + // shift right with `shift` bits, then mask 63 bits. + let value = (chunk >> shift) & ((1u128 << 63) - 1); + result.push(value as u64); + } + + result +} + +/// helper function: converts a slice of GenericField into a Vec +/// the fn groups consecutive Goldilocks elements and converting them in one shot. +fn generic_field_to_bn(input: &[GenericField]) -> Vec { + let mut bn_felts = Vec::new(); + let mut temp_goldilocks = Vec::new(); + + for e in input.iter().copied() { + match e { + GenericField::Goldilocks(v) => { + // accumulate consecutive Goldilocks field elems. + temp_goldilocks.push(v); + } + GenericField::BN254(v) => { + // convert any accumulated Goldilocks elems. + if !temp_goldilocks.is_empty() { + let converted = goldilocks_to_bn(&temp_goldilocks); + bn_felts.extend(converted); + temp_goldilocks.clear(); + } + // push the BN field element directly. + bn_felts.push(v); + } + } + } + // convert any remaining Goldilocks elements. + if !temp_goldilocks.is_empty() { + let converted = goldilocks_to_bn(&temp_goldilocks); + bn_felts.extend(converted); + } + + bn_felts +} + +/// computes the length in bytes of a vector of generic field elements. +fn check_len_in_bytes(input: &[GenericField]) -> usize{ + input.iter().map(|elem| { + match elem { + GenericField::BN254(_) => 32, + GenericField::Goldilocks(_) => 8, + } + }).sum() +} + + +//------------------ serialization for BN254 --------------------- + +pub fn felts_to_bytes(f: &E) -> Vec where E: CanonicalSerialize { let mut bytes = Vec::new(); @@ -136,14 +310,14 @@ fn felts_to_bytes(f: E) -> Vec where bytes } -fn bytes_to_felts(bytes: &[u8]) -> E where +pub fn bytes_to_felts(bytes: &[u8]) -> E where E: CanonicalDeserialize { let fr_res = E::deserialize_uncompressed(bytes).unwrap(); fr_res } -fn felts_to_u64(f: E) -> Vec +pub fn felts_to_u64(f: E) -> Vec where E: CanonicalSerialize, { diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index 27c827ab..4037f687 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -3,20 +3,20 @@ use alloc::{vec, vec::Vec}; use core::marker::PhantomData; use crate::field::extension::{Extendable, FieldExtension}; +use crate::hash::duplex::DuplexState; use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField}; use crate::hash::hashing::PlonkyPermutation; use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher}; /// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir. #[derive(Clone, Debug)] -pub struct Challenger> { - pub(crate) sponge_state: H::Permutation, - pub(crate) input_buffer: Vec, - output_buffer: Vec, +pub struct Challenger> +{ + duplex_state: DuplexState } /// Observes prover messages, and generates verifier challenges based on the transcript. @@ -27,42 +27,36 @@ pub struct Challenger> { /// design, but it can be viewed as a duplex sponge whose inputs are sometimes zero (when we perform /// multiple squeezes) and whose outputs are sometimes ignored (when we perform multiple /// absorptions). Thus the security properties of a duplex sponge still apply to our design. -impl> Challenger { +impl> Challenger +{ pub fn new() -> Challenger { Challenger { - sponge_state: H::Permutation::new(core::iter::repeat(F::ZERO)), - input_buffer: Vec::with_capacity(H::Permutation::RATE), - output_buffer: Vec::with_capacity(H::Permutation::RATE), + duplex_state: DuplexState::::new(), } } - pub fn observe_element(&mut self, element: F) { - // Any buffered outputs are now invalid, since they wouldn't reflect this input. - self.output_buffer.clear(); - - self.input_buffer.push(element); - - if self.input_buffer.len() == H::Permutation::RATE { - self.duplexing(); - } + pub fn observe_element(&mut self, element: GenericField) { + self.duplex_state.absorb(element) } pub fn observe_extension_element(&mut self, element: &F::Extension) - where - F: RichField + Extendable, + where + F: RichField + Extendable, { - self.observe_elements(&element.to_basefield_array()); + let elements = element.to_basefield_array().map(|e: F|GenericField::::Goldilocks(e)); + + self.observe_elements(&elements); } - pub fn observe_elements(&mut self, elements: &[F]) { + pub fn observe_elements(&mut self, elements: &[GenericField]) { for &element in elements { self.observe_element(element); } } pub fn observe_extension_elements(&mut self, elements: &[F::Extension]) - where - F: RichField + Extendable, + where + F: RichField + Extendable, { for element in elements { self.observe_extension_element(element); @@ -80,15 +74,7 @@ impl> Challenger { } pub fn get_challenge(&mut self) -> F { - // If we have buffered inputs, we must perform a duplexing so that the challenge will - // reflect them. Or if we've run out of outputs, we must perform a duplexing to get more. - if !self.input_buffer.is_empty() || self.output_buffer.is_empty() { - self.duplexing(); - } - - self.output_buffer - .pop() - .expect("Output buffer should be non-empty") + self.duplex_state.squeeze() } pub fn get_n_challenges(&mut self, n: usize) -> Vec { @@ -107,8 +93,8 @@ impl> Challenger { } pub fn get_extension_challenge(&mut self) -> F::Extension - where - F: RichField + Extendable, + where + F: RichField + Extendable, { let mut arr = [F::ZERO; D]; arr.copy_from_slice(&self.get_n_challenges(D)); @@ -116,40 +102,18 @@ impl> Challenger { } pub fn get_n_extension_challenges(&mut self, n: usize) -> Vec - where - F: RichField + Extendable, + where + F: RichField + Extendable, { (0..n) .map(|_| self.get_extension_challenge::()) .collect() } - /// Absorb any buffered inputs. After calling this, the input buffer will be empty, and the - /// output buffer will be full. - fn duplexing(&mut self) { - assert!(self.input_buffer.len() <= H::Permutation::RATE); - - // Overwrite the first r elements with the inputs. This differs from a standard sponge, - // where we would xor or add in the inputs. This is a well-known variant, though, - // sometimes called "overwrite mode". - self.sponge_state - .set_from_iter(self.input_buffer.drain(..), 0); - - // Apply the permutation. - self.sponge_state.permute(); - - self.output_buffer.clear(); - self.output_buffer - .extend_from_slice(self.sponge_state.squeeze()); + pub fn grind(&mut self, min_leading_zeros: u32) -> F { + self.duplex_state.grind(min_leading_zeros) } - pub fn compact(&mut self) -> H::Permutation { - if !self.input_buffer.is_empty() { - self.duplexing(); - } - self.output_buffer.clear(); - self.sponge_state - } } impl> Default for Challenger { diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index f0c0c885..930ee3ae 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -44,7 +44,7 @@ use crate::plonk::circuit_data::{ CircuitConfig, CircuitData, CommonCircuitData, MockCircuitData, ProverCircuitData, ProverOnlyCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }; -use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, GenericHashOut, Hasher}; use crate::plonk::copy_constraint::CopyConstraint; use crate::plonk::permutation_argument::Forest; use crate::plonk::plonk_common::PlonkOracle; @@ -1256,13 +1256,14 @@ impl, const D: usize> CircuitBuilder { }; let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone(); let domain_separator = self.domain_separator.unwrap_or_default(); - let domain_separator_digest = C::Hasher::hash_pad(&domain_separator); + let ds_felts: Vec> = domain_separator.clone().into_iter().map(GenericField::Goldilocks).collect(); + let domain_separator_digest = C::Hasher::hash_pad(&ds_felts); // TODO: This should also include an encoding of gate constraints. let circuit_digest_parts = [ constants_sigmas_cap.flatten(), domain_separator_digest.to_vec(), vec![ - F::from_canonical_usize(degree_bits), + GenericField::Goldilocks(F::from_canonical_usize(degree_bits)), /* Add other circuit data here */ ], ]; diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 741daeb7..77b2355e 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -8,7 +8,7 @@ #[cfg(not(feature = "std"))] use alloc::{vec, vec::Vec}; -use core::fmt::Debug; +use core::fmt::{Debug}; use serde::de::DeserializeOwned; use serde::Serialize; @@ -19,22 +19,86 @@ use crate::field::goldilocks_field::GoldilocksField; use crate::hash::hash_types::{HashOut, RichField}; use crate::hash::hashing::PlonkyPermutation; use crate::hash::keccak::KeccakHash; -use crate::hash::poseidon2_bn254::Poseidon2BN254; use crate::hash::poseidon::PoseidonHash; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; +use ark_bn254::Fr as BN254Fr; +use ark_ff::{One, Zero}; +use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes, Poseidon2BN254}; pub trait GenericHashOut: - Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned +Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned { fn to_bytes(&self) -> Vec; fn from_bytes(bytes: &[u8]) -> Self; - fn to_vec(&self) -> Vec; + fn to_vec(&self) -> Vec>; +} + +/// generic field enum - supports only 2 fields for now +/// Supported fields: Goldilocks , BN254 Fr (from Arkworks) +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub enum GenericField { + Goldilocks(F), + BN254(BN254Fr), +} + +/// hasher field trait to cover fields in `GenericField` enum +pub trait HasherField: Default + Sized + Copy + Debug + Eq + PartialEq + Sync + Send { + fn get_one() -> Self; + fn get_zero() -> Self; + fn to_bytes(&self) -> Vec; + + fn from_bytes(b: &[u8]) -> Self; + +} + +/// BN254 as Hasherfield +impl HasherField for BN254Fr { + fn get_one() -> Self { + BN254Fr::one() + } + + fn get_zero() -> Self { + BN254Fr::zero() + } + + fn to_bytes(&self) -> Vec { + felts_to_bytes::(&self) + } + + fn from_bytes(b: &[u8]) -> Self { + bytes_to_felts::(b) + } +} + +/// RichField (Goldilocks) as Hasherfield +impl HasherField for T { + fn get_one() -> Self { + T::ONE + } + + fn get_zero() -> Self { + T::ZERO + } + + fn to_bytes(&self) -> Vec { + self.to_canonical_u64().to_le_bytes().to_vec() + } + + fn from_bytes(b: &[u8]) -> Self { + assert_eq!(b.len(), 8, "Input vector must have exactly 8 bytes"); + let arr: [u8; 8] = b.try_into().expect("Conversion to array failed"); + let element = u64::from_le_bytes(arr); + T::from_canonical_u64(element) + } } /// Trait for hash functions. pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { + + type HF: HasherField; + /// Size of `Hash` in bytes. const HASH_SIZE: usize; @@ -42,39 +106,27 @@ pub trait Hasher: Sized + Copy + Debug + Eq + PartialEq { type Hash: GenericHashOut; /// Permutation used in the sponge construction. - type Permutation: PlonkyPermutation; + type Permutation: PlonkyPermutation; /// Hash a message without any padding step. Note that this can enable length-extension attacks. /// However, it is still collision-resistant in cases where the input has a fixed length. - fn hash_no_pad(input: &[F]) -> Self::Hash; + fn hash_no_pad(input: &[GenericField]) -> Self::Hash; /// Pad the message using the `pad10*1` rule, then hash it. - fn hash_pad(input: &[F]) -> Self::Hash { - let mut padded_input = input.to_vec(); - padded_input.push(F::ONE); - while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { - padded_input.push(F::ZERO); - } - padded_input.push(F::ONE); - Self::hash_no_pad(&padded_input) - } + fn hash_pad(input: &[GenericField]) -> Self::Hash; /// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a /// no-op. - fn hash_or_noop(inputs: &[F]) -> Self::Hash { - if inputs.len() * 8 <= Self::HASH_SIZE { - let mut inputs_bytes = vec![0u8; Self::HASH_SIZE]; - for i in 0..inputs.len() { - inputs_bytes[i * 8..(i + 1) * 8] - .copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes()); - } - Self::Hash::from_bytes(&inputs_bytes) - } else { - Self::hash_no_pad(inputs) - } - } + fn hash_or_noop(inputs: &[GenericField]) -> Self::Hash; + /// absorb the input into the given state + fn sponge(state: &mut Self::Permutation, input: Vec>); + + /// 2-to-1 compression fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash; + + /// squeeze out a vec of Goldilocks field elements (used for duplex/challenger) + fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec; } /// Trait for algebraic hash functions, built from a permutation using the sponge construction. @@ -88,16 +140,17 @@ pub trait AlgebraicHasher: Hasher> { swap: BoolTarget, builder: &mut CircuitBuilder, ) -> Self::AlgebraicPermutation - where - F: RichField + Extendable; + where + F: RichField + Extendable; } /// Generic configuration trait. pub trait GenericConfig: - Debug + Clone + Sync + Sized + Send + Eq + PartialEq +Debug + Clone + Sync + Sized + Send + Eq + PartialEq { /// Main field. type F: RichField + Extendable; + /// Field extension of degree D of the main field. type FE: FieldExtension; /// Hash function used for building Merkle trees. @@ -126,7 +179,7 @@ impl GenericConfig<2> for KeccakGoldilocksConfig { type InnerHasher = PoseidonHash; } -/// Configuration using PoseidonBN254 over the Goldilocks field. +/// Configuration using Poseidon2BN254 as hasher over the Goldilocks field. #[derive(Debug, Copy, Clone, Default, Eq, PartialEq)] pub struct Poseidon2BN254Config; impl GenericConfig<2> for Poseidon2BN254Config { diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index c76c3112..63b59ce6 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -25,7 +25,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData}; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::{GenericConfig, GenericField, Hasher}; use crate::plonk::verifier::{verify_with_challenges, DEFAULT_VERIFIER_OPTIONS}; use crate::util::serialization::{Buffer, Read, Write}; @@ -104,7 +104,8 @@ impl, C: GenericConfig, const D: usize> pub fn get_public_inputs_hash( &self, ) -> <>::InnerHasher as Hasher>::Hash { - C::InnerHasher::hash_no_pad(&self.public_inputs) + let pi_felts: Vec> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + C::InnerHasher::hash_no_pad(&pi_felts) } pub fn to_bytes(&self) -> Vec { @@ -234,7 +235,8 @@ impl, C: GenericConfig, const D: usize> pub(crate) fn get_public_inputs_hash( &self, ) -> <>::InnerHasher as Hasher>::Hash { - C::InnerHasher::hash_no_pad(&self.public_inputs) + let pi_felts: Vec> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + C::InnerHasher::hash_no_pad(&pi_felts) } pub fn to_bytes(&self) -> Vec { diff --git a/plonky2/src/plonk/prover.rs b/plonky2/src/plonk/prover.rs index acc0f003..99a7ff8f 100644 --- a/plonky2/src/plonk/prover.rs +++ b/plonky2/src/plonk/prover.rs @@ -30,7 +30,7 @@ use crate::iop::target::Target; use crate::iop::witness::{MatrixWitness, PartialWitness, PartitionWitness, Witness, WitnessWrite}; use crate::plonk::circuit_builder::NUM_COINS_LOOKUP; use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData}; -use crate::plonk::config::{GenericConfig, Hasher}; +use crate::plonk::config::{GenericConfig, GenericField, Hasher}; use crate::plonk::plonk_common::PlonkOracle; use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs}; use crate::plonk::vanishing_poly::{eval_vanishing_poly_base_batch, get_lut_poly}; @@ -269,7 +269,8 @@ where set_lookup_wires(prover_data, common_data, &mut partition_witness)?; let public_inputs = partition_witness.get_targets(&prover_data.public_inputs); - let public_inputs_hash = C::InnerHasher::hash_no_pad(&public_inputs); + let pi_felts: Vec> = public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + let public_inputs_hash = C::InnerHasher::hash_no_pad(&pi_felts); let witness = timed!( timing, From db9c63095f1b94ce9be1e96b61414ffb7635642b Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Tue, 11 Mar 2025 13:09:05 +0100 Subject: [PATCH 3/5] add benchmarks --- plonky2/benches/bn254_hash.rs | 107 ++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 plonky2/benches/bn254_hash.rs diff --git a/plonky2/benches/bn254_hash.rs b/plonky2/benches/bn254_hash.rs new file mode 100644 index 00000000..8418676a --- /dev/null +++ b/plonky2/benches/bn254_hash.rs @@ -0,0 +1,107 @@ +use std::any::type_name; +use anyhow::{anyhow, Result}; +use criterion::{criterion_group, criterion_main, Criterion}; +use plonky2::gates::noop::NoopGate; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::witness::PartialWitness; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use plonky2::plonk::circuit_data::CircuitConfig; +use plonky2::plonk::config::{GenericConfig, KeccakGoldilocksConfig, Poseidon2BN254Config, PoseidonGoldilocksConfig}; +use plonky2_field::extension::Extendable; +use plonky2_field::goldilocks_field::GoldilocksField; + +/// Benchmark for building, proving, and verifying the Plonky2 circuit. +fn bench_circuit, const D:usize, C: GenericConfig,>(c: &mut Criterion, circuit_size: usize) -> Result<()>{ + + // Create the circuit configuration + let config = CircuitConfig::standard_recursion_config(); + let mut builder = CircuitBuilder::::new(config); + + let num_dummy_gates = match circuit_size { + 0 => return Err(anyhow!("size must be at least 1")), + 1 => 0, + 2 => 1, + n => (1 << (n - 1)) + 1, + }; + + for _ in 0..num_dummy_gates { + builder.add_gate(NoopGate, vec![]); + } + + // Benchmark Group + let mut group = c.benchmark_group(format!("Circuit Benchmark size {} for hasher: {}", circuit_size, type_name::())); + + // Benchmark the Circuit Building Phase + group.bench_function("Build Circuit", |b| { + b.iter(|| { + let config = CircuitConfig::standard_recursion_config(); + let mut local_builder = CircuitBuilder::::new(config); + for _ in 0..num_dummy_gates { + local_builder.add_gate(NoopGate, vec![]); + } + let _data = local_builder.build::(); + }) + }); + + let data = builder.build::(); + println!("Circuit size (degree bits): {:?}", data.common.degree_bits()); + + // Create a PartialWitness + let mut pw = PartialWitness::new(); + + // Benchmark the Proving Phase + group.bench_function("Prove Circuit", |b| { + b.iter(|| { + let local_pw = pw.clone(); + data.prove(local_pw).expect("Failed to prove circuit") + }) + }); + + // Generate the proof once for verification benchmarking + let proof_with_pis = data.prove(pw.clone()).expect("Failed to prove circuit"); + let verifier_data = data.verifier_data(); + + println!("Proof size: {} bytes", proof_with_pis.to_bytes().len()); + + // Benchmark the Verifying Phase + group.bench_function("Verify Proof", |b| { + b.iter(|| { + verifier_data.verify(proof_with_pis.clone()).expect("Failed to verify proof"); + }) + }); + + group.finish(); + Ok(()) +} + +fn bench_multiple_hashers(c: &mut Criterion){ + const D: usize = 2; + type C1 = PoseidonGoldilocksConfig; + type C2 = KeccakGoldilocksConfig; + type C3 = Poseidon2BN254Config; + type F = GoldilocksField; + + bench_circuit::(c, 10).expect("failed"); + bench_circuit::(c, 10).expect("failed"); + bench_circuit::(c, 10).expect("failed"); + + bench_circuit::(c, 11).expect("failed"); + bench_circuit::(c, 11).expect("failed"); + bench_circuit::(c, 11).expect("failed"); + + bench_circuit::(c, 12).expect("failed"); + bench_circuit::(c, 12).expect("failed"); + bench_circuit::(c, 12).expect("failed"); + + bench_circuit::(c, 13).expect("failed"); + bench_circuit::(c, 13).expect("failed"); + bench_circuit::(c, 13).expect("failed"); +} + +/// Criterion benchmark group +criterion_group!{ + name = prove_verify_benches; + config = Criterion::default().sample_size(10); + targets = bench_multiple_hashers +} +criterion_main!(prove_verify_benches); From b33ed53ed713b952c74ede86668d7ca4e5c5fee0 Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Tue, 11 Mar 2025 13:12:19 +0100 Subject: [PATCH 4/5] fix bn conversion and some minor improvements. --- plonky2/Cargo.toml | 5 + plonky2/examples/poseidon2_bn254_example.rs | 65 ++++------ plonky2/src/fri/challenges.rs | 6 +- plonky2/src/fri/prover.rs | 6 +- plonky2/src/hash/batch_merkle_tree.rs | 30 ++--- plonky2/src/hash/duplex.rs | 2 +- plonky2/src/hash/hash_types.rs | 15 ++- plonky2/src/hash/keccak.rs | 6 +- plonky2/src/hash/merkle_proofs.rs | 6 +- plonky2/src/hash/merkle_tree.rs | 4 +- plonky2/src/hash/path_compression.rs | 4 +- plonky2/src/hash/poseidon.rs | 6 +- plonky2/src/hash/poseidon2_bn254.rs | 136 ++++++++++++-------- plonky2/src/iop/challenger.rs | 14 +- plonky2/src/plonk/circuit_builder.rs | 6 +- plonky2/src/plonk/config.rs | 51 ++++++-- plonky2/src/plonk/proof.rs | 6 +- plonky2/src/plonk/prover.rs | 4 +- 18 files changed, 212 insertions(+), 160 deletions(-) diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 854a595e..54d69bcc 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -39,6 +39,7 @@ rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"} ark-serialize = {version = "0.5.0"} ark-bn254 = "0.5.0" ark-ff = "0.5.0" +num-bigint = { version = "0.4", default-features = false } # Local dependencies plonky2_field = { version = "1.0.0", path = "../field", default-features = false } @@ -79,6 +80,10 @@ harness = false name = "hashing" harness = false +[[bench]] +name = "bn254_hash" +harness = false + [[bench]] name = "merkle" harness = false diff --git a/plonky2/examples/poseidon2_bn254_example.rs b/plonky2/examples/poseidon2_bn254_example.rs index a783eadf..45e15b4b 100644 --- a/plonky2/examples/poseidon2_bn254_example.rs +++ b/plonky2/examples/poseidon2_bn254_example.rs @@ -1,16 +1,14 @@ -use std::fs; +// use std::fs; use anyhow::Result; -use plonky2::field::types::Field; -use plonky2::iop::witness::{PartialWitness, WitnessWrite}; +use plonky2::gates::noop::NoopGate; +use plonky2::iop::witness::PartialWitness; use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::circuit_data::CircuitConfig; use plonky2::plonk::config::{GenericConfig, Poseidon2BN254Config}; -use plonky2::plonk::prover::ProverOptions; -use plonky2::plonk::verifier::{VerifierOptions, HashStatisticsPrintLevel}; +use plonky2::plonk::prover::DEFAULT_PROVER_OPTIONS; +use plonky2::plonk::verifier::{HashStatisticsPrintLevel, VerifierOptions}; -/// An example of using Plonky2 to prove a statement of the form -/// "I know the 100th element of the Fibonacci sequence, starting with constants a and b." -/// When a == 0 and b == 1, this is proving knowledge of the 100th (standard) Fibonacci number. +/// An example of using Plonky2 over BN254 to a dummy circuit of size S. fn main() -> Result<()> { const D: usize = 2; type C = Poseidon2BN254Config; @@ -18,34 +16,20 @@ fn main() -> Result<()> { let config = CircuitConfig::standard_recursion_config(); let mut builder = CircuitBuilder::::new(config); - - // The arithmetic circuit. - let initial_a = builder.add_virtual_target(); - let initial_b = builder.add_virtual_target(); - let mut prev_target = initial_a; - let mut cur_target = initial_b; - for _ in 0..99 { - let temp = builder.add(prev_target, cur_target); - prev_target = cur_target; - cur_target = temp; + const S: usize = 5; + let num_dummy_gates = (1 << (S - 1)) + 1; + for _ in 0..num_dummy_gates { + builder.add_gate(NoopGate, vec![]); } - // Public inputs are the two initial values (provided below) and the result (which is generated). - builder.register_public_input(initial_a); - builder.register_public_input(initial_b); - builder.register_public_input(cur_target); - - // Provide initial values. - let mut pw = PartialWitness::new(); - pw.set_target(initial_a, F::ZERO)?; - pw.set_target(initial_b, F::ONE)?; + let pw = PartialWitness::new(); let data = builder.build::(); + println!("circ size = {}", data.common.degree_bits()); - let prover_opts = ProverOptions { - export_witness: Some(String::from("fibonacci_witness.json")), - print_hash_statistics: HashStatisticsPrintLevel::Info, - }; + let prover_opts = DEFAULT_PROVER_OPTIONS; + + println!("proving ..."); let proof = data.prove_with_options(pw, &prover_opts)?; @@ -53,20 +37,15 @@ fn main() -> Result<()> { // let common_circuit_data_serialized = serde_json::to_string(&data.common ).unwrap(); // let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap(); // let proof_serialized = serde_json::to_string(&proof ).unwrap(); - // fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file"); - // fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file"); - // fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file"); - // println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap); - // println!("circ digest: {:?}", data.verifier_only.circuit_digest); - // println!("proof part: {:?}", proof.proof.wires_cap.0); - - println!( - "100th Fibonacci number mod |F| (starting with {}, {}) is: {}", - proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2] - ); + // fs::write("bn_common.json" , common_circuit_data_serialized) .expect("Unable to write file"); + // fs::write("bn_vkey.json" , verifier_only_circuit_data_serialized).expect("Unable to write file"); + // fs::write("bn_proof.json" , proof_serialized) .expect("Unable to write file"); let verifier_opts = VerifierOptions { print_hash_statistics: HashStatisticsPrintLevel::Summary, }; - data.verify_with_options(proof, &verifier_opts) + + assert!(data.verify_with_options(proof, &verifier_opts).is_ok()); + + Ok(()) } diff --git a/plonky2/src/fri/challenges.rs b/plonky2/src/fri/challenges.rs index 2a69b656..543a34f2 100644 --- a/plonky2/src/fri/challenges.rs +++ b/plonky2/src/fri/challenges.rs @@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::challenger::{Challenger, RecursiveChallenger}; use crate::iop::target::Target; use crate::plonk::circuit_builder::CircuitBuilder; -use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, Hasher, IntoGenericFieldVec}; impl> Challenger { pub fn observe_openings(&mut self, openings: &FriOpenings) @@ -57,7 +57,7 @@ impl> Challenger { if let Some(step_count) = max_num_query_steps { let cap_len = (1 << config.cap_height) * NUM_HASH_OUT_ELTS; let zero_cap = vec![F::ZERO; cap_len]; - let zero_cap_felts: Vec> = zero_cap.into_iter().map(GenericField::Goldilocks).collect(); + let zero_cap_felts: Vec> = zero_cap.into_generic_field_vec(); for _ in commit_phase_merkle_caps.len()..step_count { // self.observe_elements(&zero_cap); self.observe_elements(&zero_cap_felts); @@ -75,7 +75,7 @@ impl> Challenger { } } - self.observe_element(GenericField::Goldilocks(pow_witness)); + self.observe_element(pow_witness.into()); let fri_pow_response = self.get_challenge(); let fri_query_indices = (0..num_fri_queries) diff --git a/plonky2/src/fri/prover.rs b/plonky2/src/fri/prover.rs index 916f4fca..608f3c4b 100644 --- a/plonky2/src/fri/prover.rs +++ b/plonky2/src/fri/prover.rs @@ -14,7 +14,7 @@ use crate::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS}; use crate::hash::hashing::*; use crate::hash::merkle_tree::MerkleTree; use crate::iop::challenger::Challenger; -use crate::plonk::config::{GenericConfig, GenericField}; +use crate::plonk::config::{GenericConfig, GenericField, IntoGenericFieldVec}; use crate::plonk::plonk_common::reduce_with_powers; use crate::plonk::prover::ProverOptions; use crate::plonk::verifier::HashStatisticsPrintLevel; @@ -136,7 +136,7 @@ fn fri_committed_trees, C: GenericConfig, if let Some(step_count) = max_num_query_steps { let cap_len = (1 << fri_params.config.cap_height) * NUM_HASH_OUT_ELTS; let zero_cap = vec![F::ZERO; cap_len]; - let zero_cap_felts: Vec> = zero_cap.into_iter().map(GenericField::Goldilocks).collect(); + let zero_cap_felts: Vec> = zero_cap.into_generic_field_vec(); for _ in fri_params.reduction_arity_bits.len()..step_count { challenger.observe_elements(&zero_cap_felts); challenger.get_extension_challenge::(); @@ -177,7 +177,7 @@ pub(crate) fn fri_proof_of_work< // println!("pow_witness = {:?}",pow_witness); // Recompute pow_response using our normal Challenger code, and make sure it matches. - challenger.observe_element(GenericField::Goldilocks(pow_witness)); + challenger.observe_element(pow_witness.into()); let pow_response = challenger.get_challenge(); let leading_zeros = pow_response.to_canonical_u64().leading_zeros(); assert!(leading_zeros >= min_leading_zeros); diff --git a/plonky2/src/hash/batch_merkle_tree.rs b/plonky2/src/hash/batch_merkle_tree.rs index 64f85258..932b4fee 100644 --- a/plonky2/src/hash/batch_merkle_tree.rs +++ b/plonky2/src/hash/batch_merkle_tree.rs @@ -10,7 +10,7 @@ use crate::hash::merkle_proofs::MerkleProof; use crate::hash::merkle_tree::{ capacity_up_to_mut, fill_digests_buf, merkle_tree_prove, MerkleCap, }; -use crate::plonk::config::{GenericField, GenericHashOut, Hasher}; +use crate::plonk::config::{GenericField, GenericHashOut, Hasher, IntoGenericFieldVec}; use crate::util::log2_strict; #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -56,14 +56,12 @@ impl> BatchMerkleTree { let mut digests_buf_pos = 0; let mut cap = vec![]; - let dummy_leaves_felts = vec![vec![GenericField::Goldilocks(F::ZERO)]; 1 << cap_height]; + let dummy_leaves_felts = vec![vec![F::ZERO.into()]; 1 << cap_height]; let mut leaves_felts: Vec>>> = leaves.clone().into_iter() .map(|matrix| { matrix.into_iter() .map(|vec| { - vec.into_iter() - .map(|f| GenericField::Goldilocks(f)) - .collect() + vec.into_generic_field_vec() }) .collect() }) @@ -184,7 +182,7 @@ mod tests { use super::*; use crate::hash::merkle_proofs::verify_batch_merkle_proof_to_cap; - use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use crate::plonk::config::{GenericConfig, IntoGenericFieldVec, PoseidonGoldilocksConfig}; const D: usize = 2; type C = PoseidonGoldilocksConfig; @@ -208,10 +206,10 @@ mod tests { let fmt: BatchMerkleTree = BatchMerkleTree::new(vec![mat_1], 0); let mat_1_leaf_hashes = [ - H::hash_or_noop(&[F::ZERO, F::ONE]), - H::hash_or_noop(&[F::TWO, F::ONE]), - H::hash_or_noop(&[F::TWO, F::TWO]), - H::hash_or_noop(&[F::ZERO, F::ZERO]), + H::hash_or_noop(&[F::ZERO.into(), F::ONE.into()]), + H::hash_or_noop(&[F::TWO.into(), F::ONE.into()]), + H::hash_or_noop(&[F::TWO.into(), F::TWO.into()]), + H::hash_or_noop(&[F::ZERO.into(), F::ZERO.into()]), ]; assert_eq!(mat_1_leaf_hashes[0..2], fmt.digests[0..2]); assert_eq!(mat_1_leaf_hashes[2..4], fmt.digests[4..6]); @@ -260,10 +258,10 @@ mod tests { BatchMerkleTree::new(vec![mat_1, mat_2.clone()], 0); let mat_1_leaf_hashes = [ - H::hash_or_noop(&[F::ZERO, F::ONE]), - H::hash_or_noop(&[F::TWO, F::ONE]), - H::hash_or_noop(&[F::TWO, F::TWO]), - H::hash_or_noop(&[F::ZERO, F::ZERO]), + H::hash_or_noop(&[F::ZERO.into(), F::ONE.into()]), + H::hash_or_noop(&[F::TWO.into(), F::ONE.into()]), + H::hash_or_noop(&[F::TWO.into(), F::TWO.into()]), + H::hash_or_noop(&[F::ZERO.into(), F::ZERO.into()]), ]; assert_eq!(mat_1_leaf_hashes, fmt.digests[0..4]); @@ -276,10 +274,10 @@ mod tests { .zip(mat_2.iter()) .map(|(row1, row2)| { let mut new_row = row1.clone(); - new_row.extend_from_slice(row2); + new_row.extend_from_slice(&row2.clone().into_generic_field_vec()); new_row }) - .collect::>>(); + .collect::>>>(); let layer_1 = [ H::hash_or_noop(&new_leaves[0]), H::hash_or_noop(&new_leaves[1]), diff --git a/plonky2/src/hash/duplex.rs b/plonky2/src/hash/duplex.rs index 17f9bef1..9bb54c87 100644 --- a/plonky2/src/hash/duplex.rs +++ b/plonky2/src/hash/duplex.rs @@ -107,7 +107,7 @@ impl> DuplexState { .find_any(|&candidate| { let mut duplex_state = state.clone(); let mut sponge_input = input.clone(); - sponge_input.push(GenericField::Goldilocks(F::from_canonical_u64(candidate))); + sponge_input.push(F::from_canonical_u64(candidate).into()); H::sponge(&mut duplex_state, sponge_input); let temp_buf = Self::squeeze_f(&mut duplex_state); let pow_response = temp_buf.iter().last().unwrap(); diff --git a/plonky2/src/hash/hash_types.rs b/plonky2/src/hash/hash_types.rs index 6ca645ba..604d9d56 100644 --- a/plonky2/src/hash/hash_types.rs +++ b/plonky2/src/hash/hash_types.rs @@ -12,7 +12,7 @@ use crate::hash::poseidon::Poseidon; use crate::iop::target::Target; use crate::plonk::config::{GenericField, GenericHashOut}; use ark_bn254::Fr as BN254Fr; -use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes}; +use crate::hash::poseidon2_bn254::{bytes_le_to_felts, felts_to_bytes_le}; /// A prime order field with the features we need to use it as a base field in our argument system. pub trait RichField: PrimeField64 + Poseidon {} @@ -30,7 +30,7 @@ impl Serialize for BN254HashOut { fn serialize < S > ( & self, serializer: S) -> Result < S::Ok, S::Error > where S: Serializer { - let element_to_bytes = felts_to_bytes(&self.element); + let element_to_bytes = felts_to_bytes_le(&self.element); serializer.serialize_bytes( & element_to_bytes) } } @@ -44,7 +44,7 @@ impl<'de> Deserialize<'de> for BN254HashOut { let mut element_array = < [u8; 32] >::default(); element_array.copy_from_slice( & element_as_bytes[0..32]); - let deserialized_element = bytes_to_felts(&element_array); + let deserialized_element = bytes_le_to_felts(&element_array); Ok( Self { element: deserialized_element, @@ -56,18 +56,18 @@ impl<'de> Deserialize<'de> for BN254HashOut { /// `F` here is the goldilocks not the BN254 field impl GenericHashOut for BN254HashOut { fn to_bytes(&self) -> Vec { - felts_to_bytes(&self.element) + felts_to_bytes_le(&self.element) } fn from_bytes(bytes: &[u8]) -> Self { assert_eq!(bytes.len(), 32); BN254HashOut{ - element: bytes_to_felts(bytes) + element: bytes_le_to_felts(bytes) } } fn to_vec(&self) -> Vec> { - vec![GenericField::BN254(self.element.clone())] + vec![self.element.clone().into()] } } @@ -163,6 +163,7 @@ impl GenericHashOut for HashOut { .copied() .map(GenericField::::Goldilocks) .collect() + } } @@ -247,7 +248,7 @@ impl GenericHashOut for BytesHash { let mut arr = [0u8; 8]; arr[..bytes.len()].copy_from_slice(bytes); let raw = F::from_canonical_u64(u64::from_le_bytes(arr)); - GenericField::::Goldilocks(raw) + raw.into() }) .collect() } diff --git a/plonky2/src/hash/keccak.rs b/plonky2/src/hash/keccak.rs index 0ab9ee20..54259768 100644 --- a/plonky2/src/hash/keccak.rs +++ b/plonky2/src/hash/keccak.rs @@ -109,11 +109,11 @@ impl Hasher for KeccakHash { fn hash_pad(input: &[GenericField]) -> Self::Hash { let mut padded_input = input.to_vec(); - padded_input.push(GenericField::Goldilocks(F::ONE)); + padded_input.push(F::ONE.into()); while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { - padded_input.push(GenericField::Goldilocks(F::ZERO)); + padded_input.push(F::ZERO.into()); } - padded_input.push(GenericField::Goldilocks(F::ONE)); + padded_input.push(F::ONE.into()); Self::hash_no_pad(&padded_input) } diff --git a/plonky2/src/hash/merkle_proofs.rs b/plonky2/src/hash/merkle_proofs.rs index 4bbfb2a8..1701db38 100644 --- a/plonky2/src/hash/merkle_proofs.rs +++ b/plonky2/src/hash/merkle_proofs.rs @@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::VerifierCircuitTarget; -use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher, IntoGenericFieldVec}; #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[serde(bound = "")] @@ -79,7 +79,7 @@ pub fn verify_batch_merkle_proof_to_cap>( assert_eq!(leaf_data.len(), leaf_heights.len()); let leaf_data_felts: Vec>> = leaf_data.into_iter() .map(|inner| { - inner.into_iter().map(|f| GenericField::Goldilocks(f.clone())).collect() + inner.clone().into_generic_field_vec() }) .collect(); let mut current_digest = H::hash_or_noop(&leaf_data_felts[0]); @@ -102,7 +102,7 @@ pub fn verify_batch_merkle_proof_to_cap>( leaf_data_index += 1; } } - assert_eq!(leaf_data_index, leaf_data.len()); + assert_eq!(leaf_data_index, leaf_data_felts.len()); ensure!( current_digest == merkle_cap.0[leaf_index], "Invalid Merkle proof." diff --git a/plonky2/src/hash/merkle_tree.rs b/plonky2/src/hash/merkle_tree.rs index 12cf1e39..d42b87b8 100644 --- a/plonky2/src/hash/merkle_tree.rs +++ b/plonky2/src/hash/merkle_tree.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; -use crate::plonk::config::{GenericField, GenericHashOut, Hasher}; +use crate::plonk::config::{GenericField, GenericHashOut, Hasher, IntoGenericFieldVec}; use crate::util::log2_strict; /// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree. @@ -209,7 +209,7 @@ impl> MerkleTree { let cap_buf = capacity_up_to_mut(&mut cap, len_cap); let leaves_felts: Vec>> = leaves.clone().into_iter() .map(|inner| { - inner.into_iter().map(|f| GenericField::Goldilocks(f)).collect() + inner.into_generic_field_vec() }) .collect(); fill_digests_buf::(digests_buf, cap_buf, &leaves_felts[..], cap_height); diff --git a/plonky2/src/hash/path_compression.rs b/plonky2/src/hash/path_compression.rs index 18fe470c..ed30d888 100644 --- a/plonky2/src/hash/path_compression.rs +++ b/plonky2/src/hash/path_compression.rs @@ -6,7 +6,7 @@ use num::Integer; use crate::hash::hash_types::RichField; use crate::hash::merkle_proofs::MerkleProof; -use crate::plonk::config::{GenericField, Hasher}; +use crate::plonk::config::{GenericField, Hasher, IntoGenericFieldVec}; /// Compress multiple Merkle proofs on the same tree by removing redundancy in the Merkle paths. pub(crate) fn compress_merkle_proofs>( @@ -68,7 +68,7 @@ pub(crate) fn decompress_merkle_proofs>( for (&i, v) in leaves_indices.iter().zip(leaves_data) { // Observe the leaves. - let v_felts: Vec> = v.clone().into_iter().map(GenericField::Goldilocks).collect(); + let v_felts: Vec> = v.clone().into_generic_field_vec(); seen.insert(i + num_leaves, H::hash_or_noop(&v_felts)); } diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index 51455695..638366fc 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -894,11 +894,11 @@ impl Hasher for PoseidonHash { fn hash_pad(input: &[GenericField]) -> Self::Hash { let mut padded_input = input.to_vec(); - padded_input.push(GenericField::Goldilocks(F::ONE)); + padded_input.push(F::ONE.into()); while (padded_input.len() + 1) % Self::Permutation::RATE != 0 { - padded_input.push(GenericField::Goldilocks(F::ZERO)); + padded_input.push(F::ZERO.into()); } - padded_input.push(GenericField::Goldilocks(F::ONE)); + padded_input.push(F::ONE.into()); Self::hash_no_pad(&padded_input) } diff --git a/plonky2/src/hash/poseidon2_bn254.rs b/plonky2/src/hash/poseidon2_bn254.rs index b2b60175..acf40f6f 100644 --- a/plonky2/src/hash/poseidon2_bn254.rs +++ b/plonky2/src/hash/poseidon2_bn254.rs @@ -10,9 +10,14 @@ use rust_bn254_hash::state::State; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_bn254::{Fr as BN254Fr}; use rust_bn254_hash::poseidon2::permutation::permute_inplace as permute_bn254_inplace; -use ark_ff::{BigInt, PrimeField, Zero}; +use ark_ff::{ PrimeField, Zero,}; +use num::Integer; +use num_bigint::BigUint; +use ark_ff::BigInt as arkBigInt; use rust_bn254_hash::hash::Hash; use rust_bn254_hash::sponge::{sponge_felts_no_pad, sponge_felts_pad}; +use plonky2_field::goldilocks_field::GoldilocksField; +use plonky2_field::types::Field64; pub const SPONGE_RATE: usize = 2; pub const SPONGE_CAPACITY: usize = 1; @@ -84,8 +89,6 @@ impl PlonkyPermutation for Poseidon2BN254Perm { } - - #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct Poseidon2BN254; impl Hasher for Poseidon2BN254 { @@ -152,16 +155,46 @@ impl Hasher for Poseidon2BN254 { } fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec { + // Squeeze out BN254 elements from the sponge state. let bn_out = state.squeeze(); - let bn_bytes: Vec = bn_out.iter().flat_map(|e| felts_to_bytes(e)).collect(); - let goldilocks_felts: Vec = bytes_to_u64(&bn_bytes).iter().map(|e| F::from_canonical_u64(*e)).collect(); - assert!(goldilocks_felts.len()>0); - goldilocks_felts + + // convert bn to goldilocks + bn_to_goldilocks(bn_out) } } // --------- Conversion helper functions --------------------- +/// Converts a slice of BN254 field elements to a vector of Goldilocks (F) +fn bn_to_goldilocks(input: &[BN254Fr]) -> Vec { + // Goldilocks order + let r: BigUint = BigUint::from(GoldilocksField::ORDER); + + let mut goldilocks_felts = Vec::new(); + // For each BN254 field element, extract 3 Goldilocks elements. + for fe in input.into_iter().cloned() { + // Convert BN254Fr -> 256-bit big integer. + let mut big: BigUint = fe.into_bigint().into(); + + // We want three remainders in [0, p_Goldilocks), each fits into a 64-bit integer. + for _ in 0..3 { + + let (quotient, remainder) = big.div_rem(&r); + let rem_u64 = remainder.to_u64_digits(); + + // check just for safety: + assert_eq!(rem_u64.len(), 1, "Remainder unexpectedly larger than 64 bits."); + + let r64 = rem_u64[0]; + goldilocks_felts.push(F::from_canonical_u64(r64)); + + // Update big to the quotient for the next remainder. + big = quotient; + } + } + goldilocks_felts +} + /// converts a vec of goldilocks to bn254 /// takes 7 goldilocks and converts to 2 bn254 fn goldilocks_to_bn(input: &Vec) -> Vec{ @@ -184,20 +217,15 @@ fn goldilocks_to_bn(input: &Vec) -> Vec{ ws[i] = u64s[7 * m + i]; } let (a, b) = u64s_to_felts(ws); - // check that we don't push zero field elements - if a != BN254Fr::zero() { - result.push(a); - } - if b != BN254Fr::zero() { - result.push(b); - } + result.push(a); + result.push(b); } result } -const BIGINT_TWO_TO_64: BigInt<4> = BigInt( [0,1,0,0] ); -const BIGINT_TWO_TO_128: BigInt<4> = BigInt( [0,0,1,0] ); -const BIGINT_TWO_TO_192: BigInt<4> = BigInt( [0,0,0,1] ); +const BIGINT_TWO_TO_64: arkBigInt<4> = arkBigInt( [0,1,0,0] ); +const BIGINT_TWO_TO_128: arkBigInt<4> = arkBigInt( [0,0,1,0] ); +const BIGINT_TWO_TO_192: arkBigInt<4> = arkBigInt( [0,0,0,1] ); /// converts u64 to BN254 - taken directly from: rust-bn254-hash pub fn u64s_to_felts(ws: [u64; 7]) -> (BN254Fr, BN254Fr) { @@ -224,38 +252,6 @@ pub fn u64s_to_felts(ws: [u64; 7]) -> (BN254Fr, BN254Fr) { (x, y) } -/// converts a slice of bytes to 64 by taking 63 bits at a time -/// this makes it safe for conversion from bytes to Goldilocks field elems -/// this fn ignores any remaining bit that are less than 63 bits at the end -pub fn bytes_to_u64(x: &[u8]) -> Vec { - let total_bits = x.len() * 8; - let num_chunks = total_bits / 63; // ignore any leftover bits - let mut result = Vec::with_capacity(num_chunks); - - for i in 0..num_chunks { - let bit_offset = i * 63; - let first_byte = bit_offset / 8; - let shift = bit_offset % 8; - // how many bits do we need? We need (shift + 63) bits in total. - // convert that to bytes by rounding up. - let needed_bytes = ((shift + 63) + 7) / 8; - - if first_byte + needed_bytes > x.len() { - break; // break out if incomplete chunk - } - - let mut chunk: u128 = 0; - for j in 0..needed_bytes { - chunk |= (x[first_byte + j] as u128) << (8 * j); - } - // shift right with `shift` bits, then mask 63 bits. - let value = (chunk >> shift) & ((1u128 << 63) - 1); - result.push(value as u64); - } - - result -} - /// helper function: converts a slice of GenericField into a Vec /// the fn groups consecutive Goldilocks elements and converting them in one shot. fn generic_field_to_bn(input: &[GenericField]) -> Vec { @@ -302,7 +298,7 @@ fn check_len_in_bytes(input: &[GenericField]) -> usize{ //------------------ serialization for BN254 --------------------- -pub fn felts_to_bytes(f: &E) -> Vec where +pub fn felts_to_bytes_le(f: &E) -> Vec where E: CanonicalSerialize { let mut bytes = Vec::new(); @@ -310,7 +306,7 @@ pub fn felts_to_bytes(f: &E) -> Vec where bytes } -pub fn bytes_to_felts(bytes: &[u8]) -> E where +pub fn bytes_le_to_felts(bytes: &[u8]) -> E where E: CanonicalDeserialize { let fr_res = E::deserialize_uncompressed(bytes).unwrap(); @@ -329,3 +325,41 @@ pub fn felts_to_u64(f: E) -> Vec .map(|chunk| u64::from_le_bytes(chunk.try_into().unwrap())) .collect() } + +#[cfg(test)] +mod tests { + use super::*; + use ark_bn254::Fr as BN254Fr; + use ark_ff::{One, Zero}; + + /// Test that converting a bn254 element to bytes and back. + #[test] + fn test_felts_bytes_roundtrip() { + let element = ::from_bigint(arkBigInt::from(987654321u64)).unwrap(); + let bytes = felts_to_bytes_le(&element); + assert_eq!(bytes.len(), 32, "Expected 32 bytes for BN254Fr serialization"); + let recovered: BN254Fr = bytes_le_to_felts(&bytes); + assert_eq!(element, recovered, "Roundtrip conversion did not recover the original element"); + } + + /// Test roundtrip with edge cases: zero and one. + #[test] + fn test_zero_and_one_byte_conversion() { + let zero = BN254Fr::zero(); + let one = BN254Fr::one(); + + let zero_bytes = felts_to_bytes_le(&zero); + let one_bytes = felts_to_bytes_le(&one); + + // Check that both serializations are 32 bytes. + assert_eq!(zero_bytes.len(), 32, "Zero should serialize to 32 bytes"); + assert_eq!(one_bytes.len(), 32, "One should serialize to 32 bytes"); + + let zero_back: BN254Fr = bytes_le_to_felts(&zero_bytes); + let one_back: BN254Fr = bytes_le_to_felts(&one_bytes); + + assert_eq!(zero, zero_back, "Zero did not roundtrip correctly"); + assert_eq!(one, one_back, "One did not roundtrip correctly"); + } +} + diff --git a/plonky2/src/iop/challenger.rs b/plonky2/src/iop/challenger.rs index 4037f687..30549525 100644 --- a/plonky2/src/iop/challenger.rs +++ b/plonky2/src/iop/challenger.rs @@ -43,7 +43,7 @@ impl> Challenger where F: RichField + Extendable, { - let elements = element.to_basefield_array().map(|e: F|GenericField::::Goldilocks(e)); + let elements = element.to_basefield_array().map(|e: F|e.into()); self.observe_elements(&elements); } @@ -268,7 +268,7 @@ mod tests { use crate::iop::witness::{PartialWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; - use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; + use crate::plonk::config::{GenericConfig, GenericField, IntoGenericFieldVec, PoseidonGoldilocksConfig}; #[test] fn no_duplicate_challenges() { @@ -280,7 +280,7 @@ mod tests { for i in 1..10 { challenges.extend(challenger.get_n_challenges(i)); - challenger.observe_element(F::rand()); + challenger.observe_element(F::rand().into()); } let dedup_challenges = { @@ -304,10 +304,14 @@ mod tests { let num_outputs_per_round = [1, 2, 4]; // Generate random input messages. - let inputs_per_round: Vec> = num_inputs_per_round + let inputs_per_round_f: Vec> = num_inputs_per_round .iter() .map(|&n| F::rand_vec(n)) .collect(); + let inputs_per_round: Vec>> = inputs_per_round_f + .iter() + .map(|n| n.clone().into_generic_field_vec()) + .collect(); let mut challenger = Challenger::>::InnerHasher>::new(); let mut outputs_per_round: Vec> = Vec::new(); @@ -321,7 +325,7 @@ mod tests { let mut recursive_challenger = RecursiveChallenger::>::InnerHasher, D>::new(&mut builder); let mut recursive_outputs_per_round: Vec> = Vec::new(); - for (r, inputs) in inputs_per_round.iter().enumerate() { + for (r, inputs) in inputs_per_round_f.iter().enumerate() { recursive_challenger.observe_elements(&builder.constants(inputs)); recursive_outputs_per_round.push( recursive_challenger.get_n_challenges(&mut builder, num_outputs_per_round[r]), diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index 930ee3ae..290abaa9 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -44,7 +44,7 @@ use crate::plonk::circuit_data::{ CircuitConfig, CircuitData, CommonCircuitData, MockCircuitData, ProverCircuitData, ProverOnlyCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData, }; -use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, GenericHashOut, Hasher}; +use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, GenericHashOut, Hasher, IntoGenericFieldVec}; use crate::plonk::copy_constraint::CopyConstraint; use crate::plonk::permutation_argument::Forest; use crate::plonk::plonk_common::PlonkOracle; @@ -1256,14 +1256,14 @@ impl, const D: usize> CircuitBuilder { }; let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone(); let domain_separator = self.domain_separator.unwrap_or_default(); - let ds_felts: Vec> = domain_separator.clone().into_iter().map(GenericField::Goldilocks).collect(); + let ds_felts: Vec> = domain_separator.clone().into_generic_field_vec(); let domain_separator_digest = C::Hasher::hash_pad(&ds_felts); // TODO: This should also include an encoding of gate constraints. let circuit_digest_parts = [ constants_sigmas_cap.flatten(), domain_separator_digest.to_vec(), vec![ - GenericField::Goldilocks(F::from_canonical_usize(degree_bits)), + F::from_canonical_usize(degree_bits).into(), /* Add other circuit data here */ ], ]; diff --git a/plonky2/src/plonk/config.rs b/plonky2/src/plonk/config.rs index 77b2355e..fcd9bd37 100644 --- a/plonky2/src/plonk/config.rs +++ b/plonky2/src/plonk/config.rs @@ -24,7 +24,7 @@ use crate::iop::target::{BoolTarget, Target}; use crate::plonk::circuit_builder::CircuitBuilder; use ark_bn254::Fr as BN254Fr; use ark_ff::{One, Zero}; -use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes, Poseidon2BN254}; +use crate::hash::poseidon2_bn254::{bytes_le_to_felts, felts_to_bytes_le, Poseidon2BN254}; pub trait GenericHashOut: Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned @@ -43,13 +43,44 @@ pub enum GenericField { BN254(BN254Fr), } +// Convert a Goldilocks field element into a GenericField. +impl From for GenericField { + fn from(x: F) -> Self { + GenericField::Goldilocks(x) + } +} + +// Convert a BN254Fr element into a GenericField. +impl From for GenericField { + fn from(x: BN254Fr) -> Self { + GenericField::BN254(x) + } +} + +/// Extension trait to convert vectors of F or BN254Fr to Vec>. +pub trait IntoGenericFieldVec { + fn into_generic_field_vec(self) -> Vec>; +} + +impl IntoGenericFieldVec for Vec { + fn into_generic_field_vec(self) -> Vec> { + self.into_iter().map(GenericField::from).collect() + } +} + +impl IntoGenericFieldVec for Vec { + fn into_generic_field_vec(self) -> Vec> { + self.into_iter().map(GenericField::from).collect() + } +} + /// hasher field trait to cover fields in `GenericField` enum pub trait HasherField: Default + Sized + Copy + Debug + Eq + PartialEq + Sync + Send { fn get_one() -> Self; fn get_zero() -> Self; - fn to_bytes(&self) -> Vec; + fn to_bytes_le(&self) -> Vec; - fn from_bytes(b: &[u8]) -> Self; + fn from_bytes_le(b: &[u8]) -> Self; } @@ -63,12 +94,12 @@ impl HasherField for BN254Fr { BN254Fr::zero() } - fn to_bytes(&self) -> Vec { - felts_to_bytes::(&self) + fn to_bytes_le(&self) -> Vec { + felts_to_bytes_le::(&self) } - fn from_bytes(b: &[u8]) -> Self { - bytes_to_felts::(b) + fn from_bytes_le(b: &[u8]) -> Self { + bytes_le_to_felts::(b) } } @@ -82,11 +113,11 @@ impl HasherField for T { T::ZERO } - fn to_bytes(&self) -> Vec { + fn to_bytes_le(&self) -> Vec { self.to_canonical_u64().to_le_bytes().to_vec() } - fn from_bytes(b: &[u8]) -> Self { + fn from_bytes_le(b: &[u8]) -> Self { assert_eq!(b.len(), 8, "Input vector must have exactly 8 bytes"); let arr: [u8; 8] = b.try_into().expect("Conversion to array failed"); let element = u64::from_le_bytes(arr); @@ -180,7 +211,7 @@ impl GenericConfig<2> for KeccakGoldilocksConfig { } /// Configuration using Poseidon2BN254 as hasher over the Goldilocks field. -#[derive(Debug, Copy, Clone, Default, Eq, PartialEq)] +#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Serialize)] pub struct Poseidon2BN254Config; impl GenericConfig<2> for Poseidon2BN254Config { type F = GoldilocksField; diff --git a/plonky2/src/plonk/proof.rs b/plonky2/src/plonk/proof.rs index 63b59ce6..461f648f 100644 --- a/plonky2/src/plonk/proof.rs +++ b/plonky2/src/plonk/proof.rs @@ -25,7 +25,7 @@ use crate::hash::merkle_tree::MerkleCap; use crate::iop::ext_target::ExtensionTarget; use crate::iop::target::Target; use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData}; -use crate::plonk::config::{GenericConfig, GenericField, Hasher}; +use crate::plonk::config::{GenericConfig, GenericField, Hasher, IntoGenericFieldVec}; use crate::plonk::verifier::{verify_with_challenges, DEFAULT_VERIFIER_OPTIONS}; use crate::util::serialization::{Buffer, Read, Write}; @@ -104,7 +104,7 @@ impl, C: GenericConfig, const D: usize> pub fn get_public_inputs_hash( &self, ) -> <>::InnerHasher as Hasher>::Hash { - let pi_felts: Vec> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + let pi_felts: Vec> = self.public_inputs.clone().into_generic_field_vec(); C::InnerHasher::hash_no_pad(&pi_felts) } @@ -235,7 +235,7 @@ impl, C: GenericConfig, const D: usize> pub(crate) fn get_public_inputs_hash( &self, ) -> <>::InnerHasher as Hasher>::Hash { - let pi_felts: Vec> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + let pi_felts: Vec> = self.public_inputs.clone().into_generic_field_vec(); C::InnerHasher::hash_no_pad(&pi_felts) } diff --git a/plonky2/src/plonk/prover.rs b/plonky2/src/plonk/prover.rs index 99a7ff8f..85ed9394 100644 --- a/plonky2/src/plonk/prover.rs +++ b/plonky2/src/plonk/prover.rs @@ -30,7 +30,7 @@ use crate::iop::target::Target; use crate::iop::witness::{MatrixWitness, PartialWitness, PartitionWitness, Witness, WitnessWrite}; use crate::plonk::circuit_builder::NUM_COINS_LOOKUP; use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData}; -use crate::plonk::config::{GenericConfig, GenericField, Hasher}; +use crate::plonk::config::{GenericConfig, GenericField, Hasher, IntoGenericFieldVec}; use crate::plonk::plonk_common::PlonkOracle; use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs}; use crate::plonk::vanishing_poly::{eval_vanishing_poly_base_batch, get_lut_poly}; @@ -269,7 +269,7 @@ where set_lookup_wires(prover_data, common_data, &mut partition_witness)?; let public_inputs = partition_witness.get_targets(&prover_data.public_inputs); - let pi_felts: Vec> = public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect(); + let pi_felts: Vec> = public_inputs.clone().into_generic_field_vec(); let public_inputs_hash = C::InnerHasher::hash_no_pad(&pi_felts); let witness = timed!( From bdfb86b46e8f9749acc8bb8233cc78ee7eb6f4a5 Mon Sep 17 00:00:00 2001 From: M Alghazwi Date: Thu, 13 Mar 2025 10:08:27 +0100 Subject: [PATCH 5/5] add tests and descriptions for the conversions. --- plonky2/Cargo.toml | 1 + plonky2/src/hash/poseidon2_bn254.rs | 80 ++++++++++++++++++++++++++--- 2 files changed, 74 insertions(+), 7 deletions(-) diff --git a/plonky2/Cargo.toml b/plonky2/Cargo.toml index 54d69bcc..109b86e3 100644 --- a/plonky2/Cargo.toml +++ b/plonky2/Cargo.toml @@ -39,6 +39,7 @@ rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"} ark-serialize = {version = "0.5.0"} ark-bn254 = "0.5.0" ark-ff = "0.5.0" +ark-std = "0.5.0" num-bigint = { version = "0.4", default-features = false } # Local dependencies diff --git a/plonky2/src/hash/poseidon2_bn254.rs b/plonky2/src/hash/poseidon2_bn254.rs index acf40f6f..f0559797 100644 --- a/plonky2/src/hash/poseidon2_bn254.rs +++ b/plonky2/src/hash/poseidon2_bn254.rs @@ -165,7 +165,18 @@ impl Hasher for Poseidon2BN254 { // --------- Conversion helper functions --------------------- -/// Converts a slice of BN254 field elements to a vector of Goldilocks (F) +/// Converts a slice of BN254 field elements to a vector of Goldilocks (F) by: +/// +/// - Interpreting each BN254 element as an unsigned big integer `BigUint`. +/// - Repeatedly taking `remainder = X mod Goldilocks::ORDER` (which fits in a `u64`) +/// and then dividing `X` by `Goldilocks::ORDER`. +/// - Repeat this exactly 3 times for each BN254 element in the slice, generating 3*l Goldilocks elements +/// where l = length of the slice. +/// +/// We use this primarily in hashing contexts (for Fiat-Shamir in Plonky2 circuits), where +/// we want to safely convert a ~254-bit BN254 element into multiple 64-bit +/// Goldilocks elements. The little leftover in `X` after extracting 3 remainders +/// is trashed, so there is a negligible bias. fn bn_to_goldilocks(input: &[BN254Fr]) -> Vec { // Goldilocks order let r: BigUint = BigUint::from(GoldilocksField::ORDER); @@ -183,10 +194,12 @@ fn bn_to_goldilocks(input: &[BN254Fr]) -> Vec { let rem_u64 = remainder.to_u64_digits(); // check just for safety: - assert_eq!(rem_u64.len(), 1, "Remainder unexpectedly larger than 64 bits."); - - let r64 = rem_u64[0]; - goldilocks_felts.push(F::from_canonical_u64(r64)); + if rem_u64.len() > 1 { + panic!("Remainder unexpectedly larger than 64 bits.") + } else if rem_u64.len() == 1{ + let r64 = rem_u64[0]; + goldilocks_felts.push(F::from_canonical_u64(r64)); + } // Update big to the quotient for the next remainder. big = quotient; @@ -195,8 +208,15 @@ fn bn_to_goldilocks(input: &[BN254Fr]) -> Vec { goldilocks_felts } -/// converts a vec of goldilocks to bn254 -/// takes 7 goldilocks and converts to 2 bn254 + +/// Convert a vec of Goldilocks elements into BN254 elements. +/// - pack `7` consecutive `u64` values into `2` BN254 field elements. +/// - If the total number of Goldilocks elements is not a multiple of 7, we +/// zero‐pad the last chunk up to 7. That chunk still produces 2 BN254 field elements. +/// - Returns: A `Vec` +/// +/// **Note**: This is used for packing a sequence of 64-bit words into +/// BN254 in a safe way. It is NOT the inverse of `bn_to_goldilocks` fn goldilocks_to_bn(input: &Vec) -> Vec{ let u64s: Vec = input.iter().map(|x| x.to_canonical_u64()).collect(); let l = u64s.len(); @@ -331,6 +351,8 @@ mod tests { use super::*; use ark_bn254::Fr as BN254Fr; use ark_ff::{One, Zero}; + use ark_std::{test_rng, UniformRand}; + use plonky2_field::types::Field; /// Test that converting a bn254 element to bytes and back. #[test] @@ -361,5 +383,49 @@ mod tests { assert_eq!(zero, zero_back, "Zero did not roundtrip correctly"); assert_eq!(one, one_back, "One did not roundtrip correctly"); } + + /// Test that bn_to_goldilocks produces exactly 3 Goldilocks per BN254. + #[test] + fn test_bn_to_goldilocks_three_remainders() { + // We'll test random BN254 elements to ensure no overflow panic. + let num_tests = 1000; + let mut bn_vec = Vec::with_capacity(num_tests); + for _ in 0..num_tests { + // A random BN254 field element + let fe = BN254Fr::rand(&mut test_rng()); + bn_vec.push(fe); + } + + let goldi_vec = bn_to_goldilocks::(&bn_vec); + // Should be exactly 3 * num_tests + assert_eq!(goldi_vec.len(), 3 * num_tests); + } + + /// Test that exactly 7 Goldilocks produce 2 BN254, and leftover is padded for partial groups. + #[test] + fn test_goldilocks_to_bn_packing() { + // 7 exact Goldilocks => 2 BN254 + let goldis7 = vec![ + GoldilocksField::from_canonical_u64(1), + GoldilocksField::from_canonical_u64(2), + GoldilocksField::from_canonical_u64(3), + GoldilocksField::from_canonical_u64(4), + GoldilocksField::from_canonical_u64(5), + GoldilocksField::from_canonical_u64(6), + GoldilocksField::from_canonical_u64(7), + ]; + let bn_out = goldilocks_to_bn(&goldis7); + assert_eq!(bn_out.len(), 2, "7 Goldilocks should map to 2 BN254 elements"); + + // Now test leftover: 8 Goldilocks => we expect 2 BN254 from the first 7, plus + // 2 more BN254 for the leftover 1 (padded to 7). So total 4 BN254 elements. + let goldis8 = { + let mut v = goldis7.clone(); + v.push(GoldilocksField::from_canonical_u64(123)); + v + }; + let bn_out_8 = goldilocks_to_bn(&goldis8); + assert_eq!(bn_out_8.len(), 4, "8 Goldilocks -> 4 BN254"); + } }