mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-02 13:53:07 +00:00
add support for Poseidon2 BN254
This commit is contained in:
parent
15bb9599f3
commit
eeeb99f546
@ -35,6 +35,8 @@ unroll = { workspace = true }
|
||||
web-time = { version = "1.0.0", optional = true }
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"}
|
||||
ark-serialize = {version = "0.5.0"}
|
||||
|
||||
# Local dependencies
|
||||
plonky2_field = { version = "1.0.0", path = "../field", default-features = false }
|
||||
|
||||
73
plonky2/examples/poseidon2_bn254_example.rs
Normal file
73
plonky2/examples/poseidon2_bn254_example.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use std::fs;
|
||||
use anyhow::Result;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, Poseidon2BN254Config};
|
||||
use plonky2::plonk::prover::ProverOptions;
|
||||
use plonky2::plonk::verifier::{VerifierOptions, HashStatisticsPrintLevel};
|
||||
|
||||
/// An example of using Plonky2 to prove a statement of the form
|
||||
/// "I know the 100th element of the Fibonacci sequence, starting with constants a and b."
|
||||
/// When a == 0 and b == 1, this is proving knowledge of the 100th (standard) Fibonacci number.
|
||||
fn main() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = Poseidon2BN254Config;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
// The arithmetic circuit.
|
||||
let initial_a = builder.add_virtual_target();
|
||||
let initial_b = builder.add_virtual_target();
|
||||
let mut prev_target = initial_a;
|
||||
let mut cur_target = initial_b;
|
||||
for _ in 0..99 {
|
||||
let temp = builder.add(prev_target, cur_target);
|
||||
prev_target = cur_target;
|
||||
cur_target = temp;
|
||||
}
|
||||
|
||||
// Public inputs are the two initial values (provided below) and the result (which is generated).
|
||||
builder.register_public_input(initial_a);
|
||||
builder.register_public_input(initial_b);
|
||||
builder.register_public_input(cur_target);
|
||||
|
||||
// Provide initial values.
|
||||
let mut pw = PartialWitness::new();
|
||||
pw.set_target(initial_a, F::ZERO)?;
|
||||
pw.set_target(initial_b, F::ONE)?;
|
||||
|
||||
let data = builder.build::<C>();
|
||||
|
||||
let prover_opts = ProverOptions {
|
||||
export_witness: Some(String::from("fibonacci_witness.json")),
|
||||
print_hash_statistics: HashStatisticsPrintLevel::Info,
|
||||
};
|
||||
|
||||
let proof = data.prove_with_options(pw, &prover_opts)?;
|
||||
|
||||
// serialize circuit into JSON
|
||||
// let common_circuit_data_serialized = serde_json::to_string(&data.common ).unwrap();
|
||||
// let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap();
|
||||
// let proof_serialized = serde_json::to_string(&proof ).unwrap();
|
||||
// fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file");
|
||||
// fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file");
|
||||
// fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file");
|
||||
|
||||
// println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap);
|
||||
// println!("circ digest: {:?}", data.verifier_only.circuit_digest);
|
||||
|
||||
println!(
|
||||
"100th Fibonacci number mod |F| (starting with {}, {}) is: {}",
|
||||
proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2]
|
||||
);
|
||||
|
||||
let verifier_opts = VerifierOptions {
|
||||
print_hash_statistics: HashStatisticsPrintLevel::Summary,
|
||||
};
|
||||
data.verify_with_options(proof, &verifier_opts)
|
||||
|
||||
}
|
||||
@ -11,3 +11,4 @@ pub mod merkle_tree;
|
||||
pub mod path_compression;
|
||||
pub mod poseidon;
|
||||
pub mod poseidon_goldilocks;
|
||||
pub mod poseidon2_bn254;
|
||||
|
||||
157
plonky2/src/hash/poseidon2_bn254.rs
Normal file
157
plonky2/src/hash/poseidon2_bn254.rs
Normal file
@ -0,0 +1,157 @@
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::mem::size_of;
|
||||
|
||||
use rust_bn254_hash::hash::Hash;
|
||||
use crate::hash::hash_types::{BytesHash, RichField};
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::plonk::config::Hasher;
|
||||
use rust_bn254_hash::sponge::{sponge_u64_pad, sponge_u64_no_pad};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
|
||||
pub const SPONGE_RATE: usize = 8;
|
||||
pub const SPONGE_CAPACITY: usize = 4;
|
||||
pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq)]
|
||||
pub struct Poseidon2BN254Permutation<F: RichField> {
|
||||
state: [F; SPONGE_WIDTH],
|
||||
}
|
||||
|
||||
impl<F: RichField> Eq for Poseidon2BN254Permutation<F> {}
|
||||
|
||||
impl<F: RichField> AsRef<[F]> for Poseidon2BN254Permutation<F> {
|
||||
fn as_ref(&self) -> &[F] {
|
||||
&self.state
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField> PlonkyPermutation<F> for Poseidon2BN254Permutation<F> {
|
||||
const RATE: usize = SPONGE_RATE;
|
||||
const WIDTH: usize = SPONGE_WIDTH;
|
||||
|
||||
fn new<I: IntoIterator<Item = F>>(elts: I) -> Self {
|
||||
let mut perm = Self {
|
||||
state: [F::default(); SPONGE_WIDTH],
|
||||
};
|
||||
perm.set_from_iter(elts, 0);
|
||||
perm
|
||||
}
|
||||
|
||||
fn set_elt(&mut self, elt: F, idx: usize) {
|
||||
self.state[idx] = elt;
|
||||
}
|
||||
|
||||
fn set_from_slice(&mut self, elts: &[F], start_idx: usize) {
|
||||
let begin = start_idx;
|
||||
let end = start_idx + elts.len();
|
||||
self.state[begin..end].copy_from_slice(elts);
|
||||
}
|
||||
|
||||
fn set_from_iter<I: IntoIterator<Item = F>>(&mut self, elts: I, start_idx: usize) {
|
||||
for (s, e) in self.state[start_idx..].iter_mut().zip(elts) {
|
||||
*s = e;
|
||||
}
|
||||
}
|
||||
|
||||
fn permute(&mut self) {
|
||||
// convert state of Goldilocks elems to u64
|
||||
let mut state_u64 = vec![0u64; SPONGE_WIDTH ];
|
||||
for i in 0..SPONGE_WIDTH {
|
||||
state_u64[i]
|
||||
= self.state[i].to_canonical_u64();
|
||||
}
|
||||
|
||||
// Create an iterator that repeatedly applies the sponge permutation.
|
||||
let hash_onion = core::iter::repeat_with(|| {
|
||||
// Compute the next hash layer.
|
||||
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64.clone());
|
||||
// Convert the sponge output to u64.
|
||||
let output = felts_to_u64(hash);
|
||||
// Update the state for the next iteration.
|
||||
state_u64 = output.clone();
|
||||
output.into_iter()
|
||||
}).flatten();
|
||||
|
||||
// Parse field elements from u64 stream, using rejection sampling such that words that don't
|
||||
// fit in F are ignored.
|
||||
let new_state: Vec<F> = hash_onion
|
||||
.filter(|&word| word < F::ORDER)
|
||||
.map(F::from_canonical_u64)
|
||||
.take(SPONGE_WIDTH)
|
||||
.collect();
|
||||
// update the state
|
||||
self.state = new_state.try_into().expect("State length mismatch");
|
||||
}
|
||||
|
||||
fn squeeze(&self) -> &[F] {
|
||||
&self.state[..Self::RATE]
|
||||
}
|
||||
}
|
||||
|
||||
const N: usize = 32;
|
||||
/// Keccak-256 hash function.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub struct Poseidon2BN254;
|
||||
impl<F: RichField> Hasher<F> for Poseidon2BN254 {
|
||||
const HASH_SIZE: usize = N;
|
||||
type Hash = BytesHash<N>;
|
||||
type Permutation = Poseidon2BN254Permutation<F>;
|
||||
|
||||
fn hash_no_pad(input: &[F]) -> Self::Hash {
|
||||
let mut state_u64 = vec![0u64; input.len() ];
|
||||
for i in 0..input.len() {
|
||||
state_u64[i]
|
||||
= input[i].to_canonical_u64();
|
||||
}
|
||||
let mut arr = [0; N];
|
||||
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64);
|
||||
let hash_bytes = felts_to_bytes(hash);
|
||||
arr.copy_from_slice(&hash_bytes[..N]);
|
||||
BytesHash(arr)
|
||||
}
|
||||
|
||||
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
|
||||
let mut input_bytes = vec![0; N * 2];
|
||||
input_bytes[0..N].copy_from_slice(&left.0);
|
||||
input_bytes[N..].copy_from_slice(&right.0);
|
||||
let mut arr = [0; N];
|
||||
let state_u64: Vec<u64> = input_bytes
|
||||
.chunks_exact(8)
|
||||
.map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap()))
|
||||
.collect();
|
||||
|
||||
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64);
|
||||
let hash_bytes = felts_to_bytes(hash);
|
||||
arr.copy_from_slice(&hash_bytes[..N]);
|
||||
BytesHash(arr)
|
||||
}
|
||||
}
|
||||
|
||||
fn felts_to_bytes<E>(f: E) -> Vec<u8> where
|
||||
E: CanonicalSerialize
|
||||
{
|
||||
let mut bytes = Vec::new();
|
||||
f.serialize_uncompressed(&mut bytes).expect("serialization failed");
|
||||
bytes
|
||||
}
|
||||
|
||||
fn bytes_to_felts<E>(bytes: &[u8]) -> E where
|
||||
E: CanonicalDeserialize
|
||||
{
|
||||
let fr_res = E::deserialize_uncompressed(bytes).unwrap();
|
||||
fr_res
|
||||
}
|
||||
|
||||
fn felts_to_u64<E>(f: E) -> Vec<u64>
|
||||
where
|
||||
E: CanonicalSerialize,
|
||||
{
|
||||
let mut bytes = Vec::new();
|
||||
f.serialize_uncompressed(&mut bytes)
|
||||
.expect("serialization failed");
|
||||
bytes
|
||||
.chunks_exact(size_of::<u64>())
|
||||
.map(|chunk| u64::from_le_bytes(chunk.try_into().unwrap()))
|
||||
.collect()
|
||||
}
|
||||
@ -19,6 +19,7 @@ use crate::field::goldilocks_field::GoldilocksField;
|
||||
use crate::hash::hash_types::{HashOut, RichField};
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::hash::keccak::KeccakHash;
|
||||
use crate::hash::poseidon2_bn254::Poseidon2BN254;
|
||||
use crate::hash::poseidon::PoseidonHash;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
@ -124,3 +125,13 @@ impl GenericConfig<2> for KeccakGoldilocksConfig {
|
||||
type Hasher = KeccakHash<25>;
|
||||
type InnerHasher = PoseidonHash;
|
||||
}
|
||||
|
||||
/// Configuration using PoseidonBN254 over the Goldilocks field.
|
||||
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||
pub struct Poseidon2BN254Config;
|
||||
impl GenericConfig<2> for Poseidon2BN254Config {
|
||||
type F = GoldilocksField;
|
||||
type FE = QuadraticExtension<Self::F>;
|
||||
type Hasher = Poseidon2BN254;
|
||||
type InnerHasher = PoseidonHash;
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user