impl duplex and hasher for GenericField

This commit is contained in:
M Alghazwi 2025-02-27 10:46:39 +01:00
parent eeeb99f546
commit 972c3c0645
No known key found for this signature in database
GPG Key ID: 646E567CAD7DB607
19 changed files with 728 additions and 248 deletions

View File

@ -37,6 +37,8 @@ strum = "0.26"
strum_macros = "0.26"
rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"}
ark-serialize = {version = "0.5.0"}
ark-bn254 = "0.5.0"
ark-ff = "0.5.0"
# Local dependencies
plonky2_field = { version = "1.0.0", path = "../field", default-features = false }

View File

@ -56,9 +56,9 @@ fn main() -> Result<()> {
// fs::write("fibonacci_common_k.json" , common_circuit_data_serialized) .expect("Unable to write file");
// fs::write("fibonacci_vkey_k.json" , verifier_only_circuit_data_serialized).expect("Unable to write file");
// fs::write("fibonacci_proof_k.json" , proof_serialized) .expect("Unable to write file");
// println!("const sigma: {:?}", data.verifier_only.constants_sigmas_cap);
// println!("circ digest: {:?}", data.verifier_only.circuit_digest);
// println!("proof part: {:?}", proof.proof.wires_cap.0);
println!(
"100th Fibonacci number mod |F| (starting with {}, {}) is: {}",
@ -69,5 +69,4 @@ fn main() -> Result<()> {
print_hash_statistics: HashStatisticsPrintLevel::Summary,
};
data.verify_with_options(proof, &verifier_opts)
}

View File

@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap;
use crate::iop::challenger::{Challenger, RecursiveChallenger};
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, Hasher};
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
pub fn observe_openings<const D: usize>(&mut self, openings: &FriOpenings<F, D>)
@ -57,8 +57,10 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
if let Some(step_count) = max_num_query_steps {
let cap_len = (1 << config.cap_height) * NUM_HASH_OUT_ELTS;
let zero_cap = vec![F::ZERO; cap_len];
let zero_cap_felts: Vec<GenericField<F>> = zero_cap.into_iter().map(GenericField::Goldilocks).collect();
for _ in commit_phase_merkle_caps.len()..step_count {
self.observe_elements(&zero_cap);
// self.observe_elements(&zero_cap);
self.observe_elements(&zero_cap_felts);
self.get_extension_challenge::<D>();
}
}
@ -73,7 +75,7 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
}
}
self.observe_element(pow_witness);
self.observe_element(GenericField::Goldilocks(pow_witness));
let fri_pow_response = self.get_challenge();
let fri_query_indices = (0..num_fri_queries)

View File

@ -14,7 +14,7 @@ use crate::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS};
use crate::hash::hashing::*;
use crate::hash::merkle_tree::MerkleTree;
use crate::iop::challenger::Challenger;
use crate::plonk::config::GenericConfig;
use crate::plonk::config::{GenericConfig, GenericField};
use crate::plonk::plonk_common::reduce_with_powers;
use crate::plonk::prover::ProverOptions;
use crate::plonk::verifier::HashStatisticsPrintLevel;
@ -136,8 +136,9 @@ fn fri_committed_trees<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
if let Some(step_count) = max_num_query_steps {
let cap_len = (1 << fri_params.config.cap_height) * NUM_HASH_OUT_ELTS;
let zero_cap = vec![F::ZERO; cap_len];
let zero_cap_felts: Vec<GenericField<F>> = zero_cap.into_iter().map(GenericField::Goldilocks).collect();
for _ in fri_params.reduction_arity_bits.len()..step_count {
challenger.observe_elements(&zero_cap);
challenger.observe_elements(&zero_cap_felts);
challenger.get_extension_challenge::<D>();
}
}
@ -171,45 +172,12 @@ pub(crate) fn fri_proof_of_work<
) -> F {
let min_leading_zeros = config.proof_of_work_bits + (64 - F::order().bits()) as u32;
// The easiest implementation would be repeatedly clone our Challenger. With each clone, we'd
// observe an incrementing PoW witness, then get the PoW response. If it contained sufficient
// leading zeros, we'd end the search, and store this clone as our new challenger.
//
// However, performance is critical here. We want to avoid cloning Challenger, particularly
// since it stores vectors, which means allocations. We'd like a more compact state to clone.
//
// We know that a duplex will be performed right after we send the PoW witness, so we can ignore
// any output_buffer, which will be invalidated. We also know
// input_buffer.len() < H::Permutation::WIDTH, an invariant of Challenger.
//
// We separate the duplex operation into two steps, one which can be performed now, and the
// other which depends on the PoW witness candidate. The first step is the overwrite our sponge
// state with any inputs (excluding the PoW witness candidate). The second step is to overwrite
// one more element of our sponge state with the candidate, then apply the permutation,
// obtaining our duplex's post-state which contains the PoW response.
let mut duplex_intermediate_state = challenger.sponge_state;
let witness_input_pos = challenger.input_buffer.len();
duplex_intermediate_state.set_from_iter(challenger.input_buffer.clone(), 0);
// println!("duplex_intermediate_state = {:?}", duplex_intermediate_state);
let pow_witness = (0..=F::NEG_ONE.to_canonical_u64())
.into_par_iter()
.find_any(|&candidate| {
let mut duplex_state = duplex_intermediate_state;
duplex_state.set_elt(F::from_canonical_u64(candidate), witness_input_pos);
duplex_state.permute();
let pow_response = duplex_state.squeeze().iter().last().unwrap();
let leading_zeros = pow_response.to_canonical_u64().leading_zeros();
leading_zeros >= min_leading_zeros
})
.map(F::from_canonical_u64)
.expect("Proof of work failed. This is highly unlikely!");
let pow_witness = challenger.grind(min_leading_zeros);
// println!("pow_witness = {:?}",pow_witness);
// Recompute pow_response using our normal Challenger code, and make sure it matches.
challenger.observe_element(pow_witness);
challenger.observe_element(GenericField::Goldilocks(pow_witness));
let pow_response = challenger.get_challenge();
let leading_zeros = pow_response.to_canonical_u64().leading_zeros();
assert!(leading_zeros >= min_leading_zeros);

View File

@ -10,7 +10,7 @@ use crate::hash::merkle_proofs::MerkleProof;
use crate::hash::merkle_tree::{
capacity_up_to_mut, fill_digests_buf, merkle_tree_prove, MerkleCap,
};
use crate::plonk::config::{GenericHashOut, Hasher};
use crate::plonk::config::{GenericField, GenericHashOut, Hasher};
use crate::util::log2_strict;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
@ -56,9 +56,20 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
let mut digests_buf_pos = 0;
let mut cap = vec![];
let dummy_leaves = vec![vec![F::ZERO]; 1 << cap_height];
leaves.push(dummy_leaves);
for window in leaves.windows(2) {
let dummy_leaves_felts = vec![vec![GenericField::Goldilocks(F::ZERO)]; 1 << cap_height];
let mut leaves_felts: Vec<Vec<Vec<GenericField<F>>>> = leaves.clone().into_iter()
.map(|matrix| {
matrix.into_iter()
.map(|vec| {
vec.into_iter()
.map(|f| GenericField::Goldilocks(f))
.collect()
})
.collect()
})
.collect();
leaves_felts.push(dummy_leaves_felts);
for window in leaves_felts.windows(2) {
let cur = &window[0];
let next = &window[1];
@ -82,7 +93,7 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
);
} else {
// The rest leaf layers
let new_leaves: Vec<Vec<F>> = cap
let new_leaves: Vec<Vec<GenericField<F>>> = cap
.iter()
.enumerate()
.map(|(i, cap_hash)| {
@ -118,8 +129,6 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
digests.set_len(num_digests);
}
// remove dummy leaves
leaves.pop();
Self {
leaves,

121
plonky2/src/hash/duplex.rs Normal file
View File

@ -0,0 +1,121 @@
use plonky2_maybe_rayon::ParallelIterator;
use plonky2_maybe_rayon::rayon::iter::IntoParallelIterator;
use crate::hash::hash_types::RichField;
use crate::hash::hashing::PlonkyPermutation;
use crate::plonk::config::{GenericField, Hasher, HasherField};
use plonky2_field::types::PrimeField64;
#[derive(Debug, Clone)]
pub enum DuplexState<F: RichField, H: Hasher<F>> {
Absorbing {
state: H::Permutation,
buf: Vec<GenericField<F>>, // Buffer for absorbing inputs.
},
Squeezing {
state: H::Permutation,
buf: Vec<F>, // Buffer holding squeezed outputs.
},
}
impl<F: RichField, H: Hasher<F>> DuplexState<F,H> {
/// creates a new duplex state in absorbing mode with an initial zero state.
pub fn new() -> Self {
DuplexState::Absorbing {
state: H::Permutation::new(core::iter::repeat(H::HF::get_zero())),
buf: Vec::new(),
}
}
/// absorb a generic field element.
/// In absorbing mode: the element is appended to the buffer.
/// In squeezing mode: we trash any current outputs and switch back to absorbing.
pub fn absorb(&mut self, element: GenericField<F>) {
match self {
DuplexState::Absorbing { buf, .. } => {
buf.push(element);
}
DuplexState::Squeezing { state, .. } => {
let mut buf = Vec::new();
buf.push(element);
*self = DuplexState::Absorbing {
state: state.clone(),
buf,
};
}
}
}
/// Squeeze out a single challenge element (Goldilocks field elements)
/// In absorbing mode: the buffer elements are absorbed by calling the sponge
/// and switching the state to `Squeezing` and filling the output buffer with Goldilocks elems
/// In squeezing mode: we take elements from the buffer, if buffer is empty we permute and fill the buffer.
pub fn squeeze(&mut self) -> F {
match self {
DuplexState::Absorbing { state, buf, .. } => {
let input: Vec<GenericField<F>> = buf.drain(..).collect();
H::sponge(state, input);
let out_buf: Vec<F> = Self::squeeze_f(state);
// switch
*self = DuplexState::Squeezing {
state: state.clone(),
buf: out_buf,
};
// fall back to squeezing.
self.squeeze()
}
DuplexState::Squeezing { state, buf, .. } => {
if buf.is_empty() {
// If the buffer is empty, permute to refill it.
state.permute();
*buf = Self::squeeze_f(state);
}
let e = buf.pop().expect("Output buffer should not be empty");
e
}
}
}
/// squeeze out goldilocks field elements from the state
fn squeeze_f(state: &mut H::Permutation) -> Vec<F>{
let out = H::squeeze_goldilocks(state);
assert!(out.len()>0);
out
}
/// grind moved here from the FRI prover
/// it handles both modes (`Absorbing` and `Squeezing`)
pub fn grind(&mut self, min_leading_zeros: u32) -> F {
match self {
DuplexState::Absorbing { state, buf, .. } => {
let duplex_intermediate_state = state.clone();
let buf_felts: Vec<GenericField<F>> = buf.clone();
Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros)
}
DuplexState::Squeezing { state, .. } => {
let duplex_intermediate_state = state.clone();
let buf_felts = vec![];
Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros)
}
}
}
fn grind_helper(state: H::Permutation, input: Vec<GenericField<F>>, min_leading_zeros: u32) -> F {
let pow_witness = (0..=F::NEG_ONE.to_canonical_u64())
.into_par_iter()
.find_any(|&candidate| {
let mut duplex_state = state.clone();
let mut sponge_input = input.clone();
sponge_input.push(GenericField::Goldilocks(F::from_canonical_u64(candidate)));
H::sponge(&mut duplex_state, sponge_input);
let temp_buf = Self::squeeze_f(&mut duplex_state);
let pow_response = temp_buf.iter().last().unwrap();
let leading_zeros = PrimeField64::to_canonical_u64(pow_response).leading_zeros();
leading_zeros >= min_leading_zeros
})
.map(F::from_canonical_u64)
.expect("Proof of work failed. This is highly unlikely!");
pow_witness
}
}

View File

@ -10,13 +10,67 @@ use crate::field::goldilocks_field::GoldilocksField;
use crate::field::types::{Field, PrimeField64, Sample};
use crate::hash::poseidon::Poseidon;
use crate::iop::target::Target;
use crate::plonk::config::GenericHashOut;
use crate::plonk::config::{GenericField, GenericHashOut};
use ark_bn254::Fr as BN254Fr;
use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes};
/// A prime order field with the features we need to use it as a base field in our argument system.
pub trait RichField: PrimeField64 + Poseidon {}
impl RichField for GoldilocksField {}
/// Hash digest for the BN254 field, contains single Fr element
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct BN254HashOut{
pub element: BN254Fr
}
/// Serialize the bn254 field , uses Arkworks
impl Serialize for BN254HashOut {
fn serialize < S > ( & self, serializer: S) -> Result < S::Ok, S::Error >
where S: Serializer {
let element_to_bytes = felts_to_bytes(&self.element);
serializer.serialize_bytes( & element_to_bytes)
}
}
/// Deserialize the bn254 field , uses Arkworks
impl<'de> Deserialize<'de> for BN254HashOut {
fn deserialize<
D>(deserializer: D) -> Result< Self,
D::Error>
where D: Deserializer<'de> {
let element_as_bytes = < [u8; 32] >::deserialize(deserializer) ?;
let mut element_array = < [u8; 32] >::default();
element_array.copy_from_slice( & element_as_bytes[0..32]);
let deserialized_element = bytes_to_felts(&element_array);
Ok( Self {
element: deserialized_element,
})
}
}
/// implement GenericHashOut for the BN254 hash digest
/// `F` here is the goldilocks not the BN254 field
impl<F: RichField> GenericHashOut<F> for BN254HashOut {
fn to_bytes(&self) -> Vec<u8> {
felts_to_bytes(&self.element)
}
fn from_bytes(bytes: &[u8]) -> Self {
assert_eq!(bytes.len(), 32);
BN254HashOut{
element: bytes_to_felts(bytes)
}
}
fn to_vec(&self) -> Vec<GenericField<F>> {
vec![GenericField::BN254(self.element.clone())]
}
}
pub const NUM_HASH_OUT_ELTS: usize = 4;
/// Represents a ~256 bit hash output.
@ -103,8 +157,12 @@ impl<F: RichField> GenericHashOut<F> for HashOut<F> {
}
}
fn to_vec(&self) -> Vec<F> {
self.elements.to_vec()
fn to_vec(&self) -> Vec<GenericField<F>> {
self.elements
.iter()
.copied()
.map(GenericField::<F>::Goldilocks)
.collect()
}
}
@ -181,14 +239,15 @@ impl<F: RichField, const N: usize> GenericHashOut<F> for BytesHash<N> {
Self(bytes.try_into().unwrap())
}
fn to_vec(&self) -> Vec<F> {
fn to_vec(&self) -> Vec<GenericField<F>> {
self.0
// Chunks of 7 bytes since 8 bytes would allow collisions.
.chunks(7)
.map(|bytes| {
let mut arr = [0; 8];
let mut arr = [0u8; 8];
arr[..bytes.len()].copy_from_slice(bytes);
F::from_canonical_u64(u64::from_le_bytes(arr))
let raw = F::from_canonical_u64(u64::from_le_bytes(arr));
GenericField::<F>::Goldilocks(raw)
})
.collect()
}

View File

@ -7,7 +7,7 @@ use keccak_hash::keccak;
use crate::hash::hash_types::{BytesHash, RichField};
use crate::hash::hashing::PlonkyPermutation;
use crate::plonk::config::Hasher;
use crate::plonk::config::{GenericField, GenericHashOut, Hasher};
use crate::util::serialization::Write;
pub const SPONGE_RATE: usize = 8;
@ -102,19 +102,76 @@ impl<F: RichField> PlonkyPermutation<F> for KeccakPermutation<F> {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct KeccakHash<const N: usize>;
impl<F: RichField, const N: usize> Hasher<F> for KeccakHash<N> {
type HF = F;
const HASH_SIZE: usize = N;
type Hash = BytesHash<N>;
type Permutation = KeccakPermutation<F>;
fn hash_no_pad(input: &[F]) -> Self::Hash {
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
let mut padded_input = input.to_vec();
padded_input.push(GenericField::Goldilocks(F::ONE));
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
padded_input.push(GenericField::Goldilocks(F::ZERO));
}
padded_input.push(GenericField::Goldilocks(F::ONE));
Self::hash_no_pad(&padded_input)
}
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
let hash_size = 4 * 8;
if inputs.len() * 8 <= hash_size {
let mut inputs_bytes = vec![0u8; hash_size];
for i in 0..inputs.len() {
let goldilocks_felt = match inputs[i].clone() {
GenericField::Goldilocks(v) => { v }
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
};
inputs_bytes[i * 8..(i + 1) * 8]
.copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes());
}
<BytesHash::<N> as GenericHashOut<F>>::from_bytes(&inputs_bytes)
} else {
Self::hash_no_pad(inputs)
}
}
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
let mut buffer = Vec::with_capacity(input.len());
buffer.write_field_vec(input).unwrap();
let mut goldilocks_felts = vec![];
for e in input {
// only accept goldilocks (for now!)
match e {
GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)}
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
}
}
buffer.write_field_vec(&goldilocks_felts).unwrap();
let mut arr = [0; N];
let hash_bytes = keccak(buffer).0;
arr.copy_from_slice(&hash_bytes[..N]);
BytesHash(arr)
}
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
let mut goldilocks_felts = vec![];
for e in input {
//only accept goldilocks (for now!)
match e {
GenericField::Goldilocks(v) => {goldilocks_felts.push(v)}
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
}
}
for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) {
state.set_from_slice(chunk, 0);
state.permute();
}
}
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
state.squeeze().to_vec()
}
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
let mut v = vec![0; N * 2];
v[0..N].copy_from_slice(&left.0);

View File

@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::circuit_data::VerifierCircuitTarget;
use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher};
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher};
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[serde(bound = "")]
@ -77,7 +77,12 @@ pub fn verify_batch_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
proof: &MerkleProof<F, H>,
) -> Result<()> {
assert_eq!(leaf_data.len(), leaf_heights.len());
let mut current_digest = H::hash_or_noop(&leaf_data[0]);
let leaf_data_felts: Vec<Vec<GenericField<F>>> = leaf_data.into_iter()
.map(|inner| {
inner.into_iter().map(|f| GenericField::Goldilocks(f.clone())).collect()
})
.collect();
let mut current_digest = H::hash_or_noop(&leaf_data_felts[0]);
let mut current_height = leaf_heights[0];
let mut leaf_data_index = 1;
for &sibling_digest in &proof.siblings {
@ -92,7 +97,7 @@ pub fn verify_batch_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
if leaf_data_index < leaf_heights.len() && current_height == leaf_heights[leaf_data_index] {
let mut new_leaves = current_digest.to_vec();
new_leaves.extend_from_slice(&leaf_data[leaf_data_index]);
new_leaves.extend_from_slice(&leaf_data_felts[leaf_data_index]);
current_digest = H::hash_or_noop(&new_leaves);
leaf_data_index += 1;
}

View File

@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
use crate::hash::hash_types::RichField;
use crate::hash::merkle_proofs::MerkleProof;
use crate::plonk::config::{GenericHashOut, Hasher};
use crate::plonk::config::{GenericField, GenericHashOut, Hasher};
use crate::util::log2_strict;
/// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree.
@ -37,7 +37,7 @@ impl<F: RichField, H: Hasher<F>> MerkleCap<F, H> {
log2_strict(self.len())
}
pub fn flatten(&self) -> Vec<F> {
pub fn flatten(&self) -> Vec<GenericField<F>> {
self.0.iter().flat_map(|&h| h.to_vec()).collect()
}
}
@ -85,7 +85,7 @@ pub(crate) fn capacity_up_to_mut<T>(v: &mut Vec<T>, len: usize) -> &mut [MaybeUn
pub(crate) fn fill_subtree<F: RichField, H: Hasher<F>>(
digests_buf: &mut [MaybeUninit<H::Hash>],
leaves: &[Vec<F>],
leaves: &[Vec<GenericField<F>>],
) -> H::Hash {
assert_eq!(leaves.len(), digests_buf.len() / 2 + 1);
if digests_buf.is_empty() {
@ -115,7 +115,7 @@ pub(crate) fn fill_subtree<F: RichField, H: Hasher<F>>(
pub(crate) fn fill_digests_buf<F: RichField, H: Hasher<F>>(
digests_buf: &mut [MaybeUninit<H::Hash>],
cap_buf: &mut [MaybeUninit<H::Hash>],
leaves: &[Vec<F>],
leaves: &[Vec<GenericField<F>>],
cap_height: usize,
) {
// Special case of a tree that's all cap. The usual case will panic because we'll try to split
@ -207,8 +207,12 @@ impl<F: RichField, H: Hasher<F>> MerkleTree<F, H> {
let digests_buf = capacity_up_to_mut(&mut digests, num_digests);
let cap_buf = capacity_up_to_mut(&mut cap, len_cap);
fill_digests_buf::<F, H>(digests_buf, cap_buf, &leaves[..], cap_height);
let leaves_felts: Vec<Vec<GenericField<F>>> = leaves.clone().into_iter()
.map(|inner| {
inner.into_iter().map(|f| GenericField::Goldilocks(f)).collect()
})
.collect();
fill_digests_buf::<F, H>(digests_buf, cap_buf, &leaves_felts[..], cap_height);
unsafe {
// SAFETY: `fill_digests_buf` and `cap` initialized the spare capacity up to
// `num_digests` and `len_cap`, resp.

View File

@ -11,4 +11,5 @@ pub mod merkle_tree;
pub mod path_compression;
pub mod poseidon;
pub mod poseidon_goldilocks;
pub mod poseidon2_bn254;
pub mod duplex;
pub mod poseidon2_bn254;

View File

@ -6,7 +6,7 @@ use num::Integer;
use crate::hash::hash_types::RichField;
use crate::hash::merkle_proofs::MerkleProof;
use crate::plonk::config::Hasher;
use crate::plonk::config::{GenericField, Hasher};
/// Compress multiple Merkle proofs on the same tree by removing redundancy in the Merkle paths.
pub(crate) fn compress_merkle_proofs<F: RichField, H: Hasher<F>>(
@ -68,7 +68,8 @@ pub(crate) fn decompress_merkle_proofs<F: RichField, H: Hasher<F>>(
for (&i, v) in leaves_indices.iter().zip(leaves_data) {
// Observe the leaves.
seen.insert(i + num_leaves, H::hash_or_noop(v));
let v_felts: Vec<GenericField<F>> = v.clone().into_iter().map(GenericField::Goldilocks).collect();
seen.insert(i + num_leaves, H::hash_or_noop(&v_felts));
}
// Iterators over the siblings.

View File

@ -19,7 +19,7 @@ use crate::hash::hashing::{HashUsage, increment_given_hash_counter};
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::{AlgebraicHasher, Hasher};
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher};
pub const SPONGE_RATE: usize = 8;
pub const SPONGE_CAPACITY: usize = 4;
@ -875,17 +875,74 @@ impl<T: Copy + Debug + Default + Eq + Permuter + Send + Sync> PlonkyPermutation<
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct PoseidonHash;
impl<F: RichField> Hasher<F> for PoseidonHash {
type HF = F;
const HASH_SIZE: usize = 4 * 8;
type Hash = HashOut<F>;
type Permutation = PoseidonPermutation<F>;
fn hash_no_pad(input: &[F]) -> Self::Hash {
hash_n_to_hash_no_pad::<F, Self::Permutation>(input)
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
let mut goldilocks_felts = vec![];
for e in input {
// for goldilocks only accept goldilocks (for now!)
match e {
GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)}
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
}
}
hash_n_to_hash_no_pad::<F, Self::Permutation>(&goldilocks_felts)
}
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
let mut padded_input = input.to_vec();
padded_input.push(GenericField::Goldilocks(F::ONE));
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
padded_input.push(GenericField::Goldilocks(F::ZERO));
}
padded_input.push(GenericField::Goldilocks(F::ONE));
Self::hash_no_pad(&padded_input)
}
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
let hash_size = 4 * 8;
if inputs.len() * 8 <= hash_size {
let mut inputs_bytes = vec![0u8; hash_size];
for i in 0..inputs.len() {
// only accept goldilocks (for now!)
let goldilocks_felt = match inputs[i].clone() {
GenericField::Goldilocks(v) => { v }
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
};
inputs_bytes[i * 8..(i + 1) * 8]
.copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes());
}
Self::Hash::from_bytes(&inputs_bytes)
} else {
Self::hash_no_pad(inputs)
}
}
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
let mut goldilocks_felts = vec![];
for e in input {
// only accept goldilocks (for now!)
match e {
GenericField::Goldilocks(v) => {goldilocks_felts.push(v)}
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
}
}
for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) {
state.set_from_slice(chunk, 0);
state.permute();
}
}
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
compress::<F, Self::Permutation>(left, right)
}
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
state.squeeze().to_vec()
}
}
impl<F: RichField> AlgebraicHasher<F> for PoseidonHash {

View File

@ -1,134 +1,308 @@
#[cfg(not(feature = "std"))]
use alloc::{vec, vec::Vec};
use core::fmt::Debug;
use core::mem::size_of;
use rust_bn254_hash::hash::Hash;
use crate::hash::hash_types::{BytesHash, RichField};
use crate::hash::hash_types::{BN254HashOut, RichField};
use crate::hash::hashing::PlonkyPermutation;
use crate::plonk::config::Hasher;
use rust_bn254_hash::sponge::{sponge_u64_pad, sponge_u64_no_pad};
use crate::plonk::config::{GenericField, Hasher};
use rust_bn254_hash::state::State;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_bn254::{Fr as BN254Fr};
use rust_bn254_hash::poseidon2::permutation::permute_inplace as permute_bn254_inplace;
use ark_ff::{BigInt, PrimeField, Zero};
use rust_bn254_hash::hash::Hash;
use rust_bn254_hash::sponge::{sponge_felts_no_pad, sponge_felts_pad};
pub const SPONGE_RATE: usize = 8;
pub const SPONGE_CAPACITY: usize = 4;
pub const SPONGE_RATE: usize = 2;
pub const SPONGE_CAPACITY: usize = 1;
pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
#[derive(Copy, Clone, Default, Debug, PartialEq)]
pub struct Poseidon2BN254Permutation<F: RichField> {
state: [F; SPONGE_WIDTH],
/// Poseidon2 state with BN254 elements
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]
pub struct Poseidon2BN254Perm {
state: [BN254Fr; SPONGE_WIDTH],
}
impl<F: RichField> Eq for Poseidon2BN254Permutation<F> {}
impl<F: RichField> AsRef<[F]> for Poseidon2BN254Permutation<F> {
fn as_ref(&self) -> &[F] {
/// needed for PlonkyPermutation
impl AsRef<[BN254Fr]> for Poseidon2BN254Perm {
fn as_ref(&self) -> &[BN254Fr] {
&self.state
}
}
impl<F: RichField> PlonkyPermutation<F> for Poseidon2BN254Permutation<F> {
impl PlonkyPermutation<BN254Fr> for Poseidon2BN254Perm {
const RATE: usize = SPONGE_RATE;
const WIDTH: usize = SPONGE_WIDTH;
fn new<I: IntoIterator<Item = F>>(elts: I) -> Self {
fn new<I: IntoIterator<Item = BN254Fr>>(elts: I) -> Self {
let mut perm = Self {
state: [F::default(); SPONGE_WIDTH],
state: [BN254Fr::default(); SPONGE_WIDTH],
};
perm.set_from_iter(elts, 0);
perm
}
fn set_elt(&mut self, elt: F, idx: usize) {
fn set_elt(&mut self, elt: BN254Fr, idx: usize) {
self.state[idx] = elt;
}
fn set_from_slice(&mut self, elts: &[F], start_idx: usize) {
fn set_from_slice(&mut self, elts: &[BN254Fr], start_idx: usize) {
let begin = start_idx;
let end = start_idx + elts.len();
self.state[begin..end].copy_from_slice(elts);
}
fn set_from_iter<I: IntoIterator<Item = F>>(&mut self, elts: I, start_idx: usize) {
fn set_from_iter<I: IntoIterator<Item = BN254Fr>>(&mut self, elts: I, start_idx: usize) {
for (s, e) in self.state[start_idx..].iter_mut().zip(elts) {
*s = e;
}
}
/// calls the permutation in `rust-bn254-hash`
/// we can probably refactor the state and eliminate the conversion in this fn.
fn permute(&mut self) {
// convert state of Goldilocks elems to u64
let mut state_u64 = vec![0u64; SPONGE_WIDTH ];
for i in 0..SPONGE_WIDTH {
state_u64[i]
= self.state[i].to_canonical_u64();
}
let mut s = State{
x: self.state[0].clone(),
y: self.state[1].clone(),
z: self.state[2].clone(),
};
// Create an iterator that repeatedly applies the sponge permutation.
let hash_onion = core::iter::repeat_with(|| {
// Compute the next hash layer.
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64.clone());
// Convert the sponge output to u64.
let output = felts_to_u64(hash);
// Update the state for the next iteration.
state_u64 = output.clone();
output.into_iter()
}).flatten();
permute_bn254_inplace(&mut s);
self.state = [
s.x,
s.y,
s.z,
];
// Parse field elements from u64 stream, using rejection sampling such that words that don't
// fit in F are ignored.
let new_state: Vec<F> = hash_onion
.filter(|&word| word < F::ORDER)
.map(F::from_canonical_u64)
.take(SPONGE_WIDTH)
.collect();
// update the state
self.state = new_state.try_into().expect("State length mismatch");
}
fn squeeze(&self) -> &[F] {
fn squeeze(&self) -> &[BN254Fr] {
&self.state[..Self::RATE]
}
}
const N: usize = 32;
/// Keccak-256 hash function.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Poseidon2BN254;
impl<F: RichField> Hasher<F> for Poseidon2BN254 {
const HASH_SIZE: usize = N;
type Hash = BytesHash<N>;
type Permutation = Poseidon2BN254Permutation<F>;
type HF = BN254Fr;
const HASH_SIZE: usize = 32;
type Hash = BN254HashOut;
type Permutation = Poseidon2BN254Perm;
fn hash_no_pad(input: &[F]) -> Self::Hash {
let mut state_u64 = vec![0u64; input.len() ];
for i in 0..input.len() {
state_u64[i]
= input[i].to_canonical_u64();
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
let bn_felts = generic_field_to_bn(input);
let hash = sponge_felts_no_pad(Hash::Poseidon2, bn_felts);
BN254HashOut {
element: hash,
}
let mut arr = [0; N];
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64);
let hash_bytes = felts_to_bytes(hash);
arr.copy_from_slice(&hash_bytes[..N]);
BytesHash(arr)
}
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
let bn_felts = generic_field_to_bn(input);
let hash = sponge_felts_pad(Hash::Poseidon2, bn_felts);
BN254HashOut {
element: hash,
}
}
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
let hash_size = 32;
if check_len_in_bytes(inputs) <= hash_size {
if inputs.len() == 1 {
// if there is one element and it is a BN field element return it.
if let GenericField::BN254(v) = inputs[0].clone() {
return BN254HashOut{element: v};
}
}
}
// TODO: if we get 4 or less Goldilocks -> convert to BN return?
Self::hash_no_pad(inputs)
}
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
let bn_felts = generic_field_to_bn(&input);
// absorb in overwrite mode
for chunk in bn_felts.chunks(2) {
state.set_from_slice(chunk, 0);
state.permute();
}
}
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
let mut input_bytes = vec![0; N * 2];
input_bytes[0..N].copy_from_slice(&left.0);
input_bytes[N..].copy_from_slice(&right.0);
let mut arr = [0; N];
let state_u64: Vec<u64> = input_bytes
.chunks_exact(8)
.map(|chunk| u64::from_be_bytes(chunk.try_into().unwrap()))
.collect();
let hash = sponge_u64_no_pad(Hash::Poseidon2, state_u64);
let hash_bytes = felts_to_bytes(hash);
arr.copy_from_slice(&hash_bytes[..N]);
BytesHash(arr)
let mut perm = Self::Permutation::new(core::iter::repeat(BN254Fr::zero()));
perm.set_from_slice(&[left.element], 0);
perm.set_from_slice(&[right.element], 1);
perm.permute();
let out = perm.squeeze();
BN254HashOut {
element: out[0].clone(),
}
}
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
let bn_out = state.squeeze();
let bn_bytes: Vec<u8> = bn_out.iter().flat_map(|e| felts_to_bytes(e)).collect();
let goldilocks_felts: Vec<F> = bytes_to_u64(&bn_bytes).iter().map(|e| F::from_canonical_u64(*e)).collect();
assert!(goldilocks_felts.len()>0);
goldilocks_felts
}
}
fn felts_to_bytes<E>(f: E) -> Vec<u8> where
// --------- Conversion helper functions ---------------------
/// converts a vec of goldilocks to bn254
/// takes 7 goldilocks and converts to 2 bn254
fn goldilocks_to_bn<F: RichField>(input: &Vec<F>) -> Vec<BN254Fr>{
let u64s: Vec<u64> = input.iter().map(|x| x.to_canonical_u64()).collect();
let l = u64s.len();
let m = l / 7;
let mut result = Vec::new();
for i in 0..m {
let group: [u64; 7] = u64s[7 * i..7 * (i + 1)].try_into().unwrap();
let (a, b) = u64s_to_felts(group);
result.push(a);
result.push(b);
}
let r = l - 7 * m;
if r > 0 {
let mut ws = [0u64; 7];
for i in 0..r {
ws[i] = u64s[7 * m + i];
}
let (a, b) = u64s_to_felts(ws);
// check that we don't push zero field elements
if a != BN254Fr::zero() {
result.push(a);
}
if b != BN254Fr::zero() {
result.push(b);
}
}
result
}
const BIGINT_TWO_TO_64: BigInt<4> = BigInt( [0,1,0,0] );
const BIGINT_TWO_TO_128: BigInt<4> = BigInt( [0,0,1,0] );
const BIGINT_TWO_TO_192: BigInt<4> = BigInt( [0,0,0,1] );
/// converts u64 to BN254 - taken directly from: rust-bn254-hash
pub fn u64s_to_felts(ws: [u64; 7]) -> (BN254Fr, BN254Fr) {
let hi = ws[6] >> 32;
let lo = ws[6] & 0xFFFF_FFFF;
let field_powers_of_two_to_64: [BN254Fr;3] =
[
BN254Fr::from_bigint(BIGINT_TWO_TO_64 ).unwrap(),
BN254Fr::from_bigint(BIGINT_TWO_TO_128).unwrap(),
BN254Fr::from_bigint(BIGINT_TWO_TO_192).unwrap()
];
let x = BN254Fr::from(ws[0])
+ field_powers_of_two_to_64[0] * BN254Fr::from(ws[1])
+ field_powers_of_two_to_64[1] * BN254Fr::from(ws[2])
+ field_powers_of_two_to_64[2] * BN254Fr::from(lo);
let y = BN254Fr::from(ws[3])
+ field_powers_of_two_to_64[0] * BN254Fr::from(ws[4])
+ field_powers_of_two_to_64[1] * BN254Fr::from(ws[5])
+ field_powers_of_two_to_64[2] * BN254Fr::from(hi);
(x, y)
}
/// converts a slice of bytes to 64 by taking 63 bits at a time
/// this makes it safe for conversion from bytes to Goldilocks field elems
/// this fn ignores any remaining bit that are less than 63 bits at the end
pub fn bytes_to_u64(x: &[u8]) -> Vec<u64> {
let total_bits = x.len() * 8;
let num_chunks = total_bits / 63; // ignore any leftover bits
let mut result = Vec::with_capacity(num_chunks);
for i in 0..num_chunks {
let bit_offset = i * 63;
let first_byte = bit_offset / 8;
let shift = bit_offset % 8;
// how many bits do we need? We need (shift + 63) bits in total.
// convert that to bytes by rounding up.
let needed_bytes = ((shift + 63) + 7) / 8;
if first_byte + needed_bytes > x.len() {
break; // break out if incomplete chunk
}
let mut chunk: u128 = 0;
for j in 0..needed_bytes {
chunk |= (x[first_byte + j] as u128) << (8 * j);
}
// shift right with `shift` bits, then mask 63 bits.
let value = (chunk >> shift) & ((1u128 << 63) - 1);
result.push(value as u64);
}
result
}
/// helper function: converts a slice of GenericField<F> into a Vec<BN254Fr>
/// the fn groups consecutive Goldilocks elements and converting them in one shot.
fn generic_field_to_bn<F: RichField>(input: &[GenericField<F>]) -> Vec<BN254Fr> {
let mut bn_felts = Vec::new();
let mut temp_goldilocks = Vec::new();
for e in input.iter().copied() {
match e {
GenericField::Goldilocks(v) => {
// accumulate consecutive Goldilocks field elems.
temp_goldilocks.push(v);
}
GenericField::BN254(v) => {
// convert any accumulated Goldilocks elems.
if !temp_goldilocks.is_empty() {
let converted = goldilocks_to_bn(&temp_goldilocks);
bn_felts.extend(converted);
temp_goldilocks.clear();
}
// push the BN field element directly.
bn_felts.push(v);
}
}
}
// convert any remaining Goldilocks elements.
if !temp_goldilocks.is_empty() {
let converted = goldilocks_to_bn(&temp_goldilocks);
bn_felts.extend(converted);
}
bn_felts
}
/// computes the length in bytes of a vector of generic field elements.
fn check_len_in_bytes<F: RichField>(input: &[GenericField<F>]) -> usize{
input.iter().map(|elem| {
match elem {
GenericField::BN254(_) => 32,
GenericField::Goldilocks(_) => 8,
}
}).sum()
}
//------------------ serialization for BN254 ---------------------
pub fn felts_to_bytes<E>(f: &E) -> Vec<u8> where
E: CanonicalSerialize
{
let mut bytes = Vec::new();
@ -136,14 +310,14 @@ fn felts_to_bytes<E>(f: E) -> Vec<u8> where
bytes
}
fn bytes_to_felts<E>(bytes: &[u8]) -> E where
pub fn bytes_to_felts<E>(bytes: &[u8]) -> E where
E: CanonicalDeserialize
{
let fr_res = E::deserialize_uncompressed(bytes).unwrap();
fr_res
}
fn felts_to_u64<E>(f: E) -> Vec<u64>
pub fn felts_to_u64<E>(f: E) -> Vec<u64>
where
E: CanonicalSerialize,
{

View File

@ -3,20 +3,20 @@ use alloc::{vec, vec::Vec};
use core::marker::PhantomData;
use crate::field::extension::{Extendable, FieldExtension};
use crate::hash::duplex::DuplexState;
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::Target;
use crate::plonk::circuit_builder::CircuitBuilder;
use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher};
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher};
/// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir.
#[derive(Clone, Debug)]
pub struct Challenger<F: RichField, H: Hasher<F>> {
pub(crate) sponge_state: H::Permutation,
pub(crate) input_buffer: Vec<F>,
output_buffer: Vec<F>,
pub struct Challenger<F: RichField, H: Hasher<F>>
{
duplex_state: DuplexState<F, H>
}
/// Observes prover messages, and generates verifier challenges based on the transcript.
@ -27,42 +27,36 @@ pub struct Challenger<F: RichField, H: Hasher<F>> {
/// design, but it can be viewed as a duplex sponge whose inputs are sometimes zero (when we perform
/// multiple squeezes) and whose outputs are sometimes ignored (when we perform multiple
/// absorptions). Thus the security properties of a duplex sponge still apply to our design.
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
impl<F: RichField, H: Hasher<F>> Challenger<F, H>
{
pub fn new() -> Challenger<F, H> {
Challenger {
sponge_state: H::Permutation::new(core::iter::repeat(F::ZERO)),
input_buffer: Vec::with_capacity(H::Permutation::RATE),
output_buffer: Vec::with_capacity(H::Permutation::RATE),
duplex_state: DuplexState::<F, H>::new(),
}
}
pub fn observe_element(&mut self, element: F) {
// Any buffered outputs are now invalid, since they wouldn't reflect this input.
self.output_buffer.clear();
self.input_buffer.push(element);
if self.input_buffer.len() == H::Permutation::RATE {
self.duplexing();
}
pub fn observe_element(&mut self, element: GenericField<F>) {
self.duplex_state.absorb(element)
}
pub fn observe_extension_element<const D: usize>(&mut self, element: &F::Extension)
where
F: RichField + Extendable<D>,
where
F: RichField + Extendable<D>,
{
self.observe_elements(&element.to_basefield_array());
let elements = element.to_basefield_array().map(|e: F|GenericField::<F>::Goldilocks(e));
self.observe_elements(&elements);
}
pub fn observe_elements(&mut self, elements: &[F]) {
pub fn observe_elements(&mut self, elements: &[GenericField<F>]) {
for &element in elements {
self.observe_element(element);
}
}
pub fn observe_extension_elements<const D: usize>(&mut self, elements: &[F::Extension])
where
F: RichField + Extendable<D>,
where
F: RichField + Extendable<D>,
{
for element in elements {
self.observe_extension_element(element);
@ -80,15 +74,7 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
}
pub fn get_challenge(&mut self) -> F {
// If we have buffered inputs, we must perform a duplexing so that the challenge will
// reflect them. Or if we've run out of outputs, we must perform a duplexing to get more.
if !self.input_buffer.is_empty() || self.output_buffer.is_empty() {
self.duplexing();
}
self.output_buffer
.pop()
.expect("Output buffer should be non-empty")
self.duplex_state.squeeze()
}
pub fn get_n_challenges(&mut self, n: usize) -> Vec<F> {
@ -107,8 +93,8 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
}
pub fn get_extension_challenge<const D: usize>(&mut self) -> F::Extension
where
F: RichField + Extendable<D>,
where
F: RichField + Extendable<D>,
{
let mut arr = [F::ZERO; D];
arr.copy_from_slice(&self.get_n_challenges(D));
@ -116,40 +102,18 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
}
pub fn get_n_extension_challenges<const D: usize>(&mut self, n: usize) -> Vec<F::Extension>
where
F: RichField + Extendable<D>,
where
F: RichField + Extendable<D>,
{
(0..n)
.map(|_| self.get_extension_challenge::<D>())
.collect()
}
/// Absorb any buffered inputs. After calling this, the input buffer will be empty, and the
/// output buffer will be full.
fn duplexing(&mut self) {
assert!(self.input_buffer.len() <= H::Permutation::RATE);
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
// where we would xor or add in the inputs. This is a well-known variant, though,
// sometimes called "overwrite mode".
self.sponge_state
.set_from_iter(self.input_buffer.drain(..), 0);
// Apply the permutation.
self.sponge_state.permute();
self.output_buffer.clear();
self.output_buffer
.extend_from_slice(self.sponge_state.squeeze());
pub fn grind(&mut self, min_leading_zeros: u32) -> F {
self.duplex_state.grind(min_leading_zeros)
}
pub fn compact(&mut self) -> H::Permutation {
if !self.input_buffer.is_empty() {
self.duplexing();
}
self.output_buffer.clear();
self.sponge_state
}
}
impl<F: RichField, H: AlgebraicHasher<F>> Default for Challenger<F, H> {

View File

@ -44,7 +44,7 @@ use crate::plonk::circuit_data::{
CircuitConfig, CircuitData, CommonCircuitData, MockCircuitData, ProverCircuitData,
ProverOnlyCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
};
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher};
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, GenericHashOut, Hasher};
use crate::plonk::copy_constraint::CopyConstraint;
use crate::plonk::permutation_argument::Forest;
use crate::plonk::plonk_common::PlonkOracle;
@ -1256,13 +1256,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
};
let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone();
let domain_separator = self.domain_separator.unwrap_or_default();
let domain_separator_digest = C::Hasher::hash_pad(&domain_separator);
let ds_felts: Vec<GenericField<F>> = domain_separator.clone().into_iter().map(GenericField::Goldilocks).collect();
let domain_separator_digest = C::Hasher::hash_pad(&ds_felts);
// TODO: This should also include an encoding of gate constraints.
let circuit_digest_parts = [
constants_sigmas_cap.flatten(),
domain_separator_digest.to_vec(),
vec![
F::from_canonical_usize(degree_bits),
GenericField::Goldilocks(F::from_canonical_usize(degree_bits)),
/* Add other circuit data here */
],
];

View File

@ -8,7 +8,7 @@
#[cfg(not(feature = "std"))]
use alloc::{vec, vec::Vec};
use core::fmt::Debug;
use core::fmt::{Debug};
use serde::de::DeserializeOwned;
use serde::Serialize;
@ -19,22 +19,86 @@ use crate::field::goldilocks_field::GoldilocksField;
use crate::hash::hash_types::{HashOut, RichField};
use crate::hash::hashing::PlonkyPermutation;
use crate::hash::keccak::KeccakHash;
use crate::hash::poseidon2_bn254::Poseidon2BN254;
use crate::hash::poseidon::PoseidonHash;
use crate::iop::target::{BoolTarget, Target};
use crate::plonk::circuit_builder::CircuitBuilder;
use ark_bn254::Fr as BN254Fr;
use ark_ff::{One, Zero};
use crate::hash::poseidon2_bn254::{bytes_to_felts, felts_to_bytes, Poseidon2BN254};
pub trait GenericHashOut<F: RichField>:
Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned
Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned
{
fn to_bytes(&self) -> Vec<u8>;
fn from_bytes(bytes: &[u8]) -> Self;
fn to_vec(&self) -> Vec<F>;
fn to_vec(&self) -> Vec<GenericField<F>>;
}
/// generic field enum - supports only 2 fields for now
/// Supported fields: Goldilocks , BN254 Fr (from Arkworks)
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum GenericField<F: RichField> {
Goldilocks(F),
BN254(BN254Fr),
}
/// hasher field trait to cover fields in `GenericField` enum
pub trait HasherField: Default + Sized + Copy + Debug + Eq + PartialEq + Sync + Send {
fn get_one() -> Self;
fn get_zero() -> Self;
fn to_bytes(&self) -> Vec<u8>;
fn from_bytes(b: &[u8]) -> Self;
}
/// BN254 as Hasherfield
impl HasherField for BN254Fr {
fn get_one() -> Self {
BN254Fr::one()
}
fn get_zero() -> Self {
BN254Fr::zero()
}
fn to_bytes(&self) -> Vec<u8> {
felts_to_bytes::<BN254Fr>(&self)
}
fn from_bytes(b: &[u8]) -> Self {
bytes_to_felts::<BN254Fr>(b)
}
}
/// RichField (Goldilocks) as Hasherfield
impl <T: RichField> HasherField for T {
fn get_one() -> Self {
T::ONE
}
fn get_zero() -> Self {
T::ZERO
}
fn to_bytes(&self) -> Vec<u8> {
self.to_canonical_u64().to_le_bytes().to_vec()
}
fn from_bytes(b: &[u8]) -> Self {
assert_eq!(b.len(), 8, "Input vector must have exactly 8 bytes");
let arr: [u8; 8] = b.try_into().expect("Conversion to array failed");
let element = u64::from_le_bytes(arr);
T::from_canonical_u64(element)
}
}
/// Trait for hash functions.
pub trait Hasher<F: RichField>: Sized + Copy + Debug + Eq + PartialEq {
type HF: HasherField;
/// Size of `Hash` in bytes.
const HASH_SIZE: usize;
@ -42,39 +106,27 @@ pub trait Hasher<F: RichField>: Sized + Copy + Debug + Eq + PartialEq {
type Hash: GenericHashOut<F>;
/// Permutation used in the sponge construction.
type Permutation: PlonkyPermutation<F>;
type Permutation: PlonkyPermutation<Self::HF>;
/// Hash a message without any padding step. Note that this can enable length-extension attacks.
/// However, it is still collision-resistant in cases where the input has a fixed length.
fn hash_no_pad(input: &[F]) -> Self::Hash;
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash;
/// Pad the message using the `pad10*1` rule, then hash it.
fn hash_pad(input: &[F]) -> Self::Hash {
let mut padded_input = input.to_vec();
padded_input.push(F::ONE);
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
padded_input.push(F::ZERO);
}
padded_input.push(F::ONE);
Self::hash_no_pad(&padded_input)
}
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash;
/// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a
/// no-op.
fn hash_or_noop(inputs: &[F]) -> Self::Hash {
if inputs.len() * 8 <= Self::HASH_SIZE {
let mut inputs_bytes = vec![0u8; Self::HASH_SIZE];
for i in 0..inputs.len() {
inputs_bytes[i * 8..(i + 1) * 8]
.copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes());
}
Self::Hash::from_bytes(&inputs_bytes)
} else {
Self::hash_no_pad(inputs)
}
}
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash;
/// absorb the input into the given state
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>);
/// 2-to-1 compression
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash;
/// squeeze out a vec of Goldilocks field elements (used for duplex/challenger)
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F>;
}
/// Trait for algebraic hash functions, built from a permutation using the sponge construction.
@ -88,16 +140,17 @@ pub trait AlgebraicHasher<F: RichField>: Hasher<F, Hash = HashOut<F>> {
swap: BoolTarget,
builder: &mut CircuitBuilder<F, D>,
) -> Self::AlgebraicPermutation
where
F: RichField + Extendable<D>;
where
F: RichField + Extendable<D>;
}
/// Generic configuration trait.
pub trait GenericConfig<const D: usize>:
Debug + Clone + Sync + Sized + Send + Eq + PartialEq
Debug + Clone + Sync + Sized + Send + Eq + PartialEq
{
/// Main field.
type F: RichField + Extendable<D, Extension = Self::FE>;
/// Field extension of degree D of the main field.
type FE: FieldExtension<D, BaseField = Self::F>;
/// Hash function used for building Merkle trees.
@ -126,7 +179,7 @@ impl GenericConfig<2> for KeccakGoldilocksConfig {
type InnerHasher = PoseidonHash;
}
/// Configuration using PoseidonBN254 over the Goldilocks field.
/// Configuration using Poseidon2BN254 as hasher over the Goldilocks field.
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq)]
pub struct Poseidon2BN254Config;
impl GenericConfig<2> for Poseidon2BN254Config {

View File

@ -25,7 +25,7 @@ use crate::hash::merkle_tree::MerkleCap;
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::target::Target;
use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
use crate::plonk::config::{GenericConfig, Hasher};
use crate::plonk::config::{GenericConfig, GenericField, Hasher};
use crate::plonk::verifier::{verify_with_challenges, DEFAULT_VERIFIER_OPTIONS};
use crate::util::serialization::{Buffer, Read, Write};
@ -104,7 +104,8 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub fn get_public_inputs_hash(
&self,
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
C::InnerHasher::hash_no_pad(&self.public_inputs)
let pi_felts: Vec<GenericField<F>> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect();
C::InnerHasher::hash_no_pad(&pi_felts)
}
pub fn to_bytes(&self) -> Vec<u8> {
@ -234,7 +235,8 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
pub(crate) fn get_public_inputs_hash(
&self,
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
C::InnerHasher::hash_no_pad(&self.public_inputs)
let pi_felts: Vec<GenericField<F>> = self.public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect();
C::InnerHasher::hash_no_pad(&pi_felts)
}
pub fn to_bytes(&self) -> Vec<u8> {

View File

@ -30,7 +30,7 @@ use crate::iop::target::Target;
use crate::iop::witness::{MatrixWitness, PartialWitness, PartitionWitness, Witness, WitnessWrite};
use crate::plonk::circuit_builder::NUM_COINS_LOOKUP;
use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData};
use crate::plonk::config::{GenericConfig, Hasher};
use crate::plonk::config::{GenericConfig, GenericField, Hasher};
use crate::plonk::plonk_common::PlonkOracle;
use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs};
use crate::plonk::vanishing_poly::{eval_vanishing_poly_base_batch, get_lut_poly};
@ -269,7 +269,8 @@ where
set_lookup_wires(prover_data, common_data, &mut partition_witness)?;
let public_inputs = partition_witness.get_targets(&prover_data.public_inputs);
let public_inputs_hash = C::InnerHasher::hash_no_pad(&public_inputs);
let pi_felts: Vec<GenericField<F>> = public_inputs.clone().into_iter().map(GenericField::Goldilocks).collect();
let public_inputs_hash = C::InnerHasher::hash_no_pad(&pi_felts);
let witness = timed!(
timing,