mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-02 13:53:07 +00:00
Merge bdfb86b46e8f9749acc8bb8233cc78ee7eb6f4a5 into 32cd13dedef02d22be33f4f521ba41f942092d07
This commit is contained in:
commit
aa0c6221c6
@ -35,6 +35,12 @@ unroll = { workspace = true }
|
||||
web-time = { version = "1.0.0", optional = true }
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
rust-bn254-hash = {git = "https://github.com/codex-storage/rust-bn254-hash.git"}
|
||||
ark-serialize = {version = "0.5.0"}
|
||||
ark-bn254 = "0.5.0"
|
||||
ark-ff = "0.5.0"
|
||||
ark-std = "0.5.0"
|
||||
num-bigint = { version = "0.4", default-features = false }
|
||||
|
||||
# Local dependencies
|
||||
plonky2_field = { version = "1.0.0", path = "../field", default-features = false }
|
||||
@ -75,6 +81,10 @@ harness = false
|
||||
name = "hashing"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "bn254_hash"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "merkle"
|
||||
harness = false
|
||||
|
||||
107
plonky2/benches/bn254_hash.rs
Normal file
107
plonky2/benches/bn254_hash.rs
Normal file
@ -0,0 +1,107 @@
|
||||
use std::any::type_name;
|
||||
use anyhow::{anyhow, Result};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use plonky2::gates::noop::NoopGate;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, KeccakGoldilocksConfig, Poseidon2BN254Config, PoseidonGoldilocksConfig};
|
||||
use plonky2_field::extension::Extendable;
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
|
||||
/// Benchmark for building, proving, and verifying the Plonky2 circuit.
|
||||
fn bench_circuit<F: RichField + Extendable<D>, const D:usize, C: GenericConfig<D, F = F>,>(c: &mut Criterion, circuit_size: usize) -> Result<()>{
|
||||
|
||||
// Create the circuit configuration
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
|
||||
let num_dummy_gates = match circuit_size {
|
||||
0 => return Err(anyhow!("size must be at least 1")),
|
||||
1 => 0,
|
||||
2 => 1,
|
||||
n => (1 << (n - 1)) + 1,
|
||||
};
|
||||
|
||||
for _ in 0..num_dummy_gates {
|
||||
builder.add_gate(NoopGate, vec![]);
|
||||
}
|
||||
|
||||
// Benchmark Group
|
||||
let mut group = c.benchmark_group(format!("Circuit Benchmark size {} for hasher: {}", circuit_size, type_name::<C::Hasher>()));
|
||||
|
||||
// Benchmark the Circuit Building Phase
|
||||
group.bench_function("Build Circuit", |b| {
|
||||
b.iter(|| {
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut local_builder = CircuitBuilder::<F, D>::new(config);
|
||||
for _ in 0..num_dummy_gates {
|
||||
local_builder.add_gate(NoopGate, vec![]);
|
||||
}
|
||||
let _data = local_builder.build::<C>();
|
||||
})
|
||||
});
|
||||
|
||||
let data = builder.build::<C>();
|
||||
println!("Circuit size (degree bits): {:?}", data.common.degree_bits());
|
||||
|
||||
// Create a PartialWitness
|
||||
let mut pw = PartialWitness::new();
|
||||
|
||||
// Benchmark the Proving Phase
|
||||
group.bench_function("Prove Circuit", |b| {
|
||||
b.iter(|| {
|
||||
let local_pw = pw.clone();
|
||||
data.prove(local_pw).expect("Failed to prove circuit")
|
||||
})
|
||||
});
|
||||
|
||||
// Generate the proof once for verification benchmarking
|
||||
let proof_with_pis = data.prove(pw.clone()).expect("Failed to prove circuit");
|
||||
let verifier_data = data.verifier_data();
|
||||
|
||||
println!("Proof size: {} bytes", proof_with_pis.to_bytes().len());
|
||||
|
||||
// Benchmark the Verifying Phase
|
||||
group.bench_function("Verify Proof", |b| {
|
||||
b.iter(|| {
|
||||
verifier_data.verify(proof_with_pis.clone()).expect("Failed to verify proof");
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn bench_multiple_hashers(c: &mut Criterion){
|
||||
const D: usize = 2;
|
||||
type C1 = PoseidonGoldilocksConfig;
|
||||
type C2 = KeccakGoldilocksConfig;
|
||||
type C3 = Poseidon2BN254Config;
|
||||
type F = GoldilocksField;
|
||||
|
||||
bench_circuit::<F,D,C1>(c, 10).expect("failed");
|
||||
bench_circuit::<F,D,C2>(c, 10).expect("failed");
|
||||
bench_circuit::<F,D,C3>(c, 10).expect("failed");
|
||||
|
||||
bench_circuit::<F,D,C1>(c, 11).expect("failed");
|
||||
bench_circuit::<F,D,C2>(c, 11).expect("failed");
|
||||
bench_circuit::<F,D,C3>(c, 11).expect("failed");
|
||||
|
||||
bench_circuit::<F,D,C1>(c, 12).expect("failed");
|
||||
bench_circuit::<F,D,C2>(c, 12).expect("failed");
|
||||
bench_circuit::<F,D,C3>(c, 12).expect("failed");
|
||||
|
||||
bench_circuit::<F,D,C1>(c, 13).expect("failed");
|
||||
bench_circuit::<F,D,C2>(c, 13).expect("failed");
|
||||
bench_circuit::<F,D,C3>(c, 13).expect("failed");
|
||||
}
|
||||
|
||||
/// Criterion benchmark group
|
||||
criterion_group!{
|
||||
name = prove_verify_benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = bench_multiple_hashers
|
||||
}
|
||||
criterion_main!(prove_verify_benches);
|
||||
51
plonky2/examples/poseidon2_bn254_example.rs
Normal file
51
plonky2/examples/poseidon2_bn254_example.rs
Normal file
@ -0,0 +1,51 @@
|
||||
// use std::fs;
|
||||
use anyhow::Result;
|
||||
use plonky2::gates::noop::NoopGate;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{GenericConfig, Poseidon2BN254Config};
|
||||
use plonky2::plonk::prover::DEFAULT_PROVER_OPTIONS;
|
||||
use plonky2::plonk::verifier::{HashStatisticsPrintLevel, VerifierOptions};
|
||||
|
||||
/// An example of using Plonky2 over BN254 to a dummy circuit of size S.
|
||||
fn main() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = Poseidon2BN254Config;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
|
||||
let config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(config);
|
||||
const S: usize = 5;
|
||||
let num_dummy_gates = (1 << (S - 1)) + 1;
|
||||
for _ in 0..num_dummy_gates {
|
||||
builder.add_gate(NoopGate, vec![]);
|
||||
}
|
||||
|
||||
let pw = PartialWitness::new();
|
||||
|
||||
let data = builder.build::<C>();
|
||||
println!("circ size = {}", data.common.degree_bits());
|
||||
|
||||
let prover_opts = DEFAULT_PROVER_OPTIONS;
|
||||
|
||||
println!("proving ...");
|
||||
|
||||
let proof = data.prove_with_options(pw, &prover_opts)?;
|
||||
|
||||
// serialize circuit into JSON
|
||||
// let common_circuit_data_serialized = serde_json::to_string(&data.common ).unwrap();
|
||||
// let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap();
|
||||
// let proof_serialized = serde_json::to_string(&proof ).unwrap();
|
||||
// fs::write("bn_common.json" , common_circuit_data_serialized) .expect("Unable to write file");
|
||||
// fs::write("bn_vkey.json" , verifier_only_circuit_data_serialized).expect("Unable to write file");
|
||||
// fs::write("bn_proof.json" , proof_serialized) .expect("Unable to write file");
|
||||
|
||||
let verifier_opts = VerifierOptions {
|
||||
print_hash_statistics: HashStatisticsPrintLevel::Summary,
|
||||
};
|
||||
|
||||
assert!(data.verify_with_options(proof, &verifier_opts).is_ok());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use crate::iop::target::Target;
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, Hasher, IntoGenericFieldVec};
|
||||
|
||||
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
pub fn observe_openings<const D: usize>(&mut self, openings: &FriOpenings<F, D>)
|
||||
@ -57,8 +57,10 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
if let Some(step_count) = max_num_query_steps {
|
||||
let cap_len = (1 << config.cap_height) * NUM_HASH_OUT_ELTS;
|
||||
let zero_cap = vec![F::ZERO; cap_len];
|
||||
let zero_cap_felts: Vec<GenericField<F>> = zero_cap.into_generic_field_vec();
|
||||
for _ in commit_phase_merkle_caps.len()..step_count {
|
||||
self.observe_elements(&zero_cap);
|
||||
// self.observe_elements(&zero_cap);
|
||||
self.observe_elements(&zero_cap_felts);
|
||||
self.get_extension_challenge::<D>();
|
||||
}
|
||||
}
|
||||
@ -73,7 +75,7 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
}
|
||||
}
|
||||
|
||||
self.observe_element(pow_witness);
|
||||
self.observe_element(pow_witness.into());
|
||||
let fri_pow_response = self.get_challenge();
|
||||
|
||||
let fri_query_indices = (0..num_fri_queries)
|
||||
|
||||
@ -14,7 +14,7 @@ use crate::hash::hash_types::{RichField, NUM_HASH_OUT_ELTS};
|
||||
use crate::hash::hashing::*;
|
||||
use crate::hash::merkle_tree::MerkleTree;
|
||||
use crate::iop::challenger::Challenger;
|
||||
use crate::plonk::config::GenericConfig;
|
||||
use crate::plonk::config::{GenericConfig, GenericField, IntoGenericFieldVec};
|
||||
use crate::plonk::plonk_common::reduce_with_powers;
|
||||
use crate::plonk::prover::ProverOptions;
|
||||
use crate::plonk::verifier::HashStatisticsPrintLevel;
|
||||
@ -136,8 +136,9 @@ fn fri_committed_trees<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>,
|
||||
if let Some(step_count) = max_num_query_steps {
|
||||
let cap_len = (1 << fri_params.config.cap_height) * NUM_HASH_OUT_ELTS;
|
||||
let zero_cap = vec![F::ZERO; cap_len];
|
||||
let zero_cap_felts: Vec<GenericField<F>> = zero_cap.into_generic_field_vec();
|
||||
for _ in fri_params.reduction_arity_bits.len()..step_count {
|
||||
challenger.observe_elements(&zero_cap);
|
||||
challenger.observe_elements(&zero_cap_felts);
|
||||
challenger.get_extension_challenge::<D>();
|
||||
}
|
||||
}
|
||||
@ -171,45 +172,12 @@ pub(crate) fn fri_proof_of_work<
|
||||
) -> F {
|
||||
let min_leading_zeros = config.proof_of_work_bits + (64 - F::order().bits()) as u32;
|
||||
|
||||
// The easiest implementation would be repeatedly clone our Challenger. With each clone, we'd
|
||||
// observe an incrementing PoW witness, then get the PoW response. If it contained sufficient
|
||||
// leading zeros, we'd end the search, and store this clone as our new challenger.
|
||||
//
|
||||
// However, performance is critical here. We want to avoid cloning Challenger, particularly
|
||||
// since it stores vectors, which means allocations. We'd like a more compact state to clone.
|
||||
//
|
||||
// We know that a duplex will be performed right after we send the PoW witness, so we can ignore
|
||||
// any output_buffer, which will be invalidated. We also know
|
||||
// input_buffer.len() < H::Permutation::WIDTH, an invariant of Challenger.
|
||||
//
|
||||
// We separate the duplex operation into two steps, one which can be performed now, and the
|
||||
// other which depends on the PoW witness candidate. The first step is the overwrite our sponge
|
||||
// state with any inputs (excluding the PoW witness candidate). The second step is to overwrite
|
||||
// one more element of our sponge state with the candidate, then apply the permutation,
|
||||
// obtaining our duplex's post-state which contains the PoW response.
|
||||
let mut duplex_intermediate_state = challenger.sponge_state;
|
||||
let witness_input_pos = challenger.input_buffer.len();
|
||||
duplex_intermediate_state.set_from_iter(challenger.input_buffer.clone(), 0);
|
||||
|
||||
// println!("duplex_intermediate_state = {:?}", duplex_intermediate_state);
|
||||
|
||||
let pow_witness = (0..=F::NEG_ONE.to_canonical_u64())
|
||||
.into_par_iter()
|
||||
.find_any(|&candidate| {
|
||||
let mut duplex_state = duplex_intermediate_state;
|
||||
duplex_state.set_elt(F::from_canonical_u64(candidate), witness_input_pos);
|
||||
duplex_state.permute();
|
||||
let pow_response = duplex_state.squeeze().iter().last().unwrap();
|
||||
let leading_zeros = pow_response.to_canonical_u64().leading_zeros();
|
||||
leading_zeros >= min_leading_zeros
|
||||
})
|
||||
.map(F::from_canonical_u64)
|
||||
.expect("Proof of work failed. This is highly unlikely!");
|
||||
let pow_witness = challenger.grind(min_leading_zeros);
|
||||
|
||||
// println!("pow_witness = {:?}",pow_witness);
|
||||
|
||||
// Recompute pow_response using our normal Challenger code, and make sure it matches.
|
||||
challenger.observe_element(pow_witness);
|
||||
challenger.observe_element(pow_witness.into());
|
||||
let pow_response = challenger.get_challenge();
|
||||
let leading_zeros = pow_response.to_canonical_u64().leading_zeros();
|
||||
assert!(leading_zeros >= min_leading_zeros);
|
||||
|
||||
@ -10,7 +10,7 @@ use crate::hash::merkle_proofs::MerkleProof;
|
||||
use crate::hash::merkle_tree::{
|
||||
capacity_up_to_mut, fill_digests_buf, merkle_tree_prove, MerkleCap,
|
||||
};
|
||||
use crate::plonk::config::{GenericHashOut, Hasher};
|
||||
use crate::plonk::config::{GenericField, GenericHashOut, Hasher, IntoGenericFieldVec};
|
||||
use crate::util::log2_strict;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
@ -56,9 +56,18 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
|
||||
let mut digests_buf_pos = 0;
|
||||
|
||||
let mut cap = vec![];
|
||||
let dummy_leaves = vec![vec![F::ZERO]; 1 << cap_height];
|
||||
leaves.push(dummy_leaves);
|
||||
for window in leaves.windows(2) {
|
||||
let dummy_leaves_felts = vec![vec![F::ZERO.into()]; 1 << cap_height];
|
||||
let mut leaves_felts: Vec<Vec<Vec<GenericField<F>>>> = leaves.clone().into_iter()
|
||||
.map(|matrix| {
|
||||
matrix.into_iter()
|
||||
.map(|vec| {
|
||||
vec.into_generic_field_vec()
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect();
|
||||
leaves_felts.push(dummy_leaves_felts);
|
||||
for window in leaves_felts.windows(2) {
|
||||
let cur = &window[0];
|
||||
let next = &window[1];
|
||||
|
||||
@ -82,7 +91,7 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
|
||||
);
|
||||
} else {
|
||||
// The rest leaf layers
|
||||
let new_leaves: Vec<Vec<F>> = cap
|
||||
let new_leaves: Vec<Vec<GenericField<F>>> = cap
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, cap_hash)| {
|
||||
@ -118,8 +127,6 @@ impl<F: RichField, H: Hasher<F>> BatchMerkleTree<F, H> {
|
||||
digests.set_len(num_digests);
|
||||
}
|
||||
|
||||
// remove dummy leaves
|
||||
leaves.pop();
|
||||
|
||||
Self {
|
||||
leaves,
|
||||
@ -175,7 +182,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::hash::merkle_proofs::verify_batch_merkle_proof_to_cap;
|
||||
use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use crate::plonk::config::{GenericConfig, IntoGenericFieldVec, PoseidonGoldilocksConfig};
|
||||
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
@ -199,10 +206,10 @@ mod tests {
|
||||
let fmt: BatchMerkleTree<GoldilocksField, H> = BatchMerkleTree::new(vec![mat_1], 0);
|
||||
|
||||
let mat_1_leaf_hashes = [
|
||||
H::hash_or_noop(&[F::ZERO, F::ONE]),
|
||||
H::hash_or_noop(&[F::TWO, F::ONE]),
|
||||
H::hash_or_noop(&[F::TWO, F::TWO]),
|
||||
H::hash_or_noop(&[F::ZERO, F::ZERO]),
|
||||
H::hash_or_noop(&[F::ZERO.into(), F::ONE.into()]),
|
||||
H::hash_or_noop(&[F::TWO.into(), F::ONE.into()]),
|
||||
H::hash_or_noop(&[F::TWO.into(), F::TWO.into()]),
|
||||
H::hash_or_noop(&[F::ZERO.into(), F::ZERO.into()]),
|
||||
];
|
||||
assert_eq!(mat_1_leaf_hashes[0..2], fmt.digests[0..2]);
|
||||
assert_eq!(mat_1_leaf_hashes[2..4], fmt.digests[4..6]);
|
||||
@ -251,10 +258,10 @@ mod tests {
|
||||
BatchMerkleTree::new(vec![mat_1, mat_2.clone()], 0);
|
||||
|
||||
let mat_1_leaf_hashes = [
|
||||
H::hash_or_noop(&[F::ZERO, F::ONE]),
|
||||
H::hash_or_noop(&[F::TWO, F::ONE]),
|
||||
H::hash_or_noop(&[F::TWO, F::TWO]),
|
||||
H::hash_or_noop(&[F::ZERO, F::ZERO]),
|
||||
H::hash_or_noop(&[F::ZERO.into(), F::ONE.into()]),
|
||||
H::hash_or_noop(&[F::TWO.into(), F::ONE.into()]),
|
||||
H::hash_or_noop(&[F::TWO.into(), F::TWO.into()]),
|
||||
H::hash_or_noop(&[F::ZERO.into(), F::ZERO.into()]),
|
||||
];
|
||||
assert_eq!(mat_1_leaf_hashes, fmt.digests[0..4]);
|
||||
|
||||
@ -267,10 +274,10 @@ mod tests {
|
||||
.zip(mat_2.iter())
|
||||
.map(|(row1, row2)| {
|
||||
let mut new_row = row1.clone();
|
||||
new_row.extend_from_slice(row2);
|
||||
new_row.extend_from_slice(&row2.clone().into_generic_field_vec());
|
||||
new_row
|
||||
})
|
||||
.collect::<Vec<Vec<F>>>();
|
||||
.collect::<Vec<Vec<GenericField<F>>>>();
|
||||
let layer_1 = [
|
||||
H::hash_or_noop(&new_leaves[0]),
|
||||
H::hash_or_noop(&new_leaves[1]),
|
||||
|
||||
121
plonky2/src/hash/duplex.rs
Normal file
121
plonky2/src/hash/duplex.rs
Normal file
@ -0,0 +1,121 @@
|
||||
use plonky2_maybe_rayon::ParallelIterator;
|
||||
use plonky2_maybe_rayon::rayon::iter::IntoParallelIterator;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::plonk::config::{GenericField, Hasher, HasherField};
|
||||
use plonky2_field::types::PrimeField64;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DuplexState<F: RichField, H: Hasher<F>> {
|
||||
Absorbing {
|
||||
state: H::Permutation,
|
||||
buf: Vec<GenericField<F>>, // Buffer for absorbing inputs.
|
||||
},
|
||||
Squeezing {
|
||||
state: H::Permutation,
|
||||
buf: Vec<F>, // Buffer holding squeezed outputs.
|
||||
},
|
||||
}
|
||||
|
||||
impl<F: RichField, H: Hasher<F>> DuplexState<F,H> {
|
||||
/// creates a new duplex state in absorbing mode with an initial zero state.
|
||||
pub fn new() -> Self {
|
||||
DuplexState::Absorbing {
|
||||
state: H::Permutation::new(core::iter::repeat(H::HF::get_zero())),
|
||||
buf: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// absorb a generic field element.
|
||||
/// In absorbing mode: the element is appended to the buffer.
|
||||
/// In squeezing mode: we trash any current outputs and switch back to absorbing.
|
||||
pub fn absorb(&mut self, element: GenericField<F>) {
|
||||
match self {
|
||||
DuplexState::Absorbing { buf, .. } => {
|
||||
buf.push(element);
|
||||
}
|
||||
DuplexState::Squeezing { state, .. } => {
|
||||
let mut buf = Vec::new();
|
||||
buf.push(element);
|
||||
*self = DuplexState::Absorbing {
|
||||
state: state.clone(),
|
||||
buf,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Squeeze out a single challenge element (Goldilocks field elements)
|
||||
/// In absorbing mode: the buffer elements are absorbed by calling the sponge
|
||||
/// and switching the state to `Squeezing` and filling the output buffer with Goldilocks elems
|
||||
/// In squeezing mode: we take elements from the buffer, if buffer is empty we permute and fill the buffer.
|
||||
pub fn squeeze(&mut self) -> F {
|
||||
match self {
|
||||
DuplexState::Absorbing { state, buf, .. } => {
|
||||
let input: Vec<GenericField<F>> = buf.drain(..).collect();
|
||||
H::sponge(state, input);
|
||||
let out_buf: Vec<F> = Self::squeeze_f(state);
|
||||
// switch
|
||||
*self = DuplexState::Squeezing {
|
||||
state: state.clone(),
|
||||
buf: out_buf,
|
||||
};
|
||||
// fall back to squeezing.
|
||||
self.squeeze()
|
||||
}
|
||||
DuplexState::Squeezing { state, buf, .. } => {
|
||||
if buf.is_empty() {
|
||||
// If the buffer is empty, permute to refill it.
|
||||
state.permute();
|
||||
*buf = Self::squeeze_f(state);
|
||||
}
|
||||
let e = buf.pop().expect("Output buffer should not be empty");
|
||||
e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// squeeze out goldilocks field elements from the state
|
||||
fn squeeze_f(state: &mut H::Permutation) -> Vec<F>{
|
||||
let out = H::squeeze_goldilocks(state);
|
||||
assert!(out.len()>0);
|
||||
out
|
||||
}
|
||||
|
||||
/// grind moved here from the FRI prover
|
||||
/// it handles both modes (`Absorbing` and `Squeezing`)
|
||||
pub fn grind(&mut self, min_leading_zeros: u32) -> F {
|
||||
match self {
|
||||
DuplexState::Absorbing { state, buf, .. } => {
|
||||
|
||||
let duplex_intermediate_state = state.clone();
|
||||
let buf_felts: Vec<GenericField<F>> = buf.clone();
|
||||
|
||||
Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros)
|
||||
}
|
||||
DuplexState::Squeezing { state, .. } => {
|
||||
let duplex_intermediate_state = state.clone();
|
||||
let buf_felts = vec![];
|
||||
Self::grind_helper(duplex_intermediate_state, buf_felts, min_leading_zeros)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn grind_helper(state: H::Permutation, input: Vec<GenericField<F>>, min_leading_zeros: u32) -> F {
|
||||
let pow_witness = (0..=F::NEG_ONE.to_canonical_u64())
|
||||
.into_par_iter()
|
||||
.find_any(|&candidate| {
|
||||
let mut duplex_state = state.clone();
|
||||
let mut sponge_input = input.clone();
|
||||
sponge_input.push(F::from_canonical_u64(candidate).into());
|
||||
H::sponge(&mut duplex_state, sponge_input);
|
||||
let temp_buf = Self::squeeze_f(&mut duplex_state);
|
||||
let pow_response = temp_buf.iter().last().unwrap();
|
||||
let leading_zeros = PrimeField64::to_canonical_u64(pow_response).leading_zeros();
|
||||
leading_zeros >= min_leading_zeros
|
||||
})
|
||||
.map(F::from_canonical_u64)
|
||||
.expect("Proof of work failed. This is highly unlikely!");
|
||||
pow_witness
|
||||
}
|
||||
}
|
||||
@ -10,13 +10,67 @@ use crate::field::goldilocks_field::GoldilocksField;
|
||||
use crate::field::types::{Field, PrimeField64, Sample};
|
||||
use crate::hash::poseidon::Poseidon;
|
||||
use crate::iop::target::Target;
|
||||
use crate::plonk::config::GenericHashOut;
|
||||
use crate::plonk::config::{GenericField, GenericHashOut};
|
||||
use ark_bn254::Fr as BN254Fr;
|
||||
use crate::hash::poseidon2_bn254::{bytes_le_to_felts, felts_to_bytes_le};
|
||||
|
||||
/// A prime order field with the features we need to use it as a base field in our argument system.
|
||||
pub trait RichField: PrimeField64 + Poseidon {}
|
||||
|
||||
impl RichField for GoldilocksField {}
|
||||
|
||||
/// Hash digest for the BN254 field, contains single Fr element
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct BN254HashOut{
|
||||
pub element: BN254Fr
|
||||
}
|
||||
|
||||
/// Serialize the bn254 field , uses Arkworks
|
||||
impl Serialize for BN254HashOut {
|
||||
fn serialize < S > ( & self, serializer: S) -> Result < S::Ok, S::Error >
|
||||
where S: Serializer {
|
||||
|
||||
let element_to_bytes = felts_to_bytes_le(&self.element);
|
||||
serializer.serialize_bytes( & element_to_bytes)
|
||||
}
|
||||
}
|
||||
/// Deserialize the bn254 field , uses Arkworks
|
||||
impl<'de> Deserialize<'de> for BN254HashOut {
|
||||
fn deserialize<
|
||||
D>(deserializer: D) -> Result< Self,
|
||||
D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
let element_as_bytes = < [u8; 32] >::deserialize(deserializer) ?;
|
||||
let mut element_array = < [u8; 32] >::default();
|
||||
element_array.copy_from_slice( & element_as_bytes[0..32]);
|
||||
|
||||
let deserialized_element = bytes_le_to_felts(&element_array);
|
||||
|
||||
Ok( Self {
|
||||
element: deserialized_element,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// implement GenericHashOut for the BN254 hash digest
|
||||
/// `F` here is the goldilocks not the BN254 field
|
||||
impl<F: RichField> GenericHashOut<F> for BN254HashOut {
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
felts_to_bytes_le(&self.element)
|
||||
}
|
||||
|
||||
fn from_bytes(bytes: &[u8]) -> Self {
|
||||
assert_eq!(bytes.len(), 32);
|
||||
BN254HashOut{
|
||||
element: bytes_le_to_felts(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
fn to_vec(&self) -> Vec<GenericField<F>> {
|
||||
vec![self.element.clone().into()]
|
||||
}
|
||||
}
|
||||
|
||||
pub const NUM_HASH_OUT_ELTS: usize = 4;
|
||||
|
||||
/// Represents a ~256 bit hash output.
|
||||
@ -103,8 +157,13 @@ impl<F: RichField> GenericHashOut<F> for HashOut<F> {
|
||||
}
|
||||
}
|
||||
|
||||
fn to_vec(&self) -> Vec<F> {
|
||||
self.elements.to_vec()
|
||||
fn to_vec(&self) -> Vec<GenericField<F>> {
|
||||
self.elements
|
||||
.iter()
|
||||
.copied()
|
||||
.map(GenericField::<F>::Goldilocks)
|
||||
.collect()
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,14 +240,15 @@ impl<F: RichField, const N: usize> GenericHashOut<F> for BytesHash<N> {
|
||||
Self(bytes.try_into().unwrap())
|
||||
}
|
||||
|
||||
fn to_vec(&self) -> Vec<F> {
|
||||
fn to_vec(&self) -> Vec<GenericField<F>> {
|
||||
self.0
|
||||
// Chunks of 7 bytes since 8 bytes would allow collisions.
|
||||
.chunks(7)
|
||||
.map(|bytes| {
|
||||
let mut arr = [0; 8];
|
||||
let mut arr = [0u8; 8];
|
||||
arr[..bytes.len()].copy_from_slice(bytes);
|
||||
F::from_canonical_u64(u64::from_le_bytes(arr))
|
||||
let raw = F::from_canonical_u64(u64::from_le_bytes(arr));
|
||||
raw.into()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@ use keccak_hash::keccak;
|
||||
|
||||
use crate::hash::hash_types::{BytesHash, RichField};
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::plonk::config::Hasher;
|
||||
use crate::plonk::config::{GenericField, GenericHashOut, Hasher};
|
||||
use crate::util::serialization::Write;
|
||||
|
||||
pub const SPONGE_RATE: usize = 8;
|
||||
@ -102,19 +102,76 @@ impl<F: RichField> PlonkyPermutation<F> for KeccakPermutation<F> {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub struct KeccakHash<const N: usize>;
|
||||
impl<F: RichField, const N: usize> Hasher<F> for KeccakHash<N> {
|
||||
type HF = F;
|
||||
const HASH_SIZE: usize = N;
|
||||
type Hash = BytesHash<N>;
|
||||
type Permutation = KeccakPermutation<F>;
|
||||
|
||||
fn hash_no_pad(input: &[F]) -> Self::Hash {
|
||||
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let mut padded_input = input.to_vec();
|
||||
padded_input.push(F::ONE.into());
|
||||
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
|
||||
padded_input.push(F::ZERO.into());
|
||||
}
|
||||
padded_input.push(F::ONE.into());
|
||||
Self::hash_no_pad(&padded_input)
|
||||
}
|
||||
|
||||
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
|
||||
let hash_size = 4 * 8;
|
||||
if inputs.len() * 8 <= hash_size {
|
||||
let mut inputs_bytes = vec![0u8; hash_size];
|
||||
for i in 0..inputs.len() {
|
||||
let goldilocks_felt = match inputs[i].clone() {
|
||||
GenericField::Goldilocks(v) => { v }
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
};
|
||||
inputs_bytes[i * 8..(i + 1) * 8]
|
||||
.copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes());
|
||||
}
|
||||
<BytesHash::<N> as GenericHashOut<F>>::from_bytes(&inputs_bytes)
|
||||
} else {
|
||||
Self::hash_no_pad(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let mut buffer = Vec::with_capacity(input.len());
|
||||
buffer.write_field_vec(input).unwrap();
|
||||
let mut goldilocks_felts = vec![];
|
||||
for e in input {
|
||||
// only accept goldilocks (for now!)
|
||||
match e {
|
||||
GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)}
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
}
|
||||
}
|
||||
|
||||
buffer.write_field_vec(&goldilocks_felts).unwrap();
|
||||
let mut arr = [0; N];
|
||||
let hash_bytes = keccak(buffer).0;
|
||||
arr.copy_from_slice(&hash_bytes[..N]);
|
||||
BytesHash(arr)
|
||||
}
|
||||
|
||||
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
|
||||
let mut goldilocks_felts = vec![];
|
||||
for e in input {
|
||||
//only accept goldilocks (for now!)
|
||||
match e {
|
||||
GenericField::Goldilocks(v) => {goldilocks_felts.push(v)}
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
}
|
||||
}
|
||||
for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) {
|
||||
state.set_from_slice(chunk, 0);
|
||||
state.permute();
|
||||
}
|
||||
}
|
||||
|
||||
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
|
||||
state.squeeze().to_vec()
|
||||
}
|
||||
|
||||
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
|
||||
let mut v = vec![0; N * 2];
|
||||
v[0..N].copy_from_slice(&left.0);
|
||||
|
||||
@ -13,7 +13,7 @@ use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::circuit_data::VerifierCircuitTarget;
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher, IntoGenericFieldVec};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[serde(bound = "")]
|
||||
@ -77,7 +77,12 @@ pub fn verify_batch_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
|
||||
proof: &MerkleProof<F, H>,
|
||||
) -> Result<()> {
|
||||
assert_eq!(leaf_data.len(), leaf_heights.len());
|
||||
let mut current_digest = H::hash_or_noop(&leaf_data[0]);
|
||||
let leaf_data_felts: Vec<Vec<GenericField<F>>> = leaf_data.into_iter()
|
||||
.map(|inner| {
|
||||
inner.clone().into_generic_field_vec()
|
||||
})
|
||||
.collect();
|
||||
let mut current_digest = H::hash_or_noop(&leaf_data_felts[0]);
|
||||
let mut current_height = leaf_heights[0];
|
||||
let mut leaf_data_index = 1;
|
||||
for &sibling_digest in &proof.siblings {
|
||||
@ -92,12 +97,12 @@ pub fn verify_batch_merkle_proof_to_cap<F: RichField, H: Hasher<F>>(
|
||||
|
||||
if leaf_data_index < leaf_heights.len() && current_height == leaf_heights[leaf_data_index] {
|
||||
let mut new_leaves = current_digest.to_vec();
|
||||
new_leaves.extend_from_slice(&leaf_data[leaf_data_index]);
|
||||
new_leaves.extend_from_slice(&leaf_data_felts[leaf_data_index]);
|
||||
current_digest = H::hash_or_noop(&new_leaves);
|
||||
leaf_data_index += 1;
|
||||
}
|
||||
}
|
||||
assert_eq!(leaf_data_index, leaf_data.len());
|
||||
assert_eq!(leaf_data_index, leaf_data_felts.len());
|
||||
ensure!(
|
||||
current_digest == merkle_cap.0[leaf_index],
|
||||
"Invalid Merkle proof."
|
||||
|
||||
@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::merkle_proofs::MerkleProof;
|
||||
use crate::plonk::config::{GenericHashOut, Hasher};
|
||||
use crate::plonk::config::{GenericField, GenericHashOut, Hasher, IntoGenericFieldVec};
|
||||
use crate::util::log2_strict;
|
||||
|
||||
/// The Merkle cap of height `h` of a Merkle tree is the `h`-th layer (from the root) of the tree.
|
||||
@ -37,7 +37,7 @@ impl<F: RichField, H: Hasher<F>> MerkleCap<F, H> {
|
||||
log2_strict(self.len())
|
||||
}
|
||||
|
||||
pub fn flatten(&self) -> Vec<F> {
|
||||
pub fn flatten(&self) -> Vec<GenericField<F>> {
|
||||
self.0.iter().flat_map(|&h| h.to_vec()).collect()
|
||||
}
|
||||
}
|
||||
@ -85,7 +85,7 @@ pub(crate) fn capacity_up_to_mut<T>(v: &mut Vec<T>, len: usize) -> &mut [MaybeUn
|
||||
|
||||
pub(crate) fn fill_subtree<F: RichField, H: Hasher<F>>(
|
||||
digests_buf: &mut [MaybeUninit<H::Hash>],
|
||||
leaves: &[Vec<F>],
|
||||
leaves: &[Vec<GenericField<F>>],
|
||||
) -> H::Hash {
|
||||
assert_eq!(leaves.len(), digests_buf.len() / 2 + 1);
|
||||
if digests_buf.is_empty() {
|
||||
@ -115,7 +115,7 @@ pub(crate) fn fill_subtree<F: RichField, H: Hasher<F>>(
|
||||
pub(crate) fn fill_digests_buf<F: RichField, H: Hasher<F>>(
|
||||
digests_buf: &mut [MaybeUninit<H::Hash>],
|
||||
cap_buf: &mut [MaybeUninit<H::Hash>],
|
||||
leaves: &[Vec<F>],
|
||||
leaves: &[Vec<GenericField<F>>],
|
||||
cap_height: usize,
|
||||
) {
|
||||
// Special case of a tree that's all cap. The usual case will panic because we'll try to split
|
||||
@ -207,8 +207,12 @@ impl<F: RichField, H: Hasher<F>> MerkleTree<F, H> {
|
||||
|
||||
let digests_buf = capacity_up_to_mut(&mut digests, num_digests);
|
||||
let cap_buf = capacity_up_to_mut(&mut cap, len_cap);
|
||||
fill_digests_buf::<F, H>(digests_buf, cap_buf, &leaves[..], cap_height);
|
||||
|
||||
let leaves_felts: Vec<Vec<GenericField<F>>> = leaves.clone().into_iter()
|
||||
.map(|inner| {
|
||||
inner.into_generic_field_vec()
|
||||
})
|
||||
.collect();
|
||||
fill_digests_buf::<F, H>(digests_buf, cap_buf, &leaves_felts[..], cap_height);
|
||||
unsafe {
|
||||
// SAFETY: `fill_digests_buf` and `cap` initialized the spare capacity up to
|
||||
// `num_digests` and `len_cap`, resp.
|
||||
|
||||
@ -11,3 +11,5 @@ pub mod merkle_tree;
|
||||
pub mod path_compression;
|
||||
pub mod poseidon;
|
||||
pub mod poseidon_goldilocks;
|
||||
pub mod duplex;
|
||||
pub mod poseidon2_bn254;
|
||||
@ -6,7 +6,7 @@ use num::Integer;
|
||||
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::hash::merkle_proofs::MerkleProof;
|
||||
use crate::plonk::config::Hasher;
|
||||
use crate::plonk::config::{GenericField, Hasher, IntoGenericFieldVec};
|
||||
|
||||
/// Compress multiple Merkle proofs on the same tree by removing redundancy in the Merkle paths.
|
||||
pub(crate) fn compress_merkle_proofs<F: RichField, H: Hasher<F>>(
|
||||
@ -68,7 +68,8 @@ pub(crate) fn decompress_merkle_proofs<F: RichField, H: Hasher<F>>(
|
||||
|
||||
for (&i, v) in leaves_indices.iter().zip(leaves_data) {
|
||||
// Observe the leaves.
|
||||
seen.insert(i + num_leaves, H::hash_or_noop(v));
|
||||
let v_felts: Vec<GenericField<F>> = v.clone().into_generic_field_vec();
|
||||
seen.insert(i + num_leaves, H::hash_or_noop(&v_felts));
|
||||
}
|
||||
|
||||
// Iterators over the siblings.
|
||||
|
||||
@ -19,7 +19,7 @@ use crate::hash::hashing::{HashUsage, increment_given_hash_counter};
|
||||
use crate::iop::ext_target::ExtensionTarget;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::config::{AlgebraicHasher, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher};
|
||||
|
||||
pub const SPONGE_RATE: usize = 8;
|
||||
pub const SPONGE_CAPACITY: usize = 4;
|
||||
@ -875,17 +875,74 @@ impl<T: Copy + Debug + Default + Eq + Permuter + Send + Sync> PlonkyPermutation<
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub struct PoseidonHash;
|
||||
impl<F: RichField> Hasher<F> for PoseidonHash {
|
||||
type HF = F;
|
||||
const HASH_SIZE: usize = 4 * 8;
|
||||
type Hash = HashOut<F>;
|
||||
type Permutation = PoseidonPermutation<F>;
|
||||
|
||||
fn hash_no_pad(input: &[F]) -> Self::Hash {
|
||||
hash_n_to_hash_no_pad::<F, Self::Permutation>(input)
|
||||
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let mut goldilocks_felts = vec![];
|
||||
for e in input {
|
||||
// for goldilocks only accept goldilocks (for now!)
|
||||
match e {
|
||||
GenericField::Goldilocks(v) => {goldilocks_felts.push(*v)}
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
}
|
||||
}
|
||||
hash_n_to_hash_no_pad::<F, Self::Permutation>(&goldilocks_felts)
|
||||
}
|
||||
|
||||
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let mut padded_input = input.to_vec();
|
||||
padded_input.push(F::ONE.into());
|
||||
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
|
||||
padded_input.push(F::ZERO.into());
|
||||
}
|
||||
padded_input.push(F::ONE.into());
|
||||
Self::hash_no_pad(&padded_input)
|
||||
}
|
||||
|
||||
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
|
||||
let hash_size = 4 * 8;
|
||||
if inputs.len() * 8 <= hash_size {
|
||||
let mut inputs_bytes = vec![0u8; hash_size];
|
||||
for i in 0..inputs.len() {
|
||||
// only accept goldilocks (for now!)
|
||||
let goldilocks_felt = match inputs[i].clone() {
|
||||
GenericField::Goldilocks(v) => { v }
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
};
|
||||
inputs_bytes[i * 8..(i + 1) * 8]
|
||||
.copy_from_slice(&goldilocks_felt.to_canonical_u64().to_le_bytes());
|
||||
}
|
||||
Self::Hash::from_bytes(&inputs_bytes)
|
||||
} else {
|
||||
Self::hash_no_pad(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
|
||||
let mut goldilocks_felts = vec![];
|
||||
for e in input {
|
||||
// only accept goldilocks (for now!)
|
||||
match e {
|
||||
GenericField::Goldilocks(v) => {goldilocks_felts.push(v)}
|
||||
GenericField::BN254(_) => {panic!("BN input is not supported for goldilocks hasher")}
|
||||
}
|
||||
}
|
||||
for chunk in goldilocks_felts.chunks(Self::Permutation::RATE) {
|
||||
state.set_from_slice(chunk, 0);
|
||||
state.permute();
|
||||
}
|
||||
}
|
||||
|
||||
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
|
||||
compress::<F, Self::Permutation>(left, right)
|
||||
}
|
||||
|
||||
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
|
||||
state.squeeze().to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField> AlgebraicHasher<F> for PoseidonHash {
|
||||
|
||||
431
plonky2/src/hash/poseidon2_bn254.rs
Normal file
431
plonky2/src/hash/poseidon2_bn254.rs
Normal file
@ -0,0 +1,431 @@
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::fmt::Debug;
|
||||
use core::mem::size_of;
|
||||
|
||||
use crate::hash::hash_types::{BN254HashOut, RichField};
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::plonk::config::{GenericField, Hasher};
|
||||
use rust_bn254_hash::state::State;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_bn254::{Fr as BN254Fr};
|
||||
use rust_bn254_hash::poseidon2::permutation::permute_inplace as permute_bn254_inplace;
|
||||
use ark_ff::{ PrimeField, Zero,};
|
||||
use num::Integer;
|
||||
use num_bigint::BigUint;
|
||||
use ark_ff::BigInt as arkBigInt;
|
||||
use rust_bn254_hash::hash::Hash;
|
||||
use rust_bn254_hash::sponge::{sponge_felts_no_pad, sponge_felts_pad};
|
||||
use plonky2_field::goldilocks_field::GoldilocksField;
|
||||
use plonky2_field::types::Field64;
|
||||
|
||||
pub const SPONGE_RATE: usize = 2;
|
||||
pub const SPONGE_CAPACITY: usize = 1;
|
||||
pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
|
||||
|
||||
/// Poseidon2 state with BN254 elements
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]
|
||||
pub struct Poseidon2BN254Perm {
|
||||
state: [BN254Fr; SPONGE_WIDTH],
|
||||
}
|
||||
|
||||
/// needed for PlonkyPermutation
|
||||
impl AsRef<[BN254Fr]> for Poseidon2BN254Perm {
|
||||
fn as_ref(&self) -> &[BN254Fr] {
|
||||
&self.state
|
||||
}
|
||||
}
|
||||
|
||||
impl PlonkyPermutation<BN254Fr> for Poseidon2BN254Perm {
|
||||
const RATE: usize = SPONGE_RATE;
|
||||
const WIDTH: usize = SPONGE_WIDTH;
|
||||
|
||||
fn new<I: IntoIterator<Item = BN254Fr>>(elts: I) -> Self {
|
||||
let mut perm = Self {
|
||||
state: [BN254Fr::default(); SPONGE_WIDTH],
|
||||
};
|
||||
perm.set_from_iter(elts, 0);
|
||||
perm
|
||||
}
|
||||
|
||||
fn set_elt(&mut self, elt: BN254Fr, idx: usize) {
|
||||
self.state[idx] = elt;
|
||||
}
|
||||
|
||||
fn set_from_slice(&mut self, elts: &[BN254Fr], start_idx: usize) {
|
||||
let begin = start_idx;
|
||||
let end = start_idx + elts.len();
|
||||
self.state[begin..end].copy_from_slice(elts);
|
||||
}
|
||||
|
||||
fn set_from_iter<I: IntoIterator<Item = BN254Fr>>(&mut self, elts: I, start_idx: usize) {
|
||||
for (s, e) in self.state[start_idx..].iter_mut().zip(elts) {
|
||||
*s = e;
|
||||
}
|
||||
}
|
||||
|
||||
/// calls the permutation in `rust-bn254-hash`
|
||||
/// we can probably refactor the state and eliminate the conversion in this fn.
|
||||
fn permute(&mut self) {
|
||||
let mut s = State{
|
||||
x: self.state[0].clone(),
|
||||
y: self.state[1].clone(),
|
||||
z: self.state[2].clone(),
|
||||
};
|
||||
|
||||
permute_bn254_inplace(&mut s);
|
||||
|
||||
self.state = [
|
||||
s.x,
|
||||
s.y,
|
||||
s.z,
|
||||
];
|
||||
|
||||
}
|
||||
|
||||
fn squeeze(&self) -> &[BN254Fr] {
|
||||
&self.state[..Self::RATE]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub struct Poseidon2BN254;
|
||||
impl<F: RichField> Hasher<F> for Poseidon2BN254 {
|
||||
type HF = BN254Fr;
|
||||
const HASH_SIZE: usize = 32;
|
||||
type Hash = BN254HashOut;
|
||||
type Permutation = Poseidon2BN254Perm;
|
||||
|
||||
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let bn_felts = generic_field_to_bn(input);
|
||||
let hash = sponge_felts_no_pad(Hash::Poseidon2, bn_felts);
|
||||
BN254HashOut {
|
||||
element: hash,
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash {
|
||||
let bn_felts = generic_field_to_bn(input);
|
||||
let hash = sponge_felts_pad(Hash::Poseidon2, bn_felts);
|
||||
BN254HashOut {
|
||||
element: hash,
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash {
|
||||
let hash_size = 32;
|
||||
if check_len_in_bytes(inputs) <= hash_size {
|
||||
if inputs.len() == 1 {
|
||||
// if there is one element and it is a BN field element return it.
|
||||
if let GenericField::BN254(v) = inputs[0].clone() {
|
||||
return BN254HashOut{element: v};
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO: if we get 4 or less Goldilocks -> convert to BN return?
|
||||
Self::hash_no_pad(inputs)
|
||||
}
|
||||
|
||||
|
||||
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>) {
|
||||
|
||||
let bn_felts = generic_field_to_bn(&input);
|
||||
|
||||
// absorb in overwrite mode
|
||||
for chunk in bn_felts.chunks(2) {
|
||||
state.set_from_slice(chunk, 0);
|
||||
state.permute();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash {
|
||||
|
||||
let mut perm = Self::Permutation::new(core::iter::repeat(BN254Fr::zero()));
|
||||
perm.set_from_slice(&[left.element], 0);
|
||||
perm.set_from_slice(&[right.element], 1);
|
||||
|
||||
perm.permute();
|
||||
let out = perm.squeeze();
|
||||
|
||||
BN254HashOut {
|
||||
element: out[0].clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F> {
|
||||
// Squeeze out BN254 elements from the sponge state.
|
||||
let bn_out = state.squeeze();
|
||||
|
||||
// convert bn to goldilocks
|
||||
bn_to_goldilocks(bn_out)
|
||||
}
|
||||
}
|
||||
|
||||
// --------- Conversion helper functions ---------------------
|
||||
|
||||
/// Converts a slice of BN254 field elements to a vector of Goldilocks (F) by:
|
||||
///
|
||||
/// - Interpreting each BN254 element as an unsigned big integer `BigUint`.
|
||||
/// - Repeatedly taking `remainder = X mod Goldilocks::ORDER` (which fits in a `u64`)
|
||||
/// and then dividing `X` by `Goldilocks::ORDER`.
|
||||
/// - Repeat this exactly 3 times for each BN254 element in the slice, generating 3*l Goldilocks elements
|
||||
/// where l = length of the slice.
|
||||
///
|
||||
/// We use this primarily in hashing contexts (for Fiat-Shamir in Plonky2 circuits), where
|
||||
/// we want to safely convert a ~254-bit BN254 element into multiple 64-bit
|
||||
/// Goldilocks elements. The little leftover in `X` after extracting 3 remainders
|
||||
/// is trashed, so there is a negligible bias.
|
||||
fn bn_to_goldilocks<F: RichField>(input: &[BN254Fr]) -> Vec<F> {
|
||||
// Goldilocks order
|
||||
let r: BigUint = BigUint::from(GoldilocksField::ORDER);
|
||||
|
||||
let mut goldilocks_felts = Vec::new();
|
||||
// For each BN254 field element, extract 3 Goldilocks elements.
|
||||
for fe in input.into_iter().cloned() {
|
||||
// Convert BN254Fr -> 256-bit big integer.
|
||||
let mut big: BigUint = fe.into_bigint().into();
|
||||
|
||||
// We want three remainders in [0, p_Goldilocks), each fits into a 64-bit integer.
|
||||
for _ in 0..3 {
|
||||
|
||||
let (quotient, remainder) = big.div_rem(&r);
|
||||
let rem_u64 = remainder.to_u64_digits();
|
||||
|
||||
// check just for safety:
|
||||
if rem_u64.len() > 1 {
|
||||
panic!("Remainder unexpectedly larger than 64 bits.")
|
||||
} else if rem_u64.len() == 1{
|
||||
let r64 = rem_u64[0];
|
||||
goldilocks_felts.push(F::from_canonical_u64(r64));
|
||||
}
|
||||
|
||||
// Update big to the quotient for the next remainder.
|
||||
big = quotient;
|
||||
}
|
||||
}
|
||||
goldilocks_felts
|
||||
}
|
||||
|
||||
|
||||
/// Convert a vec of Goldilocks elements into BN254 elements.
|
||||
/// - pack `7` consecutive `u64` values into `2` BN254 field elements.
|
||||
/// - If the total number of Goldilocks elements is not a multiple of 7, we
|
||||
/// zero‐pad the last chunk up to 7. That chunk still produces 2 BN254 field elements.
|
||||
/// - Returns: A `Vec<BN254Fr>`
|
||||
///
|
||||
/// **Note**: This is used for packing a sequence of 64-bit words into
|
||||
/// BN254 in a safe way. It is NOT the inverse of `bn_to_goldilocks`
|
||||
fn goldilocks_to_bn<F: RichField>(input: &Vec<F>) -> Vec<BN254Fr>{
|
||||
let u64s: Vec<u64> = input.iter().map(|x| x.to_canonical_u64()).collect();
|
||||
let l = u64s.len();
|
||||
let m = l / 7;
|
||||
let mut result = Vec::new();
|
||||
|
||||
for i in 0..m {
|
||||
let group: [u64; 7] = u64s[7 * i..7 * (i + 1)].try_into().unwrap();
|
||||
let (a, b) = u64s_to_felts(group);
|
||||
result.push(a);
|
||||
result.push(b);
|
||||
}
|
||||
|
||||
let r = l - 7 * m;
|
||||
if r > 0 {
|
||||
let mut ws = [0u64; 7];
|
||||
for i in 0..r {
|
||||
ws[i] = u64s[7 * m + i];
|
||||
}
|
||||
let (a, b) = u64s_to_felts(ws);
|
||||
result.push(a);
|
||||
result.push(b);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
const BIGINT_TWO_TO_64: arkBigInt<4> = arkBigInt( [0,1,0,0] );
|
||||
const BIGINT_TWO_TO_128: arkBigInt<4> = arkBigInt( [0,0,1,0] );
|
||||
const BIGINT_TWO_TO_192: arkBigInt<4> = arkBigInt( [0,0,0,1] );
|
||||
|
||||
/// converts u64 to BN254 - taken directly from: rust-bn254-hash
|
||||
pub fn u64s_to_felts(ws: [u64; 7]) -> (BN254Fr, BN254Fr) {
|
||||
let hi = ws[6] >> 32;
|
||||
let lo = ws[6] & 0xFFFF_FFFF;
|
||||
|
||||
let field_powers_of_two_to_64: [BN254Fr;3] =
|
||||
[
|
||||
BN254Fr::from_bigint(BIGINT_TWO_TO_64 ).unwrap(),
|
||||
BN254Fr::from_bigint(BIGINT_TWO_TO_128).unwrap(),
|
||||
BN254Fr::from_bigint(BIGINT_TWO_TO_192).unwrap()
|
||||
];
|
||||
|
||||
let x = BN254Fr::from(ws[0])
|
||||
+ field_powers_of_two_to_64[0] * BN254Fr::from(ws[1])
|
||||
+ field_powers_of_two_to_64[1] * BN254Fr::from(ws[2])
|
||||
+ field_powers_of_two_to_64[2] * BN254Fr::from(lo);
|
||||
|
||||
let y = BN254Fr::from(ws[3])
|
||||
+ field_powers_of_two_to_64[0] * BN254Fr::from(ws[4])
|
||||
+ field_powers_of_two_to_64[1] * BN254Fr::from(ws[5])
|
||||
+ field_powers_of_two_to_64[2] * BN254Fr::from(hi);
|
||||
|
||||
(x, y)
|
||||
}
|
||||
|
||||
/// helper function: converts a slice of GenericField<F> into a Vec<BN254Fr>
|
||||
/// the fn groups consecutive Goldilocks elements and converting them in one shot.
|
||||
fn generic_field_to_bn<F: RichField>(input: &[GenericField<F>]) -> Vec<BN254Fr> {
|
||||
let mut bn_felts = Vec::new();
|
||||
let mut temp_goldilocks = Vec::new();
|
||||
|
||||
for e in input.iter().copied() {
|
||||
match e {
|
||||
GenericField::Goldilocks(v) => {
|
||||
// accumulate consecutive Goldilocks field elems.
|
||||
temp_goldilocks.push(v);
|
||||
}
|
||||
GenericField::BN254(v) => {
|
||||
// convert any accumulated Goldilocks elems.
|
||||
if !temp_goldilocks.is_empty() {
|
||||
let converted = goldilocks_to_bn(&temp_goldilocks);
|
||||
bn_felts.extend(converted);
|
||||
temp_goldilocks.clear();
|
||||
}
|
||||
// push the BN field element directly.
|
||||
bn_felts.push(v);
|
||||
}
|
||||
}
|
||||
}
|
||||
// convert any remaining Goldilocks elements.
|
||||
if !temp_goldilocks.is_empty() {
|
||||
let converted = goldilocks_to_bn(&temp_goldilocks);
|
||||
bn_felts.extend(converted);
|
||||
}
|
||||
|
||||
bn_felts
|
||||
}
|
||||
|
||||
/// computes the length in bytes of a vector of generic field elements.
|
||||
fn check_len_in_bytes<F: RichField>(input: &[GenericField<F>]) -> usize{
|
||||
input.iter().map(|elem| {
|
||||
match elem {
|
||||
GenericField::BN254(_) => 32,
|
||||
GenericField::Goldilocks(_) => 8,
|
||||
}
|
||||
}).sum()
|
||||
}
|
||||
|
||||
|
||||
//------------------ serialization for BN254 ---------------------
|
||||
|
||||
pub fn felts_to_bytes_le<E>(f: &E) -> Vec<u8> where
|
||||
E: CanonicalSerialize
|
||||
{
|
||||
let mut bytes = Vec::new();
|
||||
f.serialize_uncompressed(&mut bytes).expect("serialization failed");
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_felts<E>(bytes: &[u8]) -> E where
|
||||
E: CanonicalDeserialize
|
||||
{
|
||||
let fr_res = E::deserialize_uncompressed(bytes).unwrap();
|
||||
fr_res
|
||||
}
|
||||
|
||||
pub fn felts_to_u64<E>(f: E) -> Vec<u64>
|
||||
where
|
||||
E: CanonicalSerialize,
|
||||
{
|
||||
let mut bytes = Vec::new();
|
||||
f.serialize_uncompressed(&mut bytes)
|
||||
.expect("serialization failed");
|
||||
bytes
|
||||
.chunks_exact(size_of::<u64>())
|
||||
.map(|chunk| u64::from_le_bytes(chunk.try_into().unwrap()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ark_bn254::Fr as BN254Fr;
|
||||
use ark_ff::{One, Zero};
|
||||
use ark_std::{test_rng, UniformRand};
|
||||
use plonky2_field::types::Field;
|
||||
|
||||
/// Test that converting a bn254 element to bytes and back.
|
||||
#[test]
|
||||
fn test_felts_bytes_roundtrip() {
|
||||
let element = <BN254Fr as PrimeField>::from_bigint(arkBigInt::from(987654321u64)).unwrap();
|
||||
let bytes = felts_to_bytes_le(&element);
|
||||
assert_eq!(bytes.len(), 32, "Expected 32 bytes for BN254Fr serialization");
|
||||
let recovered: BN254Fr = bytes_le_to_felts(&bytes);
|
||||
assert_eq!(element, recovered, "Roundtrip conversion did not recover the original element");
|
||||
}
|
||||
|
||||
/// Test roundtrip with edge cases: zero and one.
|
||||
#[test]
|
||||
fn test_zero_and_one_byte_conversion() {
|
||||
let zero = BN254Fr::zero();
|
||||
let one = BN254Fr::one();
|
||||
|
||||
let zero_bytes = felts_to_bytes_le(&zero);
|
||||
let one_bytes = felts_to_bytes_le(&one);
|
||||
|
||||
// Check that both serializations are 32 bytes.
|
||||
assert_eq!(zero_bytes.len(), 32, "Zero should serialize to 32 bytes");
|
||||
assert_eq!(one_bytes.len(), 32, "One should serialize to 32 bytes");
|
||||
|
||||
let zero_back: BN254Fr = bytes_le_to_felts(&zero_bytes);
|
||||
let one_back: BN254Fr = bytes_le_to_felts(&one_bytes);
|
||||
|
||||
assert_eq!(zero, zero_back, "Zero did not roundtrip correctly");
|
||||
assert_eq!(one, one_back, "One did not roundtrip correctly");
|
||||
}
|
||||
|
||||
/// Test that bn_to_goldilocks produces exactly 3 Goldilocks per BN254.
|
||||
#[test]
|
||||
fn test_bn_to_goldilocks_three_remainders() {
|
||||
// We'll test random BN254 elements to ensure no overflow panic.
|
||||
let num_tests = 1000;
|
||||
let mut bn_vec = Vec::with_capacity(num_tests);
|
||||
for _ in 0..num_tests {
|
||||
// A random BN254 field element
|
||||
let fe = BN254Fr::rand(&mut test_rng());
|
||||
bn_vec.push(fe);
|
||||
}
|
||||
|
||||
let goldi_vec = bn_to_goldilocks::<GoldilocksField>(&bn_vec);
|
||||
// Should be exactly 3 * num_tests
|
||||
assert_eq!(goldi_vec.len(), 3 * num_tests);
|
||||
}
|
||||
|
||||
/// Test that exactly 7 Goldilocks produce 2 BN254, and leftover is padded for partial groups.
|
||||
#[test]
|
||||
fn test_goldilocks_to_bn_packing() {
|
||||
// 7 exact Goldilocks => 2 BN254
|
||||
let goldis7 = vec![
|
||||
GoldilocksField::from_canonical_u64(1),
|
||||
GoldilocksField::from_canonical_u64(2),
|
||||
GoldilocksField::from_canonical_u64(3),
|
||||
GoldilocksField::from_canonical_u64(4),
|
||||
GoldilocksField::from_canonical_u64(5),
|
||||
GoldilocksField::from_canonical_u64(6),
|
||||
GoldilocksField::from_canonical_u64(7),
|
||||
];
|
||||
let bn_out = goldilocks_to_bn(&goldis7);
|
||||
assert_eq!(bn_out.len(), 2, "7 Goldilocks should map to 2 BN254 elements");
|
||||
|
||||
// Now test leftover: 8 Goldilocks => we expect 2 BN254 from the first 7, plus
|
||||
// 2 more BN254 for the leftover 1 (padded to 7). So total 4 BN254 elements.
|
||||
let goldis8 = {
|
||||
let mut v = goldis7.clone();
|
||||
v.push(GoldilocksField::from_canonical_u64(123));
|
||||
v
|
||||
};
|
||||
let bn_out_8 = goldilocks_to_bn(&goldis8);
|
||||
assert_eq!(bn_out_8.len(), 4, "8 Goldilocks -> 4 BN254");
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,20 +3,20 @@ use alloc::{vec, vec::Vec};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use crate::field::extension::{Extendable, FieldExtension};
|
||||
use crate::hash::duplex::DuplexState;
|
||||
use crate::hash::hash_types::{HashOut, HashOutTarget, MerkleCapTarget, RichField};
|
||||
use crate::hash::hashing::PlonkyPermutation;
|
||||
use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::ext_target::ExtensionTarget;
|
||||
use crate::iop::target::Target;
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericHashOut, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericField, GenericHashOut, Hasher};
|
||||
|
||||
/// Observes prover messages, and generates challenges by hashing the transcript, a la Fiat-Shamir.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Challenger<F: RichField, H: Hasher<F>> {
|
||||
pub(crate) sponge_state: H::Permutation,
|
||||
pub(crate) input_buffer: Vec<F>,
|
||||
output_buffer: Vec<F>,
|
||||
pub struct Challenger<F: RichField, H: Hasher<F>>
|
||||
{
|
||||
duplex_state: DuplexState<F, H>
|
||||
}
|
||||
|
||||
/// Observes prover messages, and generates verifier challenges based on the transcript.
|
||||
@ -27,42 +27,36 @@ pub struct Challenger<F: RichField, H: Hasher<F>> {
|
||||
/// design, but it can be viewed as a duplex sponge whose inputs are sometimes zero (when we perform
|
||||
/// multiple squeezes) and whose outputs are sometimes ignored (when we perform multiple
|
||||
/// absorptions). Thus the security properties of a duplex sponge still apply to our design.
|
||||
impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
impl<F: RichField, H: Hasher<F>> Challenger<F, H>
|
||||
{
|
||||
pub fn new() -> Challenger<F, H> {
|
||||
Challenger {
|
||||
sponge_state: H::Permutation::new(core::iter::repeat(F::ZERO)),
|
||||
input_buffer: Vec::with_capacity(H::Permutation::RATE),
|
||||
output_buffer: Vec::with_capacity(H::Permutation::RATE),
|
||||
duplex_state: DuplexState::<F, H>::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn observe_element(&mut self, element: F) {
|
||||
// Any buffered outputs are now invalid, since they wouldn't reflect this input.
|
||||
self.output_buffer.clear();
|
||||
|
||||
self.input_buffer.push(element);
|
||||
|
||||
if self.input_buffer.len() == H::Permutation::RATE {
|
||||
self.duplexing();
|
||||
}
|
||||
pub fn observe_element(&mut self, element: GenericField<F>) {
|
||||
self.duplex_state.absorb(element)
|
||||
}
|
||||
|
||||
pub fn observe_extension_element<const D: usize>(&mut self, element: &F::Extension)
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
self.observe_elements(&element.to_basefield_array());
|
||||
let elements = element.to_basefield_array().map(|e: F|e.into());
|
||||
|
||||
self.observe_elements(&elements);
|
||||
}
|
||||
|
||||
pub fn observe_elements(&mut self, elements: &[F]) {
|
||||
pub fn observe_elements(&mut self, elements: &[GenericField<F>]) {
|
||||
for &element in elements {
|
||||
self.observe_element(element);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn observe_extension_elements<const D: usize>(&mut self, elements: &[F::Extension])
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
for element in elements {
|
||||
self.observe_extension_element(element);
|
||||
@ -80,15 +74,7 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
}
|
||||
|
||||
pub fn get_challenge(&mut self) -> F {
|
||||
// If we have buffered inputs, we must perform a duplexing so that the challenge will
|
||||
// reflect them. Or if we've run out of outputs, we must perform a duplexing to get more.
|
||||
if !self.input_buffer.is_empty() || self.output_buffer.is_empty() {
|
||||
self.duplexing();
|
||||
}
|
||||
|
||||
self.output_buffer
|
||||
.pop()
|
||||
.expect("Output buffer should be non-empty")
|
||||
self.duplex_state.squeeze()
|
||||
}
|
||||
|
||||
pub fn get_n_challenges(&mut self, n: usize) -> Vec<F> {
|
||||
@ -107,8 +93,8 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
}
|
||||
|
||||
pub fn get_extension_challenge<const D: usize>(&mut self) -> F::Extension
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
let mut arr = [F::ZERO; D];
|
||||
arr.copy_from_slice(&self.get_n_challenges(D));
|
||||
@ -116,40 +102,18 @@ impl<F: RichField, H: Hasher<F>> Challenger<F, H> {
|
||||
}
|
||||
|
||||
pub fn get_n_extension_challenges<const D: usize>(&mut self, n: usize) -> Vec<F::Extension>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
{
|
||||
(0..n)
|
||||
.map(|_| self.get_extension_challenge::<D>())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Absorb any buffered inputs. After calling this, the input buffer will be empty, and the
|
||||
/// output buffer will be full.
|
||||
fn duplexing(&mut self) {
|
||||
assert!(self.input_buffer.len() <= H::Permutation::RATE);
|
||||
|
||||
// Overwrite the first r elements with the inputs. This differs from a standard sponge,
|
||||
// where we would xor or add in the inputs. This is a well-known variant, though,
|
||||
// sometimes called "overwrite mode".
|
||||
self.sponge_state
|
||||
.set_from_iter(self.input_buffer.drain(..), 0);
|
||||
|
||||
// Apply the permutation.
|
||||
self.sponge_state.permute();
|
||||
|
||||
self.output_buffer.clear();
|
||||
self.output_buffer
|
||||
.extend_from_slice(self.sponge_state.squeeze());
|
||||
pub fn grind(&mut self, min_leading_zeros: u32) -> F {
|
||||
self.duplex_state.grind(min_leading_zeros)
|
||||
}
|
||||
|
||||
pub fn compact(&mut self) -> H::Permutation {
|
||||
if !self.input_buffer.is_empty() {
|
||||
self.duplexing();
|
||||
}
|
||||
self.output_buffer.clear();
|
||||
self.sponge_state
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField, H: AlgebraicHasher<F>> Default for Challenger<F, H> {
|
||||
@ -304,7 +268,7 @@ mod tests {
|
||||
use crate::iop::witness::{PartialWitness, Witness};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use crate::plonk::circuit_data::CircuitConfig;
|
||||
use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use crate::plonk::config::{GenericConfig, GenericField, IntoGenericFieldVec, PoseidonGoldilocksConfig};
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_challenges() {
|
||||
@ -316,7 +280,7 @@ mod tests {
|
||||
|
||||
for i in 1..10 {
|
||||
challenges.extend(challenger.get_n_challenges(i));
|
||||
challenger.observe_element(F::rand());
|
||||
challenger.observe_element(F::rand().into());
|
||||
}
|
||||
|
||||
let dedup_challenges = {
|
||||
@ -340,10 +304,14 @@ mod tests {
|
||||
let num_outputs_per_round = [1, 2, 4];
|
||||
|
||||
// Generate random input messages.
|
||||
let inputs_per_round: Vec<Vec<F>> = num_inputs_per_round
|
||||
let inputs_per_round_f: Vec<Vec<F>> = num_inputs_per_round
|
||||
.iter()
|
||||
.map(|&n| F::rand_vec(n))
|
||||
.collect();
|
||||
let inputs_per_round: Vec<Vec<GenericField<F>>> = inputs_per_round_f
|
||||
.iter()
|
||||
.map(|n| n.clone().into_generic_field_vec())
|
||||
.collect();
|
||||
|
||||
let mut challenger = Challenger::<F, <C as GenericConfig<D>>::InnerHasher>::new();
|
||||
let mut outputs_per_round: Vec<Vec<F>> = Vec::new();
|
||||
@ -357,7 +325,7 @@ mod tests {
|
||||
let mut recursive_challenger =
|
||||
RecursiveChallenger::<F, <C as GenericConfig<D>>::InnerHasher, D>::new(&mut builder);
|
||||
let mut recursive_outputs_per_round: Vec<Vec<Target>> = Vec::new();
|
||||
for (r, inputs) in inputs_per_round.iter().enumerate() {
|
||||
for (r, inputs) in inputs_per_round_f.iter().enumerate() {
|
||||
recursive_challenger.observe_elements(&builder.constants(inputs));
|
||||
recursive_outputs_per_round.push(
|
||||
recursive_challenger.get_n_challenges(&mut builder, num_outputs_per_round[r]),
|
||||
|
||||
@ -44,7 +44,7 @@ use crate::plonk::circuit_data::{
|
||||
CircuitConfig, CircuitData, CommonCircuitData, MockCircuitData, ProverCircuitData,
|
||||
ProverOnlyCircuitData, VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
|
||||
};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericHashOut, Hasher};
|
||||
use crate::plonk::config::{AlgebraicHasher, GenericConfig, GenericField, GenericHashOut, Hasher, IntoGenericFieldVec};
|
||||
use crate::plonk::copy_constraint::CopyConstraint;
|
||||
use crate::plonk::permutation_argument::Forest;
|
||||
use crate::plonk::plonk_common::PlonkOracle;
|
||||
@ -1256,13 +1256,14 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
};
|
||||
let constants_sigmas_cap = constants_sigmas_commitment.merkle_tree.cap.clone();
|
||||
let domain_separator = self.domain_separator.unwrap_or_default();
|
||||
let domain_separator_digest = C::Hasher::hash_pad(&domain_separator);
|
||||
let ds_felts: Vec<GenericField<F>> = domain_separator.clone().into_generic_field_vec();
|
||||
let domain_separator_digest = C::Hasher::hash_pad(&ds_felts);
|
||||
// TODO: This should also include an encoding of gate constraints.
|
||||
let circuit_digest_parts = [
|
||||
constants_sigmas_cap.flatten(),
|
||||
domain_separator_digest.to_vec(),
|
||||
vec![
|
||||
F::from_canonical_usize(degree_bits),
|
||||
F::from_canonical_usize(degree_bits).into(),
|
||||
/* Add other circuit data here */
|
||||
],
|
||||
];
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::fmt::Debug;
|
||||
use core::fmt::{Debug};
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
@ -22,18 +22,114 @@ use crate::hash::keccak::KeccakHash;
|
||||
use crate::hash::poseidon::PoseidonHash;
|
||||
use crate::iop::target::{BoolTarget, Target};
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
use ark_bn254::Fr as BN254Fr;
|
||||
use ark_ff::{One, Zero};
|
||||
use crate::hash::poseidon2_bn254::{bytes_le_to_felts, felts_to_bytes_le, Poseidon2BN254};
|
||||
|
||||
pub trait GenericHashOut<F: RichField>:
|
||||
Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned
|
||||
Copy + Clone + Debug + Eq + PartialEq + Send + Sync + Serialize + DeserializeOwned
|
||||
{
|
||||
fn to_bytes(&self) -> Vec<u8>;
|
||||
fn from_bytes(bytes: &[u8]) -> Self;
|
||||
|
||||
fn to_vec(&self) -> Vec<F>;
|
||||
fn to_vec(&self) -> Vec<GenericField<F>>;
|
||||
}
|
||||
|
||||
/// generic field enum - supports only 2 fields for now
|
||||
/// Supported fields: Goldilocks , BN254 Fr (from Arkworks)
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum GenericField<F: RichField> {
|
||||
Goldilocks(F),
|
||||
BN254(BN254Fr),
|
||||
}
|
||||
|
||||
// Convert a Goldilocks field element into a GenericField.
|
||||
impl<F: RichField> From<F> for GenericField<F> {
|
||||
fn from(x: F) -> Self {
|
||||
GenericField::Goldilocks(x)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert a BN254Fr element into a GenericField.
|
||||
impl<F: RichField> From<BN254Fr> for GenericField<F> {
|
||||
fn from(x: BN254Fr) -> Self {
|
||||
GenericField::BN254(x)
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait to convert vectors of F or BN254Fr to Vec<GenericField<F>>.
|
||||
pub trait IntoGenericFieldVec<F: RichField> {
|
||||
fn into_generic_field_vec(self) -> Vec<GenericField<F>>;
|
||||
}
|
||||
|
||||
impl<F: RichField> IntoGenericFieldVec<F> for Vec<F> {
|
||||
fn into_generic_field_vec(self) -> Vec<GenericField<F>> {
|
||||
self.into_iter().map(GenericField::from).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField> IntoGenericFieldVec<F> for Vec<BN254Fr> {
|
||||
fn into_generic_field_vec(self) -> Vec<GenericField<F>> {
|
||||
self.into_iter().map(GenericField::from).collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// hasher field trait to cover fields in `GenericField` enum
|
||||
pub trait HasherField: Default + Sized + Copy + Debug + Eq + PartialEq + Sync + Send {
|
||||
fn get_one() -> Self;
|
||||
fn get_zero() -> Self;
|
||||
fn to_bytes_le(&self) -> Vec<u8>;
|
||||
|
||||
fn from_bytes_le(b: &[u8]) -> Self;
|
||||
|
||||
}
|
||||
|
||||
/// BN254 as Hasherfield
|
||||
impl HasherField for BN254Fr {
|
||||
fn get_one() -> Self {
|
||||
BN254Fr::one()
|
||||
}
|
||||
|
||||
fn get_zero() -> Self {
|
||||
BN254Fr::zero()
|
||||
}
|
||||
|
||||
fn to_bytes_le(&self) -> Vec<u8> {
|
||||
felts_to_bytes_le::<BN254Fr>(&self)
|
||||
}
|
||||
|
||||
fn from_bytes_le(b: &[u8]) -> Self {
|
||||
bytes_le_to_felts::<BN254Fr>(b)
|
||||
}
|
||||
}
|
||||
|
||||
/// RichField (Goldilocks) as Hasherfield
|
||||
impl <T: RichField> HasherField for T {
|
||||
fn get_one() -> Self {
|
||||
T::ONE
|
||||
}
|
||||
|
||||
fn get_zero() -> Self {
|
||||
T::ZERO
|
||||
}
|
||||
|
||||
fn to_bytes_le(&self) -> Vec<u8> {
|
||||
self.to_canonical_u64().to_le_bytes().to_vec()
|
||||
}
|
||||
|
||||
fn from_bytes_le(b: &[u8]) -> Self {
|
||||
assert_eq!(b.len(), 8, "Input vector must have exactly 8 bytes");
|
||||
let arr: [u8; 8] = b.try_into().expect("Conversion to array failed");
|
||||
let element = u64::from_le_bytes(arr);
|
||||
T::from_canonical_u64(element)
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for hash functions.
|
||||
pub trait Hasher<F: RichField>: Sized + Copy + Debug + Eq + PartialEq {
|
||||
|
||||
type HF: HasherField;
|
||||
|
||||
/// Size of `Hash` in bytes.
|
||||
const HASH_SIZE: usize;
|
||||
|
||||
@ -41,39 +137,27 @@ pub trait Hasher<F: RichField>: Sized + Copy + Debug + Eq + PartialEq {
|
||||
type Hash: GenericHashOut<F>;
|
||||
|
||||
/// Permutation used in the sponge construction.
|
||||
type Permutation: PlonkyPermutation<F>;
|
||||
type Permutation: PlonkyPermutation<Self::HF>;
|
||||
|
||||
/// Hash a message without any padding step. Note that this can enable length-extension attacks.
|
||||
/// However, it is still collision-resistant in cases where the input has a fixed length.
|
||||
fn hash_no_pad(input: &[F]) -> Self::Hash;
|
||||
fn hash_no_pad(input: &[GenericField<F>]) -> Self::Hash;
|
||||
|
||||
/// Pad the message using the `pad10*1` rule, then hash it.
|
||||
fn hash_pad(input: &[F]) -> Self::Hash {
|
||||
let mut padded_input = input.to_vec();
|
||||
padded_input.push(F::ONE);
|
||||
while (padded_input.len() + 1) % Self::Permutation::RATE != 0 {
|
||||
padded_input.push(F::ZERO);
|
||||
}
|
||||
padded_input.push(F::ONE);
|
||||
Self::hash_no_pad(&padded_input)
|
||||
}
|
||||
fn hash_pad(input: &[GenericField<F>]) -> Self::Hash;
|
||||
|
||||
/// Hash the slice if necessary to reduce its length to ~256 bits. If it already fits, this is a
|
||||
/// no-op.
|
||||
fn hash_or_noop(inputs: &[F]) -> Self::Hash {
|
||||
if inputs.len() * 8 <= Self::HASH_SIZE {
|
||||
let mut inputs_bytes = vec![0u8; Self::HASH_SIZE];
|
||||
for i in 0..inputs.len() {
|
||||
inputs_bytes[i * 8..(i + 1) * 8]
|
||||
.copy_from_slice(&inputs[i].to_canonical_u64().to_le_bytes());
|
||||
}
|
||||
Self::Hash::from_bytes(&inputs_bytes)
|
||||
} else {
|
||||
Self::hash_no_pad(inputs)
|
||||
}
|
||||
}
|
||||
fn hash_or_noop(inputs: &[GenericField<F>]) -> Self::Hash;
|
||||
|
||||
/// absorb the input into the given state
|
||||
fn sponge(state: &mut Self::Permutation, input: Vec<GenericField<F>>);
|
||||
|
||||
/// 2-to-1 compression
|
||||
fn two_to_one(left: Self::Hash, right: Self::Hash) -> Self::Hash;
|
||||
|
||||
/// squeeze out a vec of Goldilocks field elements (used for duplex/challenger)
|
||||
fn squeeze_goldilocks(state: &mut Self::Permutation) -> Vec<F>;
|
||||
}
|
||||
|
||||
/// Trait for algebraic hash functions, built from a permutation using the sponge construction.
|
||||
@ -87,16 +171,17 @@ pub trait AlgebraicHasher<F: RichField>: Hasher<F, Hash = HashOut<F>> {
|
||||
swap: BoolTarget,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
) -> Self::AlgebraicPermutation
|
||||
where
|
||||
F: RichField + Extendable<D>;
|
||||
where
|
||||
F: RichField + Extendable<D>;
|
||||
}
|
||||
|
||||
/// Generic configuration trait.
|
||||
pub trait GenericConfig<const D: usize>:
|
||||
Debug + Clone + Sync + Sized + Send + Eq + PartialEq
|
||||
Debug + Clone + Sync + Sized + Send + Eq + PartialEq
|
||||
{
|
||||
/// Main field.
|
||||
type F: RichField + Extendable<D, Extension = Self::FE>;
|
||||
|
||||
/// Field extension of degree D of the main field.
|
||||
type FE: FieldExtension<D, BaseField = Self::F>;
|
||||
/// Hash function used for building Merkle trees.
|
||||
@ -124,3 +209,13 @@ impl GenericConfig<2> for KeccakGoldilocksConfig {
|
||||
type Hasher = KeccakHash<25>;
|
||||
type InnerHasher = PoseidonHash;
|
||||
}
|
||||
|
||||
/// Configuration using Poseidon2BN254 as hasher over the Goldilocks field.
|
||||
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Serialize)]
|
||||
pub struct Poseidon2BN254Config;
|
||||
impl GenericConfig<2> for Poseidon2BN254Config {
|
||||
type F = GoldilocksField;
|
||||
type FE = QuadraticExtension<Self::F>;
|
||||
type Hasher = Poseidon2BN254;
|
||||
type InnerHasher = PoseidonHash;
|
||||
}
|
||||
|
||||
@ -25,7 +25,7 @@ use crate::hash::merkle_tree::MerkleCap;
|
||||
use crate::iop::ext_target::ExtensionTarget;
|
||||
use crate::iop::target::Target;
|
||||
use crate::plonk::circuit_data::{CommonCircuitData, VerifierOnlyCircuitData};
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::config::{GenericConfig, GenericField, Hasher, IntoGenericFieldVec};
|
||||
use crate::plonk::verifier::{verify_with_challenges, DEFAULT_VERIFIER_OPTIONS};
|
||||
use crate::util::serialization::{Buffer, Read, Write};
|
||||
|
||||
@ -104,7 +104,8 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
pub fn get_public_inputs_hash(
|
||||
&self,
|
||||
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
|
||||
C::InnerHasher::hash_no_pad(&self.public_inputs)
|
||||
let pi_felts: Vec<GenericField<F>> = self.public_inputs.clone().into_generic_field_vec();
|
||||
C::InnerHasher::hash_no_pad(&pi_felts)
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
@ -234,7 +235,8 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
|
||||
pub(crate) fn get_public_inputs_hash(
|
||||
&self,
|
||||
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash {
|
||||
C::InnerHasher::hash_no_pad(&self.public_inputs)
|
||||
let pi_felts: Vec<GenericField<F>> = self.public_inputs.clone().into_generic_field_vec();
|
||||
C::InnerHasher::hash_no_pad(&pi_felts)
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
|
||||
@ -30,7 +30,7 @@ use crate::iop::target::Target;
|
||||
use crate::iop::witness::{MatrixWitness, PartialWitness, PartitionWitness, Witness, WitnessWrite};
|
||||
use crate::plonk::circuit_builder::NUM_COINS_LOOKUP;
|
||||
use crate::plonk::circuit_data::{CommonCircuitData, ProverOnlyCircuitData};
|
||||
use crate::plonk::config::{GenericConfig, Hasher};
|
||||
use crate::plonk::config::{GenericConfig, GenericField, Hasher, IntoGenericFieldVec};
|
||||
use crate::plonk::plonk_common::PlonkOracle;
|
||||
use crate::plonk::proof::{OpeningSet, Proof, ProofWithPublicInputs};
|
||||
use crate::plonk::vanishing_poly::{eval_vanishing_poly_base_batch, get_lut_poly};
|
||||
@ -269,7 +269,8 @@ where
|
||||
set_lookup_wires(prover_data, common_data, &mut partition_witness)?;
|
||||
|
||||
let public_inputs = partition_witness.get_targets(&prover_data.public_inputs);
|
||||
let public_inputs_hash = C::InnerHasher::hash_no_pad(&public_inputs);
|
||||
let pi_felts: Vec<GenericField<F>> = public_inputs.clone().into_generic_field_vec();
|
||||
let public_inputs_hash = C::InnerHasher::hash_no_pad(&pi_felts);
|
||||
|
||||
let witness = timed!(
|
||||
timing,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user