extract poseidon to it's own package (#8)
* extract poseidon to it's own package * move license to the bottom
This commit is contained in:
parent
ebef300064
commit
f8e4b3e2da
|
@ -36,3 +36,4 @@ serde = "1.0.156"
|
||||||
serde_json = "1.0.94"
|
serde_json = "1.0.94"
|
||||||
num-traits = "0.2.15"
|
num-traits = "0.2.15"
|
||||||
ark-relations = { version = "0.4.0", features = ["std", "tracing-subscriber"] }
|
ark-relations = { version = "0.4.0", features = ["std", "tracing-subscriber"] }
|
||||||
|
rs-poseidon = {git = "https://github.com/status-im/rs-poseidon" }
|
||||||
|
|
24
README.md
24
README.md
|
@ -5,18 +5,6 @@
|
||||||
|
|
||||||
WIP Zero Knowledge tooling for the Codex project
|
WIP Zero Knowledge tooling for the Codex project
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
Licensed and distributed under either of
|
|
||||||
|
|
||||||
* MIT license: [LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
* Apache License, Version 2.0, ([LICENSE-APACHEv2](LICENSE-APACHEv2) or http://www.apache.org/licenses/LICENSE-2.0)
|
|
||||||
|
|
||||||
at your option. These files may not be copied, modified, or distributed except according to those terms.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
First, clone the repo and install the circom components:
|
First, clone the repo and install the circom components:
|
||||||
|
@ -39,3 +27,15 @@ Running the tests:
|
||||||
```sh
|
```sh
|
||||||
cargo test # don't run in release more as it dissables circuit assets
|
cargo test # don't run in release more as it dissables circuit assets
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Licensed and distributed under either of
|
||||||
|
|
||||||
|
* MIT license: [LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
* Apache License, Version 2.0, ([LICENSE-APACHEv2](LICENSE-APACHEv2) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||||
|
|
||||||
|
at your option. These files may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
|
@ -9,11 +9,11 @@ mod test {
|
||||||
prepare_verifying_key, verify_proof_with_prepared_inputs, ProvingKey,
|
prepare_verifying_key, verify_proof_with_prepared_inputs, ProvingKey,
|
||||||
};
|
};
|
||||||
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
|
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
|
||||||
|
use rs_poseidon::poseidon::hash;
|
||||||
use ruint::aliases::U256;
|
use ruint::aliases::U256;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
circuit_tests::utils::{digest, merkelize},
|
circuit_tests::utils::{digest, treehash},
|
||||||
poseidon::hash,
|
|
||||||
storage_proofs::StorageProofs,
|
storage_proofs::StorageProofs,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ mod test {
|
||||||
parent_hash_l,
|
parent_hash_l,
|
||||||
];
|
];
|
||||||
|
|
||||||
let root = merkelize(hashes.as_slice());
|
let root = treehash(hashes.as_slice());
|
||||||
let proof_bytes = &mut Vec::new();
|
let proof_bytes = &mut Vec::new();
|
||||||
let public_inputs_bytes = &mut Vec::new();
|
let public_inputs_bytes = &mut Vec::new();
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use crate::poseidon::hash;
|
use rs_poseidon::poseidon::hash;
|
||||||
use ruint::{aliases::U256, uint};
|
use ruint::{aliases::U256, uint};
|
||||||
|
|
||||||
pub fn digest(input: &[U256], chunk_size: Option<usize>) -> U256 {
|
pub fn digest(input: &[U256], chunk_size: Option<usize>) -> U256 {
|
||||||
|
@ -25,7 +25,7 @@ pub fn digest(input: &[U256], chunk_size: Option<usize>) -> U256 {
|
||||||
concat[0]
|
concat[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merkelize(leafs: &[U256]) -> U256 {
|
pub fn treehash(leafs: &[U256]) -> U256 {
|
||||||
// simple merkle root (treehash) generator
|
// simple merkle root (treehash) generator
|
||||||
// unbalanced trees will have the last leaf duplicated
|
// unbalanced trees will have the last leaf duplicated
|
||||||
let mut merkle: Vec<U256> = leafs.to_vec();
|
let mut merkle: Vec<U256> = leafs.to_vec();
|
||||||
|
|
|
@ -186,11 +186,11 @@ pub unsafe extern "C" fn free_proof_ctx(ctx: *mut ProofCtx) {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
|
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
|
||||||
|
use rs_poseidon::poseidon::hash;
|
||||||
use ruint::aliases::U256;
|
use ruint::aliases::U256;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
circuit_tests::utils::{digest, merkelize},
|
circuit_tests::utils::{digest, treehash},
|
||||||
poseidon::hash,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{init, prove, Buffer};
|
use super::{init, prove, Buffer};
|
||||||
|
@ -247,7 +247,7 @@ mod tests {
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let root = merkelize(hashes.as_slice());
|
let root = treehash(hashes.as_slice());
|
||||||
let chunks_buff = Buffer {
|
let chunks_buff = Buffer {
|
||||||
data: chunks.as_ptr() as *const u8,
|
data: chunks.as_ptr() as *const u8,
|
||||||
len: chunks.len(),
|
len: chunks.len(),
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
pub mod ffi;
|
pub mod ffi;
|
||||||
pub mod poseidon;
|
|
||||||
pub mod storage_proofs;
|
pub mod storage_proofs;
|
||||||
mod circuit_tests;
|
mod circuit_tests;
|
||||||
|
|
|
@ -1,118 +0,0 @@
|
||||||
use ark_bn254::Fr;
|
|
||||||
use num_bigint::BigUint;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use num_traits::Num;
|
|
||||||
|
|
||||||
const CONSTANTS_STR: &str = include_str!("poseidon_constants_opt.json");
|
|
||||||
|
|
||||||
pub static CONSTANTS: Lazy<serde_json::Value> = Lazy::new(|| {
|
|
||||||
serde_json::from_str(CONSTANTS_STR).unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
pub static C_CONST: Lazy<Vec<Vec<Fr>>> = Lazy::new(|| {
|
|
||||||
CONSTANTS["C"]
|
|
||||||
.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|row| {
|
|
||||||
row.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
Fr::try_from(
|
|
||||||
BigUint::from_str_radix(
|
|
||||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
|
||||||
16,
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Fr>, _>>()
|
|
||||||
.unwrap()
|
|
||||||
})
|
|
||||||
.collect::<Vec<Vec<Fr>>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
pub static S_CONST: Lazy<Vec<Vec<Fr>>> = Lazy::new(|| {
|
|
||||||
CONSTANTS["S"]
|
|
||||||
.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|row| {
|
|
||||||
row.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
Fr::try_from(
|
|
||||||
BigUint::from_str_radix(
|
|
||||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
|
||||||
16,
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Fr>, _>>()
|
|
||||||
.unwrap()
|
|
||||||
})
|
|
||||||
.collect::<Vec<Vec<Fr>>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
pub static M_CONST: Lazy<Vec<Vec<Vec<Fr>>>> = Lazy::new(|| {
|
|
||||||
CONSTANTS["M"]
|
|
||||||
.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|row| {
|
|
||||||
row.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
c.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
Fr::try_from(
|
|
||||||
BigUint::from_str_radix(
|
|
||||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
|
||||||
16,
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Fr>, _>>()
|
|
||||||
.unwrap()
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.collect::<Vec<Vec<Vec<Fr>>>>()
|
|
||||||
});
|
|
||||||
|
|
||||||
pub static P_CONST: Lazy<Vec<Vec<Vec<Fr>>>> = Lazy::new(|| {
|
|
||||||
CONSTANTS["P"]
|
|
||||||
.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|row| {
|
|
||||||
row.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
c.as_array()
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| {
|
|
||||||
Fr::try_from(
|
|
||||||
BigUint::from_str_radix(
|
|
||||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
|
||||||
16,
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<Fr>, _>>()
|
|
||||||
.unwrap()
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.collect::<Vec<Vec<Vec<Fr>>>>()
|
|
||||||
});
|
|
|
@ -1,154 +0,0 @@
|
||||||
mod constants;
|
|
||||||
|
|
||||||
use ark_bn254::Fr;
|
|
||||||
use ark_ff::{Field, Zero};
|
|
||||||
use ruint::aliases::U256;
|
|
||||||
|
|
||||||
const N_ROUNDS_F: u8 = 8;
|
|
||||||
const N_ROUNDS_P: [i32; 16] = [
|
|
||||||
56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68,
|
|
||||||
];
|
|
||||||
|
|
||||||
// Compute a Poseidon hash function of the input vector.
|
|
||||||
//
|
|
||||||
// # Panics
|
|
||||||
//
|
|
||||||
// Panics if `input` is not a valid field element.
|
|
||||||
#[must_use]
|
|
||||||
pub fn hash(inputs: &[U256]) -> U256 {
|
|
||||||
assert!(!inputs.is_empty());
|
|
||||||
assert!(inputs.len() <= N_ROUNDS_P.len());
|
|
||||||
|
|
||||||
let t = inputs.len() + 1;
|
|
||||||
let n_rounds_f = N_ROUNDS_F as usize;
|
|
||||||
let n_rounds_p = N_ROUNDS_P[t - 2] as usize;
|
|
||||||
let c = constants::C_CONST[t - 2].clone();
|
|
||||||
let s = constants::S_CONST[t - 2].clone();
|
|
||||||
let m = constants::M_CONST[t - 2].clone();
|
|
||||||
let p = constants::P_CONST[t - 2].clone();
|
|
||||||
|
|
||||||
let mut state: Vec<Fr> = inputs.iter().map(|f| f.try_into().unwrap()).collect();
|
|
||||||
state.insert(0, Fr::zero());
|
|
||||||
|
|
||||||
state = state.iter().enumerate().map(|(j, a)| *a + c[j]).collect();
|
|
||||||
|
|
||||||
for r in 0..(n_rounds_f / 2 - 1) {
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.map(|a| a.pow([5]))
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, a)| a + c[(r + 1) * t + i])
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, _)| {
|
|
||||||
state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.fold((0, Fr::zero()), |acc, item| {
|
|
||||||
(0, (acc.1 + m[item.0][i] * item.1))
|
|
||||||
})
|
|
||||||
.1
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.map(|a| a.pow([5]))
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, a)| a + c[(n_rounds_f / 2 - 1 + 1) * t + i])
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, _)| {
|
|
||||||
state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.fold((0, Fr::zero()), |acc, item| {
|
|
||||||
(0, (acc.1 + p[item.0][i] * item.1))
|
|
||||||
})
|
|
||||||
.1
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
for r in 0..n_rounds_p {
|
|
||||||
state[0] = state[0].pow([5]);
|
|
||||||
state[0] += c[(n_rounds_f / 2 + 1) * t + r];
|
|
||||||
|
|
||||||
let s0 = state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.fold((0, Fr::zero()), |acc, item| {
|
|
||||||
(0, acc.1 + s[(t * 2 - 1) * r + item.0] * item.1)
|
|
||||||
})
|
|
||||||
.1;
|
|
||||||
|
|
||||||
for k in 1..t {
|
|
||||||
state[k] = state[k] + state[0] * s[(t * 2 - 1) * r + t + k - 1];
|
|
||||||
}
|
|
||||||
state[0] = s0;
|
|
||||||
}
|
|
||||||
|
|
||||||
for r in 0..(n_rounds_f / 2 - 1) {
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.map(|a| a.pow([5]))
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, a)| a + c[(n_rounds_f / 2 + 1) * t + n_rounds_p + r * t + i])
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, _)| {
|
|
||||||
state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.fold((0, Fr::zero()), |acc, item| {
|
|
||||||
(0, acc.1 + m[item.0][i] * item.1)
|
|
||||||
})
|
|
||||||
.1
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
|
|
||||||
state = state.iter().map(|a| a.pow([5])).collect();
|
|
||||||
state = state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(
|
|
||||||
|(i, _)| {
|
|
||||||
state
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.fold((0, Fr::zero()), |acc, item| {
|
|
||||||
(0, acc.1 + m[item.0][i] * item.1)
|
|
||||||
})
|
|
||||||
.1
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
state[0].into()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use ruint::uint;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_hash_inputs() {
|
|
||||||
uint! {
|
|
||||||
assert_eq!(hash(&[0_U256]), 0x2a09a9fd93c590c26b91effbb2499f07e8f7aa12e2b4940a3aed2411cb65e11c_U256);
|
|
||||||
assert_eq!(hash(&[0_U256, 0_U256]), 0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864_U256);
|
|
||||||
assert_eq!(hash(&[0_U256, 0_U256, 0_U256]), 0xbc188d27dcceadc1dcfb6af0a7af08fe2864eecec96c5ae7cee6db31ba599aa_U256);
|
|
||||||
assert_eq!(hash(&[31213_U256, 132_U256]), 0x303f59cd0831b5633bcda50514521b33776b5d4280eb5868ba1dbbe2e4d76ab5_U256);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue