mirror of
https://github.com/logos-storage/logos-storage-proofs.git
synced 2026-01-04 22:43:10 +00:00
adding quick&dirty poseidon implementation
This commit is contained in:
parent
3896fddaa2
commit
c389d6d1e9
40
Cargo.toml
40
Cargo.toml
@ -7,23 +7,35 @@ edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = [
|
||||
"staticlib", # Ensure it gets compiled as a (static) C library
|
||||
"staticlib", # Ensure it gets compiled as a (static) C library
|
||||
# "cdylib", # If you want a shared/dynamic C library (advanced)
|
||||
"lib", # For downstream Rust dependents: `examples/`, `tests/` etc.
|
||||
"lib", # For downstream Rust dependents: `examples/`, `tests/` etc.
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
ark-bn254 = { version = "0.3" }
|
||||
ark-ec = { version = "0.4", default-features = false, features = ["parallel"] }
|
||||
ark-groth16 = { version = "0.3", features = ["parallel"] }
|
||||
ark-std = { version = "0.3", default-features = false, features = ["parallel"] }
|
||||
ark-serialize = { version = "0.3", default-features = false }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-groth16 = { version = "0.3.0", features = ["parallel"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
# ark-serialize = { version = "0.3.0", default-features = false }
|
||||
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom.git#master", features = ["circom-2"] }
|
||||
arkworks-native-gadgets = "1.2.0"
|
||||
arkworks-utils = { version = "1.0.1", features = ["parallel", "poseidon_bn254_x5_3", "poseidon_bn254_x5_5"] }
|
||||
ark-ff = { version = "0.4.1", features = ["std"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ff = { package="ff_ce", version="0.11", features = ["derive"] }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom.git", rev = "35ce5a9", features = [
|
||||
"circom-2",
|
||||
] }
|
||||
# arkworks-native-gadgets = "1.2.0"
|
||||
# arkworks-utils = { version = "1.0.1", features = [
|
||||
# "parallel",
|
||||
# "poseidon_bn254_x5_3",
|
||||
# "poseidon_bn254_x5_5",
|
||||
# ] }
|
||||
ark-ff = { version = "0.3.0", features = ["std"] }
|
||||
ruint = { version = "1.7.0", features = ["serde", "num-bigint", "ark-ff"] }
|
||||
once_cell = "1.17.1"
|
||||
serde = "1.0.156"
|
||||
serde_json = "1.0.94"
|
||||
num-traits = "0.2.15"
|
||||
|
||||
55
circuits/poseidon-hasher.circom
Normal file
55
circuits/poseidon-hasher.circom
Normal file
@ -0,0 +1,55 @@
|
||||
pragma circom 2.1.0;
|
||||
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
|
||||
function roundUpDiv(x, n) {
|
||||
var last = x % n; // get the last digit
|
||||
var div = x \ n; // get the division
|
||||
|
||||
if (last > 0) {
|
||||
return div + 1;
|
||||
}
|
||||
|
||||
return div;
|
||||
}
|
||||
|
||||
template parallel PoseidonHasher(BLOCK_SIZE, CHUNK_SIZE) {
|
||||
// BLOCK_SIZE - size of the input block array
|
||||
// CHUNK_SIZE - number of elements to hash at once
|
||||
signal input block[BLOCK_SIZE]; // Input block array
|
||||
signal output hash; // Output hash
|
||||
|
||||
// Split array into chunks of size CHUNK_SIZE, usually 2
|
||||
var NUM_CHUNKS = roundUpDiv(BLOCK_SIZE, CHUNK_SIZE);
|
||||
|
||||
// Initialize an array to store hashes of each block
|
||||
component hashes[NUM_CHUNKS];
|
||||
|
||||
// Loop over chunks and hash them using Poseidon()
|
||||
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||
hashes[i] = Poseidon(CHUNK_SIZE);
|
||||
|
||||
var start = i * CHUNK_SIZE;
|
||||
var end = start + CHUNK_SIZE;
|
||||
for (var j = start; j < end; j++) {
|
||||
if (j >= BLOCK_SIZE) {
|
||||
hashes[i].inputs[j - start] <== 0;
|
||||
} else {
|
||||
hashes[i].inputs[j - start] <== block[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Concatenate hashes into a single block
|
||||
var concat[NUM_CHUNKS];
|
||||
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||
concat[i] = hashes[i].out;
|
||||
}
|
||||
|
||||
// Hash concatenated array using Poseidon() again
|
||||
component h = Poseidon(NUM_CHUNKS);
|
||||
h.inputs <== concat;
|
||||
|
||||
// Assign output to hash signal
|
||||
hash <== h.out;
|
||||
}
|
||||
@ -5,8 +5,11 @@ template SimpleHasher(SIZE) {
|
||||
signal input hash;
|
||||
|
||||
component hasher = Poseidon(SIZE);
|
||||
hasher.inputs[0] <== in;
|
||||
for(var i = 0; i < SIZE; i++) {
|
||||
hasher.inputs[i] <== in[i];
|
||||
}
|
||||
|
||||
hasher.out === hash;
|
||||
}
|
||||
|
||||
component main = SimpleHasher(2);
|
||||
component main = SimpleHasher(1);
|
||||
|
||||
@ -3,6 +3,7 @@ pragma circom 2.1.0;
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
include "../node_modules/circomlib/circuits/switcher.circom";
|
||||
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||
include "./poseidon-hasher.circom";
|
||||
|
||||
template parallel MerkleProof(LEVELS) {
|
||||
signal input leaf;
|
||||
@ -32,56 +33,6 @@ template parallel MerkleProof(LEVELS) {
|
||||
root <== hasher[LEVELS - 1].out;
|
||||
}
|
||||
|
||||
function roundUpDiv(x, n) {
|
||||
var last = x % n; // get the last digit
|
||||
var div = x \ n; // get the division
|
||||
|
||||
if (last > 0) {
|
||||
return div + 1;
|
||||
}
|
||||
|
||||
return div;
|
||||
}
|
||||
|
||||
template parallel HashCheck(BLOCK_SIZE, CHUNK_SIZE) {
|
||||
signal input block[BLOCK_SIZE];
|
||||
signal output hash;
|
||||
|
||||
// Split array into chunks of size CHUNK_SIZE
|
||||
var NUM_CHUNKS = roundUpDiv(BLOCK_SIZE, CHUNK_SIZE);
|
||||
|
||||
// Initialize an array to store hashes of each block
|
||||
component hashes[NUM_CHUNKS];
|
||||
|
||||
// Loop over chunks and hash them using Poseidon()
|
||||
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||
hashes[i] = Poseidon(CHUNK_SIZE);
|
||||
|
||||
var start = i * CHUNK_SIZE;
|
||||
var end = start + CHUNK_SIZE;
|
||||
for (var j = start; j < end; j++) {
|
||||
if (j >= BLOCK_SIZE) {
|
||||
hashes[i].inputs[j - start] <== 0;
|
||||
} else {
|
||||
hashes[i].inputs[j - start] <== block[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Concatenate hashes into a single block
|
||||
var concat[NUM_CHUNKS];
|
||||
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||
concat[i] = hashes[i].out;
|
||||
}
|
||||
|
||||
// Hash concatenated array using Poseidon() again
|
||||
component h = Poseidon(NUM_CHUNKS);
|
||||
h.inputs <== concat;
|
||||
|
||||
// Assign output to hash signal
|
||||
hash <== h.out;
|
||||
}
|
||||
|
||||
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS, CHUNK_SIZE) {
|
||||
// BLOCK_SIZE: size of block in symbols
|
||||
// QUERY_LEN: query length, i.e. number if indices to be proven
|
||||
@ -98,7 +49,7 @@ template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS, CHUNK_SIZE) {
|
||||
|
||||
component hashers[QUERY_LEN];
|
||||
for (var i = 0; i < QUERY_LEN; i++) {
|
||||
hashers[i] = HashCheck(BLOCK_SIZE, CHUNK_SIZE);
|
||||
hashers[i] = PoseidonHasher(BLOCK_SIZE, CHUNK_SIZE);
|
||||
hashers[i].block <== chunks[i];
|
||||
hashers[i].hash === hashes[i];
|
||||
}
|
||||
|
||||
66
src/ffi.rs
66
src/ffi.rs
@ -1,40 +1,40 @@
|
||||
use crate::storageproofs::StorageProofs;
|
||||
use std::str;
|
||||
// use crate::storageproofs::StorageProofs;
|
||||
// use std::str;
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn init(
|
||||
r1cs: *const u8,
|
||||
r1cs_len: usize,
|
||||
wasm: *const u8,
|
||||
wasm_len: usize,
|
||||
) -> *mut StorageProofs {
|
||||
let r1cs = unsafe {
|
||||
let slice = std::slice::from_raw_parts(r1cs, r1cs_len);
|
||||
str::from_utf8(slice).unwrap()
|
||||
};
|
||||
// #[no_mangle]
|
||||
// pub extern "C" fn init(
|
||||
// r1cs: *const u8,
|
||||
// r1cs_len: usize,
|
||||
// wasm: *const u8,
|
||||
// wasm_len: usize,
|
||||
// ) -> *mut StorageProofs {
|
||||
// let r1cs = unsafe {
|
||||
// let slice = std::slice::from_raw_parts(r1cs, r1cs_len);
|
||||
// str::from_utf8(slice).unwrap()
|
||||
// };
|
||||
|
||||
let wasm = unsafe {
|
||||
let slice = std::slice::from_raw_parts(wasm, wasm_len);
|
||||
str::from_utf8(slice).unwrap()
|
||||
};
|
||||
// let wasm = unsafe {
|
||||
// let slice = std::slice::from_raw_parts(wasm, wasm_len);
|
||||
// str::from_utf8(slice).unwrap()
|
||||
// };
|
||||
|
||||
let storage_proofs = Box::into_raw(Box::new(StorageProofs::new(
|
||||
wasm.to_string(),
|
||||
r1cs.to_string(),
|
||||
)));
|
||||
// let storage_proofs = Box::into_raw(Box::new(StorageProofs::new(
|
||||
// wasm.to_string(),
|
||||
// r1cs.to_string(),
|
||||
// )));
|
||||
|
||||
return storage_proofs;
|
||||
}
|
||||
// return storage_proofs;
|
||||
// }
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::init;
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::init;
|
||||
|
||||
#[test]
|
||||
fn should_prove() {
|
||||
let r1cs = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test.r1cs";
|
||||
let wasm = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test_js/storer_test.wasm";
|
||||
// #[test]
|
||||
// fn should_prove() {
|
||||
// let r1cs = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test.r1cs";
|
||||
// let wasm = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test_js/storer_test.wasm";
|
||||
|
||||
let prover = init(r1cs.as_ptr(), r1cs.len(), wasm.as_ptr(), wasm.len());
|
||||
}
|
||||
}
|
||||
// let prover = init(r1cs.as_ptr(), r1cs.len(), wasm.as_ptr(), wasm.len());
|
||||
// }
|
||||
// }
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
pub mod ffi;
|
||||
pub mod storageproofs;
|
||||
pub mod hash;
|
||||
pub mod poseidon;
|
||||
// pub mod storageproofs;
|
||||
mod simple_hasher;
|
||||
|
||||
141
src/poseidon/constants.rs
Normal file
141
src/poseidon/constants.rs
Normal file
@ -0,0 +1,141 @@
|
||||
use std::{fs::File, io::BufReader};
|
||||
|
||||
use ark_bn254::Fr;
|
||||
use num_bigint::BigUint;
|
||||
use once_cell::sync::Lazy;
|
||||
use num_traits::Num;
|
||||
|
||||
pub static CONSTANTS: Lazy<serde_json::Value> = Lazy::new(|| {
|
||||
let file = File::open(
|
||||
"./src/poseidon/poseidon_constants_opt.json",
|
||||
)
|
||||
.unwrap();
|
||||
// Read the JSON contents of the file as an instance of `User`.
|
||||
serde_json::from_reader(BufReader::new(file)).unwrap()
|
||||
});
|
||||
|
||||
pub static C_CONST: Lazy<Vec<Vec<Fr>>> = Lazy::new(|| {
|
||||
CONSTANTS["C"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|row| {
|
||||
row.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
Fr::try_from(
|
||||
BigUint::from_str_radix(
|
||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<Fr>, _>>()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
})
|
||||
.collect::<Vec<Vec<Fr>>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub static S_CONST: Lazy<Vec<Vec<Fr>>> = Lazy::new(|| {
|
||||
CONSTANTS["S"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|row| {
|
||||
row.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
Fr::try_from(
|
||||
BigUint::from_str_radix(
|
||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<Fr>, _>>()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
})
|
||||
.collect::<Vec<Vec<Fr>>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub static M_CONST: Lazy<Vec<Vec<Vec<Fr>>>> = Lazy::new(|| {
|
||||
CONSTANTS["M"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|row| {
|
||||
row.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
c.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
Fr::try_from(
|
||||
BigUint::from_str_radix(
|
||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<Fr>, _>>()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
// .flatten()
|
||||
.collect::<Vec<Vec<Vec<Fr>>>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub static P_CONST: Lazy<Vec<Vec<Vec<Fr>>>> = Lazy::new(|| {
|
||||
CONSTANTS["P"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|row| {
|
||||
row.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
c.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
Fr::try_from(
|
||||
BigUint::from_str_radix(
|
||||
c.as_str().unwrap().strip_prefix("0x").unwrap(),
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<Fr>, _>>()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
// .flatten()
|
||||
.collect::<Vec<Vec<Vec<Fr>>>>()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
});
|
||||
154
src/poseidon/mod.rs
Normal file
154
src/poseidon/mod.rs
Normal file
@ -0,0 +1,154 @@
|
||||
mod constants;
|
||||
|
||||
use ark_bn254::Fr;
|
||||
use ark_ff::{Field, Zero};
|
||||
use ruint::aliases::U256;
|
||||
|
||||
const N_ROUNDS_F: u8 = 8;
|
||||
const N_ROUNDS_P: [i32; 16] = [
|
||||
56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68,
|
||||
];
|
||||
|
||||
// Compute a Poseidon hash function of the input vector.
|
||||
//
|
||||
// # Panics
|
||||
//
|
||||
// Panics if `input` is not a valid field element.
|
||||
#[must_use]
|
||||
pub fn hash(inputs: &[U256]) -> U256 {
|
||||
assert!(inputs.len() > 0);
|
||||
assert!(inputs.len() <= N_ROUNDS_P.len());
|
||||
|
||||
let t = inputs.len() + 1;
|
||||
let n_rounds_f = N_ROUNDS_F as usize;
|
||||
let n_rounds_p = N_ROUNDS_P[t - 2] as usize;
|
||||
let c = constants::C_CONST[t - 2].clone();
|
||||
let s = constants::S_CONST[t - 2].clone();
|
||||
let m = constants::M_CONST[t - 2].clone();
|
||||
let p = constants::P_CONST[t - 2].clone();
|
||||
|
||||
let mut state: Vec<Fr> = inputs.iter().map(|f| f.try_into().unwrap()).collect();
|
||||
state.insert(0, Fr::zero());
|
||||
|
||||
state = state.iter().enumerate().map(|(j, a)| *a + c[j]).collect();
|
||||
|
||||
for r in 0..(n_rounds_f / 2 - 1) {
|
||||
state = state
|
||||
.iter()
|
||||
.map(|a| a.pow(&[5]))
|
||||
.enumerate()
|
||||
.map(|(i, a)| a + c[(r + 1) * t + i])
|
||||
.collect();
|
||||
|
||||
state = state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, _)| {
|
||||
state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold((0, Fr::zero()), |acc, item| {
|
||||
(0, (acc.1 + m[item.0][i] * item.1))
|
||||
})
|
||||
.1
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
state = state
|
||||
.iter()
|
||||
.map(|a| a.pow(&[5]))
|
||||
.enumerate()
|
||||
.map(|(i, a)| a + c[(n_rounds_f / 2 - 1 + 1) * t + i])
|
||||
.collect();
|
||||
|
||||
state = state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, _)| {
|
||||
state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold((0, Fr::zero()), |acc, item| {
|
||||
(0, (acc.1 + p[item.0][i] * item.1))
|
||||
})
|
||||
.1
|
||||
})
|
||||
.collect();
|
||||
|
||||
for r in 0..n_rounds_p as usize {
|
||||
state[0] = state[0].pow(&[5]);
|
||||
state[0] = state[0] + c[(n_rounds_f / 2 + 1) * t + r];
|
||||
|
||||
let s0 = state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold((0, Fr::zero()), |acc, item| {
|
||||
(0, acc.1 + s[(t * 2 - 1) * r + item.0] * item.1)
|
||||
})
|
||||
.1;
|
||||
|
||||
for k in 1..t {
|
||||
state[k] = state[k] + state[0] * s[(t * 2 - 1) * r + t + k - 1];
|
||||
}
|
||||
state[0] = s0;
|
||||
}
|
||||
|
||||
for r in 0..(n_rounds_f / 2 - 1) as usize {
|
||||
state = state
|
||||
.iter()
|
||||
.map(|a| a.pow(&[5]))
|
||||
.enumerate()
|
||||
.map(|(i, a)| a + c[(n_rounds_f / 2 + 1) * t + n_rounds_p + r * t + i])
|
||||
.collect();
|
||||
|
||||
state = state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, _)| {
|
||||
state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold((0, Fr::zero()), |acc, item| {
|
||||
(0, acc.1 + m[item.0][i] * item.1)
|
||||
})
|
||||
.1
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
state = state.iter().map(|a| a.pow(&[5])).collect();
|
||||
state = state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(i, _)| {
|
||||
state
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold((0, Fr::zero()), |acc, item| {
|
||||
(0, acc.1 + m[item.0][i] * item.1)
|
||||
})
|
||||
.1
|
||||
}, // reduce((acc, a, j) => F.add(acc, F.mul(M[j][i], a)), F.zero)
|
||||
)
|
||||
.collect();
|
||||
|
||||
state[0].into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ruint::uint;
|
||||
|
||||
#[test]
|
||||
fn test_hash_inputs() {
|
||||
uint! {
|
||||
assert_eq!(hash(&[0_U256]), 0x2a09a9fd93c590c26b91effbb2499f07e8f7aa12e2b4940a3aed2411cb65e11c_U256);
|
||||
assert_eq!(hash(&[0_U256, 0_U256]), 0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864_U256);
|
||||
assert_eq!(hash(&[0_U256, 0_U256, 0_U256]), 0xbc188d27dcceadc1dcfb6af0a7af08fe2864eecec96c5ae7cee6db31ba599aa_U256);
|
||||
assert_eq!(hash(&[31213_U256, 132_U256]), 0x303f59cd0831b5633bcda50514521b33776b5d4280eb5868ba1dbbe2e4d76ab5_U256);
|
||||
}
|
||||
}
|
||||
}
|
||||
24806
src/poseidon/poseidon_constants_opt.json
Normal file
24806
src/poseidon/poseidon_constants_opt.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,239 +1,210 @@
|
||||
use ark_std::rand::rngs::ThreadRng;
|
||||
use arkworks_native_gadgets::prelude::ark_ff::PrimeField;
|
||||
use num_bigint::{BigInt, Sign};
|
||||
// use ark_bn254::Bn254;
|
||||
// use ark_std::rand::rngs::ThreadRng;
|
||||
// use ark_circom::{CircomBuilder, CircomConfig};
|
||||
// use ark_groth16::{ProvingKey, generate_random_parameters, prepare_verifying_key, create_random_proof as prove};
|
||||
// use ruint::aliases::U256;
|
||||
// use crate::poseidon::hash1;
|
||||
|
||||
use ark_bn254::{Bn254, Fq};
|
||||
use ark_circom::{CircomBuilder, CircomConfig};
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
Proof, ProvingKey,
|
||||
};
|
||||
// #[derive(Debug, Clone)]
|
||||
// #[repr(C)]
|
||||
// pub struct StorageProofs {
|
||||
// builder: CircomBuilder<Bn254>,
|
||||
// pvk: ProvingKey<Bn254>,
|
||||
// rng: ThreadRng,
|
||||
// }
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[repr(C)]
|
||||
pub struct StorageProofs {
|
||||
builder: CircomBuilder<Bn254>,
|
||||
pvk: ProvingKey<Bn254>,
|
||||
rng: ThreadRng,
|
||||
}
|
||||
// impl StorageProofs {
|
||||
// pub fn new(wtns: String, r1cs: String) -> Self {
|
||||
// let mut rng = ThreadRng::default();
|
||||
// let builder = CircomBuilder::new(CircomConfig::<Bn254>::new(wtns, r1cs).unwrap());
|
||||
// let pvk = generate_random_parameters::<Bn254, _, _>(builder.setup(), &mut rng).unwrap();
|
||||
|
||||
impl StorageProofs {
|
||||
pub fn new(wtns: String, r1cs: String) -> Self {
|
||||
let mut rng = ThreadRng::default();
|
||||
let builder = CircomBuilder::new(CircomConfig::<Bn254>::new(wtns, r1cs).unwrap());
|
||||
let pvk = generate_random_parameters::<Bn254, _, _>(builder.setup(), &mut rng).unwrap();
|
||||
// Self { builder, pvk, rng }
|
||||
// }
|
||||
|
||||
Self { builder, pvk, rng }
|
||||
}
|
||||
// pub fn prove(
|
||||
// &mut self,
|
||||
// chunks: Vec<Vec<Fq>>,
|
||||
// siblings: Vec<Vec<Fq>>,
|
||||
// hashes: Vec<Fq>,
|
||||
// path: Vec<u32>,
|
||||
// root: Fq,
|
||||
// salt: Fq,
|
||||
// proof_bytes: Vec<u8>,
|
||||
// public_inputs_bytes: Vec<u8>,
|
||||
// ) -> Result<(), String> {
|
||||
// let mut builder = self.builder.clone();
|
||||
|
||||
pub fn prove(
|
||||
&mut self,
|
||||
chunks: Vec<Vec<Fq>>,
|
||||
siblings: Vec<Vec<Fq>>,
|
||||
hashes: Vec<Fq>,
|
||||
path: Vec<u32>,
|
||||
root: Fq,
|
||||
salt: Fq,
|
||||
proof_bytes: Vec<u8>,
|
||||
public_inputs_bytes: Vec<u8>,
|
||||
) -> Result<(), String> {
|
||||
let mut builder = self.builder.clone();
|
||||
// chunks.iter().flat_map(|c| c.into_iter()).for_each(|c| {
|
||||
// builder.push_input(
|
||||
// "chunks",
|
||||
// BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
// )
|
||||
// });
|
||||
|
||||
chunks.iter().flat_map(|c| c.into_iter()).for_each(|c| {
|
||||
builder.push_input(
|
||||
"chunks",
|
||||
BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
)
|
||||
});
|
||||
// siblings.iter().flat_map(|c| c.into_iter()).for_each(|c| {
|
||||
// builder.push_input(
|
||||
// "siblings",
|
||||
// BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
// )
|
||||
// });
|
||||
|
||||
siblings.iter().flat_map(|c| c.into_iter()).for_each(|c| {
|
||||
builder.push_input(
|
||||
"siblings",
|
||||
BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
)
|
||||
});
|
||||
// hashes.iter().for_each(|c| {
|
||||
// builder.push_input(
|
||||
// "hashes",
|
||||
// BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
// )
|
||||
// });
|
||||
|
||||
hashes.iter().for_each(|c| {
|
||||
builder.push_input(
|
||||
"hashes",
|
||||
BigInt::from_biguint(Sign::Plus, c.into_repr().into()),
|
||||
)
|
||||
});
|
||||
// path.iter()
|
||||
// .for_each(|c| builder.push_input("path", BigInt::new(Sign::Plus, vec![*c])));
|
||||
|
||||
path.iter()
|
||||
.for_each(|c| builder.push_input("path", BigInt::new(Sign::Plus, vec![*c])));
|
||||
// builder.push_input(
|
||||
// "root",
|
||||
// BigInt::from_biguint(Sign::Plus, root.into_repr().into()),
|
||||
// );
|
||||
|
||||
builder.push_input(
|
||||
"root",
|
||||
BigInt::from_biguint(Sign::Plus, root.into_repr().into()),
|
||||
);
|
||||
// builder.push_input(
|
||||
// "salt",
|
||||
// BigInt::from_biguint(Sign::Plus, salt.into_repr().into()),
|
||||
// );
|
||||
|
||||
builder.push_input(
|
||||
"salt",
|
||||
BigInt::from_biguint(Sign::Plus, salt.into_repr().into()),
|
||||
);
|
||||
// let circuit = builder.build().unwrap();
|
||||
// let inputs = circuit.get_public_inputs().unwrap();
|
||||
// let proof = prove(circuit, &self.pvk, &mut self.rng).unwrap();
|
||||
// let vk = prepare_verifying_key(&self.pvk.vk);
|
||||
|
||||
let circuit = builder.build().unwrap();
|
||||
let inputs = circuit.get_public_inputs().unwrap();
|
||||
let proof = prove(circuit, &self.pvk, &mut self.rng).unwrap();
|
||||
let vk = prepare_verifying_key(&self.pvk.vk);
|
||||
// // proof.serialize(proof_bytes).unwrap();
|
||||
// // inputs.serialize(public_inputs_bytes).unwrap();
|
||||
|
||||
// proof.serialize(proof_bytes).unwrap();
|
||||
// inputs.serialize(public_inputs_bytes).unwrap();
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
Ok(())
|
||||
}
|
||||
// // fn verify<R: Read>(self, hashes: Vec<i32>, root: i32, salt: i32,vk_bytes: R, proof_bytes: R) -> Result<(), String> {
|
||||
// // let vk = ProvingKey::<Bn254>::deserialize(vk_bytes).unwrap();
|
||||
// // let proof = Proof::<Bn254>::deserialize(proof_bytes).unwrap();
|
||||
|
||||
// fn verify<R: Read>(self, hashes: Vec<i32>, root: i32, salt: i32,vk_bytes: R, proof_bytes: R) -> Result<(), String> {
|
||||
// let vk = ProvingKey::<Bn254>::deserialize(vk_bytes).unwrap();
|
||||
// let proof = Proof::<Bn254>::deserialize(proof_bytes).unwrap();
|
||||
// // let vk = prepare_verifying_key(&self.pvk.vk);
|
||||
// // verify_proof(&vk, &proof, &public_inputs).unwrap();
|
||||
|
||||
// let vk = prepare_verifying_key(&self.pvk.vk);
|
||||
// verify_proof(&vk, &proof, &public_inputs).unwrap();
|
||||
// // Ok(())
|
||||
// // }
|
||||
// }
|
||||
|
||||
// Ok(())
|
||||
// }
|
||||
}
|
||||
// #[cfg(test)]
|
||||
// mod test {
|
||||
// use super::StorageProofs;
|
||||
// use ark_bn254::Fq;
|
||||
// use ark_ff::{UniformRand, Zero};
|
||||
// use ark_std::rand::{rngs::ThreadRng, Rng};
|
||||
// use arkworks_native_gadgets::{
|
||||
// poseidon::{sbox::PoseidonSbox, *},
|
||||
// prelude::ark_ff::PrimeField,
|
||||
// };
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::StorageProofs;
|
||||
use ark_bn254::Fq;
|
||||
use ark_ff::{UniformRand, Zero};
|
||||
use ark_std::rand::{rngs::ThreadRng, Rng};
|
||||
use arkworks_native_gadgets::{
|
||||
poseidon::{sbox::PoseidonSbox, *},
|
||||
prelude::ark_ff::PrimeField,
|
||||
};
|
||||
// use arkworks_utils::{
|
||||
// bytes_matrix_to_f, bytes_vec_to_f, poseidon_params::setup_poseidon_params, Curve,
|
||||
// };
|
||||
|
||||
use arkworks_utils::{
|
||||
bytes_matrix_to_f, bytes_vec_to_f, poseidon_params::setup_poseidon_params, Curve,
|
||||
};
|
||||
// fn digest(input: Vec<Fq>, chunk_size: Option<usize>) -> Result<Fq, PoseidonError> {
|
||||
// let chunk_size = chunk_size.unwrap_or(4);
|
||||
// let chunks = ((input.len() as f32) / (chunk_size as f32)).ceil() as usize;
|
||||
// let mut concat = vec![];
|
||||
// let hasher = hash1(Curve::Bn254, 5, (chunk_size + 1) as u8);
|
||||
|
||||
type PoseidonHasher = Poseidon<Fq>;
|
||||
type Hasher = Box<dyn Fn(Vec<Fq>) -> Result<Fq, PoseidonError>>;
|
||||
pub fn setup_params<F: PrimeField>(curve: Curve, exp: i8, width: u8) -> PoseidonParameters<F> {
|
||||
let pos_data = setup_poseidon_params(curve, exp, width).unwrap();
|
||||
// let mut i: usize = 0;
|
||||
// while i < chunks {
|
||||
// let range = (i * chunk_size)..std::cmp::min((i + 1) * chunk_size, input.len());
|
||||
|
||||
let mds_f = bytes_matrix_to_f(&pos_data.mds);
|
||||
let rounds_f = bytes_vec_to_f(&pos_data.rounds);
|
||||
// let mut chunk: Vec<Fq> = input[range].to_vec();
|
||||
|
||||
PoseidonParameters {
|
||||
mds_matrix: mds_f,
|
||||
round_keys: rounds_f,
|
||||
full_rounds: pos_data.full_rounds,
|
||||
partial_rounds: pos_data.partial_rounds,
|
||||
sbox: PoseidonSbox(pos_data.exp),
|
||||
width: pos_data.width,
|
||||
}
|
||||
}
|
||||
// if chunk.len() < chunk_size {
|
||||
// chunk.resize(chunk_size as usize, Fq::zero());
|
||||
// }
|
||||
|
||||
fn hasher(curve: Curve, exp: i8, width: u8) -> Hasher {
|
||||
let params = setup_params(curve, exp, width);
|
||||
let poseidon = PoseidonHasher::new(params);
|
||||
// concat.push(hasher(chunk)?);
|
||||
// i += chunk_size;
|
||||
// }
|
||||
|
||||
return Box::new(move |inputs| poseidon.hash(&inputs));
|
||||
}
|
||||
// if concat.len() > 1 {
|
||||
// return hasher(concat);
|
||||
// }
|
||||
|
||||
fn digest(input: Vec<Fq>, chunk_size: Option<usize>) -> Result<Fq, PoseidonError> {
|
||||
let chunk_size = chunk_size.unwrap_or(4);
|
||||
let chunks = ((input.len() as f32) / (chunk_size as f32)).ceil() as usize;
|
||||
let mut concat = vec![];
|
||||
let hasher = hasher(Curve::Bn254, 5, (chunk_size + 1) as u8);
|
||||
// return Ok(concat[0]);
|
||||
// }
|
||||
|
||||
let mut i: usize = 0;
|
||||
while i < chunks {
|
||||
let range = (i * chunk_size)..std::cmp::min((i + 1) * chunk_size, input.len());
|
||||
// fn merkelize(leafs: Vec<Fq>) -> Fq {
|
||||
// // simple merkle root (treehash) generator
|
||||
// // unbalanced trees will have the last leaf duplicated
|
||||
// let mut merkle: Vec<Fq> = leafs;
|
||||
// let hasher = hasher(Curve::Bn254, 5, 3);
|
||||
|
||||
let mut chunk: Vec<Fq> = input[range].to_vec();
|
||||
// while merkle.len() > 1 {
|
||||
// let mut new_merkle = Vec::new();
|
||||
// let mut i = 0;
|
||||
// while i < merkle.len() {
|
||||
// new_merkle.push(hasher(vec![merkle[i], merkle[i + 1]]).unwrap());
|
||||
// i += 2;
|
||||
// }
|
||||
|
||||
if chunk.len() < chunk_size {
|
||||
chunk.resize(chunk_size as usize, Fq::zero());
|
||||
}
|
||||
// if merkle.len() % 2 == 1 {
|
||||
// new_merkle.push(
|
||||
// hasher(vec![merkle[merkle.len() - 2], merkle[merkle.len() - 2]]).unwrap(),
|
||||
// );
|
||||
// }
|
||||
|
||||
concat.push(hasher(chunk)?);
|
||||
i += chunk_size;
|
||||
}
|
||||
// merkle = new_merkle;
|
||||
// }
|
||||
|
||||
if concat.len() > 1 {
|
||||
return hasher(concat);
|
||||
}
|
||||
// return merkle[0];
|
||||
// }
|
||||
|
||||
return Ok(concat[0]);
|
||||
}
|
||||
// #[test]
|
||||
// fn should_proove() {
|
||||
// let mut rng = ThreadRng::default();
|
||||
// let data: Vec<(Vec<Fq>, Fq)> = (0..4)
|
||||
// .map(|_| {
|
||||
// let preimages = vec![Fq::rand(&mut rng); 32];
|
||||
// let hash = digest(preimages.clone(), None).unwrap();
|
||||
// return (preimages, hash);
|
||||
// })
|
||||
// .collect();
|
||||
|
||||
fn merkelize(leafs: Vec<Fq>) -> Fq {
|
||||
// simple merkle root (treehash) generator
|
||||
// unbalanced trees will have the last leaf duplicated
|
||||
let mut merkle: Vec<Fq> = leafs;
|
||||
let hasher = hasher(Curve::Bn254, 5, 3);
|
||||
// let chunks: Vec<Vec<Fq>> = data.iter().map(|c| c.0.to_vec()).collect();
|
||||
// let hashes: Vec<Fq> = data.iter().map(|c| c.1).collect();
|
||||
// let path = [0, 1, 2, 3].to_vec();
|
||||
|
||||
while merkle.len() > 1 {
|
||||
let mut new_merkle = Vec::new();
|
||||
let mut i = 0;
|
||||
while i < merkle.len() {
|
||||
new_merkle.push(hasher(vec![merkle[i], merkle[i + 1]]).unwrap());
|
||||
i += 2;
|
||||
}
|
||||
// let hash2 = hasher(Curve::Bn254, 5, 3);
|
||||
// let parent_hash_l = hash2(vec![hashes[0], hashes[1]]).unwrap();
|
||||
// let parent_hash_r = hash2(vec![hashes[2], hashes[3]]).unwrap();
|
||||
|
||||
if merkle.len() % 2 == 1 {
|
||||
new_merkle.push(
|
||||
hasher(vec![merkle[merkle.len() - 2], merkle[merkle.len() - 2]]).unwrap(),
|
||||
);
|
||||
}
|
||||
// let siblings = [
|
||||
// [hashes[1], parent_hash_r].to_vec(),
|
||||
// [hashes[1], parent_hash_r].to_vec(),
|
||||
// [hashes[3], parent_hash_l].to_vec(),
|
||||
// [hashes[2], parent_hash_l].to_vec(),
|
||||
// ]
|
||||
// .to_vec();
|
||||
|
||||
merkle = new_merkle;
|
||||
}
|
||||
// let root = merkelize(hashes.clone());
|
||||
// let mut proof_bytes: Vec<u8> = Vec::new();
|
||||
// let mut public_inputs_bytes: Vec<u8> = Vec::new();
|
||||
|
||||
return merkle[0];
|
||||
}
|
||||
// let r1cs = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test.r1cs";
|
||||
// let wasm = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test_js/storer_test.wasm";
|
||||
|
||||
#[test]
|
||||
fn should_proove() {
|
||||
let mut rng = ThreadRng::default();
|
||||
let data: Vec<(Vec<Fq>, Fq)> = (0..4)
|
||||
.map(|_| {
|
||||
let preimages = vec![Fq::rand(&mut rng); 32];
|
||||
let hash = digest(preimages.clone(), None).unwrap();
|
||||
return (preimages, hash);
|
||||
})
|
||||
.collect();
|
||||
|
||||
let chunks: Vec<Vec<Fq>> = data.iter().map(|c| c.0.to_vec()).collect();
|
||||
let hashes: Vec<Fq> = data.iter().map(|c| c.1).collect();
|
||||
let path = [0, 1, 2, 3].to_vec();
|
||||
|
||||
let hash2 = hasher(Curve::Bn254, 5, 3);
|
||||
let parent_hash_l = hash2(vec![hashes[0], hashes[1]]).unwrap();
|
||||
let parent_hash_r = hash2(vec![hashes[2], hashes[3]]).unwrap();
|
||||
|
||||
let siblings = [
|
||||
[hashes[1], parent_hash_r].to_vec(),
|
||||
[hashes[1], parent_hash_r].to_vec(),
|
||||
[hashes[3], parent_hash_l].to_vec(),
|
||||
[hashes[2], parent_hash_l].to_vec(),
|
||||
]
|
||||
.to_vec();
|
||||
|
||||
let root = merkelize(hashes.clone());
|
||||
let mut proof_bytes: Vec<u8> = Vec::new();
|
||||
let mut public_inputs_bytes: Vec<u8> = Vec::new();
|
||||
|
||||
let r1cs = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test.r1cs";
|
||||
let wasm = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test_js/storer_test.wasm";
|
||||
|
||||
let mut prover = StorageProofs::new(wasm.to_string(), r1cs.to_string());
|
||||
prover
|
||||
.prove(
|
||||
chunks,
|
||||
siblings,
|
||||
hashes,
|
||||
path,
|
||||
root,
|
||||
root, // random salt
|
||||
proof_bytes,
|
||||
public_inputs_bytes,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
// let mut prover = StorageProofs::new(wasm.to_string(), r1cs.to_string());
|
||||
// prover
|
||||
// .prove(
|
||||
// chunks,
|
||||
// siblings,
|
||||
// hashes,
|
||||
// path,
|
||||
// root,
|
||||
// root, // random salt
|
||||
// proof_bytes,
|
||||
// public_inputs_bytes,
|
||||
// )
|
||||
// .unwrap();
|
||||
// }
|
||||
// }
|
||||
|
||||
26
src/utils.rs
Normal file
26
src/utils.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use crate::poseidon::hash1;
|
||||
use ruint::aliases::U256;
|
||||
|
||||
fn digest(input: &[U256], chunk_size: Option<usize>) -> U256 {
|
||||
let chunk_size = chunk_size.unwrap_or(4);
|
||||
let chunks = ((input.len() as f32) / (chunk_size as f32)).ceil() as usize;
|
||||
let mut concat = vec![];
|
||||
|
||||
let mut i: usize = 0;
|
||||
while i < chunks {
|
||||
let range = (i * chunk_size)..std::cmp::min((i + 1) * chunk_size, input.len());
|
||||
let mut chunk: Vec<Fq> = input[range].to_vec();
|
||||
if chunk.len() < chunk_size {
|
||||
chunk.resize(chunk_size as usize, Fq::zero());
|
||||
}
|
||||
|
||||
concat.push(hash1(chunk)?);
|
||||
i += chunk_size;
|
||||
}
|
||||
|
||||
if concat.len() > 1 {
|
||||
return hasher(concat);
|
||||
}
|
||||
|
||||
return Ok(concat[0]);
|
||||
}
|
||||
@ -134,4 +134,8 @@ describe("Storer test", function () {
|
||||
/Error: Error: Assert Failed.\nError in template StorageProver_7 line: 75/);
|
||||
|
||||
}).timeout(100000);
|
||||
|
||||
// it("Should should hash item", async () => {
|
||||
// console.log(poseidon([0, 0, 0]).toString(16));
|
||||
// });
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user