deploy: 3ee5f606b1b76acd9029c96d2cfd1047afcf8ef4

This commit is contained in:
staheri14 2021-03-02 21:22:31 +00:00
parent 73a79a1759
commit ef9337c871
25 changed files with 88 additions and 3313 deletions

40
.gitignore vendored
View File

@ -1,8 +1,32 @@
/target
/pkg
/examples/www
node_modules
*.key
Cargo.lock
.cargo
tmp_wasm
/nimcache
# Executables shall be put in an ignored build/ directory
/build
# Nimble packages
/vendor/.nimble
# Generated Files
*.generated.nim
# ntags/ctags output
/tags
# a symlink that can't be added to the repo because of Windows
/waku.nims
# Ignore dynamic, static libs and libtool archive files
*.so
*.dylib
*.a
*.la
*.exe
*.dll
.DS_Store
# Ignore simulation generated metrics files
/metrics/prometheus
/metrics/waku-sim-all-nodes-grafana-dashboard.json
rln

4
.gitmodules vendored
View File

@ -106,3 +106,7 @@
[submodule "vendor/nim-web3"]
path = vendor/nim-web3
url = https://github.com/status-im/nim-web3.git
[submodule "vendor/rln"]
path = vendor/rln
url = https://github.com/kilic/rln
branch = full-node

View File

@ -1 +1 @@
1614608398
1614718668

View File

@ -1,58 +0,0 @@
[package]
name = "rln"
version = "0.1.0"
authors = ["Onur Kılıç <kiliconu@itu.edu.tr>"]
edition = "2018"
[lib]
crate-type = ["cdylib", "rlib"]
[features]
multicore = ["sapling-crypto/multicore", "bellman/multicore"]
wasm = ["sapling-crypto/wasm", "bellman/wasm", "bellman/nolog"]
bench = []
[dependencies]
rand = "0.4"
blake2 = "0.8.1"
sapling-crypto = { package = "sapling-crypto_ce", version = "0.1.3", default-features = false }
# sapling-crypto = {package = "sapling-crypto_ce", path = "../sapling-crypto", default-features = false }
bellman = { package = "bellman_ce", version = "0.3.4", default-features = false }
# bellman = {package = "bellman_ce", path = "../bellman", default-features = false }
[target.'cfg(target_arch = "wasm32")'.dependencies]
hex = "0.4"
console_error_panic_hook = { version = "0.1.1" }
wasm-bindgen = "=0.2.60"
# wee_alloc = "0.4.5"
web-sys = {version = "0.3", features = ["console", "Performance", "Window"]}
js-sys = "0.3.37"
[target.'cfg(target_arch = "wasm32")'.dev-dependencies]
wasm-bindgen-test = "0.3"
[profile.release]
opt-level = 3
lto = "thin"
incremental = true
# build all our deps in release mode
[profile.dev.package."*"]
opt-level = 3
[profile.bench]
opt-level = 3
debug = false
rpath = false
lto = "thin"
incremental = true
debug-assertions = false
[profile.test]
opt-level = 3
incremental = true
debug-assertions = true
debug = true

View File

@ -119,8 +119,7 @@ installganache:
npm install ganache-cli; npx ganache-cli -p 8540 -g 0 -l 3000000000000&
rlnlib:
#cargo clean --manifest-path rln/Cargo.toml #TODO may need to clean the rln directory before cloning the rln repo
rm -rf rln; git clone --branch full-node https://github.com/kilic/rln; git --git-dir=rln/.git reset --hard a80f5d0; cargo build --manifest-path rln/Cargo.toml;
cargo build --manifest-path vendor/rln/Cargo.toml
test2: | build deps installganache
echo -e $(BUILD_MSG) "build/$@" && \

View File

@ -1,41 +1,56 @@
# RLN
# nim-waku
This is the development repo of rate limit nullifier zkSNARK circuits.
## Introduction
For details, see work in progress document [here](https://hackmd.io/tMTLMYmTR5eynw2lwK9n1w?view)
The nim-waku repository implements Waku v1 and v2, and provides tools related to it.
## Test
- A Nim implementation of the [Waku v1 protocol](https://specs.vac.dev/waku/waku.html).
- A Nim implementation of the [Waku v2 protocol](https://specs.vac.dev/specs/waku/v2/waku-v2.html).
- CLI applications `wakunode` and `wakunode2` that allows you to run a Waku v1 or v2 node.
- Examples of Waku v1 and v2 usage.
- Various tests of above.
```
cargo test --release --features multicore rln_32 -- --nocapture
For more details on Waku v1 and v2, see their respective home folders:
- [Waku v1](waku/v1/README.md)
- [Waku v2](waku/v2/README.md)
## How to Build & Run
These instructions are generic and apply to both Waku v1 and v2. For more
detailed instructions, see Waku v1 and v2 home above.
### Prerequisites
* GNU Make, Bash and the usual POSIX utilities. Git 2.9.4 or newer.
* PCRE
More information on the installation of these can be found [here](https://github.com/status-im/nimbus#prerequisites).
### Wakunode
```bash
# The first `make` invocation will update all Git submodules.
# You'll run `make update` after each `git pull`, in the future, to keep those submodules up to date.
make wakunode1 wakunode2
# See available command line options
./build/wakunode --help
./build/wakunode2 --help
# Connect the client directly with the Status test fleet
./build/wakunode --log-level:debug --discovery:off --fleet:test --log-metrics
# TODO Equivalent for v2
```
## Generate Test Keys
### Waku Protocol Test Suite
```
cargo run --release --example export_test_keys
```bash
# Run all the Waku v1 and v2 tests
make test
```
## Wasm Support
### Examples
### Build
```
wasm-pack build --release --target=nodejs --scope=rln --out-name=$PACKAGE --out-dir=$PACKAGE_DIR -- --features wasm
```
### Test
With wasm-pack:
```
wasm-pack test --release --node -- --features wasm
```
With cargo:
Follow the steps [here](https://rustwasm.github.io/docs/wasm-bindgen/wasm-bindgen-test/usage.html#appendix-using-wasm-bindgen-test-without-wasm-pack) before running the test, then run:
```
cargo test --release --target wasm32-unknown-unknown --features wasm
```
Examples can be found in the examples folder. For Waku v2, there is a fully
featured chat example.

View File

@ -1,38 +0,0 @@
#[cfg(not(target_arch = "wasm32"))]
fn main() {
use sapling_crypto::bellman::pairing::bn256::Bn256;
let merkle_depth = 32usize;
test_keys::export::<Bn256>(merkle_depth);
}
#[cfg(target_arch = "wasm32")]
fn main() {
panic!("should not be run in wasm");
}
#[cfg(not(target_arch = "wasm32"))]
mod test_keys {
use sapling_crypto::bellman::pairing::Engine;
pub fn export<E: Engine>(merkle_depth: usize) {
use rand::{SeedableRng, XorShiftRng};
use rln::circuit::poseidon::PoseidonCircuit;
use rln::circuit::rln::{RLNCircuit, RLNInputs};
use rln::poseidon::PoseidonParams;
use sapling_crypto::bellman::groth16::generate_random_parameters;
use std::fs::File;
let poseidon_params = PoseidonParams::<E>::new(8, 55, 3, None, None, None);
let mut rng = XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
let hasher = PoseidonCircuit::new(poseidon_params.clone());
let circuit = RLNCircuit::<E> {
inputs: RLNInputs::<E>::empty(merkle_depth),
hasher: hasher.clone(),
};
let parameters = generate_random_parameters(circuit, &mut rng).unwrap();
let mut file_vk = File::create("verifier.key").unwrap();
let vk = parameters.vk.clone();
vk.write(&mut file_vk).unwrap();
let mut file_paramaters = File::create("parameters.key").unwrap();
parameters.write(&mut file_paramaters).unwrap();
}
}

1
rln

@ -1 +0,0 @@
Subproject commit a80f5d013eb092ff18bd1d946c57565e2cdc65da

View File

@ -1,230 +0,0 @@
use crate::circuit::rln::{RLNCircuit, RLNInputs};
use crate::merkle::MerkleTree;
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use crate::{circuit::poseidon::PoseidonCircuit, public::RLNSignal};
use rand::{Rand, SeedableRng, XorShiftRng};
use sapling_crypto::bellman::groth16::*;
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use sapling_crypto::bellman::Circuit;
use sapling_crypto::circuit::test::TestConstraintSystem;
use std::io::{self, ErrorKind, Read, Write};
use std::thread::sleep;
use std::time::{Duration, Instant};
use std::{error::Error, hash::Hash};
use crate::public::RLN;
pub struct ProverBenchResult {
pub prover_key_size: usize,
pub prover_time: f64,
}
impl ProverBenchResult {
pub fn new() -> ProverBenchResult {
ProverBenchResult {
prover_key_size: 0,
prover_time: 0f64,
}
}
}
pub fn run_rln_prover_bench<E: Engine>(
merkle_depth: usize,
poseidon_params: PoseidonParams<E>,
) -> ProverBenchResult {
RLNTest::new(merkle_depth, Some(poseidon_params)).run_prover_bench()
}
pub struct RLNTest<E>
where
E: Engine,
{
rln: RLN<E>,
merkle_depth: usize,
}
impl<E> RLNTest<E>
where
E: Engine,
{
fn rng() -> XorShiftRng {
XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654])
}
fn secret_key() -> E::Fr {
E::Fr::from_str("1001").unwrap()
}
fn insert_public_key(&mut self) {
let hasher = self.hasher();
let public_key = hasher.hash(vec![Self::secret_key()]);
let mut pubkey_data: Vec<u8> = Vec::new();
public_key.into_repr().write_le(&mut pubkey_data).unwrap();
self.rln.update_next_member(pubkey_data.as_slice()).unwrap();
}
fn id_index() -> usize {
0
}
pub fn new(merkle_depth: usize, poseidon_params: Option<PoseidonParams<E>>) -> RLNTest<E> {
let mut rln_test = RLNTest {
rln: RLN::new(merkle_depth, poseidon_params),
merkle_depth,
};
rln_test.insert_public_key();
rln_test
}
pub fn hasher(&self) -> PoseidonHasher<E> {
self.rln.hasher()
}
pub fn valid_inputs(&self) -> RLNInputs<E> {
let mut rng = Self::rng();
let hasher = self.rln.hasher();
// Initialize empty merkle tree
let merkle_depth = self.merkle_depth;
let mut membership_tree = MerkleTree::empty(hasher.clone(), merkle_depth);
// A. setup an identity
let secret_key = E::Fr::rand(&mut rng);
let id_comm: E::Fr = hasher.hash(vec![secret_key.clone()]);
// B. insert to the membership tree
let id_index = 6; // any number below 2^depth will work
membership_tree.update(id_index, id_comm).unwrap();
// C.1 get membership witness
let auth_path = membership_tree.get_witness(id_index).unwrap();
assert!(membership_tree
.check_inclusion(auth_path.clone(), id_index)
.unwrap());
// C.2 prepare sss
// get current epoch
let epoch = E::Fr::rand(&mut rng);
let signal_hash = E::Fr::rand(&mut rng);
// evaluation point is the signal_hash
let share_x = signal_hash.clone();
// calculate current line equation
let a_0 = secret_key.clone();
let a_1: E::Fr = hasher.hash(vec![a_0, epoch]);
// evaluate line equation
let mut share_y = a_1.clone();
share_y.mul_assign(&share_x);
share_y.add_assign(&a_0);
// calculate nullfier
let nullifier = hasher.hash(vec![a_1]);
// compose the circuit
let inputs = RLNInputs::<E> {
share_x: Some(share_x),
share_y: Some(share_y),
epoch: Some(epoch),
nullifier: Some(nullifier),
root: Some(membership_tree.get_root()),
id_key: Some(secret_key),
auth_path: auth_path.into_iter().map(|w| Some(w)).collect(),
};
inputs
}
pub fn signal(&self) -> RLNSignal<E> {
let mut rng = Self::rng();
let epoch = E::Fr::rand(&mut rng);
let signal_hash = E::Fr::rand(&mut rng);
RLNSignal {
epoch,
hash: signal_hash,
}
}
pub fn synthesize(&self) -> usize {
let hasher = PoseidonCircuit::new(self.rln.poseidon_params());
println!("{}", self.merkle_depth);
let inputs = self.valid_inputs();
let circuit = RLNCircuit::<E> {
inputs: inputs.clone(),
hasher: hasher.clone(),
};
let mut cs = TestConstraintSystem::<E>::new();
let circuit = circuit.clone();
match circuit.synthesize(&mut cs) {
Ok(_) => (),
Err(e) => {
println!("err\n{}", e);
}
}
let unsatisfied = cs.which_is_unsatisfied();
if unsatisfied.is_some() {
panic!("unsatisfied\n{}", unsatisfied.unwrap());
}
let unconstrained = cs.find_unconstrained();
if !unconstrained.is_empty() {
panic!("unconstrained\n{}", unconstrained);
}
assert!(cs.is_satisfied());
cs.num_constraints()
}
pub fn run_prover_bench(&self) -> ProverBenchResult {
let mut signal_data: Vec<u8> = Vec::new();
let signal = self.signal();
signal.write(&mut signal_data).unwrap();
let mut proof: Vec<u8> = Vec::new();
let now = Instant::now();
let mut secret_key_data: Vec<u8> = Vec::new();
let secret_key = Self::secret_key();
secret_key
.into_repr()
.write_le(&mut secret_key_data)
.unwrap();
let id_index = Self::id_index();
self.rln
.generate_proof(
signal_data.as_slice(),
secret_key_data.as_slice(),
id_index,
&mut proof,
)
.unwrap();
let prover_time = now.elapsed().as_millis() as f64 / 1000.0;
assert!(self.rln.verify(proof.as_slice()).unwrap(), true);
let mut circuit_parameters: Vec<u8> = Vec::new();
self.rln
.export_circuit_parameters(&mut circuit_parameters)
.unwrap();
let prover_key_size = circuit_parameters.len();
ProverBenchResult {
prover_time,
prover_key_size,
}
}
pub fn export_circuit_parameters<W: Write>(&self, w: W) -> io::Result<()> {
self.rln.export_circuit_parameters(w)
}
}

View File

@ -1,6 +0,0 @@
mod polynomial;
pub mod poseidon;
pub mod rln;
#[cfg(any(test, feature = "bench"))]
pub mod bench;

View File

@ -1,46 +0,0 @@
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use sapling_crypto::bellman::{Circuit, ConstraintSystem, SynthesisError, Variable};
use sapling_crypto::circuit::{boolean, ecc, num, Assignment};
// helper for horner evaluation methods
// b = a_0 + a_1 * x
pub fn allocate_add_with_coeff<CS, E>(
mut cs: CS,
a1: &num::AllocatedNum<E>,
x: &num::AllocatedNum<E>,
a0: &num::AllocatedNum<E>,
) -> Result<num::AllocatedNum<E>, SynthesisError>
where
E: Engine,
CS: ConstraintSystem<E>,
{
let ax = num::AllocatedNum::alloc(cs.namespace(|| "a1x"), || {
let mut ax_val = *a1.get_value().get()?;
let x_val = *x.get_value().get()?;
ax_val.mul_assign(&x_val);
Ok(ax_val)
})?;
cs.enforce(
|| "a1*x",
|lc| lc + a1.get_variable(),
|lc| lc + x.get_variable(),
|lc| lc + ax.get_variable(),
);
let y = num::AllocatedNum::alloc(cs.namespace(|| "y"), || {
let ax_val = *ax.get_value().get()?;
let mut y_val = *a0.get_value().get()?;
y_val.add_assign(&ax_val);
Ok(y_val)
})?;
cs.enforce(
|| "enforce y",
|lc| lc + ax.get_variable() + a0.get_variable(),
|lc| lc + CS::one(),
|lc| lc + y.get_variable(),
);
Ok(y)
}

View File

@ -1,403 +0,0 @@
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use sapling_crypto::bellman::{Circuit, ConstraintSystem, LinearCombination, SynthesisError};
use sapling_crypto::circuit::{boolean, ecc, num, Assignment};
#[derive(Clone)]
struct Element<E>
where
E: Engine,
{
an: Option<num::AllocatedNum<E>>,
nu: Option<num::Num<E>>,
}
enum RoundType {
Full,
Partial,
Exhausted,
}
struct RoundCtx<'a, E>
where
E: Engine,
{
number: usize,
params: &'a PoseidonParams<E>,
}
struct State<E>
where
E: Engine,
{
elements: Vec<Element<E>>,
}
#[derive(Clone)]
pub struct PoseidonCircuit<E>
where
E: Engine,
{
params: PoseidonParams<E>,
}
impl<E> Element<E>
where
E: Engine,
{
pub fn new_from_alloc(an: num::AllocatedNum<E>) -> Self {
return Element {
an: Some(an),
nu: None,
};
}
pub fn new_from_num(nu: num::Num<E>) -> Self {
return Element {
an: None,
nu: Some(nu),
};
}
pub fn is_allocated(&self) -> bool {
return self.an.is_some();
}
pub fn is_number(&self) -> bool {
return self.nu.is_some();
}
pub fn update_with_allocated(&mut self, an: num::AllocatedNum<E>) {
self.an = Some(an);
self.nu = None;
}
pub fn update_with_num(&mut self, nu: num::Num<E>) {
self.nu = Some(nu);
self.an = None;
}
pub fn num(&self) -> num::Num<E> {
if let Some(nu) = self.nu.clone() {
nu
} else {
match self.an.clone() {
Some(an) => num::Num::from(an),
None => panic!("element not exist"),
}
}
}
pub fn allocate<CS: ConstraintSystem<E>>(
&self,
mut cs: CS,
) -> Result<num::AllocatedNum<E>, SynthesisError> {
match self.nu.clone() {
Some(nu) => {
let v = num::AllocatedNum::alloc(cs.namespace(|| "allocate num"), || {
nu.get_value()
.ok_or_else(|| SynthesisError::AssignmentMissing)
})?;
cs.enforce(
|| format!("enforce allocated"),
|_| nu.lc(E::Fr::one()),
|lc| lc + CS::one(),
|lc| lc + v.get_variable(),
);
Ok(v)
}
None => panic!(""),
}
}
pub fn allocated(&self) -> Option<num::AllocatedNum<E>> {
self.an.clone()
}
}
impl<'a, E> RoundCtx<'a, E>
where
E: Engine,
{
pub fn new(params: &'a PoseidonParams<E>) -> Self {
RoundCtx {
params,
number: 0usize,
}
}
pub fn width(&self) -> usize {
self.params.width()
}
pub fn round_number(&self) -> usize {
self.number
}
pub fn is_full_round(&self) -> bool {
match self.round_type() {
RoundType::Full => true,
_ => false,
}
}
pub fn is_exhausted(&self) -> bool {
match self.round_type() {
RoundType::Exhausted => true,
_ => false,
}
}
pub fn is_last_round(&self) -> bool {
self.number == self.params.total_rounds() - 1
}
pub fn in_transition(&self) -> bool {
let a1 = self.params.full_round_half_len();
let a2 = a1 + self.params.partial_round_len();
self.number == a1 - 1 || self.number == a2 - 1
}
pub fn round_constant(&self) -> E::Fr {
self.params.round_constant(self.number)
}
pub fn mds_matrix_row(&self, i: usize) -> Vec<E::Fr> {
let w = self.width();
let matrix = self.params.mds_matrix();
matrix[i * w..(i + 1) * w].to_vec()
}
pub fn round_type(&self) -> RoundType {
let a1 = self.params.full_round_half_len();
let (a2, a3) = (
a1 + self.params.partial_round_len(),
self.params.total_rounds(),
);
if self.number < a1 {
RoundType::Full
} else if self.number >= a1 && self.number < a2 {
RoundType::Partial
} else if self.number >= a2 && self.number < a3 {
RoundType::Full
} else {
RoundType::Exhausted
}
}
pub fn round_end(&mut self) {
self.number += 1;
}
}
impl<E> State<E>
where
E: Engine,
{
pub fn new(elements: Vec<Element<E>>) -> Self {
Self { elements }
}
pub fn first_allocated<CS: ConstraintSystem<E>>(
&mut self,
mut cs: CS,
) -> Result<num::AllocatedNum<E>, SynthesisError> {
let el = match self.elements[0].allocated() {
Some(an) => an,
None => self.elements[0].allocate(cs.namespace(|| format!("alloc first")))?,
};
Ok(el)
}
fn sbox<CS: ConstraintSystem<E>>(
&mut self,
mut cs: CS,
ctx: &mut RoundCtx<E>,
) -> Result<(), SynthesisError> {
assert_eq!(ctx.width(), self.elements.len());
for i in 0..if ctx.is_full_round() { ctx.width() } else { 1 } {
let round_constant = ctx.round_constant();
let si = {
match self.elements[i].allocated() {
Some(an) => an,
None => self.elements[i]
.allocate(cs.namespace(|| format!("alloc sbox input {}", i)))?,
}
};
let si2 = num::AllocatedNum::alloc(
cs.namespace(|| format!("square with round constant {}", i)),
|| {
let mut val = *si.get_value().get()?;
val.add_assign(&round_constant);
val.square();
Ok(val)
},
)?;
cs.enforce(
|| format!("constraint square with round constant {}", i),
|lc| lc + si.get_variable() + (round_constant, CS::one()),
|lc| lc + si.get_variable() + (round_constant, CS::one()),
|lc| lc + si2.get_variable(),
);
let si4 = si2.square(cs.namespace(|| format!("si^4 {}", i)))?;
let si5 = num::AllocatedNum::alloc(cs.namespace(|| format!("si^5 {}", i)), || {
let mut val = *si4.get_value().get()?;
let mut si_val = *si.get_value().get()?;
si_val.add_assign(&round_constant);
val.mul_assign(&si_val);
Ok(val)
})?;
cs.enforce(
|| format!("constraint sbox result {}", i),
|lc| lc + si.get_variable() + (round_constant, CS::one()),
|lc| lc + si4.get_variable(),
|lc| lc + si5.get_variable(),
);
self.elements[i].update_with_allocated(si5);
}
Ok(())
}
fn mul_mds_matrix<CS: ConstraintSystem<E>>(
&mut self,
ctx: &mut RoundCtx<E>,
) -> Result<(), SynthesisError> {
assert_eq!(ctx.width(), self.elements.len());
if !ctx.is_last_round() {
// skip mds multiplication in last round
let mut new_state: Vec<num::Num<E>> = Vec::new();
let w = ctx.width();
for i in 0..w {
let row = ctx.mds_matrix_row(i);
let mut acc = num::Num::<E>::zero();
for j in 0..w {
let mut r = self.elements[j].num();
r.scale(row[j]);
acc.add_assign(&r);
}
new_state.push(acc);
}
// round ends here
let is_full_round = ctx.is_full_round();
let in_transition = ctx.in_transition();
ctx.round_end();
// add round constants just after mds if
// first full round has just ended
// or in partial rounds expect the last one.
if in_transition == is_full_round {
// add round constants for elements in {1, t}
let round_constant = ctx.round_constant();
for i in 1..w {
let mut constant_as_num = num::Num::<E>::zero();
constant_as_num = constant_as_num.add_bool_with_coeff(
CS::one(),
&boolean::Boolean::Constant(true),
round_constant,
);
new_state[i].add_assign(&constant_as_num);
}
}
for (s0, s1) in self.elements.iter_mut().zip(new_state) {
s0.update_with_num(s1);
}
} else {
// terminates hades
ctx.round_end();
}
Ok(())
}
}
impl<E> PoseidonCircuit<E>
where
E: Engine,
{
pub fn new(params: PoseidonParams<E>) -> Self {
Self { params: params }
}
pub fn width(&self) -> usize {
self.params.width()
}
pub fn alloc<CS: ConstraintSystem<E>>(
&self,
mut cs: CS,
input: Vec<num::AllocatedNum<E>>,
) -> Result<num::AllocatedNum<E>, SynthesisError> {
assert!(input.len() < self.params.width());
let mut elements: Vec<Element<E>> = input
.iter()
.map(|el| Element::new_from_alloc(el.clone()))
.collect();
elements.resize(self.width(), Element::new_from_num(num::Num::zero()));
let mut state = State::new(elements);
let mut ctx = RoundCtx::new(&self.params);
loop {
match ctx.round_type() {
RoundType::Exhausted => {
break;
}
_ => {
let round_number = ctx.round_number();
state.sbox(cs.namespace(|| format!("sbox {}", round_number)), &mut ctx)?;
state.mul_mds_matrix::<CS>(&mut ctx)?;
}
}
}
state.first_allocated(cs.namespace(|| format!("allocate result")))
}
}
#[test]
fn test_poseidon_circuit() {
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::circuit::test::TestConstraintSystem;
let mut cs = TestConstraintSystem::<Bn256>::new();
let params = PoseidonParams::new(8, 55, 3, None, None, None);
let inputs: Vec<Fr> = ["0", "0"]
.iter()
.map(|e| Fr::from_str(e).unwrap())
.collect();
let allocated_inputs = inputs
.clone()
.into_iter()
.enumerate()
.map(|(i, e)| {
let a = num::AllocatedNum::alloc(cs.namespace(|| format!("input {}", i)), || Ok(e));
a.unwrap()
})
.collect();
let circuit = PoseidonCircuit::<Bn256>::new(params.clone());
let res_allocated = circuit
.alloc(cs.namespace(|| "hash alloc"), allocated_inputs)
.unwrap();
let result = res_allocated.get_value().unwrap();
let poseidon = PoseidonHasher::new(params.clone());
let expected = poseidon.hash(inputs);
assert_eq!(result, expected);
assert!(cs.is_satisfied());
println!(
"number of constraints for (t: {}, rf: {}, rp: {}), {}",
params.width(),
params.full_round_half_len() * 2,
params.partial_round_len(),
cs.num_constraints()
);
}

View File

@ -1,480 +0,0 @@
use crate::circuit::polynomial::allocate_add_with_coeff;
use crate::circuit::poseidon::PoseidonCircuit;
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use sapling_crypto::bellman::{Circuit, ConstraintSystem, SynthesisError, Variable};
use sapling_crypto::circuit::{boolean, ecc, num, Assignment};
use sapling_crypto::jubjub::{JubjubEngine, JubjubParams, PrimeOrder};
use std::io::{self, Read, Write};
// Rate Limit Nullifier
#[derive(Clone)]
pub struct RLNInputs<E>
where
E: Engine,
{
// Public inputs
// share, (x, y),
// where x should be hash of the signal
// and y is the evaluation
pub share_x: Option<E::Fr>,
pub share_y: Option<E::Fr>,
// epoch is the external nullifier
// we derive the line equation and the nullifier from epoch
pub epoch: Option<E::Fr>,
// nullifier
pub nullifier: Option<E::Fr>,
// root is the current state of membership set
pub root: Option<E::Fr>,
// Private inputs
// id_key must be a preimage of a leaf in membership tree.
// id_key also together with epoch will be used to construct
// a secret line equation together with the epoch
pub id_key: Option<E::Fr>,
// authentication path of the member
pub auth_path: Vec<Option<(E::Fr, bool)>>,
}
impl<E> RLNInputs<E>
where
E: Engine,
{
pub fn public_inputs(&self) -> Vec<E::Fr> {
vec![
self.root.unwrap(),
self.epoch.unwrap(),
self.share_x.unwrap(),
self.share_y.unwrap(),
self.nullifier.unwrap(),
]
}
pub fn merkle_depth(&self) -> usize {
self.auth_path.len()
}
pub fn empty(merkle_depth: usize) -> RLNInputs<E> {
RLNInputs::<E> {
share_x: None,
share_y: None,
epoch: None,
nullifier: None,
root: None,
id_key: None,
auth_path: vec![None; merkle_depth],
}
}
pub fn read<R: Read>(mut reader: R) -> io::Result<RLNInputs<E>> {
let mut buf = <E::Fr as PrimeField>::Repr::default();
buf.read_le(&mut reader)?;
let share_x =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let share_y =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let epoch =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let nullifier =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let root =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let id_key =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let auth_path = Self::decode_auth_path(&mut reader)?;
Ok(RLNInputs {
share_x: Some(share_x),
share_y: Some(share_y),
epoch: Some(epoch),
nullifier: Some(nullifier),
root: Some(root),
id_key: Some(id_key),
auth_path,
})
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.share_x
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
self.share_y
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
self.epoch
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
self.nullifier
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
self.root
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
self.id_key
.unwrap()
.into_repr()
.write_le(&mut writer)
.unwrap();
Self::encode_auth_path(&mut writer, self.auth_path.clone()).unwrap();
Ok(())
}
pub fn read_public_inputs<R: Read>(mut reader: R) -> io::Result<Vec<E::Fr>> {
let mut buf = <E::Fr as PrimeField>::Repr::default();
buf.read_le(&mut reader)?;
let root =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let epoch =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let share_x =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let share_y =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let nullifier =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
Ok(vec![root, epoch, share_x, share_y, nullifier])
}
pub fn write_public_inputs<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.root.unwrap().into_repr().write_le(&mut writer)?;
self.epoch.unwrap().into_repr().write_le(&mut writer)?;
self.share_x.unwrap().into_repr().write_le(&mut writer)?;
self.share_y.unwrap().into_repr().write_le(&mut writer)?;
self.nullifier.unwrap().into_repr().write_le(&mut writer)?;
Ok(())
}
pub fn encode_auth_path<W: Write>(
mut writer: W,
auth_path: Vec<Option<(E::Fr, bool)>>,
) -> io::Result<()> {
let path_len = auth_path.len() as u8;
writer.write(&[path_len])?;
for el in auth_path.iter() {
let c = el.unwrap();
if c.1 {
writer.write(&[1])?;
} else {
writer.write(&[0])?;
}
c.0.into_repr().write_le(&mut writer).unwrap();
}
Ok(())
}
pub fn decode_auth_path<R: Read>(mut reader: R) -> io::Result<Vec<Option<(E::Fr, bool)>>> {
let mut byte_buf = vec![0u8; 1];
let mut el_buf = <E::Fr as PrimeField>::Repr::default();
let mut auth_path: Vec<Option<(E::Fr, bool)>> = vec![];
reader.read_exact(&mut byte_buf)?;
let path_len = byte_buf[0];
if path_len < 2 {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"invalid path length",
));
}
for _ in 0..path_len {
reader.read_exact(&mut byte_buf)?;
let path_dir = match byte_buf[0] {
0u8 => false,
1u8 => true,
_ => {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"invalid path direction",
))
}
};
el_buf.read_le(&mut reader)?;
let node = E::Fr::from_repr(el_buf)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
auth_path.push(Some((node, path_dir)));
}
Ok(auth_path)
}
}
#[derive(Clone)]
pub struct RLNCircuit<E>
where
E: Engine,
{
pub inputs: RLNInputs<E>,
pub hasher: PoseidonCircuit<E>,
}
impl<E> Circuit<E> for RLNCircuit<E>
where
E: Engine,
{
fn synthesize<CS: ConstraintSystem<E>>(self, cs: &mut CS) -> Result<(), SynthesisError> {
// 1. Part
// Membership constraints
// root == merkle_proof(auth_path, preimage_of_leaf)
let root = num::AllocatedNum::alloc(cs.namespace(|| "root"), || {
let value = self.inputs.root.clone();
Ok(*value.get()?)
})?;
root.inputize(cs.namespace(|| "root is public"))?;
let preimage = num::AllocatedNum::alloc(cs.namespace(|| "preimage"), || {
let value = self.inputs.id_key;
Ok(*value.get()?)
})?;
// identity is a leaf of membership tree
let identity = self
.hasher
.alloc(cs.namespace(|| "identity"), vec![preimage.clone()])?;
// accumulator up to the root
let mut acc = identity.clone();
// ascend the tree
let auth_path_witness = self.inputs.auth_path.clone();
for (i, e) in auth_path_witness.into_iter().enumerate() {
let cs = &mut cs.namespace(|| format!("auth path {}", i));
let position = boolean::Boolean::from(boolean::AllocatedBit::alloc(
cs.namespace(|| "position bit"),
e.map(|e| e.1),
)?);
let path_element =
num::AllocatedNum::alloc(cs.namespace(|| "path element"), || Ok(e.get()?.0))?;
let (xr, xl) = num::AllocatedNum::conditionally_reverse(
cs.namespace(|| "conditional reversal of preimage"),
&acc,
&path_element,
&position,
)?;
acc = self
.hasher
.alloc(cs.namespace(|| "hash couple"), vec![xl, xr])?;
}
// see if it is a member
cs.enforce(
|| "enforce membership",
|lc| lc + acc.get_variable(),
|lc| lc + CS::one(),
|lc| lc + root.get_variable(),
);
// 2. Part
// Line Equation Constaints
// a_1 = hash(a_0, epoch)
// share_y == a_0 + a_1 * share_x
let epoch = num::AllocatedNum::alloc(cs.namespace(|| "epoch"), || {
let value = self.inputs.epoch.clone();
Ok(*value.get()?)
})?;
epoch.inputize(cs.namespace(|| "epoch is public"))?;
let a_0 = preimage.clone();
// a_1 == h(a_0, epoch)
let a_1 = self
.hasher
.alloc(cs.namespace(|| "a_1"), vec![a_0.clone(), epoch])?;
let share_x = num::AllocatedNum::alloc(cs.namespace(|| "share x"), || {
let value = self.inputs.share_x.clone();
Ok(*value.get()?)
})?;
share_x.inputize(cs.namespace(|| "share x is public"))?;
// constaint the evaluation the line equation
let eval = allocate_add_with_coeff(cs.namespace(|| "eval"), &a_1, &share_x, &a_0)?;
let share_y = num::AllocatedNum::alloc(cs.namespace(|| "share y"), || {
let value = self.inputs.share_y.clone();
Ok(*value.get()?)
})?;
share_y.inputize(cs.namespace(|| "share y is public"))?;
// see if share satisfies the line equation
cs.enforce(
|| "enforce lookup",
|lc| lc + share_y.get_variable(),
|lc| lc + CS::one(),
|lc| lc + eval.get_variable(),
);
// 3. Part
// Nullifier constraints
// hashing secret twice with epoch ingredient
// a_1 == hash(a_0, epoch) is already constrained
// nullifier == hash(a_1)
let nullifier_calculated = self
.hasher
.alloc(cs.namespace(|| "calculated nullifier"), vec![a_1.clone()])?;
let nullifier = num::AllocatedNum::alloc(cs.namespace(|| "nullifier"), || {
let value = self.inputs.nullifier.clone();
Ok(*value.get()?)
})?;
nullifier.inputize(cs.namespace(|| "nullifier is public"))?;
// check if correct nullifier supplied
cs.enforce(
|| "enforce nullifier",
|lc| lc + nullifier_calculated.get_variable(),
|lc| lc + CS::one(),
|lc| lc + nullifier.get_variable(),
);
Ok(())
}
}
#[cfg(test)]
mod test {
use super::RLNInputs;
use crate::circuit::bench;
use crate::poseidon::PoseidonParams;
use sapling_crypto::bellman::pairing::bls12_381::Bls12;
use sapling_crypto::bellman::pairing::bn256::Bn256;
use sapling_crypto::bellman::pairing::Engine;
struct TestSuite<E: Engine> {
merkle_depth: usize,
poseidon_parameters: PoseidonParams<E>,
}
fn cases<E: Engine>() -> Vec<TestSuite<E>> {
vec![
TestSuite {
merkle_depth: 3,
poseidon_parameters: PoseidonParams::new(8, 55, 3, None, None, None),
},
TestSuite {
merkle_depth: 24,
poseidon_parameters: PoseidonParams::new(8, 55, 3, None, None, None),
},
TestSuite {
merkle_depth: 32,
poseidon_parameters: PoseidonParams::new(8, 55, 3, None, None, None),
},
TestSuite {
merkle_depth: 16,
poseidon_parameters: PoseidonParams::new(8, 33, 3, None, None, None),
},
TestSuite {
merkle_depth: 24,
poseidon_parameters: PoseidonParams::new(8, 33, 3, None, None, None),
},
TestSuite {
merkle_depth: 32,
poseidon_parameters: PoseidonParams::new(8, 33, 3, None, None, None),
},
]
}
#[test]
fn test_rln_bn() {
use sapling_crypto::bellman::pairing::bn256::Bn256;
let cases = cases::<Bn256>();
for case in cases.iter() {
let rln_test = bench::RLNTest::<Bn256>::new(
case.merkle_depth,
Some(case.poseidon_parameters.clone()),
);
let num_constraints = rln_test.synthesize();
let result = rln_test.run_prover_bench();
println!(
"bn256, t: {}, rf: {}, rp: {}, merkle depth: {}",
case.poseidon_parameters.width(),
case.poseidon_parameters.full_round_half_len() * 2,
case.poseidon_parameters.partial_round_len(),
case.merkle_depth,
);
println!("number of constatins:\t{}", num_constraints);
println!("prover key size:\t{}", result.prover_key_size);
println!("prover time:\t{}", result.prover_time);
}
}
#[test]
fn test_input_serialization() {
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
let share_x = Fr::from_str("1").unwrap();
let share_y = Fr::from_str("2").unwrap();
let epoch = Fr::from_str("3").unwrap();
let nullifier = Fr::from_str("4").unwrap();
let root = Fr::from_str("5").unwrap();
let id_key = Fr::from_str("6").unwrap();
let auth_path = vec![
Some((Fr::from_str("20").unwrap(), false)),
Some((Fr::from_str("21").unwrap(), true)),
Some((Fr::from_str("22").unwrap(), true)),
Some((Fr::from_str("23").unwrap(), false)),
];
let input0 = RLNInputs::<Bn256> {
share_x: Some(share_x),
share_y: Some(share_y),
epoch: Some(epoch),
nullifier: Some(nullifier),
root: Some(root),
id_key: Some(id_key),
auth_path,
};
let mut raw_inputs: Vec<u8> = Vec::new();
input0.write(&mut raw_inputs).unwrap();
let mut reader = raw_inputs.as_slice();
let input1 = RLNInputs::<Bn256>::read(&mut reader).unwrap();
assert_eq!(input0.share_x, input1.share_x);
assert_eq!(input0.share_y, input1.share_y);
assert_eq!(input0.epoch, input1.epoch);
assert_eq!(input0.nullifier, input1.nullifier);
assert_eq!(input0.root, input1.root);
assert_eq!(input0.id_key, input1.id_key);
assert_eq!(input0.auth_path, input1.auth_path);
}
}

View File

@ -1,375 +0,0 @@
use crate::{circuit::rln, public::RLN};
use bellman::pairing::bn256::Bn256;
use std::slice;
/// Buffer struct is taken from
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Buffer {
pub ptr: *const u8,
pub len: usize,
}
impl From<&[u8]> for Buffer {
fn from(src: &[u8]) -> Self {
Self {
ptr: &src[0] as *const u8,
len: src.len(),
}
}
}
impl<'a> From<&Buffer> for &'a [u8] {
fn from(src: &Buffer) -> &'a [u8] {
unsafe { slice::from_raw_parts(src.ptr, src.len) }
}
}
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Auth {
secret_buffer: *const Buffer,
pub index: usize,
}
impl Auth {
fn get_secret(&self) -> &[u8] {
let secret_data = <&[u8]>::from(unsafe { &*self.secret_buffer });
secret_data
}
}
#[no_mangle]
pub extern "C" fn new_circuit_from_params(
merkle_depth: usize,
parameters_buffer: *const Buffer,
ctx: *mut *mut RLN<Bn256>,
) -> bool {
let buffer = <&[u8]>::from(unsafe { &*parameters_buffer });
let rln = match RLN::<Bn256>::new_with_raw_params(merkle_depth, buffer, None) {
Ok(rln) => rln,
Err(_) => return false,
};
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
#[no_mangle]
pub extern "C" fn update_next_member(ctx: *mut RLN<Bn256>, input_buffer: *const Buffer) -> bool {
let rln = unsafe { &mut *ctx };
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
match rln.update_next_member(input_data) {
Ok(_) => true,
Err(_) => false,
}
}
#[no_mangle]
pub extern "C" fn delete_member(ctx: *mut RLN<Bn256>, index: usize) -> bool {
let rln = unsafe { &mut *ctx };
match rln.delete_member(index) {
Ok(_) => true,
Err(_) => false,
}
}
#[no_mangle]
pub extern "C" fn generate_proof(
ctx: *const RLN<Bn256>,
input_buffer: *const Buffer,
auth: *const Auth,
output_buffer: *mut Buffer,
) -> bool {
let rln = unsafe { &*ctx };
let auth = unsafe { &*auth };
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
let mut output_data: Vec<u8> = Vec::new();
match rln.generate_proof(input_data, auth.get_secret(), auth.index, &mut output_data) {
Ok(proof_data) => proof_data,
Err(_) => return false,
};
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
#[no_mangle]
pub extern "C" fn verify(
ctx: *const RLN<Bn256>,
proof_buffer: *mut Buffer,
result_ptr: *mut u32,
) -> bool {
let rln = unsafe { &*ctx };
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
if match rln.verify(proof_data) {
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *result_ptr = 0 };
} else {
unsafe { *result_ptr = 1 };
};
true
}
#[no_mangle]
pub extern "C" fn hash(
ctx: *const RLN<Bn256>,
inputs_buffer: *const Buffer,
input_len: usize,
output_buffer: *mut Buffer,
) -> bool {
let rln = unsafe { &*ctx };
let input_data = <&[u8]>::from(unsafe { &*inputs_buffer });
let mut output_data: Vec<u8> = Vec::new();
match rln.hash(input_data, input_len, &mut output_data) {
Ok(output_data) => output_data,
Err(_) => return false,
};
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
#[no_mangle]
pub extern "C" fn key_gen(ctx: *const RLN<Bn256>, keypair_buffer: *mut Buffer) -> bool {
let rln = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
match rln.key_gen(&mut output_data) {
Ok(_) => (),
Err(_) => return false,
}
unsafe { *keypair_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use std::io::{self, Read, Write};
#[cfg(test)]
mod tests {
use crate::{circuit::bench, public::RLNSignal};
use crate::{poseidon::PoseidonParams, public};
use bellman::pairing::bn256::{Bn256, Fr};
use rand::{Rand, SeedableRng, XorShiftRng};
use super::*;
use std::mem::MaybeUninit;
fn merkle_depth() -> usize {
3usize
}
fn index() -> usize {
2usize
}
fn rln_test() -> bench::RLNTest<Bn256> {
let merkle_depth = merkle_depth();
let poseidon_params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let rln_test = bench::RLNTest::<Bn256>::new(merkle_depth, Some(poseidon_params));
rln_test
}
fn rln_pointer(circuit_parameters: Vec<u8>) -> MaybeUninit<*mut RLN<Bn256>> {
// restore this new curcuit with bindings
let merkle_depth = merkle_depth();
let circuit_parameters_buffer = &Buffer::from(circuit_parameters.as_ref());
let mut rln_pointer = MaybeUninit::<*mut RLN<Bn256>>::uninit();
let success = new_circuit_from_params(
merkle_depth,
circuit_parameters_buffer,
rln_pointer.as_mut_ptr(),
);
assert!(success, "cannot init rln instance");
rln_pointer
}
#[test]
fn test_proof_ffi() {
let mut rng = XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
// setup new rln instance
let rln_test = rln_test();
let mut circuit_parameters: Vec<u8> = Vec::new();
rln_test
.export_circuit_parameters(&mut circuit_parameters)
.unwrap();
let rln_pointer = rln_pointer(circuit_parameters);
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let index = index();
// generate new key pair
let mut keypair_buffer = MaybeUninit::<Buffer>::uninit();
let success = key_gen(rln_pointer, keypair_buffer.as_mut_ptr());
assert!(success, "key generation failed");
let keypair_buffer = unsafe { keypair_buffer.assume_init() };
let mut keypair_data = <&[u8]>::from(&keypair_buffer);
// read keypair
let mut buf = <Fr as PrimeField>::Repr::default();
buf.read_le(&mut keypair_data).unwrap();
let id_key = Fr::from_repr(buf).unwrap();
buf.read_le(&mut keypair_data).unwrap();
let public_key = Fr::from_repr(buf).unwrap();
// insert members
for i in 0..index + 1 {
let new_member: Fr;
if i == index {
new_member = public_key;
} else {
new_member = Fr::rand(&mut rng);
}
let mut input_data: Vec<u8> = Vec::new();
new_member.into_repr().write_le(&mut input_data).unwrap();
let input_buffer = &Buffer::from(input_data.as_ref());
let success = update_next_member(rln_pointer, input_buffer);
assert!(success, "update with new pubkey failed");
}
let mut gen_proof_and_verify = |rln_pointer: *const RLN<Bn256>| {
// create signal
let epoch = Fr::rand(&mut rng);
let signal_hash = Fr::rand(&mut rng);
let inputs = RLNSignal::<Bn256> {
epoch: epoch,
hash: signal_hash,
};
// serialize signal
let mut inputs_data: Vec<u8> = Vec::new();
inputs.write(&mut inputs_data).unwrap();
let inputs_buffer = &Buffer::from(inputs_data.as_ref());
// construct auth object
let mut secret_data: Vec<u8> = Vec::new();
id_key.into_repr().write_le(&mut secret_data).unwrap();
let secret_buffer = &Buffer::from(secret_data.as_ref());
let auth = &Auth {
secret_buffer,
index,
} as *const Auth;
// generate proof
let mut proof_buffer = MaybeUninit::<Buffer>::uninit();
let success =
generate_proof(rln_pointer, inputs_buffer, auth, proof_buffer.as_mut_ptr());
assert!(success, "proof generation failed");
let mut proof_buffer = unsafe { proof_buffer.assume_init() };
// verify proof
let mut result = 0u32;
let result_ptr = &mut result as *mut u32;
let success = verify(rln_pointer, &mut proof_buffer, result_ptr);
assert!(success, "verification failed");
assert_eq!(0, result);
};
gen_proof_and_verify(rln_pointer);
// delete 0th member
let success = delete_member(rln_pointer, 0);
assert!(success, "deletion failed");
// gen proof & verify once more
gen_proof_and_verify(rln_pointer);
}
#[test]
fn test_hash_ffi() {
let rln_test = rln_test();
let mut circuit_parameters: Vec<u8> = Vec::new();
rln_test
.export_circuit_parameters(&mut circuit_parameters)
.unwrap();
let hasher = rln_test.hasher();
let rln_pointer = rln_pointer(circuit_parameters);
let rln_pointer = unsafe { &*rln_pointer.assume_init() };
let mut input_data: Vec<u8> = Vec::new();
let inputs: Vec<Fr> = ["1", "2"]
.iter()
.map(|e| Fr::from_str(e).unwrap())
.collect();
inputs.iter().for_each(|e| {
e.into_repr().write_le(&mut input_data).unwrap();
});
let input_buffer = &Buffer::from(input_data.as_ref());
let input_len: usize = 2;
let expected = hasher.hash(inputs);
let mut expected_data: Vec<u8> = Vec::new();
expected.into_repr().write_le(&mut expected_data).unwrap();
let mut result_buffer = MaybeUninit::<Buffer>::uninit();
let success = hash(
rln_pointer,
input_buffer,
input_len,
result_buffer.as_mut_ptr(),
);
assert!(success, "hash ffi call failed");
let result_buffer = unsafe { result_buffer.assume_init() };
let result_data = <&[u8]>::from(&result_buffer);
assert_eq!(expected_data.as_slice(), result_data);
}
#[test]
fn test_keygen_ffi() {
let rln_test = rln_test();
let mut circuit_parameters: Vec<u8> = Vec::new();
rln_test
.export_circuit_parameters(&mut circuit_parameters)
.unwrap();
let hasher = rln_test.hasher();
let rln_pointer = rln_pointer(circuit_parameters);
let rln_pointer = unsafe { &*rln_pointer.assume_init() };
let mut keypair_buffer = MaybeUninit::<Buffer>::uninit();
let success = key_gen(rln_pointer, keypair_buffer.as_mut_ptr());
assert!(success, "proof generation failed");
let keypair_buffer = unsafe { keypair_buffer.assume_init() };
let mut keypair_data = <&[u8]>::from(&keypair_buffer);
let mut buf = <Fr as PrimeField>::Repr::default();
buf.read_le(&mut keypair_data).unwrap();
let secret = Fr::from_repr(buf).unwrap();
buf.read_le(&mut keypair_data).unwrap();
let public = Fr::from_repr(buf).unwrap();
let expected_public: Fr = hasher.hash(vec![secret]);
assert_eq!(public, expected_public);
}
#[test]
#[ignore]
fn test_parameters_from_file() {
use hex;
use std::fs;
let data = fs::read("./parameters.key").expect("Unable to read file");
let merkle_depth = merkle_depth();
let circuit_parameters_buffer = &Buffer::from(data.as_ref());
let mut rln_pointer = MaybeUninit::<*mut RLN<Bn256>>::uninit();
let success = new_circuit_from_params(
merkle_depth,
circuit_parameters_buffer,
rln_pointer.as_mut_ptr(),
);
assert!(success, "creating failed");
}
}

View File

@ -1,14 +0,0 @@
#![allow(dead_code)]
#![allow(unused_imports)]
pub mod circuit;
pub mod merkle;
pub mod poseidon;
pub mod public;
mod utils;
#[cfg(not(target_arch = "wasm32"))]
pub mod ffi;
#[cfg(target_arch = "wasm32")]
mod wasm;

View File

@ -1,229 +0,0 @@
use crate::poseidon::{Poseidon as Hasher, PoseidonParams};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use std::io::{self, Error, ErrorKind};
use std::{collections::HashMap, hash::Hash};
enum SyncMode {
Bootstarp,
Maintain,
}
pub struct IncrementalMerkleTree<E>
where
E: Engine,
{
pub current_index: usize,
merkle_tree: MerkleTree<E>,
}
impl<E> IncrementalMerkleTree<E>
where
E: Engine,
{
pub fn empty(hasher: Hasher<E>, depth: usize) -> Self {
let mut zero: Vec<E::Fr> = Vec::with_capacity(depth + 1);
zero.push(E::Fr::from_str("0").unwrap());
for i in 0..depth {
zero.push(hasher.hash([zero[i]; 2].to_vec()));
}
zero.reverse();
let merkle_tree = MerkleTree {
hasher: hasher,
zero: zero.clone(),
depth: depth,
nodes: HashMap::new(),
};
let current_index: usize = 0;
IncrementalMerkleTree {
current_index,
merkle_tree,
}
}
pub fn update_next(&mut self, leaf: E::Fr) -> io::Result<()> {
self.merkle_tree.update(self.current_index, leaf)?;
self.current_index += 1;
Ok(())
}
pub fn delete(&mut self, index: usize) -> io::Result<()> {
let zero = E::Fr::from_str("0").unwrap();
self.merkle_tree.update(index, zero)?;
Ok(())
}
pub fn get_witness(&self, index: usize) -> io::Result<Vec<(E::Fr, bool)>> {
if index >= self.current_index {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds incremental index",
));
}
self.merkle_tree.get_witness(index)
}
pub fn hash(&self, inputs: Vec<E::Fr>) -> E::Fr {
self.merkle_tree.hasher.hash(inputs)
}
pub fn check_inclusion(
&self,
witness: Vec<(E::Fr, bool)>,
leaf_index: usize,
) -> io::Result<bool> {
if leaf_index >= self.current_index {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds incremental index",
));
}
self.merkle_tree.check_inclusion(witness, leaf_index)
}
pub fn get_root(&self) -> E::Fr {
return self.merkle_tree.get_root();
}
}
pub struct MerkleTree<E>
where
E: Engine,
{
pub hasher: Hasher<E>,
pub depth: usize,
zero: Vec<E::Fr>,
nodes: HashMap<(usize, usize), E::Fr>,
}
impl<E> MerkleTree<E>
where
E: Engine,
{
pub fn empty(hasher: Hasher<E>, depth: usize) -> Self {
let mut zero: Vec<E::Fr> = Vec::with_capacity(depth + 1);
zero.push(E::Fr::from_str("0").unwrap());
for i in 0..depth {
zero.push(hasher.hash([zero[i]; 2].to_vec()));
}
zero.reverse();
MerkleTree {
hasher: hasher,
zero: zero.clone(),
depth: depth,
nodes: HashMap::new(),
}
}
pub fn set_size(&self) -> usize {
1 << self.depth
}
pub fn update(&mut self, index: usize, leaf: E::Fr) -> io::Result<()> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index);
Ok(())
}
pub fn check_inclusion(&self, witness: Vec<(E::Fr, bool)>, index: usize) -> io::Result<bool> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut acc = self.get_node(self.depth, index);
for w in witness.into_iter() {
if w.1 {
acc = self.hasher.hash(vec![acc, w.0]);
} else {
acc = self.hasher.hash(vec![w.0, acc]);
}
}
Ok(acc.eq(&self.get_root()))
}
pub fn get_root(&self) -> E::Fr {
return self.get_node(0, 0);
}
pub fn get_witness(&self, index: usize) -> io::Result<Vec<(E::Fr, bool)>> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut witness = Vec::<(E::Fr, bool)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (i & 1 == 1)));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
assert_eq!(i, 0);
Ok(witness)
}
fn get_node(&self, depth: usize, index: usize) -> E::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.zero[depth]);
node
}
fn get_leaf(&self, index: usize) -> E::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> E::Fr {
let b = index & !1;
self.hasher
.hash([self.get_node(depth, b), self.get_node(depth, b + 1)].to_vec())
}
fn recalculate_from(&mut self, index: usize) {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
assert_eq!(depth, 0);
assert_eq!(i, 0);
}
}
#[test]
fn test_merkle_set() {
let data: Vec<Fr> = (0..8)
.map(|s| Fr::from_str(&format!("{}", s)).unwrap())
.collect();
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr, FrRepr};
let params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let hasher = Hasher::new(params);
let mut set = MerkleTree::empty(hasher.clone(), 3);
let leaf_index = 6;
let leaf = hasher.hash(vec![data[0]]);
set.update(leaf_index, leaf).unwrap();
let witness = set.get_witness(leaf_index).unwrap();
assert!(set.check_inclusion(witness, leaf_index).unwrap());
}

View File

@ -1,227 +0,0 @@
use blake2::{Blake2s, Digest};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
#[derive(Clone)]
pub struct PoseidonParams<E: Engine> {
rf: usize,
rp: usize,
t: usize,
round_constants: Vec<E::Fr>,
mds_matrix: Vec<E::Fr>,
}
#[derive(Clone)]
pub struct Poseidon<E: Engine> {
params: PoseidonParams<E>,
}
impl<E: Engine> PoseidonParams<E> {
pub fn new(
rf: usize,
rp: usize,
t: usize,
round_constants: Option<Vec<E::Fr>>,
mds_matrix: Option<Vec<E::Fr>>,
seed: Option<Vec<u8>>,
) -> PoseidonParams<E> {
let seed = match seed {
Some(seed) => seed,
None => b"".to_vec(),
};
let _round_constants = match round_constants {
Some(round_constants) => round_constants,
None => PoseidonParams::<E>::generate_constants(b"drlnhdsc", seed.clone(), rf + rp),
};
assert_eq!(rf + rp, _round_constants.len());
let _mds_matrix = match mds_matrix {
Some(mds_matrix) => mds_matrix,
None => PoseidonParams::<E>::generate_mds_matrix(b"drlnhdsm", seed.clone(), t),
};
PoseidonParams {
rf,
rp,
t,
round_constants: _round_constants,
mds_matrix: _mds_matrix,
}
}
pub fn width(&self) -> usize {
return self.t;
}
pub fn partial_round_len(&self) -> usize {
return self.rp;
}
pub fn full_round_half_len(&self) -> usize {
return self.rf / 2;
}
pub fn total_rounds(&self) -> usize {
return self.rf + self.rp;
}
pub fn round_constant(&self, round: usize) -> E::Fr {
return self.round_constants[round];
}
pub fn mds_matrix_row(&self, i: usize) -> Vec<E::Fr> {
let w = self.width();
self.mds_matrix[i * w..(i + 1) * w].to_vec()
}
pub fn mds_matrix(&self) -> Vec<E::Fr> {
self.mds_matrix.clone()
}
pub fn generate_mds_matrix(persona: &[u8; 8], seed: Vec<u8>, t: usize) -> Vec<E::Fr> {
let v: Vec<E::Fr> = PoseidonParams::<E>::generate_constants(persona, seed, t * 2);
let mut matrix: Vec<E::Fr> = Vec::with_capacity(t * t);
for i in 0..t {
for j in 0..t {
let mut tmp = v[i];
tmp.add_assign(&v[t + j]);
let entry = tmp.inverse().unwrap();
matrix.insert((i * t) + j, entry);
}
}
matrix
}
pub fn generate_constants(persona: &[u8; 8], seed: Vec<u8>, len: usize) -> Vec<E::Fr> {
let mut constants: Vec<E::Fr> = Vec::new();
let mut source = seed.clone();
loop {
let mut hasher = Blake2s::new();
hasher.input(persona);
hasher.input(source);
source = hasher.result().to_vec();
let mut candidate_repr = <E::Fr as PrimeField>::Repr::default();
candidate_repr.read_le(&source[..]).unwrap();
if let Ok(candidate) = E::Fr::from_repr(candidate_repr) {
constants.push(candidate);
if constants.len() == len {
break;
}
}
}
constants
}
}
impl<E: Engine> Poseidon<E> {
pub fn new(params: PoseidonParams<E>) -> Poseidon<E> {
Poseidon { params }
}
pub fn hash(&self, inputs: Vec<E::Fr>) -> E::Fr {
let mut state = inputs.clone();
state.resize(self.t(), E::Fr::zero());
let mut round_counter: usize = 0;
loop {
self.round(&mut state, round_counter);
round_counter += 1;
if round_counter == self.params.total_rounds() {
break;
}
}
state[0]
}
fn t(&self) -> usize {
self.params.t
}
fn round(&self, state: &mut Vec<E::Fr>, round: usize) {
let a1 = self.params.full_round_half_len();
let a2 = a1 + self.params.partial_round_len();
let a3 = self.params.total_rounds();
if round < a1 {
self.full_round(state, round);
} else if round >= a1 && round < a2 {
self.partial_round(state, round);
} else if round >= a2 && round < a3 {
if round == a3 - 1 {
self.full_round_last(state);
} else {
self.full_round(state, round);
}
} else {
panic!("should not be here")
}
}
fn full_round(&self, state: &mut Vec<E::Fr>, round: usize) {
self.add_round_constants(state, round);
self.apply_quintic_sbox(state, true);
self.mul_mds_matrix(state);
}
fn full_round_last(&self, state: &mut Vec<E::Fr>) {
let last_round = self.params.total_rounds() - 1;
self.add_round_constants(state, last_round);
self.apply_quintic_sbox(state, true);
}
fn partial_round(&self, state: &mut Vec<E::Fr>, round: usize) {
self.add_round_constants(state, round);
self.apply_quintic_sbox(state, false);
self.mul_mds_matrix(state);
}
fn add_round_constants(&self, state: &mut Vec<E::Fr>, round: usize) {
for (_, b) in state.iter_mut().enumerate() {
let c = self.params.round_constants[round];
b.add_assign(&c);
}
}
fn apply_quintic_sbox(&self, state: &mut Vec<E::Fr>, full: bool) {
for s in state.iter_mut() {
let mut b = s.clone();
b.square();
b.square();
s.mul_assign(&b);
if !full {
break;
}
}
}
fn mul_mds_matrix(&self, state: &mut Vec<E::Fr>) {
let w = self.params.t;
let mut new_state = vec![E::Fr::zero(); w];
for (i, ns) in new_state.iter_mut().enumerate() {
for (j, s) in state.iter().enumerate() {
let mut tmp = s.clone();
tmp.mul_assign(&self.params.mds_matrix[i * w + j]);
ns.add_assign(&tmp);
}
}
for (i, ns) in new_state.iter_mut().enumerate() {
state[i].clone_from(ns);
}
}
}
#[test]
fn test_poseidon_hash() {
use sapling_crypto::bellman::pairing::bn256;
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr};
let params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let hasher = Poseidon::<Bn256>::new(params);
let input1: Vec<Fr> = ["0"].iter().map(|e| Fr::from_str(e).unwrap()).collect();
let r1: Fr = hasher.hash(input1.to_vec());
let input2: Vec<Fr> = ["0", "0"]
.iter()
.map(|e| Fr::from_str(e).unwrap())
.collect();
let r2: Fr = hasher.hash(input2.to_vec());
// println!("{:?}", r1);
assert_eq!(r1, r2, "just to see if internal state resets");
}

View File

@ -1,254 +0,0 @@
use crate::circuit::rln::{RLNCircuit, RLNInputs};
use crate::merkle::MerkleTree;
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use crate::utils::{read_fr, read_uncompressed_proof, write_uncompressed_proof};
use crate::{circuit::poseidon::PoseidonCircuit, merkle::IncrementalMerkleTree};
use bellman::groth16::generate_random_parameters;
use bellman::groth16::{create_proof, prepare_verifying_key, verify_proof};
use bellman::groth16::{create_random_proof, Parameters, Proof};
use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use bellman::pairing::{CurveAffine, EncodedPoint, Engine};
use bellman::{Circuit, ConstraintSystem, SynthesisError};
use rand::{Rand, SeedableRng, XorShiftRng};
use std::{
io::{self, Error, ErrorKind, Read, Write},
ptr::null,
};
// Rate Limit Nullifier
#[derive(Clone)]
pub struct RLNSignal<E>
where
E: Engine,
{
pub epoch: E::Fr,
pub hash: E::Fr,
}
impl<E> RLNSignal<E>
where
E: Engine,
{
pub fn read<R: Read>(mut reader: R) -> io::Result<RLNSignal<E>> {
let mut buf = <E::Fr as PrimeField>::Repr::default();
buf.read_le(&mut reader)?;
let hash =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
buf.read_le(&mut reader)?;
let epoch =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
Ok(RLNSignal { epoch, hash })
}
pub fn write<W: Write>(&self, mut writer: W) -> io::Result<()> {
self.epoch.into_repr().write_le(&mut writer).unwrap();
self.hash.into_repr().write_le(&mut writer).unwrap();
Ok(())
}
}
pub struct RLN<E>
where
E: Engine,
{
circuit_parameters: Parameters<E>,
poseidon_params: PoseidonParams<E>,
tree: IncrementalMerkleTree<E>,
}
impl<E> RLN<E>
where
E: Engine,
{
fn default_poseidon_params() -> PoseidonParams<E> {
PoseidonParams::<E>::new(8, 55, 3, None, None, None)
}
fn new_circuit(merkle_depth: usize, poseidon_params: PoseidonParams<E>) -> Parameters<E> {
let mut rng = XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
let inputs = RLNInputs::<E>::empty(merkle_depth);
let circuit = RLNCircuit::<E> {
inputs,
hasher: PoseidonCircuit::new(poseidon_params.clone()),
};
generate_random_parameters(circuit, &mut rng).unwrap()
}
fn new_with_params(
merkle_depth: usize,
circuit_parameters: Parameters<E>,
poseidon_params: PoseidonParams<E>,
) -> RLN<E> {
let hasher = PoseidonHasher::new(poseidon_params.clone());
let tree = IncrementalMerkleTree::empty(hasher, merkle_depth);
RLN {
circuit_parameters,
poseidon_params,
tree,
}
}
pub fn new(merkle_depth: usize, poseidon_params: Option<PoseidonParams<E>>) -> RLN<E> {
let poseidon_params = match poseidon_params {
Some(params) => params,
None => Self::default_poseidon_params(),
};
let circuit_parameters = Self::new_circuit(merkle_depth, poseidon_params.clone());
Self::new_with_params(merkle_depth, circuit_parameters, poseidon_params)
}
pub fn new_with_raw_params<R: Read>(
merkle_depth: usize,
raw_circuit_parameters: R,
poseidon_params: Option<PoseidonParams<E>>,
) -> io::Result<RLN<E>> {
let circuit_parameters = Parameters::<E>::read(raw_circuit_parameters, true)?;
let poseidon_params = match poseidon_params {
Some(params) => params,
None => Self::default_poseidon_params(),
};
Ok(Self::new_with_params(
merkle_depth,
circuit_parameters,
poseidon_params,
))
}
//// inserts new member with given public key
/// * `public_key_data` is a 32 scalar field element in 32 bytes
pub fn update_next_member<R: Read>(&mut self, public_key_data: R) -> io::Result<()> {
let mut buf = <E::Fr as PrimeField>::Repr::default();
buf.read_le(public_key_data)?;
let leaf =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
self.tree.update_next(leaf)?;
Ok(())
}
//// deletes member with given index
pub fn delete_member(&mut self, index: usize) -> io::Result<()> {
self.tree.delete(index)?;
Ok(())
}
/// hashes scalar field elements
/// * expect numbers of scalar field element in 32 bytes in `input_data`
/// * expect `result_data` is a scalar field element in 32 bytes
/// * `n` is number of scalar field elemends stored in `input`
pub fn hash<R: Read, W: Write>(
&self,
input_data: R,
n: usize,
mut result_data: W,
) -> io::Result<()> {
let hasher = self.hasher();
let input: Vec<E::Fr> = read_fr::<R, E>(input_data, n)?;
let result = hasher.hash(input);
result.into_repr().write_le(&mut result_data)?;
Ok(())
}
/// given public inputs and autharization data generates public inputs and proof
/// * expect `input` serialized as |epoch<32>|signal_hash<32>|
/// * expect `id_key_data` is a scalar field element in 32 bytes
/// * `output_data` is proof data serialized as |proof<416>|root<32>|epoch<32>|share_x<32>|share_y<32>|nullifier<32>|
pub fn generate_proof<R: Read, W: Write>(
&self,
input_data: R,
id_key_data: R,
member_index: usize,
mut output_data: W,
) -> io::Result<()> {
use rand::chacha::ChaChaRng;
use rand::SeedableRng;
let mut rng = ChaChaRng::new_unseeded();
let signal = RLNSignal::<E>::read(input_data)?;
// prepare inputs
let hasher = self.hasher();
let share_x = signal.hash.clone();
let id_key: E::Fr = read_fr::<R, E>(id_key_data, 1)?[0];
// line equation
let a_0 = id_key.clone();
let a_1: E::Fr = hasher.hash(vec![a_0, signal.epoch]);
// evaluate line equation
let mut share_y = a_1.clone();
share_y.mul_assign(&share_x);
share_y.add_assign(&a_0);
let nullifier = hasher.hash(vec![a_1]);
let root = self.tree.get_root();
// TODO: check id key here
let auth_path = self.tree.get_witness(member_index)?;
let inputs = RLNInputs::<E> {
share_x: Some(share_x),
share_y: Some(share_y),
epoch: Some(signal.epoch),
nullifier: Some(nullifier),
root: Some(root),
id_key: Some(id_key),
auth_path: auth_path.into_iter().map(|w| Some(w)).collect(),
};
let circuit = RLNCircuit {
inputs: inputs.clone(),
hasher: PoseidonCircuit::new(self.poseidon_params.clone()),
};
// TOOD: handle create proof error
let proof = create_random_proof(circuit, &self.circuit_parameters, &mut rng).unwrap();
write_uncompressed_proof(proof.clone(), &mut output_data)?;
root.into_repr().write_le(&mut output_data)?;
signal.epoch.into_repr().write_le(&mut output_data)?;
share_x.into_repr().write_le(&mut output_data)?;
share_y.into_repr().write_le(&mut output_data)?;
nullifier.into_repr().write_le(&mut output_data)?;
Ok(())
}
/// given proof and public data verifies the signal
/// * expect `proof_data` is serialized as:
/// |proof<416>|root<32>|epoch<32>|share_x<32>|share_y<32>|nullifier<32>|
pub fn verify<R: Read>(&self, mut proof_data: R) -> io::Result<bool> {
let proof = read_uncompressed_proof(&mut proof_data)?;
let public_inputs = RLNInputs::<E>::read_public_inputs(&mut proof_data)?;
// TODO: root must be checked here
let verifing_key = prepare_verifying_key(&self.circuit_parameters.vk);
let success = verify_proof(&verifing_key, &proof, &public_inputs).unwrap();
Ok(success)
}
/// generates public private key pair
/// * `key_pair_data` is seralized as |secret<32>|public<32>|
pub fn key_gen<W: Write>(&self, mut key_pair_data: W) -> io::Result<()> {
let mut rng = XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
let hasher = self.hasher();
let secret = E::Fr::rand(&mut rng);
let public: E::Fr = hasher.hash(vec![secret.clone()]);
secret.into_repr().write_le(&mut key_pair_data)?;
public.into_repr().write_le(&mut key_pair_data)?;
Ok(())
}
pub fn export_verifier_key<W: Write>(&self, w: W) -> io::Result<()> {
self.circuit_parameters.vk.write(w)
}
pub fn export_circuit_parameters<W: Write>(&self, w: W) -> io::Result<()> {
self.circuit_parameters.write(w)
}
pub fn hasher(&self) -> PoseidonHasher<E> {
PoseidonHasher::new(self.poseidon_params.clone())
}
pub fn poseidon_params(&self) -> PoseidonParams<E> {
self.poseidon_params.clone()
}
}

View File

@ -1,80 +0,0 @@
use bellman::groth16::Proof;
use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use bellman::pairing::{CurveAffine, EncodedPoint, Engine};
use rand::{Rand, SeedableRng, XorShiftRng};
use std::io::{self, Error, ErrorKind, Read, Write};
pub fn read_fr<R: Read, E: Engine>(mut reader: R, n: usize) -> io::Result<Vec<E::Fr>> {
let mut out: Vec<E::Fr> = Vec::new();
let mut buf = <E::Fr as PrimeField>::Repr::default();
for _ in 0..n {
buf.read_le(&mut reader)?;
let input =
E::Fr::from_repr(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
out.push(input);
}
Ok(out)
}
pub fn write_uncompressed_proof<W: Write, E: Engine>(
proof: Proof<E>,
mut writer: W,
) -> io::Result<()> {
writer.write_all(proof.a.into_uncompressed().as_ref())?;
writer.write_all(proof.b.into_uncompressed().as_ref())?;
writer.write_all(proof.c.into_uncompressed().as_ref())?;
Ok(())
}
pub fn read_uncompressed_proof<R: Read, E: Engine>(mut reader: R) -> io::Result<Proof<E>> {
let mut g1_repr = <E::G1Affine as CurveAffine>::Uncompressed::empty();
let mut g2_repr = <E::G2Affine as CurveAffine>::Uncompressed::empty();
reader.read_exact(g1_repr.as_mut())?;
let a = g1_repr
.into_affine()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
.and_then(|e| {
if e.is_zero() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
"point at infinity",
))
} else {
Ok(e)
}
})?;
reader.read_exact(g2_repr.as_mut())?;
let b = g2_repr
.into_affine()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
.and_then(|e| {
if e.is_zero() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
"point at infinity",
))
} else {
Ok(e)
}
})?;
reader.read_exact(g1_repr.as_mut())?;
let c = g1_repr
.into_affine()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
.and_then(|e| {
if e.is_zero() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
"point at infinity",
))
} else {
Ok(e)
}
})?;
Ok(Proof { a, b, c })
}

View File

@ -1,132 +0,0 @@
use crate::public::RLN;
use std::io::{self, Error, ErrorKind, Read, Write};
use wasm_bindgen::prelude::*;
use js_sys::Array;
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr};
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
// #[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once();
}
#[wasm_bindgen]
pub struct RLNWasm {
api: RLN<Bn256>,
}
#[wasm_bindgen]
impl RLNWasm {
#[wasm_bindgen]
pub fn new(merkle_depth: usize) -> RLNWasm {
set_panic_hook();
RLNWasm {
api: RLN::<Bn256>::new(merkle_depth, None),
}
}
#[wasm_bindgen]
pub fn new_with_raw_params(
merkle_depth: usize,
raw_circuit_parameters: &[u8],
) -> Result<RLNWasm, JsValue> {
set_panic_hook();
let api = match RLN::new_with_raw_params(merkle_depth, raw_circuit_parameters, None) {
Ok(api) => api,
Err(e) => return Err(e.to_string().into()),
};
Ok(RLNWasm { api })
}
#[wasm_bindgen]
pub fn generate_proof(&self, input: &[u8]) -> Result<Vec<u8>, JsValue> {
let proof = match self.api.generate_proof(input) {
Ok(proof) => proof,
Err(e) => return Err(e.to_string().into()),
};
Ok(proof)
}
#[wasm_bindgen]
pub fn verify(
&self,
uncompresed_proof: &[u8],
raw_public_inputs: &[u8],
) -> Result<bool, JsValue> {
let success = match self.api.verify(uncompresed_proof, raw_public_inputs) {
Ok(success) => success,
Err(e) => return Err(e.to_string().into()),
};
Ok(success)
}
#[wasm_bindgen]
pub fn export_verifier_key(&self) -> Result<Vec<u8>, JsValue> {
let mut output: Vec<u8> = Vec::new();
match self.api.export_verifier_key(&mut output) {
Ok(_) => (),
Err(e) => return Err(e.to_string().into()),
};
Ok(output)
}
#[wasm_bindgen]
pub fn export_circuit_parameters(&self) -> Result<Vec<u8>, JsValue> {
let mut output: Vec<u8> = Vec::new();
match self.api.export_circuit_parameters(&mut output) {
Ok(_) => (),
Err(e) => return Err(e.to_string().into()),
};
Ok(output)
}
}
#[cfg(test)]
mod test {
use crate::circuit::bench;
use wasm_bindgen_test::*;
use crate::circuit::poseidon::PoseidonCircuit;
use crate::circuit::rln::{RLNCircuit, RLNInputs};
use crate::merkle::MerkleTree;
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use bellman::groth16::{generate_random_parameters, Parameters, Proof};
use bellman::pairing::bn256::{Bn256, Fr};
use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use rand::{Rand, SeedableRng, XorShiftRng};
#[wasm_bindgen_test]
fn test_rln_wasm() {
let merkle_depth = 3usize;
let poseidon_params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let rln_test = bench::RLNTest::<Bn256>::new(merkle_depth, Some(poseidon_params));
let rln_wasm = super::RLNWasm::new(merkle_depth);
let mut raw_inputs: Vec<u8> = Vec::new();
let inputs = rln_test.valid_inputs();
inputs.write(&mut raw_inputs);
// let now = Instant::now();
let proof = rln_wasm.generate_proof(raw_inputs.as_slice()).unwrap();
// let prover_time = now.elapsed().as_millis() as f64 / 1000.0;
let mut raw_public_inputs: Vec<u8> = Vec::new();
inputs.write_public_inputs(&mut raw_public_inputs);
assert_eq!(
rln_wasm
.verify(proof.as_slice(), raw_public_inputs.as_slice())
.unwrap(),
true
);
}
}

BIN
tests/.DS_Store vendored

Binary file not shown.

View File

@ -21,7 +21,8 @@ requires "nim >= 1.2.0",
"stint",
"metrics",
"libp2p", # Only for Waku v2
"web3"
"web3",
"rln"
### Helper functions
proc buildBinary(name: string, srcDir = "./", params = "", lang = "c") =

View File

@ -1,88 +0,0 @@
mode = ScriptMode.Verbose
### Package
version = "0.1.0"
author = "Status Research & Development GmbH"
description = "Waku, Private P2P Messaging for Resource-Restricted Devices"
license = "MIT or Apache License 2.0"
srcDir = "src"
#bin = @["build/waku"]
### Dependencies
requires "nim >= 1.2.0",
"chronicles",
"confutils",
"chronos",
"eth",
"json_rpc",
"libbacktrace",
"nimcrypto",
"stew",
"stint",
"metrics",
"libp2p", # Only for Waku v2
"web3"
### Helper functions
proc buildBinary(name: string, srcDir = "./", params = "", lang = "c") =
if not dirExists "build":
mkDir "build"
# allow something like "nim nimbus --verbosity:0 --hints:off nimbus.nims"
var extra_params = params
for i in 2..<paramCount():
extra_params &= " " & paramStr(i)
exec "nim " & lang & " --out:build/" & name & " " & extra_params & " " & srcDir & name & ".nim"
proc test(name: string, lang = "c") =
# XXX: When running `> NIM_PARAMS="-d:chronicles_log_level=INFO" make test2`
# I expect compiler flag to be overridden, however it stays with whatever is
# specified here.
buildBinary name, "tests/", "-d:chronicles_log_level=DEBUG"
#buildBinary name, "tests/", "-d:chronicles_log_level=ERROR"
exec "build/" & name
### Waku v1 tasks
task wakunode1, "Build Waku v1 cli node":
buildBinary "wakunode1", "waku/v1/node/", "-d:chronicles_log_level=TRACE"
task sim1, "Build Waku v1 simulation tools":
buildBinary "quicksim", "waku/v1/node/", "-d:chronicles_log_level=INFO"
buildBinary "start_network", "waku/v1/node/", "-d:chronicles_log_level=DEBUG"
task example1, "Build Waku v1 example":
buildBinary "example", "examples/v1/", "-d:chronicles_log_level=DEBUG"
task test1, "Build & run Waku v1 tests":
test "all_tests_v1"
### Waku v2 tasks
task wakunode2, "Build Waku v2 (experimental) cli node":
buildBinary "wakunode2", "waku/v2/node/", "-d:chronicles_log_level=TRACE"
task sim2, "Build Waku v2 simulation tools":
buildBinary "quicksim2", "waku/v2/node/", "-d:chronicles_log_level=DEBUG"
buildBinary "start_network2", "waku/v2/node/", "-d:chronicles_log_level=TRACE"
task example2, "Build Waku v2 example":
let name = "basic2"
buildBinary name, "examples/v2/", "-d:chronicles_log_level=DEBUG"
task test2, "Build & run Waku v2 tests":
test "all_tests_v2"
task scripts2, "Build Waku v2 scripts":
buildBinary "rpc_publish", "waku/v2/node/scripts/", "-d:chronicles_log_level=DEBUG"
buildBinary "rpc_subscribe", "waku/v2/node/scripts/", "-d:chronicles_log_level=DEBUG"
buildBinary "rpc_subscribe_filter", "waku/v2/node/scripts/", "-d:chronicles_log_level=DEBUG"
buildBinary "rpc_query", "waku/v2/node/scripts/", "-d:chronicles_log_level=DEBUG"
buildBinary "rpc_info", "waku/v2/node/scripts/", "-d:chronicles_log_level=DEBUG"
task chat2, "Build example Waku v2 chat usage":
let name = "chat2"
# NOTE For debugging, set debug level. For chat usage we want minimal log
# output to STDOUT. Can be fixed by redirecting logs to file (e.g.)
#buildBinary name, "examples/v2/", "-d:chronicles_log_level=WARN"
buildBinary name, "examples/v2/", "-d:chronicles_log_level=DEBUG"
task bridge, "Build Waku v1 - v2 bridge":
buildBinary "wakubridge", "waku/common/", "-d:chronicles_log_level=DEBUG"

View File

@ -1,608 +0,0 @@
## Generated at line 228
type
Waku* = object
template State*(PROTO: type Waku): type =
ref[WakuPeer:ObjectType]
template NetworkState*(PROTO: type Waku): type =
ref[WakuNetwork:ObjectType]
type
statusObj* = object
options*: StatusOptions
template status*(PROTO: type Waku): type =
statusObj
template msgProtocol*(MSG: type statusObj): type =
Waku
template RecType*(MSG: type statusObj): untyped =
statusObj
template msgId*(MSG: type statusObj): int =
0
type
messagesObj* = object
envelopes*: seq[Envelope]
template messages*(PROTO: type Waku): type =
messagesObj
template msgProtocol*(MSG: type messagesObj): type =
Waku
template RecType*(MSG: type messagesObj): untyped =
messagesObj
template msgId*(MSG: type messagesObj): int =
1
type
statusOptionsObj* = object
options*: StatusOptions
template statusOptions*(PROTO: type Waku): type =
statusOptionsObj
template msgProtocol*(MSG: type statusOptionsObj): type =
Waku
template RecType*(MSG: type statusOptionsObj): untyped =
statusOptionsObj
template msgId*(MSG: type statusOptionsObj): int =
22
type
p2pRequestObj* = object
envelope*: Envelope
template p2pRequest*(PROTO: type Waku): type =
p2pRequestObj
template msgProtocol*(MSG: type p2pRequestObj): type =
Waku
template RecType*(MSG: type p2pRequestObj): untyped =
p2pRequestObj
template msgId*(MSG: type p2pRequestObj): int =
126
type
p2pMessageObj* = object
envelopes*: seq[Envelope]
template p2pMessage*(PROTO: type Waku): type =
p2pMessageObj
template msgProtocol*(MSG: type p2pMessageObj): type =
Waku
template RecType*(MSG: type p2pMessageObj): untyped =
p2pMessageObj
template msgId*(MSG: type p2pMessageObj): int =
127
type
batchAcknowledgedObj* = object
template batchAcknowledged*(PROTO: type Waku): type =
batchAcknowledgedObj
template msgProtocol*(MSG: type batchAcknowledgedObj): type =
Waku
template RecType*(MSG: type batchAcknowledgedObj): untyped =
batchAcknowledgedObj
template msgId*(MSG: type batchAcknowledgedObj): int =
11
type
messageResponseObj* = object
template messageResponse*(PROTO: type Waku): type =
messageResponseObj
template msgProtocol*(MSG: type messageResponseObj): type =
Waku
template RecType*(MSG: type messageResponseObj): untyped =
messageResponseObj
template msgId*(MSG: type messageResponseObj): int =
12
type
p2pSyncResponseObj* = object
template p2pSyncResponse*(PROTO: type Waku): type =
p2pSyncResponseObj
template msgProtocol*(MSG: type p2pSyncResponseObj): type =
Waku
template RecType*(MSG: type p2pSyncResponseObj): untyped =
p2pSyncResponseObj
template msgId*(MSG: type p2pSyncResponseObj): int =
124
type
p2pSyncRequestObj* = object
template p2pSyncRequest*(PROTO: type Waku): type =
p2pSyncRequestObj
template msgProtocol*(MSG: type p2pSyncRequestObj): type =
Waku
template RecType*(MSG: type p2pSyncRequestObj): untyped =
p2pSyncRequestObj
template msgId*(MSG: type p2pSyncRequestObj): int =
123
type
p2pRequestCompleteObj* = object
requestId*: Hash
lastEnvelopeHash*: Hash
cursor*: seq[byte]
template p2pRequestComplete*(PROTO: type Waku): type =
p2pRequestCompleteObj
template msgProtocol*(MSG: type p2pRequestCompleteObj): type =
Waku
template RecType*(MSG: type p2pRequestCompleteObj): untyped =
p2pRequestCompleteObj
template msgId*(MSG: type p2pRequestCompleteObj): int =
125
var WakuProtocolObj = initProtocol("waku", 1, createPeerState[Peer,
ref[WakuPeer:ObjectType]], createNetworkState[EthereumNode,
ref[WakuNetwork:ObjectType]])
var WakuProtocol = addr WakuProtocolObj
template protocolInfo*(PROTO: type Waku): auto =
WakuProtocol
proc statusRawSender(peerOrResponder: Peer; options: StatusOptions;
timeout: Duration = milliseconds(10000'i64)): Future[void] {.
gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 0
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 0)
append(writer, perPeerMsgId)
append(writer, options)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
template status*(peer: Peer; options: StatusOptions;
timeout: Duration = milliseconds(10000'i64)): Future[statusObj] =
let peer_185365056 = peer
let sendingFuture`gensym185365057 = statusRawSender(peer, options)
handshakeImpl(peer_185365056, sendingFuture`gensym185365057,
nextMsg(peer_185365056, statusObj), timeout)
proc messages*(peerOrResponder: Peer; envelopes: openarray[Envelope]): Future[void] {.
gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 1
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 1)
append(writer, perPeerMsgId)
append(writer, envelopes)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc statusOptions*(peerOrResponder: Peer; options: StatusOptions): Future[void] {.
gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 22
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 22)
append(writer, perPeerMsgId)
append(writer, options)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc p2pRequest*(peerOrResponder: Peer; envelope: Envelope): Future[void] {.gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 126
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 126)
append(writer, perPeerMsgId)
append(writer, envelope)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc p2pMessage*(peerOrResponder: Peer; envelopes: openarray[Envelope]): Future[void] {.
gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 127
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 127)
append(writer, perPeerMsgId)
append(writer, envelopes)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc batchAcknowledged*(peerOrResponder: Peer): Future[void] {.gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 11
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 11)
append(writer, perPeerMsgId)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc messageResponse*(peerOrResponder: Peer): Future[void] {.gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 12
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 12)
append(writer, perPeerMsgId)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc p2pSyncResponse*(peerOrResponder: ResponderWithId[p2pSyncResponseObj]): Future[
void] {.gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 124
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 124)
append(writer, perPeerMsgId)
append(writer, peerOrResponder.reqId)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
template send*(r`gensym185365072: ResponderWithId[p2pSyncResponseObj];
args`gensym185365073: varargs[untyped]): auto =
p2pSyncResponse(r`gensym185365072, args`gensym185365073)
proc p2pSyncRequest*(peerOrResponder: Peer;
timeout: Duration = milliseconds(10000'i64)): Future[
Option[p2pSyncResponseObj]] {.gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 123
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 123)
append(writer, perPeerMsgId)
initFuture result
let reqId = registerRequest(peer, timeout, result, perPeerMsgId + 1)
append(writer, reqId)
let msgBytes = finish(writer)
linkSendFailureToReqFuture(sendMsg(peer, msgBytes), result)
proc p2pRequestComplete*(peerOrResponder: Peer; requestId: Hash;
lastEnvelopeHash: Hash; cursor: seq[byte]): Future[void] {.
gcsafe.} =
let peer = getPeer(peerOrResponder)
var writer = initRlpWriter()
const
perProtocolMsgId = 125
let perPeerMsgId = perPeerMsgIdImpl(peer, WakuProtocol, 125)
append(writer, perPeerMsgId)
startList(writer, 3)
append(writer, requestId)
append(writer, lastEnvelopeHash)
append(writer, cursor)
let msgBytes = finish(writer)
return sendMsg(peer, msgBytes)
proc messagesUserHandler(peer: Peer; envelopes: seq[Envelope]) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 1
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
if not peer.state.initialized:
warn "Handshake not completed yet, discarding messages"
return
for envelope in envelopes:
if not envelope.valid():
warn "Expired or future timed envelope", peer
continue
peer.state.accounting.received += 1
let msg = initMessage(envelope)
if not msg.allowed(peer.networkState.config):
continue
if peer.state.received.containsOrIncl(msg.hash):
envelopes_dropped.inc(labelValues = ["duplicate"])
trace "Peer sending duplicate messages", peer, hash = $msg.hash
continue
if peer.networkState.queue[].add(msg):
peer.networkState.filters.notify(msg)
proc statusOptionsUserHandler(peer: Peer; options: StatusOptions) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 22
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
if not peer.state.initialized:
warn "Handshake not completed yet, discarding statusOptions"
return
if options.topicInterest.isSome():
peer.state.topics = options.topicInterest
elif options.bloomFilter.isSome():
peer.state.bloom = options.bloomFilter.get()
peer.state.topics = none(seq[Topic])
if options.powRequirement.isSome():
peer.state.powRequirement = options.powRequirement.get()
if options.lightNode.isSome():
peer.state.isLightNode = options.lightNode.get()
proc p2pRequestUserHandler(peer: Peer; envelope: Envelope) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 126
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
if not peer.networkState.p2pRequestHandler.isNil():
peer.networkState.p2pRequestHandler(peer, envelope)
proc p2pMessageUserHandler(peer: Peer; envelopes: seq[Envelope]) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 127
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
if peer.state.trusted:
for envelope in envelopes:
let msg = Message(env: envelope, isP2P: true)
peer.networkState.filters.notify(msg)
proc batchAcknowledgedUserHandler(peer: Peer) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 11
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
discard
proc messageResponseUserHandler(peer: Peer) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 12
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
discard
proc p2pSyncResponseUserHandler(peer: Peer; reqId: int) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 124
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
discard
proc p2pSyncRequestUserHandler(peer: Peer; reqId: int) {.gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 123
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
var response = init(ResponderWithId[p2pSyncResponseObj], peer, reqId)
discard
proc p2pRequestCompleteUserHandler(peer: Peer; requestId: Hash;
lastEnvelopeHash: Hash; cursor: seq[byte]) {.
gcsafe, async.} =
type
CurrentProtocol = Waku
const
perProtocolMsgId = 125
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
discard
proc statusThunk(peer: Peer; _`gensym185365033: int; data`gensym185365034: Rlp) {.
async, gcsafe.} =
var rlp = data`gensym185365034
var msg {.noinit.}: statusObj
msg.options = checkedRlpRead(peer, rlp, StatusOptions)
proc messagesThunk(peer: Peer; _`gensym185365058: int; data`gensym185365059: Rlp) {.
async, gcsafe.} =
var rlp = data`gensym185365059
var msg {.noinit.}: messagesObj
msg.envelopes = checkedRlpRead(peer, rlp, openarray[Envelope])
await(messagesUserHandler(peer, msg.envelopes))
proc statusOptionsThunk(peer: Peer; _`gensym185365060: int; data`gensym185365061: Rlp) {.
async, gcsafe.} =
var rlp = data`gensym185365061
var msg {.noinit.}: statusOptionsObj
msg.options = checkedRlpRead(peer, rlp, StatusOptions)
await(statusOptionsUserHandler(peer, msg.options))
proc p2pRequestThunk(peer: Peer; _`gensym185365062: int; data`gensym185365063: Rlp) {.
async, gcsafe.} =
var rlp = data`gensym185365063
var msg {.noinit.}: p2pRequestObj
msg.envelope = checkedRlpRead(peer, rlp, Envelope)
await(p2pRequestUserHandler(peer, msg.envelope))
proc p2pMessageThunk(peer: Peer; _`gensym185365064: int; data`gensym185365065: Rlp) {.
async, gcsafe.} =
var rlp = data`gensym185365065
var msg {.noinit.}: p2pMessageObj
msg.envelopes = checkedRlpRead(peer, rlp, openarray[Envelope])
await(p2pMessageUserHandler(peer, msg.envelopes))
proc batchAcknowledgedThunk(peer: Peer; _`gensym185365066: int;
data`gensym185365067: Rlp) {.async, gcsafe.} =
var rlp = data`gensym185365067
var msg {.noinit.}: batchAcknowledgedObj
await(batchAcknowledgedUserHandler(peer))
proc messageResponseThunk(peer: Peer; _`gensym185365068: int;
data`gensym185365069: Rlp) {.async, gcsafe.} =
var rlp = data`gensym185365069
var msg {.noinit.}: messageResponseObj
await(messageResponseUserHandler(peer))
proc p2pSyncResponseThunk(peer: Peer; _`gensym185365070: int;
data`gensym185365071: Rlp) {.async, gcsafe.} =
var rlp = data`gensym185365071
var msg {.noinit.}: p2pSyncResponseObj
let reqId = read(rlp, int)
await(p2pSyncResponseUserHandler(peer, reqId))
resolveResponseFuture(peer, perPeerMsgId(peer, p2pSyncResponseObj), addr(msg),
reqId)
proc p2pSyncRequestThunk(peer: Peer; _`gensym185365074: int;
data`gensym185365075: Rlp) {.async, gcsafe.} =
var rlp = data`gensym185365075
var msg {.noinit.}: p2pSyncRequestObj
let reqId = read(rlp, int)
await(p2pSyncRequestUserHandler(peer, reqId))
proc p2pRequestCompleteThunk(peer: Peer; _`gensym185365076: int;
data`gensym185365077: Rlp) {.async, gcsafe.} =
var rlp = data`gensym185365077
var msg {.noinit.}: p2pRequestCompleteObj
tryEnterList(rlp)
msg.requestId = checkedRlpRead(peer, rlp, Hash)
msg.lastEnvelopeHash = checkedRlpRead(peer, rlp, Hash)
msg.cursor = checkedRlpRead(peer, rlp, seq[byte])
await(p2pRequestCompleteUserHandler(peer, msg.requestId, msg.lastEnvelopeHash,
msg.cursor))
registerMsg(WakuProtocol, 0, "status", statusThunk, messagePrinter[statusObj],
requestResolver[statusObj], nextMsgResolver[statusObj])
registerMsg(WakuProtocol, 1, "messages", messagesThunk, messagePrinter[messagesObj],
requestResolver[messagesObj], nextMsgResolver[messagesObj])
registerMsg(WakuProtocol, 22, "statusOptions", statusOptionsThunk,
messagePrinter[statusOptionsObj], requestResolver[statusOptionsObj],
nextMsgResolver[statusOptionsObj])
registerMsg(WakuProtocol, 126, "p2pRequest", p2pRequestThunk,
messagePrinter[p2pRequestObj], requestResolver[p2pRequestObj],
nextMsgResolver[p2pRequestObj])
registerMsg(WakuProtocol, 127, "p2pMessage", p2pMessageThunk,
messagePrinter[p2pMessageObj], requestResolver[p2pMessageObj],
nextMsgResolver[p2pMessageObj])
registerMsg(WakuProtocol, 11, "batchAcknowledged", batchAcknowledgedThunk,
messagePrinter[batchAcknowledgedObj],
requestResolver[batchAcknowledgedObj],
nextMsgResolver[batchAcknowledgedObj])
registerMsg(WakuProtocol, 12, "messageResponse", messageResponseThunk,
messagePrinter[messageResponseObj],
requestResolver[messageResponseObj],
nextMsgResolver[messageResponseObj])
registerMsg(WakuProtocol, 124, "p2pSyncResponse", p2pSyncResponseThunk,
messagePrinter[p2pSyncResponseObj],
requestResolver[p2pSyncResponseObj],
nextMsgResolver[p2pSyncResponseObj])
registerMsg(WakuProtocol, 123, "p2pSyncRequest", p2pSyncRequestThunk,
messagePrinter[p2pSyncRequestObj],
requestResolver[p2pSyncRequestObj],
nextMsgResolver[p2pSyncRequestObj])
registerMsg(WakuProtocol, 125, "p2pRequestComplete", p2pRequestCompleteThunk,
messagePrinter[p2pRequestCompleteObj],
requestResolver[p2pRequestCompleteObj],
nextMsgResolver[p2pRequestCompleteObj])
proc WakuPeerConnected(peer: Peer) {.gcsafe, async.} =
type
CurrentProtocol = Waku
template state(peer: Peer): ref[WakuPeer:ObjectType] =
cast[ref[WakuPeer:ObjectType]](getState(peer, WakuProtocol))
template networkState(peer: Peer): ref[WakuNetwork:ObjectType] =
cast[ref[WakuNetwork:ObjectType]](getNetworkState(peer.network, WakuProtocol))
trace "onPeerConnected Waku"
let
wakuNet = peer.networkState
wakuPeer = peer.state
let options = StatusOptions(powRequirement: some(wakuNet.config.powRequirement),
bloomFilter: wakuNet.config.bloom,
lightNode: some(wakuNet.config.isLightNode), confirmationsEnabled: some(
wakuNet.config.confirmationsEnabled),
rateLimits: wakuNet.config.rateLimits,
topicInterest: wakuNet.config.topics)
let m = await peer.status(options, timeout = chronos.milliseconds(5000))
wakuPeer.powRequirement = m.options.powRequirement.get(defaultMinPow)
wakuPeer.bloom = m.options.bloomFilter.get(fullBloom())
wakuPeer.isLightNode = m.options.lightNode.get(false)
if wakuPeer.isLightNode and wakuNet.config.isLightNode:
raise newException(UselessPeerError, "Two light nodes connected")
wakuPeer.topics = m.options.topicInterest
if wakuPeer.topics.isSome():
if wakuPeer.topics.get().len > topicInterestMax:
raise newException(UselessPeerError, "Topic-interest is too large")
if wakuNet.config.topics.isSome():
raise newException(UselessPeerError,
"Two Waku nodes with topic-interest connected")
wakuPeer.received.init()
wakuPeer.trusted = false
wakuPeer.accounting = Accounting(sent: 0, received: 0)
wakuPeer.initialized = true
if not wakuNet.config.isLightNode:
traceAsyncErrors peer.run()
debug "Waku peer initialized", peer
setEventHandlers(WakuProtocol, WakuPeerConnected, nil)
registerProtocol(WakuProtocol)

View File

@ -5,7 +5,8 @@ import os
# librln.dylib is the rln library taken from https://github.com/kilic/rln (originally implemented in rust with an exposed C API)
# contains the key generation and other relevant functions
const libPath = "rln/target/debug/"
const libPath = "vendor/rln/target/debug/"
when defined(Windows):
const libName* = libPath / "rln.dll"
elif defined(Linux):