cl: restructure cl into a workspace and integrate nullifier proof

This commit is contained in:
David Rusu 2024-07-12 09:24:58 +04:00
parent 122efb39dc
commit 51f6c66a64
32 changed files with 527 additions and 327 deletions

View File

@ -1,21 +1,11 @@
[package]
name = "cl"
version = "0.1.0"
edition = "2021"
[workspace]
resolver = "2"
members = [ "cl", "ledger", "proof_statements", "risc0_proofs"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = {version="1.0", features = ["derive"]}
bincode = "1.3.3"
risc0-groth16 = "1.0.1"
blake2 = "0.10.6"
# jubjub = "0.10.0"
group = "0.13.0"
rand_core = "0.6.0"
rand_chacha = "0.3.1"
lazy_static = "1.4.0"
hex = "0.4.3"
curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]}
sha2 = "0.10"
# Always optimize; building and running the risc0_proofs takes much longer without optimization.
[profile.dev]
opt-level = 3
[profile.release]
debug = 1
lto = true

20
cl/cl/Cargo.toml Normal file
View File

@ -0,0 +1,20 @@
[package]
name = "cl"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = {version="1.0", features = ["derive"]}
bincode = "1.3.3"
risc0-groth16 = "1.0.1"
blake2 = "0.10.6"
# jubjub = "0.10.0"
group = "0.13.0"
rand_core = "0.6.0"
rand_chacha = "0.3.1"
lazy_static = "1.4.0"
hex = "0.4.3"
curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]}
sha2 = "0.10"

View File

@ -1,14 +1,8 @@
use std::collections::BTreeSet;
use serde::{Deserialize, Serialize};
use curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint, Scalar};
use crate::{
error::Error,
note::NoteCommitment,
partial_tx::{PartialTx, PartialTxProof},
};
use crate::partial_tx::PartialTx;
/// The transaction bundle is a collection of partial transactions.
/// The goal in bundling transactions is to produce a set of partial transactions
@ -24,11 +18,11 @@ pub struct BundleWitness {
pub balance_blinding: Scalar,
}
#[derive(Debug)]
pub struct BundleProof {
pub partials: Vec<PartialTxProof>,
pub balance_blinding: Scalar,
}
// #[derive(Debug)]
// pub struct BundleProof {
// pub partials: Vec<PartialTxProof>,
// pub balance_blinding: Scalar,
// }
impl Bundle {
pub fn balance(&self) -> RistrettoPoint {
@ -40,53 +34,53 @@ impl Bundle {
== crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, balance_blinding_witness)
}
pub fn prove(
&self,
w: BundleWitness,
ptx_proofs: Vec<PartialTxProof>,
) -> Result<BundleProof, Error> {
if ptx_proofs.len() == self.partials.len() {
return Err(Error::ProofFailed);
}
let input_notes: Vec<NoteCommitment> = self
.partials
.iter()
.flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm))
.collect();
if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() {
return Err(Error::ProofFailed);
}
// pub fn prove(
// &self,
// w: BundleWitness,
// ptx_proofs: Vec<PartialTxProof>,
// ) -> Result<BundleProof, Error> {
// if ptx_proofs.len() == self.partials.len() {
// return Err(Error::ProofFailed);
// }
// let input_notes: Vec<NoteCommitment> = self
// .partials
// .iter()
// .flat_map(|ptx| ptx.inputs.iter().map(|i| i.note_comm))
// .collect();
// if input_notes.len() != BTreeSet::from_iter(input_notes.iter()).len() {
// return Err(Error::ProofFailed);
// }
let output_notes: Vec<NoteCommitment> = self
.partials
.iter()
.flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm))
.collect();
if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() {
return Err(Error::ProofFailed);
}
// let output_notes: Vec<NoteCommitment> = self
// .partials
// .iter()
// .flat_map(|ptx| ptx.outputs.iter().map(|o| o.note_comm))
// .collect();
// if output_notes.len() != BTreeSet::from_iter(output_notes.iter()).len() {
// return Err(Error::ProofFailed);
// }
if self.balance()
!= crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding)
{
return Err(Error::ProofFailed);
}
// if self.balance()
// != crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, w.balance_blinding)
// {
// return Err(Error::ProofFailed);
// }
Ok(BundleProof {
partials: ptx_proofs,
balance_blinding: w.balance_blinding,
})
}
// Ok(BundleProof {
// partials: ptx_proofs,
// balance_blinding: w.balance_blinding,
// })
// }
pub fn verify(&self, proof: BundleProof) -> bool {
proof.partials.len() == self.partials.len()
&& self.is_balanced(proof.balance_blinding)
&& self
.partials
.iter()
.zip(&proof.partials)
.all(|(p, p_proof)| p.verify(p_proof))
}
// pub fn verify(&self, proof: BundleProof) -> bool {
// proof.partials.len() == self.partials.len()
// && self.is_balanced(proof.balance_blinding)
// && self
// .partials
// .iter()
// .zip(&proof.partials)
// .all(|(p, p_proof)| p.verify(p_proof))
// }
}
#[cfg(test)]

View File

@ -4,10 +4,8 @@
/// which on their own may not balance (i.e. \sum inputs != \sum outputs)
use crate::{
balance::Balance,
error::Error,
note::{NoteCommitment, NoteWitness},
nullifier::{Nullifier, NullifierNonce, NullifierSecret},
partial_tx::PtxRoot,
};
use rand_core::RngCore;
// use risc0_groth16::{PublicInputsJson, Verifier};
@ -20,7 +18,7 @@ pub struct Input {
pub balance: Balance,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputWitness {
pub note: NoteWitness,
pub nf_sk: NullifierSecret,
@ -43,67 +41,67 @@ impl InputWitness {
balance: self.note.balance(),
}
}
}
// as we don't have SNARKS hooked up yet, the witness will be our proof
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct InputProof {
input: InputWitness,
ptx_root: PtxRoot,
death_proof: Vec<u8>,
pub fn to_output_witness(&self) -> crate::OutputWitness {
crate::OutputWitness {
note: self.note.clone(),
nf_pk: self.nf_sk.commit(),
nonce: self.nonce,
}
}
}
impl Input {
pub fn prove(
&self,
w: &InputWitness,
ptx_root: PtxRoot,
death_proof: Vec<u8>,
) -> Result<InputProof, Error> {
if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() {
Err(Error::ProofFailed)
} else {
Ok(InputProof {
input: w.clone(),
ptx_root,
death_proof,
})
}
}
// pub fn prove(
// &self,
// w: &InputWitness,
// ptx_root: PtxRoot,
// death_proof: Vec<u8>,
// ) -> Result<InputProof, Error> {
// if bincode::serialize(&w.commit()).unwrap() != bincode::serialize(&self).unwrap() {
// Err(Error::ProofFailed)
// } else {
// Ok(InputProof {
// input: w.clone(),
// ptx_root,
// death_proof,
// })
// }
// }
pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool {
// verification checks the relation
// - nf_pk == hash(nf_sk)
// - note_comm == commit(note || nf_pk)
// - nullifier == hash(nf_sk || nonce)
// - balance == v * hash_to_curve(Unit) + blinding * H
// - ptx_root is the same one that was used in proving.
// pub fn verify(&self, ptx_root: PtxRoot, proof: &InputProof) -> bool {
// // verification checks the relation
// // - nf_pk == hash(nf_sk)
// // - note_comm == commit(note || nf_pk)
// // - nullifier == hash(nf_sk || nonce)
// // - balance == v * hash_to_curve(Unit) + blinding * H
// // - ptx_root is the same one that was used in proving.
let witness = &proof.input;
// let witness = &proof.input;
let nf_pk = witness.nf_sk.commit();
// let nf_pk = witness.nf_sk.commit();
// let death_constraint_was_committed_to =
// witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap();
// // let death_constraint_was_committed_to =
// // witness.note.death_constraint == bincode::serialize(&death_constraint).unwrap();
// let death_constraint_is_satisfied: bool = Verifier::from_json(
// bincode::deserialize(&proof.death_proof).unwrap(),
// PublicInputsJson {
// values: vec![ptx_root.hex()],
// },
// bincode::deserialize(&witness.note.death_constraint).unwrap(),
// )
// .unwrap()
// .verify()
// .is_ok();
let death_constraint_is_satisfied = true;
self.note_comm == witness.note.commit(nf_pk, witness.nonce)
&& self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce)
&& self.balance == witness.note.balance()
&& ptx_root == proof.ptx_root
// && death_constraint_was_committed_to
&& death_constraint_is_satisfied
}
// // let death_constraint_is_satisfied: bool = Verifier::from_json(
// // bincode::deserialize(&proof.death_proof).unwrap(),
// // PublicInputsJson {
// // values: vec![ptx_root.hex()],
// // },
// // bincode::deserialize(&witness.note.death_constraint).unwrap(),
// // )
// // .unwrap()
// // .verify()
// // .is_ok();
// let death_constraint_is_satisfied = true;
// self.note_comm == witness.note.commit(nf_pk, witness.nonce)
// && self.nullifier == Nullifier::new(witness.nf_sk, witness.nonce)
// && self.balance == witness.note.balance()
// && ptx_root == proof.ptx_root
// // && death_constraint_was_committed_to
// && death_constraint_is_satisfied
// }
pub fn to_bytes(&self) -> [u8; 96] {
let mut bytes = [0u8; 96];

View File

@ -49,7 +49,7 @@ pub enum PathNode {
Right([u8; 32]),
}
pub fn verify_path(leaf: [u8; 32], path: &[PathNode], root: [u8; 32]) -> bool {
pub fn path_root(leaf: [u8; 32], path: &[PathNode]) -> [u8; 32] {
let mut computed_hash = leaf;
for path_node in path {
@ -63,7 +63,7 @@ pub fn verify_path(leaf: [u8; 32], path: &[PathNode], root: [u8; 32]) -> bool {
}
}
computed_hash == root
computed_hash
}
pub fn path<const N: usize>(leaves: [[u8; 32]; N], idx: usize) -> Vec<PathNode> {
@ -163,7 +163,7 @@ mod test {
let p = path::<1>(leaves, 0);
let expected = vec![];
assert_eq!(p, expected);
assert!(verify_path(leaf(b"desert"), &p, r));
assert_eq!(path_root(leaf(b"desert"), &p), r);
}
#[test]
@ -176,14 +176,14 @@ mod test {
let p0 = path(leaves, 0);
let expected0 = vec![PathNode::Right(leaf(b"sand"))];
assert_eq!(p0, expected0);
assert!(verify_path(leaf(b"desert"), &p0, r));
assert_eq!(path_root(leaf(b"desert"), &p0), r);
// --- proof for element at idx 1
let p1 = path(leaves, 1);
let expected1 = vec![PathNode::Left(leaf(b"desert"))];
assert_eq!(p1, expected1);
assert!(verify_path(leaf(b"sand"), &p1, r));
assert_eq!(path_root(leaf(b"sand"), &p1), r);
}
#[test]
@ -204,7 +204,7 @@ mod test {
PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))),
];
assert_eq!(p0, expected0);
assert!(verify_path(leaf(b"desert"), &p0, r));
assert!(path_root(leaf(b"desert"), &p0), r);
// --- proof for element at idx 1
@ -214,7 +214,7 @@ mod test {
PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))),
];
assert_eq!(p1, expected1);
assert!(verify_path(leaf(b"sand"), &p1, r));
assert_eq!(path_root(leaf(b"sand"), &p1), r);
// --- proof for element at idx 2
@ -224,7 +224,7 @@ mod test {
PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))),
];
assert_eq!(p2, expected2);
assert!(verify_path(leaf(b"feels"), &p2, r));
assert_eq!(path_root(leaf(b"feels"), &p2), r);
// --- proof for element at idx 3
@ -234,6 +234,6 @@ mod test {
PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))),
];
assert_eq!(p3, expected3);
assert!(verify_path(leaf(b"warm"), &p3, r));
assert_eq!(path_root(leaf(b"warm"), &p3), r);
}
}

View File

@ -14,7 +14,7 @@ pub struct Output {
pub balance: Balance,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct OutputWitness {
pub note: NoteWitness,
pub nf_pk: NullifierCommitment,

View File

@ -1,14 +1,11 @@
use std::collections::BTreeSet;
use rand_core::RngCore;
// use risc0_groth16::ProofJson;
use curve25519_dalek::ristretto::RistrettoPoint;
use serde::{Deserialize, Serialize};
use crate::error::Error;
use crate::input::{Input, InputProof, InputWitness};
use crate::input::{Input, InputWitness};
use crate::merkle;
use crate::output::{Output, OutputProof, OutputWitness};
use crate::output::{Output, OutputWitness};
const MAX_INPUTS: usize = 8;
const MAX_OUTPUTS: usize = 8;
@ -42,11 +39,11 @@ pub struct PartialTxWitness {
pub outputs: Vec<OutputWitness>,
}
#[derive(Debug)]
pub struct PartialTxProof {
pub inputs: Vec<InputProof>,
pub outputs: Vec<OutputProof>,
}
// #[derive(Debug)]
// pub struct PartialTxProof {
// pub inputs: Vec<InputProof>,
// pub outputs: Vec<OutputProof>,
// }
impl PartialTx {
pub fn from_witness(w: PartialTxWitness) -> Self {
@ -99,63 +96,63 @@ impl PartialTx {
PtxRoot(root)
}
pub fn prove(
&self,
w: PartialTxWitness,
death_proofs: Vec<Vec<u8>>,
) -> Result<PartialTxProof, Error> {
if bincode::serialize(&Self::from_witness(w.clone())).unwrap()
!= bincode::serialize(&self).unwrap()
{
return Err(Error::ProofFailed);
}
let input_note_comms = BTreeSet::from_iter(self.inputs.iter().map(|i| i.note_comm));
let output_note_comms = BTreeSet::from_iter(self.outputs.iter().map(|o| o.note_comm));
// pub fn prove(
// &self,
// w: PartialTxWitness,
// death_proofs: Vec<Vec<u8>>,
// ) -> Result<PartialTxProof, Error> {
// if bincode::serialize(&Self::from_witness(w.clone())).unwrap()
// != bincode::serialize(&self).unwrap()
// {
// return Err(Error::ProofFailed);
// }
// let input_note_comms = BTreeSet::from_iter(self.inputs.iter().map(|i| i.note_comm));
// let output_note_comms = BTreeSet::from_iter(self.outputs.iter().map(|o| o.note_comm));
if input_note_comms.len() != self.inputs.len()
|| output_note_comms.len() != self.outputs.len()
{
return Err(Error::ProofFailed);
}
// if input_note_comms.len() != self.inputs.len()
// || output_note_comms.len() != self.outputs.len()
// {
// return Err(Error::ProofFailed);
// }
let ptx_root = self.root();
// let ptx_root = self.root();
let input_proofs: Vec<InputProof> = Result::from_iter(
self.inputs
.iter()
.zip(&w.inputs)
.zip(death_proofs.into_iter())
.map(|((i, i_w), death_p)| i.prove(i_w, ptx_root, death_p)),
)?;
// let input_proofs: Vec<InputProof> = Result::from_iter(
// self.inputs
// .iter()
// .zip(&w.inputs)
// .zip(death_proofs.into_iter())
// .map(|((i, i_w), death_p)| i.prove(i_w, ptx_root, death_p)),
// )?;
let output_proofs: Vec<OutputProof> = Result::from_iter(
self.outputs
.iter()
.zip(&w.outputs)
.map(|(o, o_w)| o.prove(o_w)),
)?;
// let output_proofs: Vec<OutputProof> = Result::from_iter(
// self.outputs
// .iter()
// .zip(&w.outputs)
// .map(|(o, o_w)| o.prove(o_w)),
// )?;
Ok(PartialTxProof {
inputs: input_proofs,
outputs: output_proofs,
})
}
// Ok(PartialTxProof {
// inputs: input_proofs,
// outputs: output_proofs,
// })
// }
pub fn verify(&self, proof: &PartialTxProof) -> bool {
let ptx_root = self.root();
self.inputs.len() == proof.inputs.len()
&& self.outputs.len() == proof.outputs.len()
&& self
.inputs
.iter()
.zip(&proof.inputs)
.all(|(i, p)| i.verify(ptx_root, p))
&& self
.outputs
.iter()
.zip(&proof.outputs)
.all(|(o, p)| o.verify(p))
}
// pub fn verify(&self, proof: &PartialTxProof) -> bool {
// let ptx_root = self.root();
// self.inputs.len() == proof.inputs.len()
// && self.outputs.len() == proof.outputs.len()
// && self
// .inputs
// .iter()
// .zip(&proof.inputs)
// .all(|(i, p)| i.verify(ptx_root, p))
// && self
// .outputs
// .iter()
// .zip(&proof.outputs)
// .all(|(o, p)| o.verify(p))
// }
pub fn balance(&self) -> RistrettoPoint {
let in_sum: RistrettoPoint = self.inputs.iter().map(|i| i.balance.0).sum();

13
cl/ledger/Cargo.toml Normal file
View File

@ -0,0 +1,13 @@
[package]
name = "ledger"
version = "0.1.0"
edition = "2021"
[dependencies]
cl = { path = "../cl" }
proof_statements = { path = "../proof_statements" }
nomos_cl_risc0_proofs = { path = "../risc0_proofs" }
risc0-zkvm = { version = "1.0", features = ["prove", "metal"] }
risc0-groth16 = { version = "1.0" }
rand = "0.8.5"
thiserror = "1.0.62"

9
cl/ledger/src/error.rs Normal file
View File

@ -0,0 +1,9 @@
use thiserror::Error;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Error, Debug)]
pub enum Error {
#[error("risc0 failed to serde")]
Risc0Serde(#[from] risc0_zkvm::serde::Error),
}

125
cl/ledger/src/input.rs Normal file
View File

@ -0,0 +1,125 @@
use proof_statements::nullifier::{NullifierPrivate, NullifierPublic};
use crate::error::Result;
const MAX_NOTE_COMMS: usize = 2usize.pow(8);
#[derive(Debug, Clone)]
pub struct InputNullifierProof {
receipt: risc0_zkvm::Receipt,
}
impl InputNullifierProof {
pub fn public(&self) -> Result<NullifierPublic> {
Ok(self.receipt.journal.decode()?)
}
pub fn verify(&self, expected_public_inputs: NullifierPublic) -> bool {
let Ok(public_inputs) = self.public() else {
return false;
};
public_inputs == expected_public_inputs
&& self
.receipt
.verify(nomos_cl_risc0_proofs::NULLIFIER_ID)
.is_ok()
}
}
pub fn prove_input_nullifier(
input: &cl::InputWitness,
note_commitments: &[cl::NoteCommitment],
) -> InputNullifierProof {
let output = input.to_output_witness();
let cm_leaves = note_commitment_leaves(note_commitments);
let output_cm = output.commit_note();
let cm_idx = note_commitments
.iter()
.position(|c| c == &output_cm)
.unwrap();
let cm_path = cl::merkle::path(cm_leaves, cm_idx);
let secrets = NullifierPrivate {
nf_sk: input.nf_sk,
output,
cm_path,
};
let env = risc0_zkvm::ExecutorEnv::builder()
.write(&secrets)
.unwrap()
.build()
.unwrap();
// Obtain the default prover.
let prover = risc0_zkvm::default_prover();
use std::time::Instant;
let start_t = Instant::now();
// Proof information by proving the specified ELF binary.
// This struct contains the receipt along with statistics about execution of the guest
let opts = risc0_zkvm::ProverOpts::succinct();
let prove_info = prover
.prove_with_opts(env, nomos_cl_risc0_proofs::NULLIFIER_ELF, &opts)
.unwrap();
println!(
"STARK prover time: {:.2?}, total_cycles: {}",
start_t.elapsed(),
prove_info.stats.total_cycles
);
// extract the receipt.
let receipt = prove_info.receipt;
InputNullifierProof { receipt }
}
fn note_commitment_leaves(note_commitments: &[cl::NoteCommitment]) -> [[u8; 32]; MAX_NOTE_COMMS] {
let note_comm_bytes = Vec::from_iter(note_commitments.iter().map(|c| c.as_bytes().to_vec()));
let cm_leaves = cl::merkle::padded_leaves::<MAX_NOTE_COMMS>(&note_comm_bytes);
cm_leaves
}
#[cfg(test)]
mod test {
use proof_statements::nullifier::NullifierPublic;
use rand::thread_rng;
use super::{note_commitment_leaves, prove_input_nullifier};
#[test]
fn test_input_nullifier_prover() {
let mut rng = thread_rng();
let input = cl::InputWitness {
note: cl::NoteWitness {
balance: cl::BalanceWitness::random(32, "NMO", &mut rng),
death_constraint: vec![],
state: [0u8; 32],
},
nf_sk: cl::NullifierSecret::random(&mut rng),
nonce: cl::NullifierNonce::random(&mut rng),
};
let notes = vec![input.to_output_witness().commit_note()];
let proof = prove_input_nullifier(&input, &notes);
let expected_public_inputs = NullifierPublic {
cm_root: cl::merkle::root(note_commitment_leaves(&notes)),
nf: input.commit().nullifier,
};
assert!(proof.verify(expected_public_inputs));
let wrong_public_inputs = NullifierPublic {
cm_root: cl::merkle::root(note_commitment_leaves(&notes)),
nf: cl::Nullifier::new(
cl::NullifierSecret::random(&mut rng),
cl::NullifierNonce::random(&mut rng),
),
};
assert!(!proof.verify(wrong_public_inputs));
}
}

17
cl/ledger/src/lib.rs Normal file
View File

@ -0,0 +1,17 @@
pub mod error;
pub mod input;
pub fn add(left: usize, right: usize) -> usize {
left + right
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}

View File

@ -0,0 +1,8 @@
[package]
name = "proof_statements"
version = "0.1.0"
edition = "2021"
[dependencies]
cl = { path = "../cl" }
serde = { version = "1.0", features = ["derive"] }

View File

@ -0,0 +1 @@
pub mod nullifier;

View File

@ -0,0 +1,22 @@
use serde::{Deserialize, Serialize};
/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set).
/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold
/// 0. nf_pk = hash(nf_sk)
/// 1. nf = hash(nonce||nf_sk)
/// 2. note_cm = output_commitment(output)
/// 3. verify_merkle_path(note_cm, root, path)
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct NullifierPublic {
pub cm_root: [u8; 32],
pub nf: cl::Nullifier,
// TODO: we need a way to link this statement to a particular input. i.e. prove that the nullifier is actually derived from the input note.
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct NullifierPrivate {
pub nf_sk: cl::NullifierSecret,
pub output: cl::OutputWitness,
pub cm_path: Vec<cl::merkle::PathNode>,
}

View File

@ -0,0 +1,11 @@
[package]
name = "nomos_cl_risc0_proofs"
version = "0.1.0"
edition = "2021"
[build-dependencies]
risc0-build = { version = "1.0" }
[package.metadata.risc0]
methods = ["nullifier"]

3
cl/risc0_proofs/build.rs Normal file
View File

@ -0,0 +1,3 @@
fn main() {
risc0_build::embed_methods();
}

View File

@ -8,13 +8,12 @@ edition = "2021"
[dependencies]
risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] }
serde = { version = "1.0", features = ["derive"] }
bincode = "1"
cl = { path = "../../../cl" }
cl = { path = "../../cl" }
proof_statements = { path = "../../proof_statements" }
[patch.crates-io]
# Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint
# multiplication accelerator support for all downstream usages of the following crates.
# add RISC Zero accelerator support for all downstream usages of the following crates.
sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" }
# k256 = { git = "https://github.com/risc0/RustCrypto-elliptic-curves", tag = "k256/v0.13.3-risczero.0" }
crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" }
curve25519-dalek = { git = "https://github.com/risc0/curve25519-dalek", tag = "curve25519-4.1.2-risczero.0" }

View File

@ -0,0 +1,28 @@
/// Nullifier Proof
///
/// Our goal: prove the nullifier nf was derived from a note that had previously been committed to.
///
/// More formally, nullifier statement says:
/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set).
/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold
/// 0. nf_pk = hash(nf_sk)
/// 1. nf = hash(nonce||nf_sk)
/// 2. note_cm = output_commitment(output)
/// 3. verify_merkle_path(note_cm, root, path)
use cl::merkle;
use cl::nullifier::Nullifier;
use proof_statements::nullifier::{NullifierPrivate, NullifierPublic};
use risc0_zkvm::guest::env;
fn main() {
let secret: NullifierPrivate = env::read();
assert_eq!(secret.output.nf_pk, secret.nf_sk.commit());
let cm_out = secret.output.commit_note();
let cm_leaf = merkle::leaf(cm_out.as_bytes());
let cm_root = merkle::path_root(cm_leaf, &secret.cm_path);
let nf = Nullifier::new(secret.nf_sk, secret.output.nonce);
env::commit(&NullifierPublic { cm_root, nf });
}

View File

@ -0,0 +1 @@
include!(concat!(env!("OUT_DIR"), "/methods.rs"));

View File

@ -6,8 +6,8 @@ default-run = "host"
[dependencies]
methods = { path = "../methods" }
risc0-zkvm = { version = "1.0.1", features = ["prove"] }
risc0-groth16 = { version = "1.0.1" }
risc0-zkvm = { version = "1.0", features = ["prove", "metal"] }
risc0-groth16 = { version = "1.0" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
serde = "1.0"
blake2 = "0.10"
@ -16,4 +16,4 @@ common = { path = "../common" }
tempfile = "3"
clap = { version = "4", features = ["derive"] }
rand = "0.8.5"
cl = { path = "../../cl" }
cl = { path = "../../cl/cl" }

View File

@ -10,7 +10,8 @@ use clap::Parser;
#[command(version, about, long_about = None)]
enum Action {
Stf,
Nullifier,
// Nullifier,
Transfer,
}
fn stf_prove_stark() {
@ -101,64 +102,66 @@ fn stf_prove_stark() {
receipt.verify(methods::METHOD_ID).unwrap();
}
fn nf_prove_stark() {
let mut rng = rand::thread_rng();
// fn nf_prove_stark() {
// let mut rng = rand::thread_rng();
let nf_sk = cl::NullifierSecret::random(&mut rng);
// let nf_sk = cl::NullifierSecret::random(&mut rng);
let output = cl::OutputWitness {
note: cl::NoteWitness {
balance: cl::BalanceWitness::random(10, "NMO", &mut rng),
death_constraint: vec![],
state: [0u8; 32],
},
nf_pk: nf_sk.commit(),
nonce: cl::NullifierNonce::random(&mut rng),
};
let output_cm = output.commit_note().as_bytes().to_vec();
let cm_set = cl::merkle::padded_leaves::<64>(&[output_cm]);
let cm_root = cl::merkle::root(cm_set);
let cm_path = cl::merkle::path(cm_set, 0);
let nf = cl::Nullifier::new(nf_sk, output.nonce);
// let output = cl::OutputWitness {
// note: cl::NoteWitness {
// balance: cl::BalanceWitness::random(10, "NMO", &mut rng),
// death_constraint: vec![],
// state: [0u8; 32],
// },
// nf_pk: nf_sk.commit(),
// nonce: cl::NullifierNonce::random(&mut rng),
// };
// let output_cm = output.commit_note().as_bytes().to_vec();
// let cm_set = cl::merkle::padded_leaves::<64>(&[output_cm]);
// let cm_root = cl::merkle::root(cm_set);
// let cm_path = cl::merkle::path(cm_set, 0);
// let nf = cl::Nullifier::new(nf_sk, output.nonce);
let env = ExecutorEnv::builder()
.write(&cm_root)
.unwrap()
.write(&nf)
.unwrap()
.write(&nf_sk)
.unwrap()
.write(&output)
.unwrap()
.write(&cm_path)
.unwrap()
.build()
.unwrap();
// let env = ExecutorEnv::builder()
// .write(&cm_root)
// .unwrap()
// .write(&nf)
// .unwrap()
// .write(&nf_sk)
// .unwrap()
// .write(&output)
// .unwrap()
// .write(&cm_path)
// .unwrap()
// .build()
// .unwrap();
// Obtain the default prover.
let prover = default_prover();
// // Obtain the default prover.
// let prover = default_prover();
use std::time::Instant;
let start_t = Instant::now();
// use std::time::Instant;
// let start_t = Instant::now();
// Proof information by proving the specified ELF binary.
// This struct contains the receipt along with statistics about execution of the guest
let opts = risc0_zkvm::ProverOpts::succinct();
let prove_info = prover
.prove_with_opts(env, methods::NULLIFIER_ELF, &opts)
.unwrap();
// // Proof information by proving the specified ELF binary.
// // This struct contains the receipt along with statistics about execution of the guest
// let opts = risc0_zkvm::ProverOpts::succinct();
// let prove_info = prover
// .prove_with_opts(env, methods::NULLIFIER_ELF, &opts)
// .unwrap();
println!("STARK prover time: {:.2?}", start_t.elapsed());
// extract the receipt.
let receipt = prove_info.receipt;
// println!("STARK prover time: {:.2?}", start_t.elapsed());
// // extract the receipt.
// let receipt = prove_info.receipt;
// TODO: Implement code for retrieving receipt journal here.
// // TODO: Implement code for retrieving receipt journal here.
std::fs::write("proof.stark", bincode::serialize(&receipt).unwrap()).unwrap();
// The receipt was verified at the end of proving, but the below code is an
// example of how someone else could verify this receipt.
receipt.verify(methods::NULLIFIER_ID).unwrap();
}
// std::fs::write("proof.stark", bincode::serialize(&receipt).unwrap()).unwrap();
// // The receipt was verified at the end of proving, but the below code is an
// // example of how someone else could verify this receipt.
// receipt.verify(methods::NULLIFIER_ID).unwrap();
// }
fn transfer_prove_stark() {}
fn main() {
// Initialize tracing. In order to view logs, run `RUST_LOG=info cargo run`
@ -170,7 +173,8 @@ fn main() {
match action {
Action::Stf => stf_prove_stark(),
Action::Nullifier => nf_prove_stark(),
// Action::Nullifier => nf_prove_stark(),
Action::Transfer => transfer_prove_stark(),
}
}

View File

@ -7,4 +7,4 @@ edition = "2021"
risc0-build = { version = "1.0" }
[package.metadata.risc0]
methods = ["guest", "nullifier"]
methods = ["guest"]

View File

@ -13,7 +13,7 @@ blake2 = "0.10"
serde = { version = "1.0", features = ["derive"] }
bincode = "1"
common = { path = "../../common" }
cl = { path = "../../../cl" }
cl = { path = "../../../cl/cl" }
[patch.crates-io]
# Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint

View File

@ -1,9 +1,9 @@
use blake2::{Blake2s256, Digest};
use risc0_zkvm::guest::env;
use common::*;
use cl::merkle;
use cl::input::InputWitness;
use cl::merkle;
use cl::output::OutputWitness;
use common::*;
use risc0_zkvm::guest::env;
/// Public Inputs:
/// * ptx_root: the root of the partial tx merkle tree of inputs/outputs
@ -31,8 +31,11 @@ fn execute(
// a transfer is included as part of the same transaction in the cl
let in_comm = in_note.commit().to_bytes();
eprintln!("input comm: {}", env::cycle_count());
assert!(merkle::verify_path(merkle::leaf(&in_comm), &in_ptx_path, input_root));
assert_eq!(
merkle::path_root(merkle::leaf(&in_comm), &in_ptx_path),
input_root
);
eprintln!("input merkle path: {}", env::cycle_count());
// check the commitments match the actual data
@ -61,8 +64,11 @@ fn execute(
// (this is done in the death condition to disallow burning)
let out_comm = out_note.commit().to_bytes();
eprintln!("output comm: {}", env::cycle_count());
assert!(merkle::verify_path(merkle::leaf(&out_comm), &out_ptx_path, output_root));
assert_eq!(
merkle::path_root(merkle::leaf(&out_comm), &out_ptx_path),
output_root
);
eprintln!("out merkle proof: {}", env::cycle_count());
}
@ -81,8 +87,19 @@ fn main() {
let state: State = env::read();
let journal: Journal = env::read();
eprintln!("parse input: {}", env::cycle_count());
execute(ptx_root, input_root, output_root, in_ptx_path, out_ptx_path, in_note, out_note, input, state, journal);
eprintln!("parse input: {}", env::cycle_count());
execute(
ptx_root,
input_root,
output_root,
in_ptx_path,
out_ptx_path,
in_note,
out_note,
input,
state,
journal,
);
}
fn calculate_state_hash(state: &State) -> [u8; 32] {

View File

@ -1,57 +0,0 @@
/// Nullifier Proof
///
/// Our goal: prove the nullifier nf was derived from a note that had previously been committed to.
///
/// More formally, nullifier statement says:
/// for public input `nf` (nullifier) and `root_cm` (root of merkle tree over commitment set).
/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold
/// 0. nf_pk = hash(nf_sk)
/// 1. nf = hash(nonce||nf_sk)
/// 2. note_cm = output_commitment(output)
/// 3. verify_merkle_path(note_cm, root, path)
use cl::merkle;
use cl::nullifier::{Nullifier, NullifierSecret};
use cl::output::OutputWitness;
use risc0_zkvm::guest::env;
fn execute(
// public
cm_root: [u8; 32],
nf: Nullifier,
// private
nf_sk: NullifierSecret,
output: OutputWitness,
cm_path: Vec<merkle::PathNode>,
) {
eprintln!("start exec: {}", env::cycle_count());
assert_eq!(output.nf_pk, nf_sk.commit());
eprintln!("output nullifier: {}", env::cycle_count());
assert_eq!(nf, Nullifier::new(nf_sk, output.nonce));
eprintln!("nullifier: {}", env::cycle_count());
let cm_out = output.commit_note();
eprintln!("out_cm: {}", env::cycle_count());
assert!(merkle::verify_path(
merkle::leaf(cm_out.as_bytes()),
&cm_path,
cm_root
));
eprintln!("nullifier merkle path: {}", env::cycle_count());
}
fn main() {
// public input
let cm_root: [u8; 32] = env::read();
let nf: Nullifier = env::read();
// private input
let nf_sk: NullifierSecret = env::read();
let output: OutputWitness = env::read();
let cm_path: Vec<merkle::PathNode> = env::read();
eprintln!("parse input: {}", env::cycle_count());
execute(cm_root, nf, nf_sk, output, cm_path);
}