mirror of
https://github.com/logos-co/nomos-pocs.git
synced 2025-02-22 14:08:23 +00:00
Sync Bedrock with latest spec (#49)
* update cl * move zoneid into NoteWitness * Update emmarin/cl/ledger_proof_statements/src/ledger.rs Co-authored-by: davidrusu <davidrusu.me@gmail.com> * update * update note cm order * remove NoteWitness * mv indexed and merkle into /ds/; rework padded_leaves interface * ledger cross zone update test is now passing * clippy * pad nullifiers in cross zone update test * print user and total cycles for each proof * only publish sync logs for cross zone bundles --------- Co-authored-by: davidrusu <davidrusu.me@gmail.com>
This commit is contained in:
parent
3e7c8c2351
commit
dc2fd35894
@ -6,7 +6,7 @@ members = [
|
||||
"ledger_proof_statements",
|
||||
"risc0_proofs",
|
||||
"bundle_risc0_proof",
|
||||
"ptx_risc0_proof",
|
||||
"tx_risc0_proof",
|
||||
"ledger_validity_proof"
|
||||
]
|
||||
|
||||
|
@ -10,7 +10,7 @@ risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
cl = { path = "../../cl" }
|
||||
ledger_proof_statements = { path = "../../ledger_proof_statements" }
|
||||
nomos_cl_ptx_risc0_proof = { path = "../../ptx_risc0_proof" }
|
||||
nomos_cl_tx_risc0_proof = { path = "../../tx_risc0_proof" }
|
||||
|
||||
|
||||
[patch.crates-io]
|
||||
|
@ -1,49 +1,12 @@
|
||||
use cl::cl::BalanceWitness;
|
||||
use cl::zone_layer::notes::ZoneId;
|
||||
use ledger_proof_statements::bundle::{BundlePrivate, BundlePublic, LedgerUpdate};
|
||||
use cl::crust::BundleWitness;
|
||||
use risc0_zkvm::{guest::env, serde};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
fn main() {
|
||||
let bundle_private: BundlePrivate = env::read();
|
||||
let bundle_id = bundle_private.id();
|
||||
let bundle_private: BundleWitness = env::read();
|
||||
|
||||
let BundlePrivate { bundle, balances } = bundle_private;
|
||||
assert_eq!(bundle.len(), balances.len());
|
||||
|
||||
let mut zone_ledger_updates: BTreeMap<ZoneId, LedgerUpdate> = BTreeMap::new();
|
||||
|
||||
for (ptx_public, balance) in bundle.into_iter().zip(balances.iter()) {
|
||||
assert_eq!(ptx_public.ptx.balance, balance.commit());
|
||||
env::verify(
|
||||
nomos_cl_ptx_risc0_proof::PTX_ID,
|
||||
&serde::to_vec(&ptx_public).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
for (input, cm_mmr) in ptx_public.ptx.inputs.iter().zip(ptx_public.cm_mmrs) {
|
||||
let zone_ledger_update = zone_ledger_updates.entry(input.zone_id).or_default();
|
||||
|
||||
zone_ledger_update.nullifiers.push(input.nullifier);
|
||||
|
||||
zone_ledger_update
|
||||
.cm_roots
|
||||
.extend(cm_mmr.roots.iter().map(|r| r.root));
|
||||
}
|
||||
|
||||
for output in &ptx_public.ptx.outputs {
|
||||
zone_ledger_updates
|
||||
.entry(output.zone_id)
|
||||
.or_default()
|
||||
.commitments
|
||||
.push(output.note_comm);
|
||||
}
|
||||
for tx in &bundle_private.txs {
|
||||
env::verify(nomos_cl_tx_risc0_proof::TX_ID, &serde::to_vec(&tx).unwrap()).unwrap();
|
||||
}
|
||||
|
||||
assert!(BalanceWitness::combine(balances, [0u8; 16]).is_zero());
|
||||
|
||||
env::commit(&BundlePublic {
|
||||
bundle_id,
|
||||
zone_ledger_updates,
|
||||
});
|
||||
env::commit(&bundle_private.commit());
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ sha2 = "0.10"
|
||||
lazy_static = "1.5.0"
|
||||
risc0-zkvm = "1.2"
|
||||
itertools = "0.14"
|
||||
digest = "0.10"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -1,149 +0,0 @@
|
||||
use rand_core::CryptoRngCore;
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::cl::PartialTxWitness;
|
||||
|
||||
pub type Value = u64;
|
||||
pub type Unit = [u8; 32];
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
|
||||
pub struct Balance([u8; 32]);
|
||||
|
||||
impl Balance {
|
||||
pub fn to_bytes(&self) -> [u8; 32] {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct UnitBalance {
|
||||
pub unit: Unit,
|
||||
pub pos: u64,
|
||||
pub neg: u64,
|
||||
}
|
||||
|
||||
impl UnitBalance {
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.pos == self.neg
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct BalanceWitness {
|
||||
pub balances: Vec<UnitBalance>,
|
||||
pub blinding: [u8; 16],
|
||||
}
|
||||
|
||||
impl BalanceWitness {
|
||||
pub fn random_blinding(mut rng: impl CryptoRngCore) -> [u8; 16] {
|
||||
let mut blinding = [0u8; 16];
|
||||
rng.fill_bytes(&mut blinding);
|
||||
|
||||
blinding
|
||||
}
|
||||
|
||||
pub fn zero(blinding: [u8; 16]) -> Self {
|
||||
Self {
|
||||
balances: Default::default(),
|
||||
blinding,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_ptx(ptx: &PartialTxWitness, blinding: [u8; 16]) -> Self {
|
||||
let mut balance = Self::zero(blinding);
|
||||
|
||||
for input in ptx.inputs.iter() {
|
||||
balance.insert_negative(input.note.unit, input.note.value);
|
||||
}
|
||||
|
||||
for output in ptx.outputs.iter() {
|
||||
balance.insert_positive(output.note.unit, output.note.value);
|
||||
}
|
||||
|
||||
balance.clear_zeros();
|
||||
|
||||
balance
|
||||
}
|
||||
|
||||
pub fn insert_positive(&mut self, unit: Unit, value: Value) {
|
||||
for unit_bal in self.balances.iter_mut() {
|
||||
if unit_bal.unit == unit {
|
||||
unit_bal.pos += value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Unit was not found, so we must create one.
|
||||
self.balances.push(UnitBalance {
|
||||
unit,
|
||||
pos: value,
|
||||
neg: 0,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn insert_negative(&mut self, unit: Unit, value: Value) {
|
||||
for unit_bal in self.balances.iter_mut() {
|
||||
if unit_bal.unit == unit {
|
||||
unit_bal.neg += value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.balances.push(UnitBalance {
|
||||
unit,
|
||||
pos: 0,
|
||||
neg: value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn clear_zeros(&mut self) {
|
||||
let mut i = 0usize;
|
||||
while i < self.balances.len() {
|
||||
if self.balances[i].is_zero() {
|
||||
self.balances.swap_remove(i);
|
||||
// don't increment `i` since the last element has been swapped into the
|
||||
// `i`'th place
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn combine(balances: impl IntoIterator<Item = Self>, blinding: [u8; 16]) -> Self {
|
||||
let mut combined = BalanceWitness::zero(blinding);
|
||||
|
||||
for balance in balances {
|
||||
for unit_bal in balance.balances.iter() {
|
||||
if unit_bal.pos > unit_bal.neg {
|
||||
combined.insert_positive(unit_bal.unit, unit_bal.pos - unit_bal.neg);
|
||||
} else {
|
||||
combined.insert_negative(unit_bal.unit, unit_bal.neg - unit_bal.pos);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
combined.clear_zeros();
|
||||
|
||||
combined
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.balances.is_empty()
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> Balance {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_CL_BAL_COMMIT");
|
||||
|
||||
for unit_balance in self.balances.iter() {
|
||||
hasher.update(unit_balance.unit);
|
||||
hasher.update(unit_balance.pos.to_le_bytes());
|
||||
hasher.update(unit_balance.neg.to_le_bytes());
|
||||
}
|
||||
hasher.update(self.blinding);
|
||||
|
||||
let commit_bytes: [u8; 32] = hasher.finalize().into();
|
||||
Balance(commit_bytes)
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
use curve25519_dalek::ristretto::RistrettoPoint;
|
||||
use sha2::Sha512;
|
||||
|
||||
pub fn hash_to_curve(bytes: &[u8]) -> RistrettoPoint {
|
||||
RistrettoPoint::hash_from_bytes::<Sha512>(bytes)
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
ProofFailed,
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
/// This module defines the partial transaction structure.
|
||||
///
|
||||
/// Partial transactions, as the name suggests, are transactions
|
||||
/// which on their own may not balance (i.e. \sum inputs != \sum outputs)
|
||||
use crate::{
|
||||
cl::{
|
||||
note::{Constraint, NoteWitness},
|
||||
nullifier::{Nullifier, NullifierSecret},
|
||||
Nonce, NoteCommitment, OutputWitness,
|
||||
},
|
||||
zone_layer::notes::ZoneId,
|
||||
};
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Input {
|
||||
pub nullifier: Nullifier,
|
||||
pub constraint: Constraint,
|
||||
pub zone_id: ZoneId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct InputWitness {
|
||||
pub note: NoteWitness,
|
||||
pub nf_sk: NullifierSecret,
|
||||
pub zone_id: ZoneId,
|
||||
}
|
||||
|
||||
impl InputWitness {
|
||||
pub fn new(note: NoteWitness, nf_sk: NullifierSecret, zone_id: ZoneId) -> Self {
|
||||
Self {
|
||||
note,
|
||||
nf_sk,
|
||||
zone_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_output(output: OutputWitness, nf_sk: NullifierSecret) -> Self {
|
||||
assert_eq!(nf_sk.commit(), output.nf_pk);
|
||||
Self::new(output.note, nf_sk, output.zone_id)
|
||||
}
|
||||
|
||||
pub fn public(output: OutputWitness) -> Self {
|
||||
let nf_sk = NullifierSecret::zero();
|
||||
assert_eq!(nf_sk.commit(), output.nf_pk); // ensure the output was a public UTXO
|
||||
Self::new(output.note, nf_sk, output.zone_id)
|
||||
}
|
||||
|
||||
pub fn evolved_nonce(&self, domain: &[u8]) -> Nonce {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_COIN_EVOLVE");
|
||||
hasher.update(domain);
|
||||
hasher.update(self.nf_sk.0);
|
||||
hasher.update(self.note.commit(&self.zone_id, self.nf_sk.commit()).0);
|
||||
|
||||
let nonce_bytes: [u8; 32] = hasher.finalize().into();
|
||||
Nonce::from_bytes(nonce_bytes)
|
||||
}
|
||||
|
||||
pub fn evolve_output(&self, domain: &[u8]) -> OutputWitness {
|
||||
OutputWitness {
|
||||
note: NoteWitness {
|
||||
nonce: self.evolved_nonce(domain),
|
||||
..self.note
|
||||
},
|
||||
nf_pk: self.nf_sk.commit(),
|
||||
zone_id: self.zone_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nullifier(&self) -> Nullifier {
|
||||
Nullifier::new(&self.zone_id, self.nf_sk, self.note_commitment())
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> Input {
|
||||
Input {
|
||||
nullifier: self.nullifier(),
|
||||
constraint: self.note.constraint,
|
||||
zone_id: self.zone_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn note_commitment(&self) -> NoteCommitment {
|
||||
self.note.commit(&self.zone_id, self.nf_sk.commit())
|
||||
}
|
||||
}
|
||||
|
||||
impl Input {
|
||||
pub fn to_bytes(&self) -> [u8; 96] {
|
||||
let mut bytes = [0u8; 96];
|
||||
bytes[..32].copy_from_slice(self.nullifier.as_bytes());
|
||||
bytes[32..64].copy_from_slice(&self.constraint.0);
|
||||
bytes[64..96].copy_from_slice(&self.zone_id);
|
||||
bytes
|
||||
}
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
use crate::cl::merkle;
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct MMR {
|
||||
pub roots: Vec<Root>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Root {
|
||||
pub root: [u8; 32],
|
||||
pub height: u8,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MMRProof {
|
||||
pub path: Vec<merkle::PathNode>,
|
||||
}
|
||||
|
||||
impl MMRProof {
|
||||
pub fn root(&self, elem: &[u8]) -> [u8; 32] {
|
||||
let leaf = merkle::leaf(elem);
|
||||
merkle::path_root(leaf, &self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl MMR {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn push(&mut self, elem: &[u8]) -> MMRProof {
|
||||
let new_root = Root {
|
||||
root: merkle::leaf(elem),
|
||||
height: 1,
|
||||
};
|
||||
self.roots.push(new_root);
|
||||
|
||||
let mut path = vec![];
|
||||
|
||||
for i in (1..self.roots.len()).rev() {
|
||||
if self.roots[i].height == self.roots[i - 1].height {
|
||||
path.push(merkle::PathNode::Left(self.roots[i - 1].root));
|
||||
|
||||
self.roots[i - 1] = Root {
|
||||
root: merkle::node(self.roots[i - 1].root, self.roots[i].root),
|
||||
height: self.roots[i - 1].height + 1,
|
||||
};
|
||||
|
||||
self.roots.remove(i);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
MMRProof { path }
|
||||
}
|
||||
|
||||
pub fn verify_proof(&self, elem: &[u8], proof: &MMRProof) -> bool {
|
||||
let path_len = proof.path.len();
|
||||
let root = proof.root(elem);
|
||||
|
||||
for mmr_root in self.roots.iter() {
|
||||
if mmr_root.height == (path_len + 1) as u8 {
|
||||
return mmr_root.root == root;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> [u8; 32] {
|
||||
// todo: baggin the peaks
|
||||
let mut hasher = Sha256::new();
|
||||
for mmr_root in self.roots.iter() {
|
||||
hasher.update(mmr_root.root);
|
||||
hasher.update(mmr_root.height.to_le_bytes());
|
||||
}
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_mmr_push() {
|
||||
let mut mmr = MMR::new();
|
||||
let proof = mmr.push(b"hello");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 1);
|
||||
assert_eq!(mmr.roots[0].root, merkle::leaf(b"hello"));
|
||||
assert!(mmr.verify_proof(b"hello", &proof));
|
||||
|
||||
let proof = mmr.push(b"world");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 2);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world"))
|
||||
);
|
||||
assert!(mmr.verify_proof(b"world", &proof));
|
||||
|
||||
let proof = mmr.push(b"!");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 2);
|
||||
assert_eq!(mmr.roots[0].height, 2);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world"))
|
||||
);
|
||||
assert_eq!(mmr.roots[1].height, 1);
|
||||
assert_eq!(mmr.roots[1].root, merkle::leaf(b"!"));
|
||||
assert!(mmr.verify_proof(b"!", &proof));
|
||||
|
||||
let proof = mmr.push(b"!");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 3);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world")),
|
||||
merkle::node(merkle::leaf(b"!"), merkle::leaf(b"!"))
|
||||
)
|
||||
);
|
||||
assert!(mmr.verify_proof(b"!", &proof));
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
pub mod balance;
|
||||
pub mod crypto;
|
||||
pub mod error;
|
||||
pub mod indexed;
|
||||
pub mod input;
|
||||
pub mod merkle;
|
||||
pub mod mmr;
|
||||
pub mod note;
|
||||
pub mod nullifier;
|
||||
pub mod output;
|
||||
pub mod partial_tx;
|
||||
pub mod sparse_merkle;
|
||||
|
||||
pub use balance::{Balance, BalanceWitness};
|
||||
pub use input::{Input, InputWitness};
|
||||
pub use note::{Constraint, Nonce, NoteCommitment, NoteWitness};
|
||||
pub use nullifier::{Nullifier, NullifierCommitment, NullifierSecret};
|
||||
pub use output::{Output, OutputWitness};
|
||||
pub use partial_tx::{
|
||||
PartialTx, PartialTxInputWitness, PartialTxOutputWitness, PartialTxWitness, PtxRoot,
|
||||
};
|
@ -1,171 +0,0 @@
|
||||
use crate::cl::{balance::Unit, nullifier::NullifierCommitment};
|
||||
use rand::RngCore;
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct Constraint(pub [u8; 32]);
|
||||
|
||||
impl Constraint {
|
||||
pub fn from_vk(constraint_vk: &[u8]) -> Self {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_CL_CONSTRAINT_COMMIT");
|
||||
hasher.update(constraint_vk);
|
||||
let constraint_cm: [u8; 32] = hasher.finalize().into();
|
||||
|
||||
Self(constraint_cm)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derive_unit(unit: &str) -> Unit {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_CL_UNIT");
|
||||
hasher.update(unit.as_bytes());
|
||||
let unit: Unit = hasher.finalize().into();
|
||||
unit
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct NoteCommitment(pub [u8; 32]);
|
||||
|
||||
impl NoteCommitment {
|
||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
|
||||
pub struct NoteWitness {
|
||||
pub value: u64,
|
||||
pub unit: Unit,
|
||||
pub constraint: Constraint,
|
||||
pub state: [u8; 32],
|
||||
pub nonce: Nonce,
|
||||
}
|
||||
|
||||
impl NoteWitness {
|
||||
pub fn new(
|
||||
value: u64,
|
||||
unit: Unit,
|
||||
constraint: Constraint,
|
||||
state: [u8; 32],
|
||||
nonce: Nonce,
|
||||
) -> Self {
|
||||
Self {
|
||||
value,
|
||||
unit,
|
||||
constraint,
|
||||
state,
|
||||
nonce,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn basic(value: u64, unit: Unit, rng: impl RngCore) -> Self {
|
||||
let constraint = Constraint([0u8; 32]);
|
||||
let nonce = Nonce::random(rng);
|
||||
Self::new(value, unit, constraint, [0u8; 32], nonce)
|
||||
}
|
||||
|
||||
pub fn stateless(value: u64, unit: Unit, constraint: Constraint, rng: impl RngCore) -> Self {
|
||||
Self::new(value, unit, constraint, [0u8; 32], Nonce::random(rng))
|
||||
}
|
||||
|
||||
pub fn commit(&self, tag: &dyn AsRef<[u8]>, nf_pk: NullifierCommitment) -> NoteCommitment {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_CL_NOTE_CM");
|
||||
hasher.update(tag.as_ref());
|
||||
|
||||
// COMMIT TO BALANCE
|
||||
hasher.update(self.value.to_le_bytes());
|
||||
hasher.update(self.unit);
|
||||
// Important! we don't commit to the balance blinding factor as that may make the notes linkable.
|
||||
|
||||
// COMMIT TO STATE
|
||||
hasher.update(self.state);
|
||||
|
||||
// COMMIT TO CONSTRAINT
|
||||
hasher.update(self.constraint.0);
|
||||
|
||||
// COMMIT TO NONCE
|
||||
hasher.update(self.nonce.as_bytes());
|
||||
|
||||
// COMMIT TO NULLIFIER
|
||||
hasher.update(nf_pk.as_bytes());
|
||||
|
||||
let commit_bytes: [u8; 32] = hasher.finalize().into();
|
||||
NoteCommitment(commit_bytes)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Nonce([u8; 32]);
|
||||
|
||||
impl Nonce {
|
||||
pub fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut nonce = [0u8; 32];
|
||||
rng.fill_bytes(&mut nonce);
|
||||
Self(nonce)
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn from_bytes(bytes: [u8; 32]) -> Self {
|
||||
Self(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// use super::*;
|
||||
// use crate::cl::nullifier::NullifierSecret;
|
||||
|
||||
// #[test]
|
||||
// fn test_note_commit_permutations() {
|
||||
// let (nmo, eth) = (derive_unit("NMO"), derive_unit("ETH"));
|
||||
|
||||
// let mut rng = rand::thread_rng();
|
||||
|
||||
// let nf_pk = NullifierSecret::random(&mut rng).commit();
|
||||
|
||||
// let reference_note = NoteWitness::basic(32, nmo, &mut rng);
|
||||
|
||||
// // different notes under same nullifier produce different commitments
|
||||
// let mutation_tests = [
|
||||
// NoteWitness {
|
||||
// value: 12,
|
||||
// ..reference_note
|
||||
// },
|
||||
// NoteWitness {
|
||||
// unit: eth,
|
||||
// ..reference_note
|
||||
// },
|
||||
// NoteWitness {
|
||||
// constraint: Constraint::from_vk(&[1u8; 32]),
|
||||
// ..reference_note
|
||||
// },
|
||||
// NoteWitness {
|
||||
// state: [1u8; 32],
|
||||
// ..reference_note
|
||||
// },
|
||||
// NoteWitness {
|
||||
// nonce: Nonce::random(&mut rng),
|
||||
// ..reference_note
|
||||
// },
|
||||
// ];
|
||||
|
||||
// for n in mutation_tests {
|
||||
// assert_ne!(n.commit(nf_pk), reference_note.commit(nf_pk));
|
||||
// }
|
||||
|
||||
// // commitment to same note with different nullifiers produce different commitments
|
||||
|
||||
// let other_nf_pk = NullifierSecret::random(&mut rng).commit();
|
||||
|
||||
// assert_ne!(
|
||||
// reference_note.commit(nf_pk),
|
||||
// reference_note.commit(other_nf_pk)
|
||||
// );
|
||||
// }
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
cl::{
|
||||
note::{NoteCommitment, NoteWitness},
|
||||
nullifier::NullifierCommitment,
|
||||
NullifierSecret,
|
||||
},
|
||||
zone_layer::notes::ZoneId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Output {
|
||||
pub zone_id: ZoneId,
|
||||
pub note_comm: NoteCommitment,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct OutputWitness {
|
||||
pub note: NoteWitness,
|
||||
pub nf_pk: NullifierCommitment,
|
||||
pub zone_id: ZoneId,
|
||||
}
|
||||
|
||||
impl OutputWitness {
|
||||
pub fn new(note: NoteWitness, nf_pk: NullifierCommitment, zone_id: ZoneId) -> Self {
|
||||
Self {
|
||||
note,
|
||||
nf_pk,
|
||||
zone_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn public(note: NoteWitness, zone_id: ZoneId) -> Self {
|
||||
let nf_pk = NullifierSecret::zero().commit();
|
||||
Self {
|
||||
note,
|
||||
nf_pk,
|
||||
zone_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn commit_note(&self) -> NoteCommitment {
|
||||
self.note.commit(&self.zone_id, self.nf_pk)
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> Output {
|
||||
Output {
|
||||
zone_id: self.zone_id,
|
||||
note_comm: self.commit_note(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Output {
|
||||
pub fn to_bytes(&self) -> [u8; 64] {
|
||||
let mut bytes = [0u8; 64];
|
||||
bytes[..32].copy_from_slice(&self.zone_id);
|
||||
bytes[32..].copy_from_slice(&self.note_comm.0);
|
||||
bytes
|
||||
}
|
||||
}
|
@ -1,217 +0,0 @@
|
||||
use rand_core::{CryptoRngCore, RngCore};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::cl::{
|
||||
balance::{Balance, BalanceWitness},
|
||||
input::{Input, InputWitness},
|
||||
merkle,
|
||||
output::{Output, OutputWitness},
|
||||
};
|
||||
|
||||
pub const MAX_INPUTS: usize = 8;
|
||||
pub const MAX_OUTPUTS: usize = 8;
|
||||
|
||||
/// The partial transaction commitment couples an input to a partial transaction.
|
||||
/// Prevents partial tx unbundling.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct PtxRoot(pub [u8; 32]);
|
||||
|
||||
impl From<[u8; 32]> for PtxRoot {
|
||||
fn from(bytes: [u8; 32]) -> Self {
|
||||
Self(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl PtxRoot {
|
||||
pub fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut sk = [0u8; 32];
|
||||
rng.fill_bytes(&mut sk);
|
||||
Self(sk)
|
||||
}
|
||||
|
||||
pub fn hex(&self) -> String {
|
||||
hex::encode(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PartialTx {
|
||||
pub inputs: Vec<Input>,
|
||||
pub outputs: Vec<Output>,
|
||||
pub balance: Balance,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PartialTxWitness {
|
||||
pub inputs: Vec<InputWitness>,
|
||||
pub outputs: Vec<OutputWitness>,
|
||||
pub balance_blinding: [u8; 16],
|
||||
}
|
||||
|
||||
impl PartialTxWitness {
|
||||
pub fn random(
|
||||
inputs: Vec<InputWitness>,
|
||||
outputs: Vec<OutputWitness>,
|
||||
mut rng: impl CryptoRngCore,
|
||||
) -> Self {
|
||||
Self {
|
||||
inputs,
|
||||
outputs,
|
||||
balance_blinding: BalanceWitness::random_blinding(&mut rng),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn balance(&self) -> BalanceWitness {
|
||||
BalanceWitness::from_ptx(self, self.balance_blinding)
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> PartialTx {
|
||||
PartialTx {
|
||||
inputs: self.inputs.iter().map(InputWitness::commit).collect(),
|
||||
outputs: self.outputs.iter().map(OutputWitness::commit).collect(),
|
||||
balance: self.balance().commit(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn input_witness(&self, idx: usize) -> PartialTxInputWitness {
|
||||
let input_bytes =
|
||||
Vec::from_iter(self.inputs.iter().map(|i| i.commit().to_bytes().to_vec()));
|
||||
assert!(input_bytes.len() <= MAX_INPUTS);
|
||||
let input_merkle_leaves = merkle::padded_leaves(&input_bytes);
|
||||
|
||||
let path = merkle::path(&input_merkle_leaves, idx);
|
||||
let input = self.inputs[idx];
|
||||
PartialTxInputWitness { input, path }
|
||||
}
|
||||
|
||||
pub fn output_witness(&self, idx: usize) -> PartialTxOutputWitness {
|
||||
let output_bytes =
|
||||
Vec::from_iter(self.outputs.iter().map(|o| o.commit().to_bytes().to_vec()));
|
||||
assert!(output_bytes.len() <= MAX_OUTPUTS);
|
||||
let output_merkle_leaves = merkle::padded_leaves(&output_bytes);
|
||||
|
||||
let path = merkle::path(&output_merkle_leaves, idx);
|
||||
let output = self.outputs[idx];
|
||||
PartialTxOutputWitness { output, path }
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialTx {
|
||||
pub fn input_root(&self) -> [u8; 32] {
|
||||
let input_bytes =
|
||||
Vec::from_iter(self.inputs.iter().map(Input::to_bytes).map(Vec::from_iter));
|
||||
let input_merkle_leaves = merkle::padded_leaves(&input_bytes);
|
||||
assert!(input_merkle_leaves.len() <= MAX_INPUTS);
|
||||
merkle::root(&input_merkle_leaves)
|
||||
}
|
||||
|
||||
pub fn output_root(&self) -> [u8; 32] {
|
||||
let output_bytes = Vec::from_iter(
|
||||
self.outputs
|
||||
.iter()
|
||||
.map(Output::to_bytes)
|
||||
.map(Vec::from_iter),
|
||||
);
|
||||
let output_merkle_leaves = merkle::padded_leaves(&output_bytes);
|
||||
assert!(output_merkle_leaves.len() <= MAX_OUTPUTS);
|
||||
merkle::root(&output_merkle_leaves)
|
||||
}
|
||||
|
||||
pub fn root(&self) -> PtxRoot {
|
||||
let input_root = self.input_root();
|
||||
let output_root = self.output_root();
|
||||
let root = merkle::node(input_root, output_root);
|
||||
PtxRoot(root)
|
||||
}
|
||||
}
|
||||
|
||||
/// An input to a partial transaction
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PartialTxInputWitness {
|
||||
pub input: InputWitness,
|
||||
pub path: Vec<merkle::PathNode>,
|
||||
}
|
||||
|
||||
impl PartialTxInputWitness {
|
||||
pub fn input_root(&self) -> [u8; 32] {
|
||||
let leaf = merkle::leaf(&self.input.commit().to_bytes());
|
||||
merkle::path_root(leaf, &self.path)
|
||||
}
|
||||
}
|
||||
|
||||
/// An output to a partial transaction
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PartialTxOutputWitness {
|
||||
pub output: OutputWitness,
|
||||
pub path: Vec<merkle::PathNode>,
|
||||
}
|
||||
|
||||
impl PartialTxOutputWitness {
|
||||
pub fn output_root(&self) -> [u8; 32] {
|
||||
let leaf = merkle::leaf(&self.output.commit().to_bytes());
|
||||
merkle::path_root(leaf, &self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
// use crate::cl::{
|
||||
// balance::UnitBalance,
|
||||
// note::{derive_unit, NoteWitness},
|
||||
// nullifier::NullifierSecret,
|
||||
// };
|
||||
|
||||
// use super::*;
|
||||
|
||||
// #[test]
|
||||
// fn test_partial_tx_balance() {
|
||||
// let (nmo, eth, crv) = (derive_unit("NMO"), derive_unit("ETH"), derive_unit("CRV"));
|
||||
// let mut rng = rand::thread_rng();
|
||||
|
||||
// let nf_a = NullifierSecret::random(&mut rng);
|
||||
// let nf_b = NullifierSecret::random(&mut rng);
|
||||
// let nf_c = NullifierSecret::random(&mut rng);
|
||||
|
||||
// let nmo_10_utxo = OutputWitness::new(NoteWitness::basic(10, nmo, &mut rng), nf_a.commit());
|
||||
// let nmo_10 = InputWitness::from_output(nmo_10_utxo, nf_a);
|
||||
|
||||
// let eth_23_utxo = OutputWitness::new(NoteWitness::basic(23, eth, &mut rng), nf_b.commit());
|
||||
// let eth_23 = InputWitness::from_output(eth_23_utxo, nf_b);
|
||||
|
||||
// let crv_4840 = OutputWitness::new(NoteWitness::basic(4840, crv, &mut rng), nf_c.commit());
|
||||
|
||||
// let ptx_witness = PartialTxWitness {
|
||||
// inputs: vec![nmo_10, eth_23],
|
||||
// outputs: vec![crv_4840],
|
||||
// balance_blinding: BalanceWitness::random_blinding(&mut rng),
|
||||
// };
|
||||
|
||||
// let ptx = ptx_witness.commit();
|
||||
|
||||
// assert_eq!(
|
||||
// ptx.balance,
|
||||
// BalanceWitness {
|
||||
// balances: vec![
|
||||
// UnitBalance {
|
||||
// unit: nmo,
|
||||
// pos: 0,
|
||||
// neg: 10
|
||||
// },
|
||||
// UnitBalance {
|
||||
// unit: eth,
|
||||
// pos: 0,
|
||||
// neg: 23
|
||||
// },
|
||||
// UnitBalance {
|
||||
// unit: crv,
|
||||
// pos: 4840,
|
||||
// neg: 0
|
||||
// },
|
||||
// ],
|
||||
// blinding: ptx_witness.balance_blinding
|
||||
// }
|
||||
// .commit()
|
||||
// );
|
||||
// }
|
||||
}
|
@ -1,360 +0,0 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use crate::cl::merkle;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
/// absence of element is marked with all 0's
|
||||
pub static ABSENT: [u8; 32] = [0u8; 32];
|
||||
|
||||
/// presence of element is marked with all 1's
|
||||
pub static PRESENT: [u8; 32] = [255u8; 32];
|
||||
|
||||
lazy_static! {
|
||||
// the roots of empty merkle trees of diffent heights
|
||||
// i.e. all leafs are ABSENT
|
||||
static ref EMPTY_ROOTS: [[u8; 32]; 257] = {
|
||||
let mut roots = [ABSENT; 257];
|
||||
for h in 1..257 {
|
||||
roots[h] = merkle::node(roots[h - 1], roots[h - 1]);
|
||||
}
|
||||
|
||||
roots
|
||||
};
|
||||
}
|
||||
|
||||
pub fn sparse_root(elems: &BTreeSet<[u8; 32]>) -> [u8; 32] {
|
||||
sparse_root_rec(0, elems)
|
||||
}
|
||||
|
||||
fn sparse_root_rec(prefix: u64, elems: &BTreeSet<[u8; 32]>) -> [u8; 32] {
|
||||
if elems.is_empty() {
|
||||
return empty_tree_root(256 - prefix);
|
||||
}
|
||||
if prefix == 256 {
|
||||
assert_eq!(elems.len(), 1);
|
||||
return PRESENT;
|
||||
}
|
||||
// partition the elements
|
||||
let (left, right): (BTreeSet<_>, BTreeSet<_>) =
|
||||
elems.iter().partition(|e| !bit(prefix as u8, **e));
|
||||
|
||||
merkle::node(
|
||||
sparse_root_rec(prefix + 1, &left),
|
||||
sparse_root_rec(prefix + 1, &right),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn sparse_path(elem: [u8; 32], elems: &BTreeSet<[u8; 32]>) -> Vec<merkle::PathNode> {
|
||||
fn sparse_path_rec(
|
||||
prefix: u64,
|
||||
elem: [u8; 32],
|
||||
elems: &BTreeSet<[u8; 32]>,
|
||||
) -> Vec<merkle::PathNode> {
|
||||
if prefix == 256 {
|
||||
return Vec::new();
|
||||
}
|
||||
// partition the elements
|
||||
let (left, right): (BTreeSet<_>, BTreeSet<_>) =
|
||||
elems.iter().partition(|e| !bit(prefix as u8, **e));
|
||||
|
||||
match bit(prefix as u8, elem) {
|
||||
true => {
|
||||
let left_root = sparse_root_rec(prefix + 1, &left);
|
||||
let mut path = sparse_path_rec(prefix + 1, elem, &right);
|
||||
|
||||
path.push(merkle::PathNode::Left(left_root));
|
||||
path
|
||||
}
|
||||
false => {
|
||||
let right_root = sparse_root_rec(prefix + 1, &right);
|
||||
let mut path = sparse_path_rec(prefix + 1, elem, &left);
|
||||
|
||||
path.push(merkle::PathNode::Right(right_root));
|
||||
path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sparse_path_rec(0, elem, elems)
|
||||
}
|
||||
|
||||
pub fn path_key(path: &[merkle::PathNode]) -> [u8; 32] {
|
||||
assert_eq!(path.len(), 256);
|
||||
|
||||
let mut key = [0u8; 32];
|
||||
for byte_i in (0..32).rev() {
|
||||
let mut byte = 0u8;
|
||||
for bit_i in 0..8 {
|
||||
byte <<= 1;
|
||||
match path[byte_i * 8 + bit_i] {
|
||||
merkle::PathNode::Left(_) => byte += 1,
|
||||
merkle::PathNode::Right(_) => byte += 0,
|
||||
};
|
||||
}
|
||||
key[31 - byte_i] = byte;
|
||||
}
|
||||
|
||||
key
|
||||
}
|
||||
|
||||
fn empty_tree_root(height: u64) -> [u8; 32] {
|
||||
assert!(height <= 256);
|
||||
EMPTY_ROOTS[height as usize]
|
||||
}
|
||||
|
||||
fn bit(idx: u8, elem: [u8; 32]) -> bool {
|
||||
let byte = idx / 8;
|
||||
let bit_in_byte = idx - byte * 8;
|
||||
|
||||
(elem[byte as usize] & (1 << bit_in_byte)) != 0
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn random_hash() -> [u8; 32] {
|
||||
rand::random()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_neighbour_paths() {
|
||||
let elems = BTreeSet::from_iter([[0u8; 32]]);
|
||||
|
||||
let path_0 = sparse_path([0u8; 32], &elems);
|
||||
let mut key_1 = [0u8; 32];
|
||||
key_1[31] = 128;
|
||||
let path_1 = sparse_path(key_1, &elems);
|
||||
|
||||
assert_ne!(path_0, path_1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_bit_agreement() {
|
||||
fn path_bit(idx: u8, path: &[merkle::PathNode]) -> bool {
|
||||
match path[255 - idx as usize] {
|
||||
merkle::PathNode::Left(_) => true,
|
||||
merkle::PathNode::Right(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
let key = random_hash();
|
||||
let path = sparse_path(key, &BTreeSet::new());
|
||||
|
||||
for i in 0..=255 {
|
||||
let b = bit(i, key);
|
||||
let pb = path_bit(i, &path);
|
||||
assert_eq!(b, pb, "{}!={}@{}", b, pb, i);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_key() {
|
||||
let elems = BTreeSet::from_iter(std::iter::repeat_with(random_hash).take(10));
|
||||
|
||||
// membership proofs
|
||||
for e in elems.iter() {
|
||||
let path = sparse_path(*e, &elems);
|
||||
assert_eq!(path_key(&path), *e);
|
||||
}
|
||||
|
||||
// non-membership proofs
|
||||
for _ in 0..10 {
|
||||
let elem = random_hash();
|
||||
let path = sparse_path(elem, &elems);
|
||||
assert_eq!(path_key(&path), elem);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_path() {
|
||||
let elems = BTreeSet::from_iter(std::iter::repeat_with(random_hash).take(10));
|
||||
|
||||
let root = sparse_root(&elems);
|
||||
|
||||
// membership proofs
|
||||
for e in elems.iter() {
|
||||
let path = sparse_path(*e, &elems);
|
||||
assert_eq!(merkle::path_root(PRESENT, &path), root);
|
||||
}
|
||||
|
||||
// non-membership proofs
|
||||
for _ in 0..10 {
|
||||
let elem = random_hash();
|
||||
let path = sparse_path(elem, &elems);
|
||||
assert!(!elems.contains(&elem));
|
||||
assert_eq!(merkle::path_root(ABSENT, &path), root);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_non_membership_in_empty_tree() {
|
||||
let root = sparse_root(&BTreeSet::new());
|
||||
|
||||
let path = sparse_path([0u8; 32], &BTreeSet::new());
|
||||
|
||||
assert_eq!(merkle::path_root(ABSENT, &path), root);
|
||||
|
||||
for (h, node) in path.into_iter().enumerate() {
|
||||
match node {
|
||||
merkle::PathNode::Left(hash) | merkle::PathNode::Right(hash) => {
|
||||
assert_eq!(hash, empty_tree_root(h as u64))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_root_left_most_occupied() {
|
||||
let root = sparse_root(&BTreeSet::from_iter([[0u8; 32]]));
|
||||
|
||||
// We are constructing the tree:
|
||||
//
|
||||
// / \
|
||||
// / \ 0 subtree
|
||||
// / \ 0 subtree
|
||||
// 1 0
|
||||
let mut expected_root = PRESENT;
|
||||
for h in 0..=255 {
|
||||
expected_root = merkle::node(expected_root, empty_tree_root(h))
|
||||
}
|
||||
|
||||
assert_eq!(root, expected_root)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_root_right_most_occupied() {
|
||||
let root = sparse_root(&BTreeSet::from_iter([[255u8; 32]]));
|
||||
|
||||
// We are constructing the tree:
|
||||
//
|
||||
// /\
|
||||
// 0 /\
|
||||
// 0 /\
|
||||
// 0 1
|
||||
let mut expected_root = PRESENT;
|
||||
for h in 0..=255 {
|
||||
expected_root = merkle::node(empty_tree_root(h), expected_root)
|
||||
}
|
||||
|
||||
assert_eq!(root, expected_root)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_root_middle_elem() {
|
||||
let elem = {
|
||||
let mut x = [255u8; 32];
|
||||
x[0] = 254;
|
||||
x
|
||||
};
|
||||
assert!(!bit(0, elem));
|
||||
for i in 1..=255 {
|
||||
assert!(bit(i, elem));
|
||||
}
|
||||
|
||||
let root = sparse_root(&BTreeSet::from_iter([elem]));
|
||||
|
||||
// We are constructing the tree:
|
||||
// root
|
||||
// / \
|
||||
// /\ 0
|
||||
// 0 /\
|
||||
// 0 /\
|
||||
// 0 ...
|
||||
// \
|
||||
// 1
|
||||
let mut expected_root = PRESENT;
|
||||
for h in 0..=254 {
|
||||
expected_root = merkle::node(empty_tree_root(h), expected_root)
|
||||
}
|
||||
expected_root = merkle::node(expected_root, empty_tree_root(255));
|
||||
|
||||
assert_eq!(root, expected_root)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_root_middle_weave_elem() {
|
||||
let elem = [85u8; 32];
|
||||
for i in 0..=255 {
|
||||
assert_eq!(bit(i, elem), i % 2 == 0);
|
||||
}
|
||||
|
||||
let root = sparse_root(&BTreeSet::from_iter([elem]));
|
||||
|
||||
// We are constructing the tree:
|
||||
// /\
|
||||
// 0 /\
|
||||
// /\0
|
||||
// /\
|
||||
// 0 /\
|
||||
// /\0
|
||||
// 0 1
|
||||
|
||||
let mut expected_root = PRESENT;
|
||||
for h in 0..=255 {
|
||||
if h % 2 == 0 {
|
||||
expected_root = merkle::node(expected_root, empty_tree_root(h))
|
||||
} else {
|
||||
expected_root = merkle::node(empty_tree_root(h), expected_root)
|
||||
}
|
||||
}
|
||||
assert_eq!(root, expected_root)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sparse_multiple_elems() {
|
||||
let root = sparse_root(&BTreeSet::from_iter([[0u8; 32], [255u8; 32]]));
|
||||
|
||||
// We are constructing the tree:
|
||||
// root
|
||||
// / \
|
||||
// /\ /\
|
||||
// /\0 0 /\
|
||||
// 1 0 0 1
|
||||
|
||||
let mut left_root = PRESENT;
|
||||
for h in 0..=254 {
|
||||
left_root = merkle::node(left_root, empty_tree_root(h))
|
||||
}
|
||||
|
||||
let mut right_root = PRESENT;
|
||||
for h in 0..=254 {
|
||||
right_root = merkle::node(empty_tree_root(h), right_root)
|
||||
}
|
||||
let expected_root = merkle::node(left_root, right_root);
|
||||
|
||||
assert_eq!(root, expected_root)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bit() {
|
||||
for i in 0..=255 {
|
||||
assert!(!bit(i, [0u8; 32]))
|
||||
}
|
||||
|
||||
for i in 0..=255 {
|
||||
assert!(bit(i, [255u8; 32]))
|
||||
}
|
||||
|
||||
for i in 0..=255 {
|
||||
assert_eq!(bit(i, [85u8; 32]), i % 2 == 0)
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn test_empty_tree_root() {
|
||||
assert_eq!(empty_tree_root(0), ABSENT);
|
||||
|
||||
assert_eq!(empty_tree_root(1), merkle::node(ABSENT, ABSENT));
|
||||
assert_eq!(
|
||||
empty_tree_root(2),
|
||||
merkle::node(merkle::node(ABSENT, ABSENT), merkle::node(ABSENT, ABSENT)),
|
||||
);
|
||||
assert_eq!(
|
||||
empty_tree_root(3),
|
||||
merkle::node(
|
||||
merkle::node(merkle::node(ABSENT, ABSENT), merkle::node(ABSENT, ABSENT)),
|
||||
merkle::node(merkle::node(ABSENT, ABSENT), merkle::node(ABSENT, ABSENT)),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
131
emmarin/cl/cl/src/crust/balance.rs
Normal file
131
emmarin/cl/cl/src/crust/balance.rs
Normal file
@ -0,0 +1,131 @@
|
||||
use crate::{Digest, Hash};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub type Value = u64;
|
||||
pub type Unit = [u8; 32];
|
||||
pub const NOP_COVENANT: [u8; 32] = [0u8; 32];
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
|
||||
pub struct UnitWitness {
|
||||
pub spending_covenant: [u8; 32],
|
||||
pub minting_covenant: [u8; 32],
|
||||
pub burning_covenant: [u8; 32],
|
||||
}
|
||||
|
||||
impl UnitWitness {
|
||||
pub fn unit(&self) -> Unit {
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(b"NOMOS_CL_UNIT");
|
||||
hasher.update(self.spending_covenant);
|
||||
hasher.update(self.minting_covenant);
|
||||
hasher.update(self.burning_covenant);
|
||||
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct UnitBalance {
|
||||
pub unit: Unit,
|
||||
pub pos: u64,
|
||||
pub neg: u64,
|
||||
}
|
||||
|
||||
impl UnitBalance {
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.pos == self.neg
|
||||
}
|
||||
|
||||
pub fn pos(unit: Unit, value: u64) -> Self {
|
||||
Self {
|
||||
unit,
|
||||
pos: value,
|
||||
neg: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn neg(unit: Unit, value: u64) -> Self {
|
||||
Self {
|
||||
unit,
|
||||
pos: 0,
|
||||
neg: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct Balance {
|
||||
pub balances: Vec<UnitBalance>,
|
||||
}
|
||||
impl Balance {
|
||||
pub fn zero() -> Self {
|
||||
Self {
|
||||
balances: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_positive(&mut self, unit: Unit, value: Value) {
|
||||
for unit_bal in self.balances.iter_mut() {
|
||||
if unit_bal.unit == unit {
|
||||
unit_bal.pos += value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Unit was not found, so we must create one.
|
||||
self.balances.push(UnitBalance {
|
||||
unit,
|
||||
pos: value,
|
||||
neg: 0,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn insert_negative(&mut self, unit: Unit, value: Value) {
|
||||
for unit_bal in self.balances.iter_mut() {
|
||||
if unit_bal.unit == unit {
|
||||
unit_bal.neg += value;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.balances.push(UnitBalance {
|
||||
unit,
|
||||
pos: 0,
|
||||
neg: value,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn clear_zeros(&mut self) {
|
||||
let mut i = 0usize;
|
||||
while i < self.balances.len() {
|
||||
if self.balances[i].is_zero() {
|
||||
self.balances.swap_remove(i);
|
||||
// don't increment `i` since the last element has been swapped into the
|
||||
// `i`'th place
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn combine<'a>(balances: impl IntoIterator<Item = &'a Self>) -> Self {
|
||||
let mut combined = balances
|
||||
.into_iter()
|
||||
.fold(Balance::zero(), |mut acc, balance| {
|
||||
for unit_bal in &balance.balances {
|
||||
if unit_bal.pos > unit_bal.neg {
|
||||
acc.insert_positive(unit_bal.unit, unit_bal.pos - unit_bal.neg);
|
||||
} else {
|
||||
acc.insert_negative(unit_bal.unit, unit_bal.neg - unit_bal.pos);
|
||||
}
|
||||
}
|
||||
acc
|
||||
});
|
||||
combined.clear_zeros();
|
||||
combined
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.balances.is_empty()
|
||||
}
|
||||
}
|
161
emmarin/cl/cl/src/crust/iow.rs
Normal file
161
emmarin/cl/cl/src/crust/iow.rs
Normal file
@ -0,0 +1,161 @@
|
||||
use crate::{
|
||||
crust::{
|
||||
balance::{Unit, UnitWitness},
|
||||
nullifier::{Nullifier, NullifierCommitment, NullifierSecret},
|
||||
},
|
||||
mantle::ZoneId,
|
||||
Digest, Hash,
|
||||
};
|
||||
use rand::RngCore;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct InputWitness {
|
||||
pub state: [u8; 32],
|
||||
pub value: u64,
|
||||
pub unit_witness: UnitWitness,
|
||||
pub nonce: Nonce,
|
||||
pub zone_id: ZoneId,
|
||||
pub nf_sk: NullifierSecret,
|
||||
}
|
||||
|
||||
impl InputWitness {
|
||||
pub fn from_output(
|
||||
output: OutputWitness,
|
||||
nf_sk: NullifierSecret,
|
||||
unit_witness: UnitWitness,
|
||||
) -> Self {
|
||||
assert_eq!(nf_sk.commit(), output.nf_pk);
|
||||
assert_eq!(unit_witness.unit(), output.unit);
|
||||
Self {
|
||||
state: output.state,
|
||||
value: output.value,
|
||||
unit_witness,
|
||||
nonce: output.nonce,
|
||||
zone_id: output.zone_id,
|
||||
nf_sk,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn evolved_nonce(&self, domain: &[u8]) -> Nonce {
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(b"NOMOS_COIN_EVOLVE");
|
||||
hasher.update(domain);
|
||||
hasher.update(self.nf_sk.0);
|
||||
hasher.update(self.note_commitment().0);
|
||||
|
||||
let nonce_bytes: [u8; 32] = hasher.finalize().into();
|
||||
Nonce::from_bytes(nonce_bytes)
|
||||
}
|
||||
|
||||
pub fn evolve_output(&self, domain: &[u8]) -> OutputWitness {
|
||||
OutputWitness {
|
||||
state: self.state,
|
||||
value: self.value,
|
||||
unit: self.unit_witness.unit(),
|
||||
nonce: self.evolved_nonce(domain),
|
||||
zone_id: self.zone_id,
|
||||
nf_pk: self.nf_sk.commit(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nullifier(&self) -> Nullifier {
|
||||
Nullifier::new(&self.zone_id, self.nf_sk, self.note_commitment())
|
||||
}
|
||||
|
||||
pub fn note_commitment(&self) -> NoteCommitment {
|
||||
NoteCommitment::commit(
|
||||
self.state,
|
||||
self.value,
|
||||
self.unit_witness.unit(),
|
||||
self.nonce,
|
||||
self.zone_id,
|
||||
self.nf_sk.commit(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct OutputWitness {
|
||||
pub state: [u8; 32],
|
||||
pub value: u64,
|
||||
pub unit: Unit,
|
||||
pub nonce: Nonce,
|
||||
pub zone_id: ZoneId,
|
||||
pub nf_pk: NullifierCommitment,
|
||||
}
|
||||
|
||||
impl OutputWitness {
|
||||
pub fn note_commitment(&self) -> NoteCommitment {
|
||||
NoteCommitment::commit(
|
||||
self.state,
|
||||
self.value,
|
||||
self.unit,
|
||||
self.nonce,
|
||||
self.zone_id,
|
||||
self.nf_pk,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct NoteCommitment(pub [u8; 32]);
|
||||
|
||||
impl NoteCommitment {
|
||||
fn commit(
|
||||
state: [u8; 32],
|
||||
value: u64,
|
||||
unit: Unit,
|
||||
nonce: Nonce,
|
||||
zone_id: ZoneId,
|
||||
nf_pk: NullifierCommitment,
|
||||
) -> Self {
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(b"NOMOS_NOTE_CM");
|
||||
hasher.update(state);
|
||||
hasher.update(value.to_le_bytes());
|
||||
hasher.update(unit);
|
||||
hasher.update(nonce.as_bytes());
|
||||
hasher.update(nf_pk.as_bytes());
|
||||
hasher.update(zone_id);
|
||||
let commit_bytes: [u8; 32] = hasher.finalize().into();
|
||||
Self(commit_bytes)
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Nonce([u8; 32]);
|
||||
|
||||
impl Nonce {
|
||||
pub fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut nonce = [0u8; 32];
|
||||
rng.fill_bytes(&mut nonce);
|
||||
Self(nonce)
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub fn from_bytes(bytes: [u8; 32]) -> Self {
|
||||
Self(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MintWitness {
|
||||
pub amount: u64,
|
||||
pub unit: UnitWitness,
|
||||
pub salt: [u8; 16],
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct BurnWitness {
|
||||
pub amount: u64,
|
||||
pub unit: UnitWitness,
|
||||
pub salt: [u8; 16],
|
||||
}
|
11
emmarin/cl/cl/src/crust/mod.rs
Normal file
11
emmarin/cl/cl/src/crust/mod.rs
Normal file
@ -0,0 +1,11 @@
|
||||
pub mod balance;
|
||||
pub mod iow;
|
||||
// pub mod note;
|
||||
pub mod nullifier;
|
||||
pub mod tx;
|
||||
|
||||
pub use balance::{Balance, Unit, UnitWitness};
|
||||
pub use iow::{BurnWitness, InputWitness, MintWitness, Nonce, NoteCommitment, OutputWitness};
|
||||
// pub use note::{Nonce, NoteCommitment, NoteWitness};
|
||||
pub use nullifier::{Nullifier, NullifierCommitment, NullifierSecret};
|
||||
pub use tx::{Bundle, BundleRoot, BundleWitness, Tx, TxRoot, TxWitness};
|
59
emmarin/cl/cl/src/crust/note.rs
Normal file
59
emmarin/cl/cl/src/crust/note.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use crate::crust::{balance::Unit, nullifier::NullifierCommitment};
|
||||
use crate::mantle::ZoneId;
|
||||
use crate::{Digest, Hash};
|
||||
use rand::RngCore;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::cl::nullifier::NullifierSecret;
|
||||
|
||||
#[test]
|
||||
fn test_note_commit_permutations() {
|
||||
let (nmo, eth) = ([0; 32], [1; 32]);
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let nf_pk = NullifierSecret::random(&mut rng).commit();
|
||||
|
||||
let reference_note = NoteWitness::basic(32, nmo, &mut rng);
|
||||
|
||||
// different notes under same nullifier produce different commitments
|
||||
let mutation_tests = [
|
||||
NoteWitness {
|
||||
value: 12,
|
||||
..reference_note
|
||||
},
|
||||
NoteWitness {
|
||||
unit: eth,
|
||||
..reference_note
|
||||
},
|
||||
NoteWitness {
|
||||
covenant: Covenant::from_vk(&[1u8; 32]),
|
||||
..reference_note
|
||||
},
|
||||
NoteWitness {
|
||||
state: [1u8; 32],
|
||||
..reference_note
|
||||
},
|
||||
NoteWitness {
|
||||
nonce: Nonce::random(&mut rng),
|
||||
..reference_note
|
||||
},
|
||||
];
|
||||
|
||||
for n in mutation_tests {
|
||||
assert_ne!(n.commit(nf_pk), reference_note.commit(nf_pk));
|
||||
}
|
||||
|
||||
// commitment to same note with different nullifiers produce different commitments
|
||||
|
||||
let other_nf_pk = NullifierSecret::random(&mut rng).commit();
|
||||
|
||||
assert_ne!(
|
||||
reference_note.commit(nf_pk),
|
||||
reference_note.commit(other_nf_pk)
|
||||
);
|
||||
}
|
||||
}
|
@ -8,12 +8,10 @@
|
||||
|
||||
use std::cmp::PartialOrd;
|
||||
|
||||
use crate::{crust::NoteCommitment, Digest, Hash};
|
||||
use rand_core::RngCore;
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::cl::NoteCommitment;
|
||||
|
||||
// Maintained privately by note holder
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct NullifierSecret(pub [u8; 16]);
|
||||
@ -29,6 +27,12 @@ pub struct NullifierCommitment([u8; 32]);
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub struct Nullifier(pub [u8; 32]);
|
||||
|
||||
impl AsRef<[u8]> for Nullifier {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl NullifierSecret {
|
||||
pub fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut sk = [0u8; 16];
|
||||
@ -41,7 +45,7 @@ impl NullifierSecret {
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> NullifierCommitment {
|
||||
let mut hasher = Sha256::new();
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(b"NOMOS_CL_NULL_COMMIT");
|
||||
hasher.update(self.0);
|
||||
|
||||
@ -70,7 +74,7 @@ impl NullifierCommitment {
|
||||
|
||||
impl Nullifier {
|
||||
pub fn new(tag: &dyn AsRef<[u8]>, sk: NullifierSecret, note_cm: NoteCommitment) -> Self {
|
||||
let mut hasher = Sha256::new();
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(tag.as_ref());
|
||||
hasher.update(sk.0);
|
||||
hasher.update(note_cm.0);
|
399
emmarin/cl/cl/src/crust/tx.rs
Normal file
399
emmarin/cl/cl/src/crust/tx.rs
Normal file
@ -0,0 +1,399 @@
|
||||
use rand_core::RngCore;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::{
|
||||
crust::{
|
||||
Balance, BurnWitness, InputWitness, MintWitness, NoteCommitment, Nullifier, OutputWitness,
|
||||
Unit,
|
||||
},
|
||||
ds::{
|
||||
merkle,
|
||||
mmr::{MMRProof, Root, MMR},
|
||||
},
|
||||
mantle::ZoneId,
|
||||
};
|
||||
|
||||
/// An identifier of a transaction
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct TxRoot(pub [u8; 32]);
|
||||
|
||||
/// An identifier of a bundle
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct BundleRoot(pub [u8; 32]);
|
||||
|
||||
impl From<[u8; 32]> for TxRoot {
|
||||
fn from(bytes: [u8; 32]) -> Self {
|
||||
Self(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<[u8; 32]> for BundleRoot {
|
||||
fn from(bytes: [u8; 32]) -> Self {
|
||||
Self(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl TxRoot {
|
||||
pub fn random(mut rng: impl RngCore) -> Self {
|
||||
let mut sk = [0u8; 32];
|
||||
rng.fill_bytes(&mut sk);
|
||||
Self(sk)
|
||||
}
|
||||
|
||||
pub fn hex(&self) -> String {
|
||||
hex::encode(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Tx {
|
||||
pub root: TxRoot,
|
||||
pub balance: Balance,
|
||||
pub updates: Vec<LedgerUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct TxWitness {
|
||||
pub inputs: Vec<InputWitness>,
|
||||
pub outputs: Vec<(OutputWitness, Vec<u8>)>,
|
||||
pub data: Vec<u8>,
|
||||
pub mints: Vec<MintWitness>,
|
||||
pub burns: Vec<BurnWitness>,
|
||||
pub frontier_paths: Vec<(MMR, MMRProof)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct LedgerUpdate {
|
||||
pub zone_id: ZoneId,
|
||||
pub frontier_nodes: Vec<Root>,
|
||||
pub inputs: Vec<Nullifier>,
|
||||
pub outputs: Vec<NoteCommitment>,
|
||||
}
|
||||
|
||||
pub struct LedgerUpdateWitness {
|
||||
pub zone_id: ZoneId,
|
||||
pub frontier_nodes: Vec<Root>,
|
||||
pub inputs: Vec<Nullifier>,
|
||||
pub outputs: Vec<(NoteCommitment, Vec<u8>)>,
|
||||
}
|
||||
|
||||
impl LedgerUpdateWitness {
|
||||
pub fn commit(self) -> (LedgerUpdate, [u8; 32]) {
|
||||
let input_root = merkle::root(&merkle::padded_leaves(&self.inputs));
|
||||
let output_root = merkle::root(&merkle::padded_leaves(self.outputs.iter().map(
|
||||
|(cm, data)| {
|
||||
cm.0.into_iter()
|
||||
.chain(data.iter().cloned())
|
||||
.collect::<Vec<_>>()
|
||||
},
|
||||
)));
|
||||
let root = merkle::root(&merkle::padded_leaves([
|
||||
input_root,
|
||||
output_root,
|
||||
self.zone_id,
|
||||
]));
|
||||
|
||||
(
|
||||
LedgerUpdate {
|
||||
zone_id: self.zone_id,
|
||||
inputs: self.inputs,
|
||||
outputs: self.outputs.into_iter().map(|(cm, _)| cm).collect(),
|
||||
frontier_nodes: self.frontier_nodes,
|
||||
},
|
||||
root,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl TxWitness {
|
||||
pub fn compute_updates(&self, inputs: &[InputDerivedFields]) -> Vec<LedgerUpdateWitness> {
|
||||
let mut updates = BTreeMap::new();
|
||||
assert_eq!(self.inputs.len(), self.frontier_paths.len());
|
||||
for (input, (mmr, path)) in inputs.iter().zip(&self.frontier_paths) {
|
||||
let entry = updates.entry(input.zone_id).or_insert(LedgerUpdateWitness {
|
||||
zone_id: input.zone_id,
|
||||
inputs: vec![],
|
||||
outputs: vec![],
|
||||
frontier_nodes: mmr.roots.clone(),
|
||||
});
|
||||
entry.inputs.push(input.nf);
|
||||
assert!(mmr.verify_proof(&input.cm.0, path));
|
||||
// ensure a single MMR per zone per tx
|
||||
assert_eq!(&mmr.roots, &entry.frontier_nodes);
|
||||
}
|
||||
|
||||
for (output, data) in &self.outputs {
|
||||
assert!(output.value > 0);
|
||||
updates
|
||||
.entry(output.zone_id)
|
||||
.or_insert(LedgerUpdateWitness {
|
||||
zone_id: output.zone_id,
|
||||
inputs: vec![],
|
||||
outputs: vec![],
|
||||
frontier_nodes: vec![],
|
||||
})
|
||||
.outputs
|
||||
.push((output.note_commitment(), data.clone())); // TODO: avoid clone
|
||||
}
|
||||
|
||||
updates.into_values().collect()
|
||||
}
|
||||
|
||||
pub fn mint_amounts(&self) -> Vec<MintAmount> {
|
||||
self.mints
|
||||
.iter()
|
||||
.map(|MintWitness { unit, amount, salt }| MintAmount {
|
||||
unit: unit.unit(),
|
||||
amount: *amount,
|
||||
salt: *salt,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn burn_amounts(&self) -> Vec<BurnAmount> {
|
||||
self.burns
|
||||
.iter()
|
||||
.map(|BurnWitness { unit, amount, salt }| BurnAmount {
|
||||
unit: unit.unit(),
|
||||
amount: *amount,
|
||||
salt: *salt,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn inputs_derived_fields(&self) -> Vec<InputDerivedFields> {
|
||||
self.inputs
|
||||
.iter()
|
||||
.map(|input| InputDerivedFields {
|
||||
nf: input.nullifier(),
|
||||
cm: input.note_commitment(),
|
||||
zone_id: input.zone_id,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn mint_burn_root(mints: &[MintAmount], burns: &[BurnAmount]) -> [u8; 32] {
|
||||
let mint_root = merkle::root(&merkle::padded_leaves(mints.iter().map(|m| m.to_bytes())));
|
||||
let burn_root = merkle::root(&merkle::padded_leaves(burns.iter().map(|b| b.to_bytes())));
|
||||
merkle::node(mint_root, burn_root)
|
||||
}
|
||||
|
||||
fn io_balance(&self) -> Balance {
|
||||
let mut balance = Balance::zero();
|
||||
for input in &self.inputs {
|
||||
balance.insert_positive(input.unit_witness.unit(), input.value);
|
||||
}
|
||||
for (output, _) in &self.outputs {
|
||||
balance.insert_negative(output.unit, output.value);
|
||||
}
|
||||
balance
|
||||
}
|
||||
|
||||
pub fn root(&self, update_root: [u8; 32], mint_burn_root: [u8; 32]) -> TxRoot {
|
||||
let data_root = merkle::leaf(&self.data);
|
||||
let root = merkle::root(&merkle::padded_leaves([
|
||||
update_root,
|
||||
mint_burn_root,
|
||||
data_root,
|
||||
]));
|
||||
TxRoot(root)
|
||||
}
|
||||
|
||||
pub fn balance(&self, mints: &[MintAmount], burns: &[BurnAmount]) -> Balance {
|
||||
let mut mint_burn_balance = Balance::zero();
|
||||
for MintAmount { unit, amount, .. } in mints {
|
||||
mint_burn_balance.insert_positive(*unit, *amount);
|
||||
}
|
||||
for BurnAmount { unit, amount, .. } in burns {
|
||||
mint_burn_balance.insert_negative(*unit, *amount);
|
||||
}
|
||||
Balance::combine(&[mint_burn_balance, self.io_balance()])
|
||||
}
|
||||
|
||||
// inputs, mints and burns are provided as a separate argument to allow code reuse
|
||||
// with the proof without having to recompute them
|
||||
pub fn commit(
|
||||
&self,
|
||||
mints: &[MintAmount],
|
||||
burns: &[BurnAmount],
|
||||
inputs: &[InputDerivedFields],
|
||||
) -> Tx {
|
||||
let mint_burn_root = Self::mint_burn_root(mints, burns);
|
||||
|
||||
let (updates, updates_roots): (Vec<_>, Vec<_>) = self
|
||||
.compute_updates(inputs)
|
||||
.into_iter()
|
||||
.map(LedgerUpdateWitness::commit)
|
||||
.unzip();
|
||||
let update_root = merkle::root(&merkle::padded_leaves(updates_roots));
|
||||
let root = self.root(update_root, mint_burn_root);
|
||||
let balance = self.balance(mints, burns);
|
||||
|
||||
Tx {
|
||||
root,
|
||||
balance,
|
||||
updates,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Bundle {
|
||||
pub updates: Vec<LedgerUpdate>,
|
||||
pub root: BundleRoot,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct BundleWitness {
|
||||
pub txs: Vec<Tx>,
|
||||
}
|
||||
|
||||
impl BundleWitness {
|
||||
pub fn commit(self) -> Bundle {
|
||||
assert!(Balance::combine(self.txs.iter().map(|tx| &tx.balance)).is_zero());
|
||||
|
||||
let root = BundleRoot(merkle::root(&merkle::padded_leaves(
|
||||
self.txs.iter().map(|tx| tx.root.0),
|
||||
)));
|
||||
|
||||
let updates = self
|
||||
.txs
|
||||
.into_iter()
|
||||
.fold(BTreeMap::new(), |mut updates, tx| {
|
||||
for update in tx.updates {
|
||||
let entry = updates.entry(update.zone_id).or_insert(LedgerUpdate {
|
||||
zone_id: update.zone_id,
|
||||
inputs: vec![],
|
||||
outputs: vec![],
|
||||
frontier_nodes: vec![],
|
||||
});
|
||||
|
||||
entry.inputs.extend(update.inputs);
|
||||
entry.outputs.extend(update.outputs);
|
||||
entry.frontier_nodes.extend(update.frontier_nodes); // TODO: maybe merge?
|
||||
}
|
||||
|
||||
updates
|
||||
})
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// de-dup frontier nodes
|
||||
let updates = updates
|
||||
.into_iter()
|
||||
.map(|mut update| {
|
||||
update.frontier_nodes.sort();
|
||||
update.frontier_nodes.dedup();
|
||||
update
|
||||
})
|
||||
.collect();
|
||||
|
||||
Bundle { updates, root }
|
||||
}
|
||||
}
|
||||
|
||||
// ----- Helper structs -----
|
||||
// To validate the unit covenants we need the tx root plus some additional information that is computed to
|
||||
// calculate the tx root. To avoid recomputation we store this information in the following structs.
|
||||
|
||||
pub struct MintAmount {
|
||||
pub unit: Unit,
|
||||
pub amount: u64,
|
||||
pub salt: [u8; 16],
|
||||
}
|
||||
|
||||
impl MintAmount {
|
||||
fn to_bytes(&self) -> [u8; 56] {
|
||||
let mut bytes = [0; 56];
|
||||
bytes[..32].copy_from_slice(&self.unit);
|
||||
bytes[32..40].copy_from_slice(&self.amount.to_le_bytes());
|
||||
bytes[40..].copy_from_slice(&self.salt);
|
||||
bytes
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BurnAmount {
|
||||
pub unit: Unit,
|
||||
pub amount: u64,
|
||||
pub salt: [u8; 16],
|
||||
}
|
||||
|
||||
impl BurnAmount {
|
||||
fn to_bytes(&self) -> [u8; 56] {
|
||||
let mut bytes = [0; 56];
|
||||
bytes[..32].copy_from_slice(&self.unit);
|
||||
bytes[32..40].copy_from_slice(&self.amount.to_le_bytes());
|
||||
bytes[40..].copy_from_slice(&self.salt);
|
||||
bytes
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InputDerivedFields {
|
||||
pub nf: Nullifier,
|
||||
pub cm: NoteCommitment,
|
||||
pub zone_id: ZoneId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
// use crate::cl::{
|
||||
// balance::UnitBalance,
|
||||
// note::{derive_unit, NoteWitness},
|
||||
// nullifier::NullifierSecret,
|
||||
// };
|
||||
|
||||
// use super::*;
|
||||
|
||||
// #[test]
|
||||
// fn test_partial_tx_balance() {
|
||||
// let (nmo, eth, crv) = (derive_unit("NMO"), derive_unit("ETH"), derive_unit("CRV"));
|
||||
// let mut rng = rand::thread_rng();
|
||||
|
||||
// let nf_a = NullifierSecret::random(&mut rng);
|
||||
// let nf_b = NullifierSecret::random(&mut rng);
|
||||
// let nf_c = NullifierSecret::random(&mut rng);
|
||||
|
||||
// let nmo_10_utxo = OutputWitness::new(NoteWitness::basic(10, nmo, &mut rng), nf_a.commit());
|
||||
// let nmo_10 = InputWitness::from_output(nmo_10_utxo, nf_a);
|
||||
|
||||
// let eth_23_utxo = OutputWitness::new(NoteWitness::basic(23, eth, &mut rng), nf_b.commit());
|
||||
// let eth_23 = InputWitness::from_output(eth_23_utxo, nf_b);
|
||||
|
||||
// let crv_4840 = OutputWitness::new(NoteWitness::basic(4840, crv, &mut rng), nf_c.commit());
|
||||
|
||||
// let ptx_witness = TxWitness {
|
||||
// inputs: vec![nmo_10, eth_23],
|
||||
// outputs: vec![crv_4840],
|
||||
// balance_blinding: BalanceWitness::random_blinding(&mut rng),
|
||||
// };
|
||||
|
||||
// let ptx = ptx_witness.commit();
|
||||
|
||||
// assert_eq!(
|
||||
// ptx.balance,
|
||||
// BalanceWitness {
|
||||
// balances: vec![
|
||||
// UnitBalance {
|
||||
// unit: nmo,
|
||||
// pos: 0,
|
||||
// neg: 10
|
||||
// },
|
||||
// UnitBalance {
|
||||
// unit: eth,
|
||||
// pos: 0,
|
||||
// neg: 23
|
||||
// },
|
||||
// UnitBalance {
|
||||
// unit: crv,
|
||||
// pos: 4840,
|
||||
// neg: 0
|
||||
// },
|
||||
// ],
|
||||
// blinding: ptx_witness.balance_blinding
|
||||
// }
|
||||
// .commit()
|
||||
// );
|
||||
// }
|
||||
}
|
@ -1,142 +1,14 @@
|
||||
use super::{
|
||||
merkle::{self, leaf, Path, PathNode},
|
||||
mmr::{Root, MMR},
|
||||
Nullifier,
|
||||
// TODO: generalize this IMT to support arbitrary ordered elements not just nullifiers
|
||||
|
||||
use crate::{
|
||||
crust::Nullifier,
|
||||
ds::merkle::{self, leaf, Path, PathNode},
|
||||
ds::mmr::{Root, MMR},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
// the roots of empty merkle trees of diffent heights
|
||||
static EMPTY_ROOTS: [[u8; 32]; 32] = [
|
||||
[0; 32],
|
||||
[
|
||||
177, 12, 156, 85, 54, 248, 12, 101, 211, 140, 131, 82, 142, 82, 119, 152, 90, 154, 54, 64,
|
||||
122, 123, 61, 166, 144, 13, 169, 27, 61, 69, 25, 49,
|
||||
],
|
||||
[
|
||||
192, 157, 232, 29, 199, 66, 141, 214, 64, 82, 152, 83, 59, 136, 250, 91, 209, 32, 143, 28,
|
||||
190, 109, 233, 43, 28, 58, 90, 240, 214, 89, 0, 157,
|
||||
],
|
||||
[
|
||||
234, 171, 4, 69, 194, 82, 65, 197, 220, 105, 217, 26, 78, 139, 7, 35, 123, 137, 173, 57,
|
||||
224, 85, 154, 88, 114, 245, 145, 12, 58, 131, 158, 126,
|
||||
],
|
||||
[
|
||||
200, 121, 6, 41, 76, 104, 193, 220, 1, 170, 134, 10, 156, 51, 252, 2, 116, 137, 120, 220,
|
||||
198, 203, 132, 233, 175, 242, 212, 37, 237, 112, 220, 85,
|
||||
],
|
||||
[
|
||||
243, 70, 215, 24, 17, 201, 70, 193, 170, 22, 243, 226, 154, 3, 91, 175, 130, 131, 163, 76,
|
||||
238, 174, 153, 166, 34, 53, 59, 177, 188, 93, 88, 109,
|
||||
],
|
||||
[
|
||||
47, 194, 241, 92, 49, 216, 212, 37, 215, 16, 16, 92, 141, 120, 190, 171, 192, 166, 167, 90,
|
||||
241, 16, 216, 221, 137, 26, 189, 228, 22, 8, 29, 230,
|
||||
],
|
||||
[
|
||||
114, 18, 79, 249, 200, 32, 139, 234, 8, 208, 147, 247, 248, 158, 45, 172, 74, 203, 42, 8,
|
||||
111, 32, 54, 21, 41, 79, 254, 184, 180, 21, 124, 74,
|
||||
],
|
||||
[
|
||||
145, 84, 112, 4, 33, 107, 225, 144, 128, 175, 222, 242, 151, 233, 251, 72, 111, 174, 96,
|
||||
156, 47, 199, 103, 138, 225, 136, 122, 77, 113, 155, 234, 247,
|
||||
],
|
||||
[
|
||||
11, 157, 239, 22, 43, 157, 252, 172, 170, 216, 246, 54, 17, 250, 62, 150, 56, 71, 10, 199,
|
||||
73, 149, 210, 55, 128, 177, 66, 3, 53, 117, 251, 183,
|
||||
],
|
||||
[
|
||||
185, 189, 114, 54, 194, 160, 33, 78, 253, 117, 195, 9, 8, 5, 98, 153, 232, 236, 51, 123,
|
||||
149, 89, 219, 121, 144, 24, 131, 23, 133, 185, 43, 84,
|
||||
],
|
||||
[
|
||||
112, 167, 71, 47, 253, 157, 13, 91, 220, 65, 136, 163, 159, 67, 93, 31, 20, 26, 211, 53, 3,
|
||||
87, 214, 79, 139, 91, 175, 186, 241, 96, 36, 50,
|
||||
],
|
||||
[
|
||||
194, 180, 108, 122, 130, 69, 19, 30, 123, 135, 82, 112, 184, 120, 190, 218, 243, 195, 112,
|
||||
62, 233, 93, 50, 163, 17, 113, 50, 116, 204, 0, 154, 48,
|
||||
],
|
||||
[
|
||||
148, 210, 36, 218, 105, 22, 94, 122, 161, 188, 141, 168, 111, 73, 85, 240, 124, 61, 14,
|
||||
224, 230, 127, 232, 216, 62, 226, 15, 241, 178, 214, 74, 146,
|
||||
],
|
||||
[
|
||||
40, 223, 100, 218, 109, 7, 142, 65, 131, 44, 18, 199, 189, 186, 19, 141, 26, 17, 199, 237,
|
||||
175, 131, 246, 119, 240, 208, 9, 158, 20, 61, 123, 78,
|
||||
],
|
||||
[
|
||||
201, 24, 167, 145, 146, 0, 225, 211, 222, 4, 168, 99, 66, 145, 227, 153, 137, 203, 210, 71,
|
||||
159, 65, 73, 114, 68, 95, 197, 195, 252, 157, 176, 136,
|
||||
],
|
||||
[
|
||||
48, 213, 33, 6, 16, 231, 203, 89, 97, 59, 140, 45, 122, 220, 219, 100, 28, 28, 11, 94, 152,
|
||||
121, 73, 81, 17, 43, 221, 62, 168, 253, 60, 75,
|
||||
],
|
||||
[
|
||||
235, 42, 170, 207, 251, 244, 212, 33, 244, 247, 205, 152, 200, 175, 127, 130, 29, 185, 12,
|
||||
168, 155, 181, 186, 70, 143, 116, 118, 125, 213, 61, 133, 216,
|
||||
],
|
||||
[
|
||||
114, 156, 155, 68, 120, 46, 130, 183, 148, 220, 222, 87, 255, 204, 77, 158, 109, 250, 218,
|
||||
97, 85, 113, 90, 210, 38, 127, 1, 108, 150, 234, 218, 8,
|
||||
],
|
||||
[
|
||||
23, 0, 234, 63, 219, 38, 225, 234, 86, 65, 254, 152, 99, 26, 147, 35, 220, 157, 73, 119,
|
||||
125, 42, 230, 7, 31, 193, 194, 14, 3, 66, 238, 182,
|
||||
],
|
||||
[
|
||||
98, 183, 177, 156, 96, 245, 221, 11, 101, 129, 202, 229, 95, 119, 42, 206, 89, 94, 213,
|
||||
165, 7, 78, 36, 88, 2, 102, 137, 50, 212, 33, 228, 222,
|
||||
],
|
||||
[
|
||||
72, 59, 68, 178, 17, 108, 122, 234, 144, 160, 205, 221, 106, 249, 141, 34, 247, 190, 97,
|
||||
192, 237, 171, 37, 251, 238, 87, 249, 236, 210, 120, 99, 114,
|
||||
],
|
||||
[
|
||||
199, 172, 23, 156, 51, 202, 195, 224, 29, 147, 201, 201, 224, 152, 153, 28, 175, 3, 39, 40,
|
||||
14, 98, 231, 38, 117, 171, 80, 6, 102, 236, 107, 67,
|
||||
],
|
||||
[
|
||||
130, 105, 50, 158, 64, 150, 93, 137, 190, 66, 61, 158, 243, 130, 105, 85, 76, 126, 192,
|
||||
139, 131, 236, 181, 34, 227, 186, 123, 81, 124, 83, 236, 53,
|
||||
],
|
||||
[
|
||||
29, 170, 86, 82, 122, 96, 225, 198, 251, 48, 125, 20, 235, 213, 119, 64, 95, 24, 196, 180,
|
||||
170, 18, 173, 51, 243, 126, 249, 126, 222, 136, 100, 29,
|
||||
],
|
||||
[
|
||||
144, 79, 68, 40, 85, 101, 172, 71, 165, 66, 18, 29, 183, 16, 224, 80, 32, 242, 43, 104,
|
||||
247, 113, 196, 87, 107, 148, 111, 209, 145, 145, 193, 172,
|
||||
],
|
||||
[
|
||||
247, 113, 160, 20, 26, 123, 24, 107, 219, 159, 232, 236, 212, 181, 146, 159, 254, 102, 166,
|
||||
103, 141, 17, 38, 106, 73, 250, 12, 56, 18, 126, 253, 59,
|
||||
],
|
||||
[
|
||||
161, 111, 104, 235, 136, 130, 176, 167, 161, 49, 57, 160, 91, 220, 207, 169, 208, 228, 131,
|
||||
64, 251, 123, 30, 207, 135, 64, 14, 80, 39, 91, 44, 30,
|
||||
],
|
||||
[
|
||||
213, 239, 239, 81, 151, 152, 116, 196, 117, 174, 223, 128, 213, 197, 4, 49, 154, 132, 187,
|
||||
96, 86, 68, 237, 185, 223, 205, 118, 91, 158, 98, 202, 176,
|
||||
],
|
||||
[
|
||||
52, 136, 50, 107, 42, 155, 186, 152, 251, 91, 53, 50, 239, 148, 165, 86, 84, 80, 117, 168,
|
||||
142, 47, 181, 177, 49, 210, 235, 228, 6, 189, 23, 175,
|
||||
],
|
||||
[
|
||||
40, 108, 31, 110, 180, 110, 13, 47, 169, 96, 51, 163, 201, 72, 25, 8, 134, 12, 176, 44,
|
||||
221, 250, 108, 225, 154, 236, 208, 26, 170, 126, 80, 12,
|
||||
],
|
||||
[
|
||||
185, 231, 113, 255, 127, 172, 246, 169, 177, 34, 116, 231, 131, 19, 25, 81, 91, 136, 95,
|
||||
192, 80, 179, 134, 27, 205, 18, 151, 234, 202, 116, 165, 249,
|
||||
],
|
||||
];
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NullifierTree {
|
||||
leaves: Vec<Leaf>,
|
||||
@ -158,8 +30,16 @@ impl NullifierTree {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.leaves.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.leaves.len()
|
||||
}
|
||||
|
||||
fn hashed_leaves(&self) -> Vec<[u8; 32]> {
|
||||
merkle::padded_leaves(&self.leaves.iter().map(|l| l.to_bytes()).collect::<Vec<_>>())
|
||||
merkle::padded_leaves(self.leaves.iter().map(|l| l.to_bytes()))
|
||||
}
|
||||
|
||||
pub fn root(&self) -> [u8; 32] {
|
||||
@ -185,7 +65,7 @@ impl NullifierTree {
|
||||
for leaf in &self.leaves {
|
||||
mmr.push(&leaf.to_bytes());
|
||||
}
|
||||
assert_eq!(self.root(), frontier_root(&mmr.roots));
|
||||
assert_eq!(self.root(), mmr.frontier_root());
|
||||
|
||||
self.leaves.push(new_leaf);
|
||||
|
||||
@ -240,7 +120,7 @@ impl NullifierTree {
|
||||
mmr.push(&leaf.to_bytes());
|
||||
}
|
||||
|
||||
assert_eq!(self.root(), frontier_root(&mmr.roots));
|
||||
assert_eq!(self.root(), mmr.frontier_root());
|
||||
|
||||
for new_leaf in new_leaves {
|
||||
self.leaves.push(new_leaf);
|
||||
@ -263,11 +143,7 @@ pub struct Leaf {
|
||||
|
||||
impl Leaf {
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
self.value
|
||||
.0
|
||||
.into_iter()
|
||||
.chain(self.next_value.0.into_iter())
|
||||
.collect()
|
||||
self.value.0.into_iter().chain(self.next_value.0).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -297,12 +173,12 @@ impl UpdateProof {
|
||||
updated_low_nf.next_value = self.value;
|
||||
|
||||
let updated_root = merkle::path_root(leaf(&updated_low_nf.to_bytes()), &self.low_nf_path);
|
||||
assert_eq!(updated_root, frontier_root(&self.mmr.roots));
|
||||
assert_eq!(updated_root, self.mmr.frontier_root());
|
||||
|
||||
let mut mmr = self.mmr.clone();
|
||||
mmr.push(&new_leaf.to_bytes());
|
||||
|
||||
frontier_root(&mmr.roots)
|
||||
mmr.frontier_root()
|
||||
}
|
||||
}
|
||||
|
||||
@ -317,7 +193,7 @@ struct BatchUpdateProofInner {
|
||||
/// in risc0
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BatchUpdateProof {
|
||||
data: Vec<u8>,
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
struct LowNfIterator<'a> {
|
||||
@ -330,7 +206,7 @@ struct PathIterator<'p> {
|
||||
path: &'p [u8],
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PathIterator<'a> {
|
||||
impl Iterator for PathIterator<'_> {
|
||||
type Item = PathNode;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -400,7 +276,7 @@ impl BatchUpdateProof {
|
||||
.peeking_take_while(|v| in_interval(low_nf, **v))
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
assert!(in_gap.len() >= 1, "unused low nf");
|
||||
assert!(!in_gap.is_empty(), "unused low nf");
|
||||
|
||||
for w in in_gap.windows(2) {
|
||||
new_leaves.push(Leaf {
|
||||
@ -427,13 +303,13 @@ impl BatchUpdateProof {
|
||||
}
|
||||
|
||||
assert!(values.next().is_none(), "unused values");
|
||||
assert_eq!(cur_root, frontier_root(&mmr.roots));
|
||||
assert_eq!(cur_root, mmr.frontier_root());
|
||||
|
||||
for new_leaf in new_leaves {
|
||||
mmr.push(&new_leaf.to_bytes());
|
||||
}
|
||||
|
||||
frontier_root(&mmr.roots)
|
||||
mmr.frontier_root()
|
||||
}
|
||||
|
||||
pub fn verify(&self, nfs: &[Nullifier], old_root: [u8; 32]) -> [u8; 32] {
|
||||
@ -501,57 +377,9 @@ fn in_interval(low_nf: Leaf, value: Nullifier) -> bool {
|
||||
low_nf.value < value && value < low_nf.next_value
|
||||
}
|
||||
|
||||
fn frontier_root(roots: &[Root]) -> [u8; 32] {
|
||||
if roots.is_empty() {
|
||||
return EMPTY_ROOTS[0];
|
||||
}
|
||||
if roots.len() == 1 {
|
||||
return roots[0].root;
|
||||
}
|
||||
let mut root = EMPTY_ROOTS[0];
|
||||
let mut depth = 1;
|
||||
for last in roots.iter().rev() {
|
||||
while depth < last.height {
|
||||
root = merkle::node(root, EMPTY_ROOTS[depth as usize - 1]);
|
||||
depth += 1;
|
||||
}
|
||||
root = merkle::node(last.root, root);
|
||||
depth += 1;
|
||||
}
|
||||
|
||||
root
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use proptest_macro::property_test;
|
||||
|
||||
#[test]
|
||||
fn test_empty_roots() {
|
||||
let mut root = [0; 32];
|
||||
for i in 0..32 {
|
||||
assert_eq!(root, EMPTY_ROOTS[i]);
|
||||
root = merkle::node(root, root);
|
||||
}
|
||||
}
|
||||
|
||||
#[property_test]
|
||||
fn test_frontier_root(elems: Vec<[u8; 32]>) {
|
||||
let mut mmr = MMR::new();
|
||||
for elem in &elems {
|
||||
mmr.push(elem);
|
||||
}
|
||||
assert_eq!(
|
||||
frontier_root(&mmr.roots),
|
||||
merkle::root(&merkle::padded_leaves(
|
||||
&elems
|
||||
.into_iter()
|
||||
.map(|array| array.to_vec())
|
||||
.collect::<Vec<_>>()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
@ -2,15 +2,10 @@ use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::borrow::Borrow;
|
||||
|
||||
pub fn padded_leaves(elements: &[Vec<u8>]) -> Vec<[u8; 32]> {
|
||||
let mut leaves = std::iter::repeat([0; 32])
|
||||
.take(elements.len().next_power_of_two())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (i, element) in elements.iter().enumerate() {
|
||||
leaves[i] = leaf(element);
|
||||
}
|
||||
|
||||
pub fn padded_leaves(elements: impl IntoIterator<Item = impl AsRef<[u8]>>) -> Vec<[u8; 32]> {
|
||||
let mut leaves = Vec::from_iter(elements.into_iter().map(|e| leaf(e.as_ref())));
|
||||
let pad = leaves.len().next_power_of_two() - leaves.len();
|
||||
leaves.extend(std::iter::repeat([0; 32]).take(pad));
|
||||
leaves
|
||||
}
|
||||
|
||||
@ -53,7 +48,7 @@ pub enum PathNode {
|
||||
Right([u8; 32]),
|
||||
}
|
||||
|
||||
pub fn path_root<'a>(leaf: [u8; 32], path: impl IntoIterator<Item: Borrow<PathNode>>) -> [u8; 32] {
|
||||
pub fn path_root(leaf: [u8; 32], path: impl IntoIterator<Item: Borrow<PathNode>>) -> [u8; 32] {
|
||||
let mut computed_hash = leaf;
|
||||
|
||||
for path_node in path.into_iter() {
|
||||
@ -102,7 +97,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_root_height_1() {
|
||||
let r = root(&padded_leaves(&[b"sand".into()]));
|
||||
let r = root(&padded_leaves(["sand"]));
|
||||
|
||||
let expected = leaf(b"sand");
|
||||
|
||||
@ -111,7 +106,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_root_height_2() {
|
||||
let r = root(&padded_leaves(&[b"desert".into(), b"sand".into()]));
|
||||
let r = root(&padded_leaves(["desert", "sand"]));
|
||||
|
||||
let expected = node(leaf(b"desert"), leaf(b"sand"));
|
||||
|
||||
@ -120,12 +115,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_root_height_3() {
|
||||
let r = root(&padded_leaves(&[
|
||||
b"desert".into(),
|
||||
b"sand".into(),
|
||||
b"feels".into(),
|
||||
b"warm".into(),
|
||||
]));
|
||||
let r = root(&padded_leaves(["desert", "sand", "feels", "warm"]));
|
||||
|
||||
let expected = node(
|
||||
node(leaf(b"desert"), leaf(b"sand")),
|
||||
@ -137,13 +127,8 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_root_height_4() {
|
||||
let r = root(&padded_leaves(&[
|
||||
b"desert".into(),
|
||||
b"sand".into(),
|
||||
b"feels".into(),
|
||||
b"warm".into(),
|
||||
b"at".into(),
|
||||
b"night".into(),
|
||||
let r = root(&padded_leaves([
|
||||
"desert", "sand", "feels", "warm", "at", "night",
|
||||
]));
|
||||
|
||||
let expected = node(
|
||||
@ -162,7 +147,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_path_height_1() {
|
||||
let leaves = padded_leaves(&[b"desert".into()]);
|
||||
let leaves = padded_leaves(["desert"]);
|
||||
let r = root(&leaves);
|
||||
|
||||
let p = path(&leaves, 0);
|
||||
@ -173,7 +158,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_path_height_2() {
|
||||
let leaves = padded_leaves(&[b"desert".into(), b"sand".into()]);
|
||||
let leaves = padded_leaves(["desert", "sand"]);
|
||||
let r = root(&leaves);
|
||||
|
||||
// --- proof for element at idx 0
|
||||
@ -193,12 +178,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_path_height_3() {
|
||||
let leaves = padded_leaves(&[
|
||||
b"desert".into(),
|
||||
b"sand".into(),
|
||||
b"feels".into(),
|
||||
b"warm".into(),
|
||||
]);
|
||||
let leaves = padded_leaves(["desert", "sand", "feels", "warm"]);
|
||||
let r = root(&leaves);
|
||||
|
||||
// --- proof for element at idx 0
|
307
emmarin/cl/cl/src/ds/mmr.rs
Normal file
307
emmarin/cl/cl/src/ds/mmr.rs
Normal file
@ -0,0 +1,307 @@
|
||||
use crate::ds::merkle;
|
||||
use crate::{Digest, Hash};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// the roots of empty merkle trees of diffent heights
|
||||
static EMPTY_ROOTS: [[u8; 32]; 32] = [
|
||||
[0; 32],
|
||||
[
|
||||
177, 12, 156, 85, 54, 248, 12, 101, 211, 140, 131, 82, 142, 82, 119, 152, 90, 154, 54, 64,
|
||||
122, 123, 61, 166, 144, 13, 169, 27, 61, 69, 25, 49,
|
||||
],
|
||||
[
|
||||
192, 157, 232, 29, 199, 66, 141, 214, 64, 82, 152, 83, 59, 136, 250, 91, 209, 32, 143, 28,
|
||||
190, 109, 233, 43, 28, 58, 90, 240, 214, 89, 0, 157,
|
||||
],
|
||||
[
|
||||
234, 171, 4, 69, 194, 82, 65, 197, 220, 105, 217, 26, 78, 139, 7, 35, 123, 137, 173, 57,
|
||||
224, 85, 154, 88, 114, 245, 145, 12, 58, 131, 158, 126,
|
||||
],
|
||||
[
|
||||
200, 121, 6, 41, 76, 104, 193, 220, 1, 170, 134, 10, 156, 51, 252, 2, 116, 137, 120, 220,
|
||||
198, 203, 132, 233, 175, 242, 212, 37, 237, 112, 220, 85,
|
||||
],
|
||||
[
|
||||
243, 70, 215, 24, 17, 201, 70, 193, 170, 22, 243, 226, 154, 3, 91, 175, 130, 131, 163, 76,
|
||||
238, 174, 153, 166, 34, 53, 59, 177, 188, 93, 88, 109,
|
||||
],
|
||||
[
|
||||
47, 194, 241, 92, 49, 216, 212, 37, 215, 16, 16, 92, 141, 120, 190, 171, 192, 166, 167, 90,
|
||||
241, 16, 216, 221, 137, 26, 189, 228, 22, 8, 29, 230,
|
||||
],
|
||||
[
|
||||
114, 18, 79, 249, 200, 32, 139, 234, 8, 208, 147, 247, 248, 158, 45, 172, 74, 203, 42, 8,
|
||||
111, 32, 54, 21, 41, 79, 254, 184, 180, 21, 124, 74,
|
||||
],
|
||||
[
|
||||
145, 84, 112, 4, 33, 107, 225, 144, 128, 175, 222, 242, 151, 233, 251, 72, 111, 174, 96,
|
||||
156, 47, 199, 103, 138, 225, 136, 122, 77, 113, 155, 234, 247,
|
||||
],
|
||||
[
|
||||
11, 157, 239, 22, 43, 157, 252, 172, 170, 216, 246, 54, 17, 250, 62, 150, 56, 71, 10, 199,
|
||||
73, 149, 210, 55, 128, 177, 66, 3, 53, 117, 251, 183,
|
||||
],
|
||||
[
|
||||
185, 189, 114, 54, 194, 160, 33, 78, 253, 117, 195, 9, 8, 5, 98, 153, 232, 236, 51, 123,
|
||||
149, 89, 219, 121, 144, 24, 131, 23, 133, 185, 43, 84,
|
||||
],
|
||||
[
|
||||
112, 167, 71, 47, 253, 157, 13, 91, 220, 65, 136, 163, 159, 67, 93, 31, 20, 26, 211, 53, 3,
|
||||
87, 214, 79, 139, 91, 175, 186, 241, 96, 36, 50,
|
||||
],
|
||||
[
|
||||
194, 180, 108, 122, 130, 69, 19, 30, 123, 135, 82, 112, 184, 120, 190, 218, 243, 195, 112,
|
||||
62, 233, 93, 50, 163, 17, 113, 50, 116, 204, 0, 154, 48,
|
||||
],
|
||||
[
|
||||
148, 210, 36, 218, 105, 22, 94, 122, 161, 188, 141, 168, 111, 73, 85, 240, 124, 61, 14,
|
||||
224, 230, 127, 232, 216, 62, 226, 15, 241, 178, 214, 74, 146,
|
||||
],
|
||||
[
|
||||
40, 223, 100, 218, 109, 7, 142, 65, 131, 44, 18, 199, 189, 186, 19, 141, 26, 17, 199, 237,
|
||||
175, 131, 246, 119, 240, 208, 9, 158, 20, 61, 123, 78,
|
||||
],
|
||||
[
|
||||
201, 24, 167, 145, 146, 0, 225, 211, 222, 4, 168, 99, 66, 145, 227, 153, 137, 203, 210, 71,
|
||||
159, 65, 73, 114, 68, 95, 197, 195, 252, 157, 176, 136,
|
||||
],
|
||||
[
|
||||
48, 213, 33, 6, 16, 231, 203, 89, 97, 59, 140, 45, 122, 220, 219, 100, 28, 28, 11, 94, 152,
|
||||
121, 73, 81, 17, 43, 221, 62, 168, 253, 60, 75,
|
||||
],
|
||||
[
|
||||
235, 42, 170, 207, 251, 244, 212, 33, 244, 247, 205, 152, 200, 175, 127, 130, 29, 185, 12,
|
||||
168, 155, 181, 186, 70, 143, 116, 118, 125, 213, 61, 133, 216,
|
||||
],
|
||||
[
|
||||
114, 156, 155, 68, 120, 46, 130, 183, 148, 220, 222, 87, 255, 204, 77, 158, 109, 250, 218,
|
||||
97, 85, 113, 90, 210, 38, 127, 1, 108, 150, 234, 218, 8,
|
||||
],
|
||||
[
|
||||
23, 0, 234, 63, 219, 38, 225, 234, 86, 65, 254, 152, 99, 26, 147, 35, 220, 157, 73, 119,
|
||||
125, 42, 230, 7, 31, 193, 194, 14, 3, 66, 238, 182,
|
||||
],
|
||||
[
|
||||
98, 183, 177, 156, 96, 245, 221, 11, 101, 129, 202, 229, 95, 119, 42, 206, 89, 94, 213,
|
||||
165, 7, 78, 36, 88, 2, 102, 137, 50, 212, 33, 228, 222,
|
||||
],
|
||||
[
|
||||
72, 59, 68, 178, 17, 108, 122, 234, 144, 160, 205, 221, 106, 249, 141, 34, 247, 190, 97,
|
||||
192, 237, 171, 37, 251, 238, 87, 249, 236, 210, 120, 99, 114,
|
||||
],
|
||||
[
|
||||
199, 172, 23, 156, 51, 202, 195, 224, 29, 147, 201, 201, 224, 152, 153, 28, 175, 3, 39, 40,
|
||||
14, 98, 231, 38, 117, 171, 80, 6, 102, 236, 107, 67,
|
||||
],
|
||||
[
|
||||
130, 105, 50, 158, 64, 150, 93, 137, 190, 66, 61, 158, 243, 130, 105, 85, 76, 126, 192,
|
||||
139, 131, 236, 181, 34, 227, 186, 123, 81, 124, 83, 236, 53,
|
||||
],
|
||||
[
|
||||
29, 170, 86, 82, 122, 96, 225, 198, 251, 48, 125, 20, 235, 213, 119, 64, 95, 24, 196, 180,
|
||||
170, 18, 173, 51, 243, 126, 249, 126, 222, 136, 100, 29,
|
||||
],
|
||||
[
|
||||
144, 79, 68, 40, 85, 101, 172, 71, 165, 66, 18, 29, 183, 16, 224, 80, 32, 242, 43, 104,
|
||||
247, 113, 196, 87, 107, 148, 111, 209, 145, 145, 193, 172,
|
||||
],
|
||||
[
|
||||
247, 113, 160, 20, 26, 123, 24, 107, 219, 159, 232, 236, 212, 181, 146, 159, 254, 102, 166,
|
||||
103, 141, 17, 38, 106, 73, 250, 12, 56, 18, 126, 253, 59,
|
||||
],
|
||||
[
|
||||
161, 111, 104, 235, 136, 130, 176, 167, 161, 49, 57, 160, 91, 220, 207, 169, 208, 228, 131,
|
||||
64, 251, 123, 30, 207, 135, 64, 14, 80, 39, 91, 44, 30,
|
||||
],
|
||||
[
|
||||
213, 239, 239, 81, 151, 152, 116, 196, 117, 174, 223, 128, 213, 197, 4, 49, 154, 132, 187,
|
||||
96, 86, 68, 237, 185, 223, 205, 118, 91, 158, 98, 202, 176,
|
||||
],
|
||||
[
|
||||
52, 136, 50, 107, 42, 155, 186, 152, 251, 91, 53, 50, 239, 148, 165, 86, 84, 80, 117, 168,
|
||||
142, 47, 181, 177, 49, 210, 235, 228, 6, 189, 23, 175,
|
||||
],
|
||||
[
|
||||
40, 108, 31, 110, 180, 110, 13, 47, 169, 96, 51, 163, 201, 72, 25, 8, 134, 12, 176, 44,
|
||||
221, 250, 108, 225, 154, 236, 208, 26, 170, 126, 80, 12,
|
||||
],
|
||||
[
|
||||
185, 231, 113, 255, 127, 172, 246, 169, 177, 34, 116, 231, 131, 19, 25, 81, 91, 136, 95,
|
||||
192, 80, 179, 134, 27, 205, 18, 151, 234, 202, 116, 165, 249,
|
||||
],
|
||||
];
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct MMR {
|
||||
pub roots: Vec<Root>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord)]
|
||||
pub struct Root {
|
||||
pub root: [u8; 32],
|
||||
pub height: u8,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MMRProof {
|
||||
pub path: Vec<merkle::PathNode>,
|
||||
}
|
||||
|
||||
impl MMRProof {
|
||||
pub fn root(&self, elem: &[u8]) -> [u8; 32] {
|
||||
let leaf = merkle::leaf(elem);
|
||||
merkle::path_root(leaf, &self.path)
|
||||
}
|
||||
}
|
||||
|
||||
impl MMR {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn push(&mut self, elem: &[u8]) -> MMRProof {
|
||||
let new_root = Root {
|
||||
root: merkle::leaf(elem),
|
||||
height: 1,
|
||||
};
|
||||
self.roots.push(new_root);
|
||||
|
||||
let mut path = vec![];
|
||||
|
||||
for i in (1..self.roots.len()).rev() {
|
||||
if self.roots[i].height == self.roots[i - 1].height {
|
||||
path.push(merkle::PathNode::Left(self.roots[i - 1].root));
|
||||
|
||||
self.roots[i - 1] = Root {
|
||||
root: merkle::node(self.roots[i - 1].root, self.roots[i].root),
|
||||
height: self.roots[i - 1].height + 1,
|
||||
};
|
||||
|
||||
self.roots.remove(i);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
MMRProof { path }
|
||||
}
|
||||
|
||||
pub fn verify_proof(&self, elem: &[u8], proof: &MMRProof) -> bool {
|
||||
let path_len = proof.path.len();
|
||||
let root = proof.root(elem);
|
||||
|
||||
for mmr_root in self.roots.iter() {
|
||||
if mmr_root.height == (path_len + 1) as u8 {
|
||||
return mmr_root.root == root;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn commit(&self) -> [u8; 32] {
|
||||
// todo: baggin the peaks
|
||||
let mut hasher = Hash::new();
|
||||
for mmr_root in self.roots.iter() {
|
||||
hasher.update(mmr_root.root);
|
||||
hasher.update(mmr_root.height.to_le_bytes());
|
||||
}
|
||||
hasher.finalize().into()
|
||||
}
|
||||
|
||||
pub fn frontier_root(&self) -> [u8; 32] {
|
||||
if self.roots.is_empty() {
|
||||
return EMPTY_ROOTS[0];
|
||||
}
|
||||
if self.roots.len() == 1 {
|
||||
return self.roots[0].root;
|
||||
}
|
||||
let mut root = EMPTY_ROOTS[0];
|
||||
let mut depth = 1;
|
||||
for last in self.roots.iter().rev() {
|
||||
while depth < last.height {
|
||||
root = merkle::node(root, EMPTY_ROOTS[depth as usize - 1]);
|
||||
depth += 1;
|
||||
}
|
||||
root = merkle::node(last.root, root);
|
||||
depth += 1;
|
||||
}
|
||||
|
||||
root
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use proptest_macro::property_test;
|
||||
|
||||
#[test]
|
||||
fn test_empty_roots() {
|
||||
assert_eq!(EMPTY_ROOTS.len(), 32);
|
||||
|
||||
let mut root = [0; 32];
|
||||
for expected_root in EMPTY_ROOTS {
|
||||
assert_eq!(root, expected_root);
|
||||
root = merkle::node(root, root);
|
||||
}
|
||||
}
|
||||
|
||||
#[property_test]
|
||||
fn test_frontier_root(elems: Vec<[u8; 32]>) {
|
||||
let mut mmr = MMR::new();
|
||||
for elem in &elems {
|
||||
mmr.push(elem);
|
||||
}
|
||||
assert_eq!(
|
||||
mmr.frontier_root(),
|
||||
merkle::root(&merkle::padded_leaves(elems))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mmr_push() {
|
||||
let mut mmr = MMR::new();
|
||||
let proof = mmr.push(b"hello");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 1);
|
||||
assert_eq!(mmr.roots[0].root, merkle::leaf(b"hello"));
|
||||
assert!(mmr.verify_proof(b"hello", &proof));
|
||||
|
||||
let proof = mmr.push(b"world");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 2);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world"))
|
||||
);
|
||||
assert!(mmr.verify_proof(b"world", &proof));
|
||||
|
||||
let proof = mmr.push(b"!");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 2);
|
||||
assert_eq!(mmr.roots[0].height, 2);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world"))
|
||||
);
|
||||
assert_eq!(mmr.roots[1].height, 1);
|
||||
assert_eq!(mmr.roots[1].root, merkle::leaf(b"!"));
|
||||
assert!(mmr.verify_proof(b"!", &proof));
|
||||
|
||||
let proof = mmr.push(b"!");
|
||||
|
||||
assert_eq!(mmr.roots.len(), 1);
|
||||
assert_eq!(mmr.roots[0].height, 3);
|
||||
assert_eq!(
|
||||
mmr.roots[0].root,
|
||||
merkle::node(
|
||||
merkle::node(merkle::leaf(b"hello"), merkle::leaf(b"world")),
|
||||
merkle::node(merkle::leaf(b"!"), merkle::leaf(b"!"))
|
||||
)
|
||||
);
|
||||
assert!(mmr.verify_proof(b"!", &proof));
|
||||
}
|
||||
}
|
3
emmarin/cl/cl/src/ds/mod.rs
Normal file
3
emmarin/cl/cl/src/ds/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod indexed;
|
||||
pub mod merkle;
|
||||
pub mod mmr;
|
@ -1,2 +1,12 @@
|
||||
pub mod cl;
|
||||
pub mod zone_layer;
|
||||
pub mod crust;
|
||||
pub mod ds;
|
||||
pub mod mantle;
|
||||
|
||||
pub type Hash = risc0_zkvm::sha::rust_crypto::Sha256;
|
||||
pub use digest::Digest;
|
||||
|
||||
pub fn hash(data: &[u8]) -> [u8; 32] {
|
||||
let mut hasher = Hash::new();
|
||||
hasher.update(data);
|
||||
hasher.finalize().into()
|
||||
}
|
||||
|
1
emmarin/cl/cl/src/mantle/indexed.rs
Normal file
1
emmarin/cl/cl/src/mantle/indexed.rs
Normal file
@ -0,0 +1 @@
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::cl::{
|
||||
indexed::{BatchUpdateProof, NullifierTree},
|
||||
mmr::{MMRProof, MMR},
|
||||
NoteCommitment, Nullifier,
|
||||
use crate::{
|
||||
crust::{NoteCommitment, Nullifier},
|
||||
ds::indexed::{BatchUpdateProof, NullifierTree},
|
||||
ds::mmr::{MMRProof, MMR},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
5
emmarin/cl/cl/src/mantle/mod.rs
Normal file
5
emmarin/cl/cl/src/mantle/mod.rs
Normal file
@ -0,0 +1,5 @@
|
||||
pub mod ledger;
|
||||
pub mod update;
|
||||
pub mod zone;
|
||||
|
||||
pub use zone::{Stf, ZoneId, ZoneState};
|
13
emmarin/cl/cl/src/mantle/update.rs
Normal file
13
emmarin/cl/cl/src/mantle/update.rs
Normal file
@ -0,0 +1,13 @@
|
||||
use crate::mantle::ZoneState;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct BatchUpdate {
|
||||
pub updates: Vec<Update>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Update {
|
||||
pub old: ZoneState,
|
||||
pub new: ZoneState,
|
||||
}
|
@ -2,13 +2,12 @@ use super::ledger::Ledger;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ZoneNote {
|
||||
pub struct ZoneState {
|
||||
pub stf: Stf,
|
||||
pub state: State,
|
||||
pub zone_data: ZoneData,
|
||||
pub ledger: Ledger,
|
||||
pub id: [u8; 32],
|
||||
}
|
||||
|
||||
pub type Stf = [u8; 32];
|
||||
pub type ZoneId = [u8; 32];
|
||||
pub type State = [u8; 32];
|
||||
pub type ZoneData = [u8; 32];
|
@ -1,3 +0,0 @@
|
||||
pub mod ledger;
|
||||
pub mod notes;
|
||||
pub mod tx;
|
@ -1,24 +0,0 @@
|
||||
use super::notes::ZoneNote;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct UpdateBundle {
|
||||
pub updates: Vec<ZoneUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ZoneUpdate {
|
||||
pub old: ZoneNote,
|
||||
pub new: ZoneNote,
|
||||
}
|
||||
|
||||
impl ZoneUpdate {
|
||||
pub fn new(old: ZoneNote, new: ZoneNote) -> Self {
|
||||
assert_eq!(old.id, new.id);
|
||||
Self { old, new }
|
||||
}
|
||||
|
||||
pub fn well_formed(&self) -> bool {
|
||||
self.old.id == self.new.id
|
||||
}
|
||||
}
|
@ -1,18 +1,16 @@
|
||||
use cl::{
|
||||
cl::{
|
||||
note::derive_unit, BalanceWitness, InputWitness, NoteWitness, NullifierCommitment,
|
||||
NullifierSecret, OutputWitness, PartialTxWitness,
|
||||
},
|
||||
zone_layer::notes::ZoneId,
|
||||
};
|
||||
use cl::crust::{InputWitness, Nonce, NullifierSecret, OutputWitness, TxWitness, UnitWitness};
|
||||
|
||||
fn receive_utxo(note: NoteWitness, nf_pk: NullifierCommitment, zone_id: ZoneId) -> OutputWitness {
|
||||
OutputWitness::new(note, nf_pk, zone_id)
|
||||
fn nmo_unit() -> UnitWitness {
|
||||
UnitWitness {
|
||||
spending_covenant: [0; 32],
|
||||
minting_covenant: [0; 32],
|
||||
burning_covenant: [0; 32],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simple_transfer() {
|
||||
let nmo = derive_unit("NMO");
|
||||
let nmo = nmo_unit();
|
||||
let mut rng = rand::thread_rng();
|
||||
let zone_id = [0; 32];
|
||||
|
||||
@ -22,22 +20,43 @@ fn test_simple_transfer() {
|
||||
let recipient_nf_pk = NullifierSecret::random(&mut rng).commit();
|
||||
|
||||
// Assume the sender has received an unspent output from somewhere
|
||||
let utxo = receive_utxo(NoteWitness::basic(10, nmo, &mut rng), sender_nf_pk, zone_id);
|
||||
|
||||
// and wants to send 8 NMO to some recipient and return 2 NMO to itself.
|
||||
let recipient_output = OutputWitness::new(
|
||||
NoteWitness::basic(8, nmo, &mut rng),
|
||||
recipient_nf_pk,
|
||||
let utxo = OutputWitness {
|
||||
state: [0; 32],
|
||||
value: 10,
|
||||
unit: nmo.unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id,
|
||||
);
|
||||
let change_output =
|
||||
OutputWitness::new(NoteWitness::basic(2, nmo, &mut rng), sender_nf_pk, zone_id);
|
||||
|
||||
let ptx_witness = PartialTxWitness {
|
||||
inputs: vec![InputWitness::from_output(utxo, sender_nf_sk)],
|
||||
outputs: vec![recipient_output, change_output],
|
||||
balance_blinding: BalanceWitness::random_blinding(&mut rng),
|
||||
nf_pk: sender_nf_pk,
|
||||
};
|
||||
|
||||
assert!(ptx_witness.balance().is_zero())
|
||||
// and wants to send 8 NMO to some recipient and return 2 NMO to itself.
|
||||
let recipient_output = OutputWitness {
|
||||
state: [0; 32],
|
||||
value: 8,
|
||||
unit: nmo.unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id,
|
||||
nf_pk: recipient_nf_pk,
|
||||
};
|
||||
let change_output = OutputWitness {
|
||||
state: [0; 32],
|
||||
value: 2,
|
||||
unit: nmo.unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id,
|
||||
nf_pk: sender_nf_pk,
|
||||
};
|
||||
|
||||
let tx_witness = TxWitness {
|
||||
inputs: vec![InputWitness::from_output(utxo, sender_nf_sk, nmo)],
|
||||
outputs: vec![(recipient_output, Vec::new()), (change_output, Vec::new())],
|
||||
data: vec![],
|
||||
mints: vec![],
|
||||
burns: vec![],
|
||||
frontier_paths: vec![],
|
||||
};
|
||||
|
||||
assert!(tx_witness
|
||||
.balance(&tx_witness.mint_amounts(), &tx_witness.burn_amounts())
|
||||
.is_zero())
|
||||
}
|
||||
|
@ -6,9 +6,9 @@ edition = "2021"
|
||||
[dependencies]
|
||||
cl = { path = "../cl" }
|
||||
ledger_proof_statements = { path = "../ledger_proof_statements" }
|
||||
nomos_cl_risc0_proofs = { path = "../risc0_proofs" }
|
||||
nomos_mantle_risc0_proofs = { path = "../risc0_proofs" }
|
||||
nomos_cl_bundle_risc0_proof = { path = "../bundle_risc0_proof" }
|
||||
nomos_cl_ptx_risc0_proof = { path = "../ptx_risc0_proof" }
|
||||
nomos_cl_tx_risc0_proof = { path = "../tx_risc0_proof" }
|
||||
ledger_validity_proof = { path = "../ledger_validity_proof" }
|
||||
risc0-zkvm = { version = "1.0", features = ["prove", "metal"] }
|
||||
risc0-groth16 = { version = "1.0" }
|
||||
|
@ -1,6 +1,5 @@
|
||||
use ledger_proof_statements::bundle::{BundlePrivate, BundlePublic};
|
||||
|
||||
use crate::partial_tx::ProvedPartialTx;
|
||||
use crate::tx::ProvedTx;
|
||||
use cl::crust::{Bundle, BundleWitness};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProvedBundle {
|
||||
@ -8,12 +7,12 @@ pub struct ProvedBundle {
|
||||
}
|
||||
|
||||
impl ProvedBundle {
|
||||
pub fn prove(bundle: &BundlePrivate, partials: Vec<ProvedPartialTx>) -> Self {
|
||||
pub fn prove(bundle: &BundleWitness, txs: Vec<ProvedTx>) -> Self {
|
||||
//show that all ptx's are individually valid, and balance to 0
|
||||
let mut env = risc0_zkvm::ExecutorEnv::builder();
|
||||
|
||||
for proved_ptx in partials {
|
||||
env.add_assumption(proved_ptx.risc0_receipt);
|
||||
for proved_tx in txs {
|
||||
env.add_assumption(proved_tx.risc0_receipt);
|
||||
}
|
||||
|
||||
let env = env.write(&bundle).unwrap().build().unwrap();
|
||||
@ -28,8 +27,9 @@ impl ProvedBundle {
|
||||
.unwrap();
|
||||
|
||||
println!(
|
||||
"STARK 'bundle' prover time: {:.2?}, total_cycles: {}",
|
||||
"STARK 'bundle' prover time: {:.2?}, user_cycles: {}, total_cycles: {}",
|
||||
start_t.elapsed(),
|
||||
prove_info.stats.user_cycles,
|
||||
prove_info.stats.total_cycles
|
||||
);
|
||||
|
||||
@ -40,7 +40,7 @@ impl ProvedBundle {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn public(&self) -> BundlePublic {
|
||||
pub fn public(&self) -> Bundle {
|
||||
self.risc0_receipt.journal.decode().unwrap()
|
||||
}
|
||||
|
||||
|
@ -1,75 +0,0 @@
|
||||
use cl::cl::{Constraint, Nullifier, PtxRoot};
|
||||
use ledger_proof_statements::constraint::ConstraintPublic;
|
||||
|
||||
use crate::error::Result;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConstraintProof {
|
||||
pub risc0_id: [u32; 8],
|
||||
pub risc0_receipt: risc0_zkvm::Receipt,
|
||||
}
|
||||
|
||||
pub fn risc0_constraint(risc0_id: [u32; 8]) -> Constraint {
|
||||
unsafe { Constraint(core::mem::transmute::<[u32; 8], [u8; 32]>(risc0_id)) }
|
||||
}
|
||||
|
||||
impl ConstraintProof {
|
||||
pub fn from_risc0(risc0_id: [u32; 8], risc0_receipt: risc0_zkvm::Receipt) -> Self {
|
||||
Self {
|
||||
risc0_id,
|
||||
risc0_receipt,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn constraint(&self) -> Constraint {
|
||||
risc0_constraint(self.risc0_id)
|
||||
}
|
||||
|
||||
pub fn public(&self) -> Result<ConstraintPublic> {
|
||||
Ok(self.risc0_receipt.journal.decode()?)
|
||||
}
|
||||
|
||||
pub fn verify(&self, expected_public: ConstraintPublic) -> bool {
|
||||
let Ok(public) = self.public() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
expected_public == public && self.risc0_receipt.verify(self.risc0_id).is_ok()
|
||||
}
|
||||
|
||||
pub fn nop_constraint() -> Constraint {
|
||||
risc0_constraint(nomos_cl_risc0_proofs::CONSTRAINT_NOP_ID)
|
||||
}
|
||||
|
||||
pub fn prove_nop(nf: Nullifier, ptx_root: PtxRoot) -> Self {
|
||||
let constraint_public = ConstraintPublic { nf, ptx_root };
|
||||
let env = risc0_zkvm::ExecutorEnv::builder()
|
||||
.write(&constraint_public)
|
||||
.unwrap()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// Obtain the default prover.
|
||||
let prover = risc0_zkvm::default_prover();
|
||||
|
||||
let start_t = std::time::Instant::now();
|
||||
|
||||
// Proof information by proving the specified ELF binary.
|
||||
// This struct contains the receipt along with statistics about execution of the guest
|
||||
let opts = risc0_zkvm::ProverOpts::succinct();
|
||||
let prove_info = prover
|
||||
.prove_with_opts(env, nomos_cl_risc0_proofs::CONSTRAINT_NOP_ELF, &opts)
|
||||
.unwrap();
|
||||
|
||||
println!(
|
||||
"STARK 'constraint-nop' prover time: {:.2?}, total_cycles: {}",
|
||||
start_t.elapsed(),
|
||||
prove_info.stats.total_cycles
|
||||
);
|
||||
|
||||
// extract the receipt.
|
||||
let receipt = prove_info.receipt;
|
||||
|
||||
Self::from_risc0(nomos_cl_risc0_proofs::CONSTRAINT_NOP_ID, receipt)
|
||||
}
|
||||
}
|
37
emmarin/cl/ledger/src/covenant.rs
Normal file
37
emmarin/cl/ledger/src/covenant.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use ledger_proof_statements::covenant::{SpendingCovenantPublic, SupplyCovenantPublic};
|
||||
|
||||
use crate::error::Result;
|
||||
|
||||
macro_rules! impl_covenant_proof {
|
||||
($name:ident, $public:ident) => {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct $name {
|
||||
pub risc0_id: [u32; 8],
|
||||
pub risc0_receipt: risc0_zkvm::Receipt,
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub fn from_risc0(risc0_id: [u32; 8], risc0_receipt: risc0_zkvm::Receipt) -> Self {
|
||||
Self {
|
||||
risc0_id,
|
||||
risc0_receipt,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn public(&self) -> Result<$public> {
|
||||
Ok(self.risc0_receipt.journal.decode()?)
|
||||
}
|
||||
|
||||
pub fn verify(&self, expected_public: $public) -> bool {
|
||||
let Ok(public) = self.public() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
expected_public == public && self.risc0_receipt.verify(self.risc0_id).is_ok()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_covenant_proof!(SupplyCovenantProof, SupplyCovenantPublic);
|
||||
impl_covenant_proof!(SpendingCovenantProof, SpendingCovenantPublic);
|
@ -3,7 +3,7 @@ use std::collections::BTreeMap;
|
||||
use ledger_proof_statements::ledger::{LedgerBundleWitness, LedgerProofPrivate, LedgerProofPublic};
|
||||
|
||||
use crate::bundle::ProvedBundle;
|
||||
use cl::zone_layer::{ledger::LedgerState, notes::ZoneId};
|
||||
use cl::mantle::{ledger::LedgerState, zone::ZoneId};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProvedLedgerTransition {
|
||||
@ -22,20 +22,21 @@ impl ProvedLedgerTransition {
|
||||
let bundle = proved_bundle.public();
|
||||
|
||||
let zone_ledger_update = bundle
|
||||
.zone_ledger_updates
|
||||
.get(&zone_id)
|
||||
.updates
|
||||
.iter()
|
||||
.find(|update| update.zone_id == zone_id)
|
||||
.expect("why are we proving this bundle for this zone if it's not involved?");
|
||||
|
||||
let cm_root_proofs =
|
||||
BTreeMap::from_iter(zone_ledger_update.cm_roots.iter().map(|root| {
|
||||
BTreeMap::from_iter(zone_ledger_update.frontier_nodes.iter().map(|root| {
|
||||
// We make the simplifying assumption that bundle proofs
|
||||
// are done w.r.t. the latest MMR (hence, empty merkle proofs)
|
||||
//
|
||||
// We can remove this assumption by tracking old MMR roots in the LedgerState
|
||||
(*root, vec![])
|
||||
(root.root, vec![])
|
||||
}));
|
||||
|
||||
nullifiers.extend(zone_ledger_update.nullifiers.clone());
|
||||
nullifiers.extend(zone_ledger_update.inputs.clone());
|
||||
|
||||
let ledger_bundle = LedgerBundleWitness {
|
||||
bundle,
|
||||
@ -68,8 +69,9 @@ impl ProvedLedgerTransition {
|
||||
.unwrap();
|
||||
|
||||
println!(
|
||||
"STARK 'ledger' prover time: {:.2?}, total_cycles: {}",
|
||||
"STARK 'ledger' prover time: {:.2?}, user_cycles: {}, total_cycles: {}",
|
||||
start_t.elapsed(),
|
||||
prove_info.stats.user_cycles,
|
||||
prove_info.stats.total_cycles
|
||||
);
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
pub mod bundle;
|
||||
pub mod constraint;
|
||||
pub mod covenant;
|
||||
pub mod error;
|
||||
pub mod ledger;
|
||||
pub mod partial_tx;
|
||||
pub mod stf;
|
||||
pub mod zone_update;
|
||||
|
||||
pub use constraint::ConstraintProof;
|
||||
pub mod tx;
|
||||
pub mod update;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use cl::zone_layer::notes::Stf;
|
||||
use cl::mantle::zone::Stf;
|
||||
use ledger_proof_statements::stf::StfPublic;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -8,7 +8,7 @@ pub struct StfProof {
|
||||
pub risc0_receipt: risc0_zkvm::Receipt,
|
||||
}
|
||||
|
||||
pub fn risc0_constraint(risc0_id: [u32; 8]) -> Stf {
|
||||
pub fn risc0_stf(risc0_id: [u32; 8]) -> Stf {
|
||||
// TODO: hash
|
||||
|
||||
unsafe { core::mem::transmute::<[u32; 8], [u8; 32]>(risc0_id) }
|
||||
@ -24,12 +24,16 @@ impl StfProof {
|
||||
}
|
||||
|
||||
pub fn stf(&self) -> Stf {
|
||||
risc0_constraint(self.risc0_id)
|
||||
risc0_stf(self.risc0_id)
|
||||
}
|
||||
pub fn verify(&self) -> bool {
|
||||
self.risc0_receipt.verify(self.risc0_id).is_ok()
|
||||
}
|
||||
|
||||
pub fn nop_stf() -> [u8; 32] {
|
||||
risc0_stf(nomos_mantle_risc0_proofs::STF_NOP_ID)
|
||||
}
|
||||
|
||||
pub fn prove_nop(public: StfPublic) -> Self {
|
||||
let env = risc0_zkvm::ExecutorEnv::builder()
|
||||
.write(&public)
|
||||
@ -43,19 +47,20 @@ impl StfProof {
|
||||
|
||||
let opts = risc0_zkvm::ProverOpts::succinct();
|
||||
let prove_info = prover
|
||||
.prove_with_opts(env, nomos_cl_risc0_proofs::STF_NOP_ELF, &opts)
|
||||
.prove_with_opts(env, nomos_mantle_risc0_proofs::STF_NOP_ELF, &opts)
|
||||
.unwrap();
|
||||
|
||||
println!(
|
||||
"STARK 'stf' prover time: {:.2?}, total_cycles: {}",
|
||||
"STARK 'stf' prover time: {:.2?}, user_cycles: {}, total_cycles: {}",
|
||||
start_t.elapsed(),
|
||||
prove_info.stats.user_cycles,
|
||||
prove_info.stats.total_cycles
|
||||
);
|
||||
|
||||
let receipt = prove_info.receipt;
|
||||
|
||||
Self {
|
||||
risc0_id: nomos_cl_risc0_proofs::STF_NOP_ID,
|
||||
risc0_id: nomos_mantle_risc0_proofs::STF_NOP_ID,
|
||||
public,
|
||||
risc0_receipt: receipt,
|
||||
}
|
||||
|
@ -1,36 +1,31 @@
|
||||
use ledger_proof_statements::ptx::{PtxPrivate, PtxPublic};
|
||||
|
||||
use crate::{
|
||||
covenant::{SpendingCovenantProof, SupplyCovenantProof},
|
||||
error::{Error, Result},
|
||||
ConstraintProof,
|
||||
};
|
||||
use cl::cl::{
|
||||
mmr::{MMRProof, MMR},
|
||||
PartialTxWitness,
|
||||
};
|
||||
use cl::crust::{Tx, TxWitness};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProvedPartialTx {
|
||||
pub struct ProvedTx {
|
||||
pub risc0_receipt: risc0_zkvm::Receipt,
|
||||
}
|
||||
|
||||
impl ProvedPartialTx {
|
||||
impl ProvedTx {
|
||||
pub fn prove(
|
||||
ptx_witness: PartialTxWitness,
|
||||
input_cm_proofs: Vec<(MMR, MMRProof)>,
|
||||
covenant_proofs: Vec<ConstraintProof>,
|
||||
) -> Result<ProvedPartialTx> {
|
||||
let ptx_private = PtxPrivate {
|
||||
ptx: ptx_witness,
|
||||
input_cm_proofs,
|
||||
};
|
||||
|
||||
tx_witness: TxWitness,
|
||||
supply_covenant_proofs: Vec<SupplyCovenantProof>,
|
||||
spending_covenant_proofs: Vec<SpendingCovenantProof>,
|
||||
) -> Result<ProvedTx> {
|
||||
let mut env = risc0_zkvm::ExecutorEnv::builder();
|
||||
|
||||
for covenant_proof in covenant_proofs {
|
||||
env.add_assumption(covenant_proof.risc0_receipt);
|
||||
for proof in spending_covenant_proofs {
|
||||
env.add_assumption(proof.risc0_receipt);
|
||||
}
|
||||
let env = env.write(&ptx_private).unwrap().build().unwrap();
|
||||
|
||||
for proof in supply_covenant_proofs {
|
||||
env.add_assumption(proof.risc0_receipt);
|
||||
}
|
||||
|
||||
let env = env.write(&tx_witness).unwrap().build().unwrap();
|
||||
|
||||
// Obtain the default prover.
|
||||
let prover = risc0_zkvm::default_prover();
|
||||
@ -41,12 +36,13 @@ impl ProvedPartialTx {
|
||||
// This struct contains the receipt along with statistics about execution of the guest
|
||||
let opts = risc0_zkvm::ProverOpts::succinct();
|
||||
let prove_info = prover
|
||||
.prove_with_opts(env, nomos_cl_ptx_risc0_proof::PTX_ELF, &opts)
|
||||
.prove_with_opts(env, nomos_cl_tx_risc0_proof::TX_ELF, &opts)
|
||||
.map_err(|_| Error::Risc0ProofFailed)?;
|
||||
|
||||
println!(
|
||||
"STARK 'ptx' prover time: {:.2?}, total_cycles: {}",
|
||||
"STARK 'tx' prover time: {:.2?}, user_cycles: {}, total_cycles: {}",
|
||||
start_t.elapsed(),
|
||||
prove_info.stats.user_cycles,
|
||||
prove_info.stats.total_cycles
|
||||
);
|
||||
|
||||
@ -55,13 +51,13 @@ impl ProvedPartialTx {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn public(&self) -> PtxPublic {
|
||||
pub fn public(&self) -> Tx {
|
||||
self.risc0_receipt.journal.decode().unwrap()
|
||||
}
|
||||
|
||||
pub fn verify(&self) -> bool {
|
||||
self.risc0_receipt
|
||||
.verify(nomos_cl_ptx_risc0_proof::PTX_ID)
|
||||
.verify(nomos_cl_tx_risc0_proof::TX_ID)
|
||||
.is_ok()
|
||||
}
|
||||
}
|
@ -1,16 +1,16 @@
|
||||
pub use crate::error::{Error, Result};
|
||||
use crate::{ledger::ProvedLedgerTransition, stf::StfProof};
|
||||
use cl::zone_layer::tx::UpdateBundle;
|
||||
use cl::mantle::update::BatchUpdate;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProvedUpdateBundle {
|
||||
pub bundle: UpdateBundle,
|
||||
pub struct ProvedBatchUpdate {
|
||||
pub batch: BatchUpdate,
|
||||
pub ledger_proofs: Vec<ProvedLedgerTransition>,
|
||||
pub stf_proofs: Vec<StfProof>,
|
||||
}
|
||||
|
||||
impl ProvedUpdateBundle {
|
||||
impl ProvedBatchUpdate {
|
||||
pub fn verify(&self) -> bool {
|
||||
let mut expected_zones = HashMap::new();
|
||||
let mut actual_zones = HashMap::new();
|
||||
@ -19,10 +19,10 @@ impl ProvedUpdateBundle {
|
||||
return false;
|
||||
}
|
||||
|
||||
for bundle in &proof.public().cross_bundles {
|
||||
expected_zones.insert(bundle.id, HashSet::from_iter(bundle.zones.clone()));
|
||||
for bundle in &proof.public().sync_logs {
|
||||
expected_zones.insert(bundle.bundle.0, HashSet::from_iter(bundle.zones.clone()));
|
||||
actual_zones
|
||||
.entry(bundle.id)
|
||||
.entry(bundle.bundle.0)
|
||||
.or_insert_with(HashSet::new)
|
||||
.insert(proof.public().id);
|
||||
}
|
||||
@ -39,16 +39,12 @@ impl ProvedUpdateBundle {
|
||||
}
|
||||
|
||||
for ((update, stf_proof), ledger_proof) in self
|
||||
.bundle
|
||||
.batch
|
||||
.updates
|
||||
.iter()
|
||||
.zip(self.stf_proofs.iter())
|
||||
.zip(self.ledger_proofs.iter())
|
||||
{
|
||||
if !update.well_formed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ledger_proof.public().old_ledger != update.old.ledger
|
||||
|| ledger_proof.public().ledger != update.new.ledger
|
||||
{
|
@ -1,28 +1,30 @@
|
||||
use cl::{
|
||||
cl::{
|
||||
balance::Unit,
|
||||
mmr::{MMRProof, MMR},
|
||||
note::derive_unit,
|
||||
BalanceWitness, InputWitness, NoteWitness, NullifierCommitment, NullifierSecret,
|
||||
OutputWitness, PartialTxWitness,
|
||||
crust::{
|
||||
balance::{UnitWitness, NOP_COVENANT},
|
||||
BundleWitness, InputWitness, Nonce, Nullifier, NullifierCommitment, NullifierSecret,
|
||||
OutputWitness, TxWitness,
|
||||
},
|
||||
zone_layer::{
|
||||
ds::mmr::{MMRProof, MMR},
|
||||
mantle::{
|
||||
ledger::LedgerState,
|
||||
notes::{ZoneId, ZoneNote},
|
||||
tx::{UpdateBundle, ZoneUpdate},
|
||||
update::{BatchUpdate, Update},
|
||||
ZoneId, ZoneState,
|
||||
},
|
||||
};
|
||||
use ledger::{
|
||||
bundle::ProvedBundle, constraint::ConstraintProof, ledger::ProvedLedgerTransition,
|
||||
partial_tx::ProvedPartialTx, stf::StfProof, zone_update::ProvedUpdateBundle,
|
||||
bundle::ProvedBundle, ledger::ProvedLedgerTransition, stf::StfProof, tx::ProvedTx,
|
||||
update::ProvedBatchUpdate,
|
||||
};
|
||||
use ledger_proof_statements::{bundle::BundlePrivate, stf::StfPublic};
|
||||
use ledger_proof_statements::stf::StfPublic;
|
||||
use rand::Rng;
|
||||
use rand_core::CryptoRngCore;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
fn nmo() -> Unit {
|
||||
static NMO: OnceLock<Unit> = OnceLock::new();
|
||||
*NMO.get_or_init(|| derive_unit("NMO"))
|
||||
fn nmo() -> UnitWitness {
|
||||
UnitWitness {
|
||||
spending_covenant: NOP_COVENANT,
|
||||
minting_covenant: NOP_COVENANT,
|
||||
burning_covenant: NOP_COVENANT,
|
||||
}
|
||||
}
|
||||
|
||||
struct User(NullifierSecret);
|
||||
@ -41,80 +43,86 @@ impl User {
|
||||
}
|
||||
}
|
||||
|
||||
fn receive_utxo(note: NoteWitness, nf_pk: NullifierCommitment, zone_id: ZoneId) -> OutputWitness {
|
||||
OutputWitness::new(note, nf_pk, zone_id)
|
||||
}
|
||||
|
||||
fn cross_transfer_transition(
|
||||
input: InputWitness,
|
||||
input_proof: (MMR, MMRProof),
|
||||
to: User,
|
||||
amount: u64,
|
||||
zone_a: ZoneId,
|
||||
zone_b: ZoneId,
|
||||
mut ledger_a: LedgerState,
|
||||
mut ledger_b: LedgerState,
|
||||
to_zone: ZoneId,
|
||||
mut ledger_in: LedgerState,
|
||||
mut ledger_out: LedgerState,
|
||||
) -> (ProvedLedgerTransition, ProvedLedgerTransition) {
|
||||
assert!(amount <= input.note.value);
|
||||
assert!(amount <= input.value);
|
||||
println!("nfs in zone_a: {}", ledger_in.nullifiers.len());
|
||||
println!("nfs in zone_b: {}", ledger_out.nullifiers.len());
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let change = input.note.value - amount;
|
||||
let transfer = OutputWitness::new(NoteWitness::basic(amount, nmo(), &mut rng), to.pk(), zone_b);
|
||||
let change = OutputWitness::new(
|
||||
NoteWitness::basic(change, nmo(), &mut rng),
|
||||
input.nf_sk.commit(),
|
||||
zone_a,
|
||||
);
|
||||
|
||||
// Construct the ptx consuming the input and producing the two outputs.
|
||||
let ptx_witness = PartialTxWitness {
|
||||
inputs: vec![input],
|
||||
outputs: vec![transfer, change],
|
||||
balance_blinding: BalanceWitness::random_blinding(&mut rng),
|
||||
let change = input.value - amount;
|
||||
let transfer = OutputWitness {
|
||||
state: Default::default(),
|
||||
value: amount,
|
||||
unit: nmo().unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id: to_zone,
|
||||
nf_pk: to.pk(),
|
||||
};
|
||||
let change = OutputWitness {
|
||||
state: Default::default(),
|
||||
value: change,
|
||||
unit: nmo().unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id: input.zone_id,
|
||||
nf_pk: input.nf_sk.commit(), // return change to sender
|
||||
};
|
||||
|
||||
// Prove the constraints for alices input (she uses the no-op constraint)
|
||||
let constraint_proof =
|
||||
ConstraintProof::prove_nop(input.nullifier(), ptx_witness.commit().root());
|
||||
// Construct the tx consuming the input and producing the two outputs.
|
||||
let tx_witness = TxWitness {
|
||||
inputs: vec![input],
|
||||
outputs: vec![(transfer, vec![]), (change, vec![])],
|
||||
data: Default::default(),
|
||||
mints: vec![],
|
||||
burns: vec![],
|
||||
frontier_paths: vec![input_proof],
|
||||
};
|
||||
|
||||
let proved_ptx = ProvedPartialTx::prove(
|
||||
ptx_witness.clone(),
|
||||
vec![input_proof],
|
||||
vec![constraint_proof.clone()],
|
||||
let proved_tx = ProvedTx::prove(
|
||||
tx_witness.clone(),
|
||||
vec![],
|
||||
vec![], // we can skip covenant proofs since NMO uses no-op spend covenants
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bundle = ProvedBundle::prove(
|
||||
&BundlePrivate {
|
||||
bundle: vec![proved_ptx.public()],
|
||||
balances: vec![ptx_witness.balance()],
|
||||
&BundleWitness {
|
||||
txs: vec![proved_tx.public()],
|
||||
},
|
||||
vec![proved_ptx],
|
||||
vec![proved_tx],
|
||||
);
|
||||
|
||||
println!("proving ledger A transition");
|
||||
let ledger_a_transition =
|
||||
ProvedLedgerTransition::prove(ledger_a.clone(), zone_a, vec![bundle.clone()]);
|
||||
let ledger_in_transition =
|
||||
ProvedLedgerTransition::prove(ledger_in.clone(), input.zone_id, vec![bundle.clone()]);
|
||||
|
||||
println!("proving ledger B transition");
|
||||
let ledger_b_transition = ProvedLedgerTransition::prove(ledger_b.clone(), zone_b, vec![bundle]);
|
||||
let ledger_out_transition =
|
||||
ProvedLedgerTransition::prove(ledger_out.clone(), to_zone, vec![bundle]);
|
||||
|
||||
ledger_a.add_commitment(&change.commit_note());
|
||||
ledger_a.add_nullifiers(vec![input.nullifier()]);
|
||||
ledger_in.add_commitment(&change.note_commitment());
|
||||
ledger_in.add_nullifiers(vec![input.nullifier()]);
|
||||
|
||||
ledger_b.add_commitment(&transfer.commit_note());
|
||||
ledger_out.add_commitment(&transfer.note_commitment());
|
||||
|
||||
assert_eq!(
|
||||
ledger_a_transition.public().ledger,
|
||||
ledger_a.to_witness().commit()
|
||||
ledger_in_transition.public().ledger,
|
||||
ledger_in.to_witness().commit()
|
||||
);
|
||||
assert_eq!(
|
||||
ledger_b_transition.public().ledger,
|
||||
ledger_b.to_witness().commit()
|
||||
ledger_out_transition.public().ledger,
|
||||
ledger_out.to_witness().commit()
|
||||
);
|
||||
|
||||
(ledger_a_transition, ledger_b_transition)
|
||||
(ledger_in_transition, ledger_out_transition)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -130,31 +138,38 @@ fn zone_update_cross() {
|
||||
let bob = User::random(&mut rng);
|
||||
|
||||
// Alice has an unspent note worth 10 NMO
|
||||
let utxo = receive_utxo(
|
||||
NoteWitness::stateless(10, nmo(), ConstraintProof::nop_constraint(), &mut rng),
|
||||
alice.pk(),
|
||||
zone_a_id,
|
||||
);
|
||||
let utxo = OutputWitness {
|
||||
state: Default::default(),
|
||||
value: 10,
|
||||
unit: nmo().unit(),
|
||||
nonce: Nonce::random(&mut rng),
|
||||
zone_id: zone_a_id,
|
||||
nf_pk: alice.pk(),
|
||||
};
|
||||
|
||||
let alice_input = InputWitness::from_output(utxo, alice.sk());
|
||||
let alice_input = InputWitness::from_output(utxo, alice.sk(), nmo());
|
||||
|
||||
let mut ledger_a = LedgerState::default();
|
||||
let alice_cm_path = ledger_a.add_commitment(&utxo.commit_note());
|
||||
ledger_a.add_nullifiers(
|
||||
std::iter::repeat_with(|| Nullifier(rng.gen()))
|
||||
.take(2_usize.pow(10))
|
||||
.collect(),
|
||||
);
|
||||
let alice_cm_path = ledger_a.add_commitment(&utxo.note_commitment());
|
||||
let alice_cm_proof = (ledger_a.commitments.clone(), alice_cm_path);
|
||||
|
||||
let ledger_b = LedgerState::default();
|
||||
|
||||
let zone_a_old = ZoneNote {
|
||||
id: zone_a_id,
|
||||
state: [0; 32],
|
||||
let zone_a_old = ZoneState {
|
||||
stf: StfProof::nop_stf(),
|
||||
zone_data: [0; 32],
|
||||
ledger: ledger_a.to_witness().commit(),
|
||||
stf: [0; 32],
|
||||
};
|
||||
let zone_b_old = ZoneNote {
|
||||
id: zone_b_id,
|
||||
state: [0; 32],
|
||||
|
||||
let zone_b_old = ZoneState {
|
||||
stf: StfProof::nop_stf(),
|
||||
zone_data: [0; 32],
|
||||
ledger: ledger_b.to_witness().commit(),
|
||||
stf: [0; 32],
|
||||
};
|
||||
|
||||
let (ledger_a_transition, ledger_b_transition) = cross_transfer_transition(
|
||||
@ -162,18 +177,17 @@ fn zone_update_cross() {
|
||||
alice_cm_proof,
|
||||
bob,
|
||||
8,
|
||||
zone_a_id,
|
||||
zone_b_id,
|
||||
ledger_a,
|
||||
ledger_b,
|
||||
);
|
||||
|
||||
let zone_a_new = ZoneNote {
|
||||
let zone_a_new = ZoneState {
|
||||
ledger: ledger_a_transition.public().ledger,
|
||||
..zone_a_old
|
||||
};
|
||||
|
||||
let zone_b_new = ZoneNote {
|
||||
let zone_b_new = ZoneState {
|
||||
ledger: ledger_b_transition.public().ledger,
|
||||
..zone_b_old
|
||||
};
|
||||
@ -188,24 +202,24 @@ fn zone_update_cross() {
|
||||
new: zone_b_new,
|
||||
});
|
||||
|
||||
let update_bundle = UpdateBundle {
|
||||
let batch = BatchUpdate {
|
||||
updates: vec![
|
||||
ZoneUpdate {
|
||||
Update {
|
||||
old: zone_a_old,
|
||||
new: zone_a_new,
|
||||
},
|
||||
ZoneUpdate {
|
||||
Update {
|
||||
old: zone_b_old,
|
||||
new: zone_b_new,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let proved_bundle = ProvedUpdateBundle {
|
||||
bundle: update_bundle,
|
||||
let proved_batch = ProvedBatchUpdate {
|
||||
batch,
|
||||
ledger_proofs: vec![ledger_a_transition, ledger_b_transition],
|
||||
stf_proofs: vec![stf_proof_a, stf_proof_b],
|
||||
};
|
||||
|
||||
assert!(proved_bundle.verify());
|
||||
assert!(proved_batch.verify());
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use cl::{
|
||||
cl::{BalanceWitness, NoteCommitment, Nullifier},
|
||||
zone_layer::notes::ZoneId,
|
||||
};
|
||||
use risc0_zkvm::sha::rust_crypto::{Digest, Sha256};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ptx::PtxPublic;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct BundleId(pub [u8; 32]);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct BundlePublic {
|
||||
pub bundle_id: BundleId,
|
||||
pub zone_ledger_updates: BTreeMap<ZoneId, LedgerUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
|
||||
pub struct LedgerUpdate {
|
||||
// inputs in this bundle used the following roots in their cm membership proof.
|
||||
pub cm_roots: BTreeSet<[u8; 32]>,
|
||||
// these are the nullifiers of inputs used in this bundle.
|
||||
pub nullifiers: Vec<Nullifier>,
|
||||
// these are commitments to created notes in this bundle
|
||||
pub commitments: Vec<NoteCommitment>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct BundlePrivate {
|
||||
pub bundle: Vec<PtxPublic>,
|
||||
pub balances: Vec<BalanceWitness>,
|
||||
}
|
||||
|
||||
impl BundlePrivate {
|
||||
pub fn id(&self) -> BundleId {
|
||||
// TODO: change to merkle root
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"NOMOS_CL_BUNDLE_ID");
|
||||
for ptx in &self.bundle {
|
||||
hasher.update(ptx.ptx.root().0);
|
||||
}
|
||||
|
||||
BundleId(hasher.finalize().into())
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
use cl::cl::{Nullifier, PtxRoot};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ConstraintPublic {
|
||||
pub nf: Nullifier,
|
||||
pub ptx_root: PtxRoot,
|
||||
}
|
15
emmarin/cl/ledger_proof_statements/src/covenant.rs
Normal file
15
emmarin/cl/ledger_proof_statements/src/covenant.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use cl::crust::{Nullifier, TxRoot, Unit};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SpendingCovenantPublic {
|
||||
pub nf: Nullifier,
|
||||
pub tx_root: TxRoot,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SupplyCovenantPublic {
|
||||
pub amount: u64,
|
||||
pub unit: Unit,
|
||||
pub tx_root: TxRoot,
|
||||
}
|
@ -1,11 +1,13 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::bundle::BundleId;
|
||||
use crate::bundle::BundlePublic;
|
||||
use cl::cl::{indexed::BatchUpdateProof, merkle, NoteCommitment};
|
||||
use cl::zone_layer::{
|
||||
ledger::{Ledger, LedgerWitness},
|
||||
notes::ZoneId,
|
||||
use cl::{
|
||||
crust::{Bundle, BundleRoot, NoteCommitment},
|
||||
ds::indexed::BatchUpdateProof,
|
||||
ds::merkle,
|
||||
mantle::{
|
||||
ledger::{Ledger, LedgerWitness},
|
||||
ZoneId,
|
||||
},
|
||||
};
|
||||
use risc0_zkvm::guest::env;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -15,7 +17,7 @@ pub struct LedgerProofPublic {
|
||||
pub old_ledger: Ledger,
|
||||
pub ledger: Ledger,
|
||||
pub id: ZoneId,
|
||||
pub cross_bundles: Vec<CrossZoneBundle>,
|
||||
pub sync_logs: Vec<SyncLog>,
|
||||
pub outputs: Vec<NoteCommitment>,
|
||||
}
|
||||
|
||||
@ -29,13 +31,13 @@ pub struct LedgerProofPrivate {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LedgerBundleWitness {
|
||||
pub bundle: BundlePublic,
|
||||
pub bundle: Bundle,
|
||||
pub cm_root_proofs: BTreeMap<[u8; 32], merkle::Path>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CrossZoneBundle {
|
||||
pub id: BundleId,
|
||||
pub struct SyncLog {
|
||||
pub bundle: BundleRoot,
|
||||
pub zones: Vec<ZoneId>,
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
pub mod bundle;
|
||||
pub mod constraint;
|
||||
pub mod covenant;
|
||||
pub mod ledger;
|
||||
pub mod ptx;
|
||||
pub mod stf;
|
||||
|
@ -1,17 +0,0 @@
|
||||
use cl::cl::{
|
||||
mmr::{MMRProof, MMR},
|
||||
PartialTx, PartialTxWitness,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PtxPublic {
|
||||
pub ptx: PartialTx,
|
||||
pub cm_mmrs: Vec<MMR>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PtxPrivate {
|
||||
pub ptx: PartialTxWitness,
|
||||
pub input_cm_proofs: Vec<(MMR, MMRProof)>,
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
use cl::zone_layer::notes::ZoneNote;
|
||||
use cl::mantle::zone::ZoneState;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
pub struct StfPublic {
|
||||
pub old: ZoneNote,
|
||||
pub new: ZoneNote,
|
||||
pub old: ZoneState,
|
||||
pub new: ZoneState,
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use cl::cl::merkle;
|
||||
use cl::ds::merkle;
|
||||
use ledger_proof_statements::ledger::{
|
||||
CrossZoneBundle, LedgerBundleWitness, LedgerProofPrivate, LedgerProofPublic,
|
||||
LedgerBundleWitness, LedgerProofPrivate, LedgerProofPublic, SyncLog,
|
||||
};
|
||||
use risc0_zkvm::{guest::env, serde};
|
||||
|
||||
@ -12,13 +12,13 @@ fn main() {
|
||||
nf_proofs,
|
||||
} = LedgerProofPrivate::read();
|
||||
let old_ledger = ledger.clone();
|
||||
let mut cross_bundles = vec![];
|
||||
let mut sync_logs = vec![];
|
||||
let mut outputs = vec![];
|
||||
|
||||
let mut nullifiers = vec![];
|
||||
|
||||
for LedgerBundleWitness {
|
||||
mut bundle,
|
||||
bundle,
|
||||
cm_root_proofs,
|
||||
} in bundles
|
||||
{
|
||||
@ -28,27 +28,36 @@ fn main() {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// TODO: do not add local updates
|
||||
cross_bundles.push(CrossZoneBundle {
|
||||
id: bundle.bundle_id,
|
||||
zones: bundle.zone_ledger_updates.keys().copied().collect(),
|
||||
});
|
||||
let zones = Vec::from_iter(bundle.updates.iter().map(|update| update.zone_id));
|
||||
if !(zones.len() == 1 && zones[0] == id) {
|
||||
// This is a cross zone bundle, add a sync log for it to ensure all zones
|
||||
// also approve it.
|
||||
sync_logs.push(SyncLog {
|
||||
bundle: bundle.root,
|
||||
zones,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(ledger_update) = bundle.zone_ledger_updates.remove(&id) {
|
||||
for past_cm_root in &ledger_update.cm_roots {
|
||||
if let Some(ledger_update) = bundle
|
||||
.updates
|
||||
.into_iter()
|
||||
.filter(|update| update.zone_id == id)
|
||||
.next()
|
||||
{
|
||||
for node in &ledger_update.frontier_nodes {
|
||||
let past_cm_root_proof = cm_root_proofs
|
||||
.get(past_cm_root)
|
||||
.get(&node.root)
|
||||
.expect("missing cm root proof");
|
||||
let expected_current_cm_root = merkle::path_root(*past_cm_root, past_cm_root_proof);
|
||||
let expected_current_cm_root = merkle::path_root(node.root, past_cm_root_proof);
|
||||
assert!(old_ledger.valid_cm_root(expected_current_cm_root))
|
||||
}
|
||||
|
||||
for cm in &ledger_update.commitments {
|
||||
for cm in &ledger_update.outputs {
|
||||
ledger.add_commitment(cm);
|
||||
outputs.push(*cm)
|
||||
}
|
||||
|
||||
nullifiers.extend(ledger_update.nullifiers);
|
||||
nullifiers.extend(ledger_update.inputs);
|
||||
}
|
||||
}
|
||||
|
||||
@ -60,7 +69,7 @@ fn main() {
|
||||
old_ledger: old_ledger.commit(),
|
||||
ledger: ledger.commit(),
|
||||
id,
|
||||
cross_bundles,
|
||||
sync_logs,
|
||||
outputs,
|
||||
});
|
||||
}
|
||||
|
@ -1,43 +0,0 @@
|
||||
/// Input Proof
|
||||
use ledger_proof_statements::{
|
||||
constraint::ConstraintPublic,
|
||||
ptx::{PtxPrivate, PtxPublic},
|
||||
};
|
||||
use risc0_zkvm::{guest::env, serde};
|
||||
|
||||
fn main() {
|
||||
let PtxPrivate {
|
||||
ptx,
|
||||
input_cm_proofs,
|
||||
} = env::read();
|
||||
|
||||
let ptx_commit = ptx.commit();
|
||||
let ptx_root = ptx_commit.root();
|
||||
|
||||
assert_eq!(ptx.inputs.len(), input_cm_proofs.len());
|
||||
let mut cm_mmrs = Vec::new();
|
||||
for (input, (mmr, mmr_proof)) in ptx.inputs.iter().zip(input_cm_proofs) {
|
||||
let note_cm = input.note_commitment();
|
||||
assert!(mmr.verify_proof(¬e_cm.0, &mmr_proof));
|
||||
cm_mmrs.push(mmr);
|
||||
|
||||
env::verify(
|
||||
input.note.constraint.0,
|
||||
&serde::to_vec(&ConstraintPublic {
|
||||
ptx_root,
|
||||
nf: input.nullifier(),
|
||||
})
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
for output in ptx.outputs.iter() {
|
||||
assert!(output.note.value > 0);
|
||||
}
|
||||
|
||||
env::commit(&PtxPublic {
|
||||
ptx: ptx_commit,
|
||||
cm_mmrs,
|
||||
});
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "nomos_cl_risc0_proofs"
|
||||
name = "nomos_mantle_risc0_proofs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
@ -7,5 +7,5 @@ edition = "2021"
|
||||
risc0-build = { version = "1.0" }
|
||||
|
||||
[package.metadata.risc0]
|
||||
methods = ["constraint_nop", "stf_nop"]
|
||||
methods = ["stf_nop"]
|
||||
|
||||
|
@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "constraint_nop"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
cl = { path = "../../cl" }
|
||||
ledger_proof_statements = { path = "../../ledger_proof_statements" }
|
||||
|
||||
|
||||
[patch.crates-io]
|
||||
# add RISC Zero accelerator support for all downstream usages of the following crates.
|
||||
sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" }
|
||||
crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" }
|
||||
curve25519-dalek = { git = "https://github.com/risc0/curve25519-dalek", tag = "curve25519-4.1.2-risczero.0" }
|
@ -1,8 +0,0 @@
|
||||
/// Constraint No-op Proof
|
||||
use ledger_proof_statements::constraint::ConstraintPublic;
|
||||
use risc0_zkvm::guest::env;
|
||||
|
||||
fn main() {
|
||||
let public: ConstraintPublic = env::read();
|
||||
env::commit(&public);
|
||||
}
|
@ -1,4 +1,3 @@
|
||||
/// Constraint No-op Proof
|
||||
use ledger_proof_statements::stf::StfPublic;
|
||||
use risc0_zkvm::guest::env;
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "nomos_cl_ptx_risc0_proof"
|
||||
name = "nomos_cl_tx_risc0_proof"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
@ -7,5 +7,5 @@ edition = "2021"
|
||||
risc0-build = { version = "1.0" }
|
||||
|
||||
[package.metadata.risc0]
|
||||
methods = ["ptx"]
|
||||
methods = ["tx"]
|
||||
|
@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "ptx"
|
||||
name = "tx"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
72
emmarin/cl/tx_risc0_proof/tx/src/main.rs
Normal file
72
emmarin/cl/tx_risc0_proof/tx/src/main.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use cl::crust::{
|
||||
balance::NOP_COVENANT,
|
||||
tx::{BurnAmount, InputDerivedFields, MintAmount},
|
||||
TxWitness,
|
||||
};
|
||||
/// Input Proof
|
||||
use ledger_proof_statements::covenant::{SpendingCovenantPublic, SupplyCovenantPublic};
|
||||
use risc0_zkvm::{guest::env, serde};
|
||||
|
||||
fn main() {
|
||||
let tx: TxWitness = env::read();
|
||||
|
||||
let mints = tx.mint_amounts();
|
||||
let burns = tx.burn_amounts();
|
||||
let inputs = tx.inputs_derived_fields();
|
||||
let tx_public = tx.commit(&mints, &burns, &inputs);
|
||||
let tx_root = tx_public.root;
|
||||
|
||||
for (MintAmount { amount, unit, .. }, minting_covenant) in mints
|
||||
.iter()
|
||||
.zip(tx.mints.iter().map(|m| m.unit.minting_covenant))
|
||||
{
|
||||
if minting_covenant == NOP_COVENANT {
|
||||
continue;
|
||||
}
|
||||
env::verify(
|
||||
minting_covenant,
|
||||
&serde::to_vec(&SupplyCovenantPublic {
|
||||
amount: *amount,
|
||||
unit: *unit,
|
||||
tx_root,
|
||||
})
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
for (BurnAmount { unit, amount, .. }, burning_covenant) in burns
|
||||
.iter()
|
||||
.zip(tx.burns.iter().map(|b| b.unit.burning_covenant))
|
||||
{
|
||||
if burning_covenant == NOP_COVENANT {
|
||||
continue;
|
||||
}
|
||||
env::verify(
|
||||
burning_covenant,
|
||||
&serde::to_vec(&SupplyCovenantPublic {
|
||||
amount: *amount,
|
||||
unit: *unit,
|
||||
tx_root,
|
||||
})
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
for (InputDerivedFields { nf, .. }, spending_covenant) in inputs
|
||||
.iter()
|
||||
.zip(tx.inputs.iter().map(|w| w.unit_witness.spending_covenant))
|
||||
{
|
||||
if spending_covenant == NOP_COVENANT {
|
||||
continue;
|
||||
}
|
||||
env::verify(
|
||||
spending_covenant,
|
||||
&serde::to_vec(&SpendingCovenantPublic { nf: *nf, tx_root }).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
env::commit(&tx_public);
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user