This commit is contained in:
Giacomo Pasini 2024-07-16 16:45:30 +02:00
commit 221d7102a9
No known key found for this signature in database
GPG Key ID: FC08489D2D895D4B
41 changed files with 2238 additions and 0 deletions

2
goas/cl/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
Cargo.lock
target/

11
goas/cl/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[workspace]
resolver = "2"
members = [ "cl", "ledger", "proof_statements", "risc0_proofs"]
# Always optimize; building and running the risc0_proofs takes much longer without optimization.
[profile.dev]
opt-level = 3
[profile.release]
debug = 1
lto = true

20
goas/cl/cl/Cargo.toml Normal file
View File

@ -0,0 +1,20 @@
[package]
name = "cl"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = {version="1.0", features = ["derive"]}
bincode = "1.3.3"
risc0-groth16 = "1.0.1"
blake2 = "0.10.6"
# jubjub = "0.10.0"
group = "0.13.0"
rand = "0.8.5"
rand_core = "0.6.0"
lazy_static = "1.4.0"
hex = "0.4.3"
curve25519-dalek = {version = "4.1", features = ["serde", "digest", "rand_core"]}
sha2 = "0.10"

216
goas/cl/cl/src/balance.rs Normal file
View File

@ -0,0 +1,216 @@
use curve25519_dalek::{ristretto::RistrettoPoint, traits::VartimeMultiscalarMul, Scalar};
use lazy_static::lazy_static;
use rand_core::CryptoRngCore;
use serde::{Deserialize, Serialize};
lazy_static! {
// Precompute of ``
static ref PEDERSON_COMMITMENT_BLINDING_POINT: RistrettoPoint = crate::crypto::hash_to_curve(b"NOMOS_CL_PEDERSON_COMMITMENT_BLINDING");
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub struct Balance(pub RistrettoPoint);
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub struct BalanceWitness {
pub value: u64,
pub unit: RistrettoPoint,
pub blinding: Scalar,
}
impl Balance {
pub fn to_bytes(&self) -> [u8; 32] {
self.0.compress().to_bytes().into()
}
}
impl BalanceWitness {
pub fn new(value: u64, unit: impl Into<String>, blinding: Scalar) -> Self {
Self {
value,
unit: unit_point(&unit.into()),
blinding,
}
}
pub fn random(value: u64, unit: impl Into<String>, mut rng: impl CryptoRngCore) -> Self {
Self::new(value, unit, Scalar::random(&mut rng))
}
pub fn commit(&self) -> Balance {
Balance(balance(self.value, self.unit, self.blinding))
}
}
pub fn unit_point(unit: &str) -> RistrettoPoint {
crate::crypto::hash_to_curve(unit.as_bytes())
}
pub fn balance(value: u64, unit: RistrettoPoint, blinding: Scalar) -> RistrettoPoint {
let value_scalar = Scalar::from(value);
// can vartime leak the number of cycles through the stark proof?
RistrettoPoint::vartime_multiscalar_mul(
&[value_scalar, blinding],
&[unit, *PEDERSON_COMMITMENT_BLINDING_POINT],
)
}
// mod serde_scalar {
// use super::Scalar;
// use serde::de::{self, Visitor};
// use serde::{Deserializer, Serializer};
// use std::fmt;
// // Serialize a SubgroupPoint by converting it to bytes.
// pub fn serialize<S>(scalar: &Scalar, serializer: S) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let bytes = scalar.to_bytes();
// serializer.serialize_bytes(&bytes)
// }
// // Deserialize a SubgroupPoint by converting it from bytes.
// pub fn deserialize<'de, D>(deserializer: D) -> Result<Scalar, D::Error>
// where
// D: Deserializer<'de>,
// {
// struct BytesVisitor;
// impl<'de> Visitor<'de> for BytesVisitor {
// type Value = Scalar;
// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
// formatter.write_str("a valid Scalar in byte representation")
// }
// fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
// where
// E: de::Error,
// {
// let mut bytes = <jubjub::SubgroupPoint as group::GroupEncoding>::Repr::default();
// assert_eq!(bytes.len(), v.len());
// bytes.copy_from_slice(v);
// Ok(Scalar::from_bytes(&bytes).unwrap())
// }
// }
// deserializer.deserialize_bytes(BytesVisitor)
// }
// }
// mod serde_point {
// use super::SubgroupPoint;
// use group::GroupEncoding;
// use serde::de::{self, Visitor};
// use serde::{Deserializer, Serializer};
// use std::fmt;
// // Serialize a SubgroupPoint by converting it to bytes.
// pub fn serialize<S>(point: &SubgroupPoint, serializer: S) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// let bytes = point.to_bytes();
// serializer.serialize_bytes(&bytes)
// }
// // Deserialize a SubgroupPoint by converting it from bytes.
// pub fn deserialize<'de, D>(deserializer: D) -> Result<SubgroupPoint, D::Error>
// where
// D: Deserializer<'de>,
// {
// struct BytesVisitor;
// impl<'de> Visitor<'de> for BytesVisitor {
// type Value = SubgroupPoint;
// fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
// formatter.write_str("a valid SubgroupPoint in byte representation")
// }
// fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
// where
// E: de::Error,
// {
// let mut bytes = <jubjub::SubgroupPoint as group::GroupEncoding>::Repr::default();
// assert_eq!(bytes.len(), v.len());
// bytes.copy_from_slice(v);
// Ok(SubgroupPoint::from_bytes(&bytes).unwrap())
// }
// }
// deserializer.deserialize_bytes(BytesVisitor)
// }
// }
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_pederson_blinding_point_pre_compute() {
// use k256::elliptic_curve::group::GroupEncoding;
// println!("{:?}", <[u8;33]>::from((*PEDERSON_COMMITMENT_BLINDING_POINT).to_bytes()));
assert_eq!(
*PEDERSON_COMMITMENT_BLINDING_POINT,
crate::crypto::hash_to_curve(b"NOMOS_CL_PEDERSON_COMMITMENT_BLINDING")
);
}
#[test]
fn test_balance_zero_unitless() {
// Zero is the same across all units
let mut rng = rand::thread_rng();
let r = Scalar::random(&mut rng);
assert_eq!(
BalanceWitness::new(0, "NMO", r).commit(),
BalanceWitness::new(0, "ETH", r).commit(),
);
}
#[test]
fn test_balance_blinding() {
// balances are blinded
let r1 = Scalar::from(12u32);
let r2 = Scalar::from(8u32);
let a_w = BalanceWitness::new(10, "NMO", r1);
let b_w = BalanceWitness::new(10, "NMO", r2);
let a = a_w.commit();
let b = b_w.commit();
assert_ne!(a, b);
assert_eq!(a.0 - b.0, BalanceWitness::new(0, "NMO", r1 - r2).commit().0);
}
#[test]
fn test_balance_units() {
// Unit's differentiate between values.
let r = Scalar::from(1337u32);
let nmo = BalanceWitness::new(10, "NMO", r);
let eth = BalanceWitness::new(10, "ETH", r);
assert_ne!(nmo.commit(), eth.commit());
}
#[test]
fn test_balance_homomorphism() {
let mut rng = rand::thread_rng();
let r1 = Scalar::random(&mut rng);
let r2 = Scalar::random(&mut rng);
let ten = BalanceWitness::new(10, "NMO", 0u32.into());
let eight = BalanceWitness::new(8, "NMO", 0u32.into());
let two = BalanceWitness::new(2, "NMO", 0u32.into());
// Values of same unit are homomorphic
assert_eq!(ten.commit().0 - eight.commit().0, two.commit().0);
// Blinding factors are also homomorphic.
assert_eq!(
BalanceWitness::new(10, "NMO", r1).commit().0
- BalanceWitness::new(10, "NMO", r2).commit().0,
BalanceWitness::new(0, "NMO", r1 - r2).commit().0
);
}
}

127
goas/cl/cl/src/bundle.rs Normal file
View File

@ -0,0 +1,127 @@
use serde::{Deserialize, Serialize};
use curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint, Scalar};
use crate::partial_tx::PartialTx;
/// The transaction bundle is a collection of partial transactions.
/// The goal in bundling transactions is to produce a set of partial transactions
/// that balance each other.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Bundle {
pub partials: Vec<PartialTx>,
}
#[derive(Debug, Clone)]
pub struct BundleWitness {
pub balance_blinding: Scalar,
}
impl Bundle {
pub fn balance(&self) -> RistrettoPoint {
self.partials.iter().map(|ptx| ptx.balance()).sum()
}
pub fn is_balanced(&self, balance_blinding_witness: Scalar) -> bool {
self.balance()
== crate::balance::balance(0, RISTRETTO_BASEPOINT_POINT, balance_blinding_witness)
}
}
#[cfg(test)]
mod test {
use crate::{
crypto::hash_to_curve, input::InputWitness, note::NoteWitness, nullifier::NullifierSecret,
output::OutputWitness, partial_tx::PartialTxWitness,
};
use super::*;
#[test]
fn test_bundle_balance() {
let mut rng = rand::thread_rng();
let nmo_10_in =
InputWitness::random(NoteWitness::new(10, "NMO", [0u8; 32], &mut rng), &mut rng);
let eth_23_in =
InputWitness::random(NoteWitness::new(23, "ETH", [0u8; 32], &mut rng), &mut rng);
let crv_4840_out = OutputWitness::random(
NoteWitness::new(4840, "CRV", [0u8; 32], &mut rng),
NullifierSecret::random(&mut rng).commit(), // transferring to a random owner
&mut rng,
);
let ptx_unbalanced = PartialTxWitness {
inputs: vec![nmo_10_in.clone(), eth_23_in.clone()],
outputs: vec![crv_4840_out.clone()],
};
let bundle_witness = BundleWitness {
balance_blinding: crv_4840_out.note.balance.blinding
- nmo_10_in.note.balance.blinding
- eth_23_in.note.balance.blinding,
};
let mut bundle = Bundle {
partials: vec![PartialTx::from_witness(ptx_unbalanced)],
};
assert!(!bundle.is_balanced(bundle_witness.balance_blinding));
assert_eq!(
bundle.balance(),
crate::balance::balance(
4840,
hash_to_curve(b"CRV"),
crv_4840_out.note.balance.blinding
) - (crate::balance::balance(
10,
hash_to_curve(b"NMO"),
nmo_10_in.note.balance.blinding
) + crate::balance::balance(
23,
hash_to_curve(b"ETH"),
eth_23_in.note.balance.blinding
))
);
let crv_4840_in =
InputWitness::random(NoteWitness::new(4840, "CRV", [0u8; 32], &mut rng), &mut rng);
let nmo_10_out = OutputWitness::random(
NoteWitness::new(10, "NMO", [0u8; 32], &mut rng),
NullifierSecret::random(&mut rng).commit(), // transferring to a random owner
&mut rng,
);
let eth_23_out = OutputWitness::random(
NoteWitness::new(23, "ETH", [0u8; 32], &mut rng),
NullifierSecret::random(&mut rng).commit(), // transferring to a random owner
&mut rng,
);
bundle
.partials
.push(PartialTx::from_witness(PartialTxWitness {
inputs: vec![crv_4840_in.clone()],
outputs: vec![nmo_10_out.clone(), eth_23_out.clone()],
}));
let witness = BundleWitness {
balance_blinding: -nmo_10_in.note.balance.blinding - eth_23_in.note.balance.blinding
+ crv_4840_out.note.balance.blinding
- crv_4840_in.note.balance.blinding
+ nmo_10_out.note.balance.blinding
+ eth_23_out.note.balance.blinding,
};
assert_eq!(
bundle.balance(),
crate::balance::balance(
0,
curve25519_dalek::constants::RISTRETTO_BASEPOINT_POINT,
witness.balance_blinding
)
);
assert!(bundle.is_balanced(witness.balance_blinding));
}
}

6
goas/cl/cl/src/crypto.rs Normal file
View File

@ -0,0 +1,6 @@
use sha2::Sha512;
use curve25519_dalek::ristretto::RistrettoPoint;
pub fn hash_to_curve(bytes: &[u8]) -> RistrettoPoint {
RistrettoPoint::hash_from_bytes::<Sha512>(bytes)
}

4
goas/cl/cl/src/error.rs Normal file
View File

@ -0,0 +1,4 @@
#[derive(Debug)]
pub enum Error {
ProofFailed,
}

61
goas/cl/cl/src/input.rs Normal file
View File

@ -0,0 +1,61 @@
/// This module defines the partial transaction structure.
///
/// Partial transactions, as the name suggests, are transactions
/// which on their own may not balance (i.e. \sum inputs != \sum outputs)
use crate::{
balance::Balance,
note::{DeathCommitment, NoteWitness},
nullifier::{Nullifier, NullifierNonce, NullifierSecret},
};
use rand_core::RngCore;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Input {
pub nullifier: Nullifier,
pub balance: Balance,
pub death_cm: DeathCommitment,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputWitness {
pub note: NoteWitness,
pub nf_sk: NullifierSecret,
pub nonce: NullifierNonce,
}
impl InputWitness {
pub fn random(note: NoteWitness, mut rng: impl RngCore) -> Self {
Self {
note,
nf_sk: NullifierSecret::random(&mut rng),
nonce: NullifierNonce::random(&mut rng),
}
}
pub fn commit(&self) -> Input {
Input {
nullifier: Nullifier::new(self.nf_sk, self.nonce),
balance: self.note.balance(),
death_cm: self.note.death_commitment(),
}
}
pub fn to_output_witness(&self) -> crate::OutputWitness {
crate::OutputWitness {
note: self.note.clone(),
nf_pk: self.nf_sk.commit(),
nonce: self.nonce,
}
}
}
impl Input {
pub fn to_bytes(&self) -> [u8; 64] {
let mut bytes = [0u8; 64];
bytes[..32].copy_from_slice(self.nullifier.as_bytes());
bytes[32..64].copy_from_slice(&self.balance.to_bytes());
bytes[64..96].copy_from_slice(&self.death_cm.0);
bytes
}
}

18
goas/cl/cl/src/lib.rs Normal file
View File

@ -0,0 +1,18 @@
pub mod balance;
pub mod bundle;
pub mod crypto;
pub mod error;
pub mod input;
pub mod merkle;
pub mod note;
pub mod nullifier;
pub mod output;
pub mod partial_tx;
pub use balance::{Balance, BalanceWitness};
pub use bundle::{Bundle, BundleWitness};
pub use input::{Input, InputWitness};
pub use note::{DeathCommitment, NoteCommitment, NoteWitness};
pub use nullifier::{Nullifier, NullifierCommitment, NullifierNonce, NullifierSecret};
pub use output::{Output, OutputWitness};
pub use partial_tx::{PartialTx, PartialTxWitness, PtxRoot};

239
goas/cl/cl/src/merkle.rs Normal file
View File

@ -0,0 +1,239 @@
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
pub fn padded_leaves<const N: usize>(elements: &[Vec<u8>]) -> [[u8; 32]; N] {
let mut leaves = [[0u8; 32]; N];
for (i, element) in elements.iter().enumerate() {
assert!(i < N);
leaves[i] = leaf(element);
}
leaves
}
pub fn leaf(data: &[u8]) -> [u8; 32] {
let mut hasher = Sha256::new();
hasher.update(b"NOMOS_MERKLE_LEAF");
hasher.update(data);
hasher.finalize().into()
}
pub fn node(a: [u8; 32], b: [u8; 32]) -> [u8; 32] {
let mut hasher = Sha256::new();
hasher.update(b"NOMOS_MERKLE_NODE");
hasher.update(a);
hasher.update(b);
hasher.finalize().into()
}
pub fn root<const N: usize>(elements: [[u8; 32]; N]) -> [u8; 32] {
let n = elements.len();
assert!(n.is_power_of_two());
let mut nodes = elements;
for h in (1..=n.ilog2()).rev() {
for i in 0..2usize.pow(h - 1) {
nodes[i] = node(nodes[i * 2], nodes[i * 2 + 1]);
}
}
nodes[0]
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum PathNode {
Left([u8; 32]),
Right([u8; 32]),
}
pub fn path_root(leaf: [u8; 32], path: &[PathNode]) -> [u8; 32] {
let mut computed_hash = leaf;
for path_node in path {
match path_node {
PathNode::Left(sibling_hash) => {
computed_hash = node(*sibling_hash, computed_hash);
}
PathNode::Right(sibling_hash) => {
computed_hash = node(computed_hash, *sibling_hash);
}
}
}
computed_hash
}
pub fn path<const N: usize>(leaves: [[u8; 32]; N], idx: usize) -> Vec<PathNode> {
assert!(N.is_power_of_two());
assert!(idx < N);
let mut nodes = leaves;
let mut path = Vec::new();
let mut idx = idx;
for h in (1..=N.ilog2()).rev() {
if idx % 2 == 0 {
path.push(PathNode::Right(nodes[idx + 1]));
} else {
path.push(PathNode::Left(nodes[idx - 1]));
}
idx /= 2;
for i in 0..2usize.pow(h - 1) {
nodes[i] = node(nodes[i * 2], nodes[i * 2 + 1]);
}
}
path
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_root_height_1() {
let r = root::<1>(padded_leaves(&[b"sand".into()]));
let expected = leaf(b"sand");
assert_eq!(r, expected);
}
#[test]
fn test_root_height_2() {
let r = root::<2>(padded_leaves(&[b"desert".into(), b"sand".into()]));
let expected = node(leaf(b"desert"), leaf(b"sand"));
assert_eq!(r, expected);
}
#[test]
fn test_root_height_3() {
let r = root::<4>(padded_leaves(&[
b"desert".into(),
b"sand".into(),
b"feels".into(),
b"warm".into(),
]));
let expected = node(
node(leaf(b"desert"), leaf(b"sand")),
node(leaf(b"feels"), leaf(b"warm")),
);
assert_eq!(r, expected);
}
#[test]
fn test_root_height_4() {
let r = root::<8>(padded_leaves(&[
b"desert".into(),
b"sand".into(),
b"feels".into(),
b"warm".into(),
b"at".into(),
b"night".into(),
]));
let expected = node(
node(
node(leaf(b"desert"), leaf(b"sand")),
node(leaf(b"feels"), leaf(b"warm")),
),
node(
node(leaf(b"at"), leaf(b"night")),
node([0u8; 32], [0u8; 32]),
),
);
assert_eq!(r, expected);
}
#[test]
fn test_path_height_1() {
let leaves = padded_leaves(&[b"desert".into()]);
let r = root::<1>(leaves);
let p = path::<1>(leaves, 0);
let expected = vec![];
assert_eq!(p, expected);
assert_eq!(path_root(leaf(b"desert"), &p), r);
}
#[test]
fn test_path_height_2() {
let leaves = padded_leaves(&[b"desert".into(), b"sand".into()]);
let r = root::<2>(leaves);
// --- proof for element at idx 0
let p0 = path(leaves, 0);
let expected0 = vec![PathNode::Right(leaf(b"sand"))];
assert_eq!(p0, expected0);
assert_eq!(path_root(leaf(b"desert"), &p0), r);
// --- proof for element at idx 1
let p1 = path(leaves, 1);
let expected1 = vec![PathNode::Left(leaf(b"desert"))];
assert_eq!(p1, expected1);
assert_eq!(path_root(leaf(b"sand"), &p1), r);
}
#[test]
fn test_path_height_3() {
let leaves = padded_leaves(&[
b"desert".into(),
b"sand".into(),
b"feels".into(),
b"warm".into(),
]);
let r = root::<4>(leaves);
// --- proof for element at idx 0
let p0 = path(leaves, 0);
let expected0 = vec![
PathNode::Right(leaf(b"sand")),
PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))),
];
assert_eq!(p0, expected0);
assert_eq!(path_root(leaf(b"desert"), &p0), r);
// --- proof for element at idx 1
let p1 = path(leaves, 1);
let expected1 = vec![
PathNode::Left(leaf(b"desert")),
PathNode::Right(node(leaf(b"feels"), leaf(b"warm"))),
];
assert_eq!(p1, expected1);
assert_eq!(path_root(leaf(b"sand"), &p1), r);
// --- proof for element at idx 2
let p2 = path(leaves, 2);
let expected2 = vec![
PathNode::Right(leaf(b"warm")),
PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))),
];
assert_eq!(p2, expected2);
assert_eq!(path_root(leaf(b"feels"), &p2), r);
// --- proof for element at idx 3
let p3 = path(leaves, 3);
let expected3 = vec![
PathNode::Left(leaf(b"feels")),
PathNode::Left(node(leaf(b"desert"), leaf(b"sand"))),
];
assert_eq!(p3, expected3);
assert_eq!(path_root(leaf(b"warm"), &p3), r);
}
}

107
goas/cl/cl/src/note.rs Normal file
View File

@ -0,0 +1,107 @@
use rand_core::CryptoRngCore;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use crate::{
balance::{Balance, BalanceWitness},
nullifier::{NullifierCommitment, NullifierNonce},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct DeathCommitment(pub [u8; 32]);
pub fn death_commitment(death_constraint: &[u8]) -> DeathCommitment {
let mut hasher = Sha256::new();
hasher.update(b"NOMOS_CL_DEATH_COMMIT");
hasher.update(death_constraint);
let death_cm: [u8; 32] = hasher.finalize().into();
DeathCommitment(death_cm)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct NoteCommitment([u8; 32]);
impl NoteCommitment {
pub fn as_bytes(&self) -> &[u8; 32] {
&self.0
}
}
// TODO: Rename Note to NoteWitness and NoteCommitment to Note
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub struct NoteWitness {
pub balance: BalanceWitness,
pub death_constraint: [u8; 32], // death constraint verification key
pub state: [u8; 32],
}
impl NoteWitness {
pub fn new(
value: u64,
unit: impl Into<String>,
state: [u8; 32],
rng: impl CryptoRngCore,
) -> Self {
Self {
balance: BalanceWitness::random(value, unit, rng),
death_constraint: [0u8; 32],
state,
}
}
pub fn commit(&self, nf_pk: NullifierCommitment, nonce: NullifierNonce) -> NoteCommitment {
let mut hasher = Sha256::new();
hasher.update(b"NOMOS_CL_NOTE_COMMIT");
// COMMIT TO BALANCE
hasher.update(self.balance.value.to_le_bytes());
hasher.update(self.balance.unit.compress().to_bytes());
// Important! we don't commit to the balance blinding factor as that may make the notes linkable.
// COMMIT TO STATE
hasher.update(self.state);
// COMMIT TO DEATH CONSTRAINT
hasher.update(self.death_constraint);
// COMMIT TO NULLIFIER
hasher.update(nf_pk.as_bytes());
hasher.update(nonce.as_bytes());
let commit_bytes: [u8; 32] = hasher.finalize().into();
NoteCommitment(commit_bytes)
}
pub fn balance(&self) -> Balance {
self.balance.commit()
}
pub fn death_commitment(&self) -> DeathCommitment {
death_commitment(&self.death_constraint)
}
}
#[cfg(test)]
mod test {
use crate::nullifier::NullifierSecret;
use super::*;
#[test]
fn test_note_commitments_dont_commit_to_balance_blinding() {
let mut rng = rand::thread_rng();
let n1 = NoteWitness::new(12, "NMO", [0u8; 32], &mut rng);
let n2 = NoteWitness::new(12, "NMO", [0u8; 32], &mut rng);
let nf_pk = NullifierSecret::random(&mut rng).commit();
let nonce = NullifierNonce::random(&mut rng);
// Balance blinding factors are different.
assert_ne!(n1.balance.blinding, n2.balance.blinding);
// But their commitments are the same.
assert_eq!(n1.commit(nf_pk, nonce), n2.commit(nf_pk, nonce));
}
}

125
goas/cl/cl/src/nullifier.rs Normal file
View File

@ -0,0 +1,125 @@
// The Nullifier is used to detect if a note has
// already been consumed.
// The same nullifier secret may be used across multiple
// notes to allow users to hold fewer secrets. A note
// nonce is used to disambiguate when the same nullifier
// secret is used for multiple notes.
use blake2::{Blake2s256, Digest};
use rand_core::RngCore;
use serde::{Deserialize, Serialize};
// TODO: create a nullifier witness and use it throughout.
// struct NullifierWitness {
// nf_sk: NullifierSecret,
// nonce: NullifierNonce
// }
// Maintained privately by note holder
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct NullifierSecret([u8; 16]);
// Nullifier commitment is public information that
// can be provided to anyone wishing to transfer
// you a note
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct NullifierCommitment([u8; 32]);
// To allow users to maintain fewer nullifier secrets, we
// provide a nonce to differentiate notes controlled by the same
// secret. Each note is assigned a unique nullifier nonce.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct NullifierNonce([u8; 16]);
// The nullifier attached to input notes to prove an input has not
// already been spent.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Nullifier([u8; 32]);
impl NullifierSecret {
pub fn random(mut rng: impl RngCore) -> Self {
let mut sk = [0u8; 16];
rng.fill_bytes(&mut sk);
Self(sk)
}
pub fn commit(&self) -> NullifierCommitment {
let mut hasher = Blake2s256::new();
hasher.update(b"NOMOS_CL_NULL_COMMIT");
hasher.update(self.0);
let commit_bytes: [u8; 32] = hasher.finalize().into();
NullifierCommitment(commit_bytes)
}
}
impl NullifierCommitment {
pub fn as_bytes(&self) -> &[u8; 32] {
&self.0
}
pub fn hex(&self) -> String {
hex::encode(self.0)
}
}
impl NullifierNonce {
pub fn random(mut rng: impl RngCore) -> Self {
let mut nonce = [0u8; 16];
rng.fill_bytes(&mut nonce);
Self(nonce)
}
pub fn as_bytes(&self) -> &[u8; 16] {
&self.0
}
}
impl Nullifier {
pub fn new(sk: NullifierSecret, nonce: NullifierNonce) -> Self {
let mut hasher = Blake2s256::new();
hasher.update(b"NOMOS_CL_NULLIFIER");
hasher.update(sk.0);
hasher.update(nonce.0);
let nf_bytes: [u8; 32] = hasher.finalize().into();
Self(nf_bytes)
}
pub(crate) fn as_bytes(&self) -> &[u8; 32] {
&self.0
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_nullifier_commitment_vectors() {
assert_eq!(
NullifierSecret([0u8; 16]).commit().hex(),
"384318f9864fe57647bac344e2afdc500a672dedb29d2dc63b004e940e4b382a"
);
assert_eq!(
NullifierSecret([1u8; 16]).commit().hex(),
"0fd667e6bb39fbdc35d6265726154b839638ea90bcf4e736953ccf27ca5f870b"
);
assert_eq!(
NullifierSecret([u8::MAX; 16]).commit().hex(),
"1cb78e487eb0b3116389311fdde84cd3f619a4d7f487b29bf5a002eed3784d75"
);
}
#[test]
fn test_nullifier_same_sk_different_nonce() {
let mut rng = rand::thread_rng();
let sk = NullifierSecret::random(&mut rng);
let nonce_1 = NullifierNonce::random(&mut rng);
let nonce_2 = NullifierNonce::random(&mut rng);
let nf_1 = Nullifier::new(sk, nonce_1);
let nf_2 = Nullifier::new(sk, nonce_2);
assert_ne!(nf_1, nf_2);
}
}

123
goas/cl/cl/src/output.rs Normal file
View File

@ -0,0 +1,123 @@
use rand_core::RngCore;
use serde::{Deserialize, Serialize};
use crate::{
balance::Balance,
error::Error,
note::{NoteCommitment, NoteWitness},
nullifier::{NullifierCommitment, NullifierNonce},
};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Output {
pub note_comm: NoteCommitment,
pub balance: Balance,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct OutputWitness {
pub note: NoteWitness,
pub nf_pk: NullifierCommitment,
pub nonce: NullifierNonce,
}
impl OutputWitness {
pub fn random(note: NoteWitness, owner: NullifierCommitment, mut rng: impl RngCore) -> Self {
Self {
note,
nf_pk: owner,
nonce: NullifierNonce::random(&mut rng),
}
}
pub fn commit_note(&self) -> NoteCommitment {
self.note.commit(self.nf_pk, self.nonce)
}
pub fn commit(&self) -> Output {
Output {
note_comm: self.commit_note(),
balance: self.note.balance(),
}
}
}
// as we don't have SNARKS hooked up yet, the witness will be our proof
#[derive(Debug, Clone)]
pub struct OutputProof(OutputWitness);
impl Output {
pub fn prove(&self, w: &OutputWitness) -> Result<OutputProof, Error> {
if &w.commit() == self {
Ok(OutputProof(w.clone()))
} else {
Err(Error::ProofFailed)
}
}
pub fn verify(&self, proof: &OutputProof) -> bool {
// verification checks the relation
// - note_comm == commit(note || nf_pk)
// - balance == v * hash_to_curve(Unit) + blinding * H
let witness = &proof.0;
self.note_comm == witness.note.commit(witness.nf_pk, witness.nonce)
&& self.balance == witness.note.balance()
}
pub fn to_bytes(&self) -> [u8; 64] {
let mut bytes = [0u8; 64];
bytes[..32].copy_from_slice(self.note_comm.as_bytes());
bytes[32..64].copy_from_slice(&self.balance.to_bytes());
bytes
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::nullifier::NullifierSecret;
#[test]
fn test_output_proof() {
let mut rng = rand::thread_rng();
let note = NoteWitness::new(10, "NMO", [0u8; 32], &mut rng);
let nf_pk = NullifierSecret::random(&mut rng).commit();
let nonce = NullifierNonce::random(&mut rng);
let witness = OutputWitness { note, nf_pk, nonce };
let output = witness.commit();
let proof = output.prove(&witness).unwrap();
assert!(output.verify(&proof));
let wrong_witnesses = [
OutputWitness {
note: NoteWitness::new(11, "NMO", [0u8; 32], &mut rng),
..witness.clone()
},
OutputWitness {
note: NoteWitness::new(10, "ETH", [0u8; 32], &mut rng),
..witness.clone()
},
OutputWitness {
nf_pk: NullifierSecret::random(&mut rng).commit(),
..witness.clone()
},
OutputWitness {
nonce: NullifierNonce::random(&mut rng),
..witness.clone()
},
];
for wrong_witness in wrong_witnesses {
assert!(output.prove(&wrong_witness).is_err());
let wrong_output = wrong_witness.commit();
let wrong_proof = wrong_output.prove(&wrong_witness).unwrap();
assert!(!output.verify(&wrong_proof));
}
}
}

View File

@ -0,0 +1,143 @@
use rand_core::RngCore;
// use risc0_groth16::ProofJson;
use curve25519_dalek::ristretto::RistrettoPoint;
use serde::{Deserialize, Serialize};
use crate::input::{Input, InputWitness};
use crate::merkle;
use crate::output::{Output, OutputWitness};
const MAX_INPUTS: usize = 8;
const MAX_OUTPUTS: usize = 8;
/// The partial transaction commitment couples an input to a partial transaction.
/// Prevents partial tx unbundling.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct PtxRoot(pub [u8; 32]);
impl PtxRoot {
pub fn random(mut rng: impl RngCore) -> Self {
let mut sk = [0u8; 32];
rng.fill_bytes(&mut sk);
Self(sk)
}
pub fn hex(&self) -> String {
hex::encode(self.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PartialTx {
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
}
#[derive(Debug, Clone)]
pub struct PartialTxWitness {
pub inputs: Vec<InputWitness>,
pub outputs: Vec<OutputWitness>,
}
impl PartialTx {
pub fn from_witness(w: PartialTxWitness) -> Self {
Self {
inputs: Vec::from_iter(w.inputs.iter().map(InputWitness::commit)),
outputs: Vec::from_iter(w.outputs.iter().map(OutputWitness::commit)),
}
}
pub fn input_root(&self) -> [u8; 32] {
let input_bytes =
Vec::from_iter(self.inputs.iter().map(Input::to_bytes).map(Vec::from_iter));
let input_merkle_leaves = merkle::padded_leaves(&input_bytes);
merkle::root::<MAX_INPUTS>(input_merkle_leaves)
}
pub fn output_root(&self) -> [u8; 32] {
let output_bytes = Vec::from_iter(
self.outputs
.iter()
.map(Output::to_bytes)
.map(Vec::from_iter),
);
let output_merkle_leaves = merkle::padded_leaves(&output_bytes);
merkle::root::<MAX_OUTPUTS>(output_merkle_leaves)
}
pub fn input_merkle_path(&self, idx: usize) -> Vec<merkle::PathNode> {
let input_bytes =
Vec::from_iter(self.inputs.iter().map(Input::to_bytes).map(Vec::from_iter));
let input_merkle_leaves = merkle::padded_leaves::<MAX_INPUTS>(&input_bytes);
merkle::path(input_merkle_leaves, idx)
}
pub fn output_merkle_path(&self, idx: usize) -> Vec<merkle::PathNode> {
let output_bytes = Vec::from_iter(
self.outputs
.iter()
.map(Output::to_bytes)
.map(Vec::from_iter),
);
let output_merkle_leaves = merkle::padded_leaves::<MAX_OUTPUTS>(&output_bytes);
merkle::path(output_merkle_leaves, idx)
}
pub fn root(&self) -> PtxRoot {
let input_root = self.input_root();
let output_root = self.output_root();
let root = merkle::node(input_root, output_root);
PtxRoot(root)
}
pub fn balance(&self) -> RistrettoPoint {
let in_sum: RistrettoPoint = self.inputs.iter().map(|i| i.balance.0).sum();
let out_sum: RistrettoPoint = self.outputs.iter().map(|o| o.balance.0).sum();
out_sum - in_sum
}
}
#[cfg(test)]
mod test {
use crate::{crypto::hash_to_curve, note::NoteWitness, nullifier::NullifierSecret};
use super::*;
#[test]
fn test_partial_tx_balance() {
let mut rng = rand::thread_rng();
let nmo_10 =
InputWitness::random(NoteWitness::new(10, "NMO", [0u8; 32], &mut rng), &mut rng);
let eth_23 =
InputWitness::random(NoteWitness::new(23, "ETH", [0u8; 32], &mut rng), &mut rng);
let crv_4840 = OutputWitness::random(
NoteWitness::new(4840, "CRV", [0u8; 32], &mut rng),
NullifierSecret::random(&mut rng).commit(), // transferring to a random owner
&mut rng,
);
let ptx_witness = PartialTxWitness {
inputs: vec![nmo_10.clone(), eth_23.clone()],
outputs: vec![crv_4840.clone()],
};
let ptx = PartialTx::from_witness(ptx_witness.clone());
assert_eq!(
ptx.balance(),
crate::balance::balance(4840, hash_to_curve(b"CRV"), crv_4840.note.balance.blinding)
- (crate::balance::balance(
10,
hash_to_curve(b"NMO"),
nmo_10.note.balance.blinding
) + crate::balance::balance(
23,
hash_to_curve(b"ETH"),
eth_23.note.balance.blinding
))
);
}
}

13
goas/cl/ledger/Cargo.toml Normal file
View File

@ -0,0 +1,13 @@
[package]
name = "ledger"
version = "0.1.0"
edition = "2021"
[dependencies]
cl = { path = "../cl" }
proof_statements = { path = "../proof_statements" }
nomos_cl_risc0_proofs = { path = "../risc0_proofs" }
risc0-zkvm = { version = "1.0", features = ["prove", "metal"] }
risc0-groth16 = { version = "1.0" }
rand = "0.8.5"
thiserror = "1.0.62"

View File

@ -0,0 +1,9 @@
use thiserror::Error;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Error, Debug)]
pub enum Error {
#[error("risc0 failed to serde")]
Risc0Serde(#[from] risc0_zkvm::serde::Error),
}

208
goas/cl/ledger/src/input.rs Normal file
View File

@ -0,0 +1,208 @@
use proof_statements::input::{InputPrivate, InputPublic};
use crate::error::Result;
const MAX_NOTE_COMMS: usize = 2usize.pow(8);
#[derive(Debug, Clone)]
pub struct InputProof {
receipt: risc0_zkvm::Receipt,
}
impl InputProof {
pub fn public(&self) -> Result<InputPublic> {
Ok(self.receipt.journal.decode()?)
}
pub fn verify(&self, expected_public_inputs: &InputPublic) -> bool {
let Ok(public_inputs) = self.public() else {
return false;
};
&public_inputs == expected_public_inputs
&& self.receipt.verify(nomos_cl_risc0_proofs::INPUT_ID).is_ok()
}
}
pub fn prove_input(input: cl::InputWitness, note_commitments: &[cl::NoteCommitment]) -> InputProof {
let output_cm = input.to_output_witness().commit_note();
let cm_leaves = note_commitment_leaves(note_commitments);
let cm_idx = note_commitments
.iter()
.position(|c| c == &output_cm)
.unwrap();
let cm_path = cl::merkle::path(cm_leaves, cm_idx);
let secrets = InputPrivate { input, cm_path };
let env = risc0_zkvm::ExecutorEnv::builder()
.write(&secrets)
.unwrap()
.build()
.unwrap();
// Obtain the default prover.
let prover = risc0_zkvm::default_prover();
use std::time::Instant;
let start_t = Instant::now();
// Proof information by proving the specified ELF binary.
// This struct contains the receipt along with statistics about execution of the guest
let opts = risc0_zkvm::ProverOpts::succinct();
let prove_info = prover
.prove_with_opts(env, nomos_cl_risc0_proofs::INPUT_ELF, &opts)
.unwrap();
println!(
"STARK prover time: {:.2?}, total_cycles: {}",
start_t.elapsed(),
prove_info.stats.total_cycles
);
// extract the receipt.
let receipt = prove_info.receipt;
InputProof { receipt }
}
fn note_commitment_leaves(note_commitments: &[cl::NoteCommitment]) -> [[u8; 32]; MAX_NOTE_COMMS] {
let note_comm_bytes = Vec::from_iter(note_commitments.iter().map(|c| c.as_bytes().to_vec()));
let cm_leaves = cl::merkle::padded_leaves::<MAX_NOTE_COMMS>(&note_comm_bytes);
cm_leaves
}
#[cfg(test)]
mod test {
use rand::thread_rng;
use super::*;
#[test]
fn test_input_nullifier_prover() {
let mut rng = thread_rng();
let input = cl::InputWitness {
note: cl::NoteWitness {
balance: cl::BalanceWitness::random(32, "NMO", &mut rng),
death_constraint: [0u8; 32],
state: [0u8; 32],
},
nf_sk: cl::NullifierSecret::random(&mut rng),
nonce: cl::NullifierNonce::random(&mut rng),
};
let notes = vec![input.to_output_witness().commit_note()];
let proof = prove_input(input, &notes);
let expected_public_inputs = InputPublic {
cm_root: cl::merkle::root(note_commitment_leaves(&notes)),
input: input.commit(),
};
assert!(proof.verify(&expected_public_inputs));
let wrong_public_inputs = [
InputPublic {
cm_root: cl::merkle::root([cl::merkle::leaf(b"bad_root")]),
..expected_public_inputs
},
InputPublic {
input: cl::Input {
nullifier: cl::Nullifier::new(
cl::NullifierSecret::random(&mut rng),
cl::NullifierNonce::random(&mut rng),
),
..expected_public_inputs.input
},
..expected_public_inputs
},
InputPublic {
input: cl::Input {
death_cm: cl::note::death_commitment(b"wrong death vk"),
..expected_public_inputs.input
},
..expected_public_inputs
},
InputPublic {
input: cl::Input {
balance: cl::BalanceWitness::random(32, "NMO", &mut rng).commit(),
..expected_public_inputs.input
},
..expected_public_inputs
},
];
for wrong_input in wrong_public_inputs {
assert!(!proof.verify(&wrong_input));
}
}
// ----- The following tests still need to be built. -----
//
// #[test]
// fn test_input_proof() {
// let mut rng = rand::thread_rng();
// let ptx_root = cl::PtxRoot::default();
// let note = cl::NoteWitness::new(10, "NMO", [0u8; 32], &mut rng);
// let nf_sk = cl::NullifierSecret::random(&mut rng);
// let nonce = cl::NullifierNonce::random(&mut rng);
// let input_witness = cl::InputWitness { note, nf_sk, nonce };
// let input = input_witness.commit();
// let proof = input.prove(&input_witness, ptx_root, vec![]).unwrap();
// assert!(input.verify(ptx_root, &proof));
// let wrong_witnesses = [
// cl::InputWitness {
// note: cl::NoteWitness::new(11, "NMO", [0u8; 32], &mut rng),
// ..input_witness.clone()
// },
// cl::InputWitness {
// note: cl::NoteWitness::new(10, "ETH", [0u8; 32], &mut rng),
// ..input_witness.clone()
// },
// cl::InputWitness {
// nf_sk: cl::NullifierSecret::random(&mut rng),
// ..input_witness.clone()
// },
// cl::InputWitness {
// nonce: cl::NullifierNonce::random(&mut rng),
// ..input_witness.clone()
// },
// ];
// for wrong_witness in wrong_witnesses {
// assert!(input.prove(&wrong_witness, ptx_root, vec![]).is_err());
// let wrong_input = wrong_witness.commit();
// let wrong_proof = wrong_input.prove(&wrong_witness, ptx_root, vec![]).unwrap();
// assert!(!input.verify(ptx_root, &wrong_proof));
// }
// }
// #[test]
// fn test_input_ptx_coupling() {
// let mut rng = rand::thread_rng();
// let note = cl::NoteWitness::new(10, "NMO", [0u8; 32], &mut rng);
// let nf_sk = cl::NullifierSecret::random(&mut rng);
// let nonce = cl::NullifierNonce::random(&mut rng);
// let witness = cl::InputWitness { note, nf_sk, nonce };
// let input = witness.commit();
// let ptx_root = cl::PtxRoot::random(&mut rng);
// let proof = input.prove(&witness, ptx_root, vec![]).unwrap();
// assert!(input.verify(ptx_root, &proof));
// // The same input proof can not be used in another partial transaction.
// let another_ptx_root = cl::PtxRoot::random(&mut rng);
// assert!(!input.verify(another_ptx_root, &proof));
// }
}

View File

@ -0,0 +1,2 @@
pub mod error;
pub mod input;

View File

@ -0,0 +1,8 @@
[package]
name = "proof_statements"
version = "0.1.0"
edition = "2021"
[dependencies]
cl = { path = "../cl" }
serde = { version = "1.0", features = ["derive"] }

View File

@ -0,0 +1,21 @@
use serde::{Deserialize, Serialize};
/// for public inputs `nf` (nullifier), `root_cm` (root of merkle tree over commitment set) and `death_cm` (commitment to death constraint).
/// the prover has knowledge of `output = (note, nf_pk, nonce)`, `nf` and `path` s.t. that the following constraints hold
/// 0. nf_pk = hash(nf_sk)
/// 1. nf = hash(nonce||nf_sk)
/// 2. note_cm = output_commitment(output)
/// 3. verify_merkle_path(note_cm, root, path)
/// 4. death_cm = death_commitment(note.death_constraint)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputPublic {
pub cm_root: [u8; 32],
pub input: cl::Input,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct InputPrivate {
pub input: cl::InputWitness,
pub cm_path: Vec<cl::merkle::PathNode>,
}

View File

@ -0,0 +1 @@
pub mod input;

View File

@ -0,0 +1,11 @@
[package]
name = "nomos_cl_risc0_proofs"
version = "0.1.0"
edition = "2021"
[build-dependencies]
risc0-build = { version = "1.0" }
[package.metadata.risc0]
methods = ["input"]

View File

@ -0,0 +1,3 @@
fn main() {
risc0_build::embed_methods();
}

View File

@ -0,0 +1,19 @@
[package]
name = "input"
version = "0.1.0"
edition = "2021"
[workspace]
[dependencies]
risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] }
serde = { version = "1.0", features = ["derive"] }
cl = { path = "../../cl" }
proof_statements = { path = "../../proof_statements" }
[patch.crates-io]
# add RISC Zero accelerator support for all downstream usages of the following crates.
sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" }
crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" }
curve25519-dalek = { git = "https://github.com/risc0/curve25519-dalek", tag = "curve25519-4.1.2-risczero.0" }

View File

@ -0,0 +1,17 @@
/// Input Proof
use cl::merkle;
use proof_statements::input::{InputPrivate, InputPublic};
use risc0_zkvm::guest::env;
fn main() {
let secret: InputPrivate = env::read();
let out_cm = secret.input.to_output_witness().commit_note();
let cm_leaf = merkle::leaf(out_cm.as_bytes());
let cm_root = merkle::path_root(cm_leaf, &secret.cm_path);
env::commit(&InputPublic {
input: secret.input.commit(),
cm_root,
});
}

View File

@ -0,0 +1 @@
include!(concat!(env!("OUT_DIR"), "/methods.rs"));

6
goas/zone/.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
.DS_Store
Cargo.lock
methods/guest/Cargo.lock
target/
output/
proof.stark

18
goas/zone/Cargo.toml Normal file
View File

@ -0,0 +1,18 @@
[workspace]
resolver = "2"
members = [ "common","host", "methods"]
# Always optimize; building and running the guest takes much longer without optimization.
[profile.dev]
opt-level = 3
[profile.release]
debug = 1
lto = true
[patch.crates-io]
# Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint
# multiplication accelerator support for all downstream usages of the following crates.
#sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" }
#k256 = { git = "https://github.com/risc0/RustCrypto-elliptic-curves", tag = "k256/v0.13.3-risczero.0" }
#crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" }

201
goas/zone/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

111
goas/zone/README.md Normal file
View File

@ -0,0 +1,111 @@
# RISC Zero Rust Starter Template
Welcome to the RISC Zero Rust Starter Template! This template is intended to
give you a starting point for building a project using the RISC Zero zkVM.
Throughout the template (including in this README), you'll find comments
labelled `TODO` in places where you'll need to make changes. To better
understand the concepts behind this template, check out the [zkVM
Overview][zkvm-overview].
## Quick Start
First, make sure [rustup] is installed. The
[`rust-toolchain.toml`][rust-toolchain] file will be used by `cargo` to
automatically install the correct version.
To build all methods and execute the method within the zkVM, run the following
command:
```bash
cargo run
```
This is an empty template, and so there is no expected output (until you modify
the code).
### Executing the project locally in development mode
During development, faster iteration upon code changes can be achieved by leveraging [dev-mode], we strongly suggest activating it during your early development phase. Furthermore, you might want to get insights into the execution statistics of your project, and this can be achieved by specifying the environment variable `RUST_LOG="[executor]=info"` before running your project.
Put together, the command to run your project in development mode while getting execution statistics is:
```bash
RUST_LOG="[executor]=info" RISC0_DEV_MODE=1 cargo run
```
### Running proofs remotely on Bonsai
_Note: The Bonsai proving service is still in early Alpha; an API key is
required for access. [Click here to request access][bonsai access]._
If you have access to the URL and API key to Bonsai you can run your proofs
remotely. To prove in Bonsai mode, invoke `cargo run` with two additional
environment variables:
```bash
BONSAI_API_KEY="YOUR_API_KEY" BONSAI_API_URL="BONSAI_URL" cargo run
```
## How to create a project based on this template
Search this template for the string `TODO`, and make the necessary changes to
implement the required feature described by the `TODO` comment. Some of these
changes will be complex, and so we have a number of instructional resources to
assist you in learning how to write your own code for the RISC Zero zkVM:
- The [RISC Zero Developer Docs][dev-docs] is a great place to get started.
- Example projects are available in the [examples folder][examples] of
[`risc0`][risc0-repo] repository.
- Reference documentation is available at [https://docs.rs][docs.rs], including
[`risc0-zkvm`][risc0-zkvm], [`cargo-risczero`][cargo-risczero],
[`risc0-build`][risc0-build], and [others][crates].
## Directory Structure
It is possible to organize the files for these components in various ways.
However, in this starter template we use a standard directory structure for zkVM
applications, which we think is a good starting point for your applications.
```text
project_name
├── Cargo.toml
├── host
│ ├── Cargo.toml
│ └── src
│ └── main.rs <-- [Host code goes here]
└── methods
├── Cargo.toml
├── build.rs
├── guest
│ ├── Cargo.toml
│ └── src
│ └── method_name.rs <-- [Guest code goes here]
└── src
└── lib.rs
```
## Video Tutorial
For a walk-through of how to build with this template, check out this [excerpt
from our workshop at ZK HACK III][zkhack-iii].
## Questions, Feedback, and Collaborations
We'd love to hear from you on [Discord][discord] or [Twitter][twitter].
[bonsai access]: https://bonsai.xyz/apply
[cargo-risczero]: https://docs.rs/cargo-risczero
[crates]: https://github.com/risc0/risc0/blob/main/README.md#rust-binaries
[dev-docs]: https://dev.risczero.com
[dev-mode]: https://dev.risczero.com/api/generating-proofs/dev-mode
[discord]: https://discord.gg/risczero
[docs.rs]: https://docs.rs/releases/search?query=risc0
[examples]: https://github.com/risc0/risc0/tree/main/examples
[risc0-build]: https://docs.rs/risc0-build
[risc0-repo]: https://www.github.com/risc0/risc0
[risc0-zkvm]: https://docs.rs/risc0-zkvm
[rustup]: https://rustup.rs
[rust-toolchain]: rust-toolchain.toml
[twitter]: https://twitter.com/risczero
[zkvm-overview]: https://dev.risczero.com/zkvm
[zkhack-iii]: https://www.youtube.com/watch?v=Yg_BGqj_6lg&list=PLcPzhUaCxlCgig7ofeARMPwQ8vbuD6hC5&index=5

View File

@ -0,0 +1,7 @@
[package]
name = "common"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1", features = ["derive"] }

View File

@ -0,0 +1,28 @@
use serde::{Serialize, Deserialize};
use std::collections::BTreeMap;
// state of the zone
pub type State = BTreeMap<u32, u32>;
// list of all inputs that were executed up to this point
pub type Journal = Vec<Input>;
#[derive(Clone, Copy, Serialize, Deserialize)]
pub enum Input {
Transfer { from: u32, to: u32, amount: u32 },
None,
}
/// State transition function of the zone
pub fn stf(mut state: State, input: Input) -> State {
match input {
Input::Transfer { from, to, amount } => {
// compute transfer
let from = state.entry(from).or_insert(0);
*from = from.checked_sub(amount).unwrap();
*state.entry(to).or_insert(0) += amount;
}
Input::None => {}
}
state
}

19
goas/zone/host/Cargo.toml Normal file
View File

@ -0,0 +1,19 @@
[package]
name = "host"
version = "0.1.0"
edition = "2021"
default-run = "host"
[dependencies]
methods = { path = "../methods" }
risc0-zkvm = { version = "1.0", features = ["prove", "metal"] }
risc0-groth16 = { version = "1.0" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
serde = "1.0"
blake2 = "0.10"
bincode = "1"
common = { path = "../common" }
tempfile = "3"
clap = { version = "4", features = ["derive"] }
rand = "0.8.5"
cl = { path = "../../cl/cl" }

View File

@ -0,0 +1,54 @@
/// Wrapping a STARK proof in groth16 in RISC0 should be as easy as specifying ProverOpts::groth16,
/// but unfortunately a permission issue seems to get in the way at least in the current machine we're
/// testing this on.
/// This workaround manually calls into docker after creating a directory with the required permissions.
/// In addition, splitting the process in different stages highlights better the different work that
/// needs to be done which could be split across different actors.
use std::path::PathBuf;
use clap::Parser;
use risc0_zkvm::{get_prover_server, ProverOpts, Receipt};
const WORK_DIR_ENV: &str = "RISC0_WORK_DIR";
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// Path to the bincode-encoded STARK proof
#[arg(short, long, default_value = "proof.stark")]
input: PathBuf,
/// Where to put the output artifacts
#[arg(short, long, default_value = "output")]
output_dir: PathBuf,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
// Initialize tracing. In order to view logs, run `RUST_LOG=info cargo run`
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::filter::EnvFilter::from_default_env())
.init();
let work_dir = tempfile::tempdir()?;
// give permissions to the docker user to write to the work dir
let mut perms = std::fs::metadata(&work_dir)?.permissions();
// on unix this is for all users
perms.set_readonly(false);
std::fs::set_permissions(&work_dir, perms)?;
let proof: Receipt = bincode::deserialize(&std::fs::read(&args.input)?)?;
let server = get_prover_server(&ProverOpts::groth16())?;
let converted = server.identity_p254(proof.inner.succinct()?)?;
let work_dir_path = work_dir.path();
std::env::set_var(WORK_DIR_ENV, work_dir_path);
risc0_groth16::docker::stark_to_snark(&converted.get_seal_bytes())?;
std::fs::create_dir_all(&args.output_dir)?;
std::fs::copy(work_dir_path.join("proof.json"), args.output_dir.join("proof.json"))?;
std::fs::copy(work_dir_path.join("public.json"), args.output_dir.join("public.json"))?;
Ok(())
}

124
goas/zone/host/src/main.rs Normal file
View File

@ -0,0 +1,124 @@
// These constants represent the RISC-V ELF and the image ID generated by risc0-build.
// The ELF is used for proving and the ID is used for verification.
use blake2::{Blake2s256, Digest};
use common::*;
use risc0_zkvm::{default_prover, ExecutorEnv};
use clap::Parser;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
enum Action {
Stf,
}
fn stf_prove_stark() {
let mut rng = rand::thread_rng();
let state: State = [(0, 1000)].into_iter().collect();
let journal = vec![];
let zone_input = Input::Transfer {
from: 0,
to: 1,
amount: 10,
};
let in_state_cm = calculate_state_hash(&state);
let in_journal_cm = calculate_journal_hash(&journal);
let in_state_root = cl::merkle::node(in_state_cm, in_journal_cm);
let in_note = cl::NoteWitness::new(1, "ZONE", in_state_root, &mut rng);
let mut out_journal = journal.clone();
out_journal.push(zone_input);
let out_state_cm = calculate_state_hash(&stf(state.clone(), zone_input));
let out_journal_cm = calculate_journal_hash(&out_journal);
let out_state_root = cl::merkle::node(out_state_cm, out_journal_cm);
let out_note = cl::NoteWitness::new(1, "ZONE", out_state_root, &mut rng);
let input = cl::InputWitness::random(in_note, &mut rng);
let output = cl::OutputWitness::random(
out_note,
cl::NullifierSecret::random(&mut rng).commit(),
&mut rng,
);
let ptx = cl::PartialTx::from_witness(cl::PartialTxWitness {
inputs: vec![input.clone()],
outputs: vec![output.clone()],
});
let ptx_root = ptx.root().0;
let in_ptx_path = ptx.input_merkle_path(0);
let out_ptx_path = ptx.output_merkle_path(0);
let env = ExecutorEnv::builder()
.write(&ptx_root)
.unwrap()
.write(&ptx.input_root())
.unwrap()
.write(&ptx.output_root())
.unwrap()
.write(&in_ptx_path)
.unwrap()
.write(&out_ptx_path)
.unwrap()
.write(&input)
.unwrap()
.write(&output)
.unwrap()
.write(&zone_input)
.unwrap()
.write(&state)
.unwrap()
.write(&journal)
.unwrap()
.build()
.unwrap();
// Obtain the default prover.
let prover = default_prover();
use std::time::Instant;
let start_t = Instant::now();
// Proof information by proving the specified ELF binary.
// This struct contains the receipt along with statistics about execution of the guest
let opts = risc0_zkvm::ProverOpts::succinct();
let prove_info = prover
.prove_with_opts(env, methods::METHOD_ELF, &opts)
.unwrap();
println!("STARK prover time: {:.2?}", start_t.elapsed());
// extract the receipt.
let receipt = prove_info.receipt;
// TODO: Implement code for retrieving receipt journal here.
std::fs::write("proof.stark", bincode::serialize(&receipt).unwrap()).unwrap();
// The receipt was verified at the end of proving, but the below code is an
// example of how someone else could verify this receipt.
receipt.verify(methods::METHOD_ID).unwrap();
}
fn main() {
// Initialize tracing. In order to view logs, run `RUST_LOG=info cargo run`
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::filter::EnvFilter::from_default_env())
.init();
let action = Action::parse();
match action {
Action::Stf => stf_prove_stark(),
}
}
fn calculate_state_hash(state: &State) -> [u8; 32] {
let bytes = bincode::serialize(state).unwrap();
Blake2s256::digest(&bytes).into()
}
fn calculate_journal_hash(journal: &Journal) -> [u8; 32] {
let bytes = bincode::serialize(journal).unwrap();
Blake2s256::digest(&bytes).into()
}

View File

@ -0,0 +1,10 @@
[package]
name = "methods"
version = "0.1.0"
edition = "2021"
[build-dependencies]
risc0-build = { version = "1.0" }
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,3 @@
fn main() {
risc0_build::embed_methods();
}

View File

@ -0,0 +1,24 @@
[package]
name = "method"
version = "0.1.0"
edition = "2021"
[workspace]
[dependencies]
risc0-zkvm = { version = "1.0", default-features = false, features = ['std'] }
blake2 = "0.10"
serde = { version = "1.0", features = ["derive"] }
bincode = "1"
common = { path = "../../common" }
cl = { path = "../../../cl/cl" }
[patch.crates-io]
# Placing these patch statement in the workspace Cargo.toml will add RISC Zero SHA-256 and bigint
# multiplication accelerator support for all downstream usages of the following crates.
sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2-v0.10.8-risczero.0" }
# k256 = { git = "https://github.com/risc0/RustCrypto-elliptic-curves", tag = "k256/v0.13.3-risczero.0" }
crypto-bigint = { git = "https://github.com/risc0/RustCrypto-crypto-bigint", tag = "v0.5.5-risczero.0" }
curve25519-dalek = { git = "https://github.com/risc0/curve25519-dalek", tag = "curve25519-4.1.2-risczero.0" }

View File

@ -0,0 +1,113 @@
use blake2::{Blake2s256, Digest};
use cl::input::InputWitness;
use cl::merkle;
use cl::output::OutputWitness;
use common::*;
use risc0_zkvm::guest::env;
/// Public Inputs:
/// * ptx_root: the root of the partial tx merkle tree of inputs/outputs
/// Private inputs:
/// TODO
fn execute(
ptx_root: [u8; 32],
input_root: [u8; 32],
output_root: [u8; 32],
in_ptx_path: Vec<merkle::PathNode>,
out_ptx_path: Vec<merkle::PathNode>,
in_note: InputWitness,
out_note: OutputWitness,
input: Input,
state: State,
mut journal: Journal,
) {
// verify ptx/cl preconditions
eprintln!("start exec: {}", env::cycle_count());
assert_eq!(ptx_root, merkle::node(input_root, output_root));
eprintln!("ptx_root: {}", env::cycle_count());
// Glue the zone and the cl together, specifically, it verifies the note requesting
// a transfer is included as part of the same transaction in the cl
let in_comm = in_note.commit().to_bytes();
eprintln!("input comm: {}", env::cycle_count());
assert_eq!(
merkle::path_root(merkle::leaf(&in_comm), &in_ptx_path),
input_root
);
eprintln!("input merkle path: {}", env::cycle_count());
// check the commitments match the actual data
let state_cm = calculate_state_hash(&state);
let journal_cm = calculate_journal_hash(&journal);
let state_root = merkle::node(state_cm, journal_cm);
assert_eq!(state_root, in_note.note.state);
eprintln!("input state root: {}", env::cycle_count());
// then run the state transition function
let state = stf(state, input);
journal.push(input);
eprintln!("stf: {}", env::cycle_count());
// verifying ptx/cl postconditions
let out_state_cm = calculate_state_hash(&state);
let out_journal_cm = calculate_journal_hash(&journal);
let out_state_root = merkle::node(out_state_cm, out_journal_cm);
// TODO: verify death constraints are propagated
assert_eq!(out_state_root, out_note.note.state);
eprintln!("out state root: {}", env::cycle_count());
// Glue the zone and the cl together, specifically, it verifies an output note
// containing the zone state is included as part of the same transaction in the cl
// (this is done in the death condition to disallow burning)
let out_comm = out_note.commit().to_bytes();
eprintln!("output comm: {}", env::cycle_count());
assert_eq!(
merkle::path_root(merkle::leaf(&out_comm), &out_ptx_path),
output_root
);
eprintln!("out merkle proof: {}", env::cycle_count());
}
fn main() {
// public input
let ptx_root: [u8; 32] = env::read();
// private input
let input_root: [u8; 32] = env::read();
let output_root: [u8; 32] = env::read();
let in_ptx_path: Vec<merkle::PathNode> = env::read();
let out_ptx_path: Vec<merkle::PathNode> = env::read();
let in_note: InputWitness = env::read();
let out_note: OutputWitness = env::read();
let input: Input = env::read();
let state: State = env::read();
let journal: Journal = env::read();
eprintln!("parse input: {}", env::cycle_count());
execute(
ptx_root,
input_root,
output_root,
in_ptx_path,
out_ptx_path,
in_note,
out_note,
input,
state,
journal,
);
}
fn calculate_state_hash(state: &State) -> [u8; 32] {
let bytes = bincode::serialize(state).unwrap();
Blake2s256::digest(&bytes).into()
}
fn calculate_journal_hash(journal: &Journal) -> [u8; 32] {
let bytes = bincode::serialize(journal).unwrap();
Blake2s256::digest(&bytes).into()
}

View File

@ -0,0 +1 @@
include!(concat!(env!("OUT_DIR"), "/methods.rs"));

View File

@ -0,0 +1,4 @@
[toolchain]
channel = "stable"
components = ["rustfmt", "rust-src"]
profile = "minimal"