Compare commits

..

No commits in common. "master" and "v0.5.0" have entirely different histories.

52 changed files with 3435 additions and 9141 deletions

View File

@ -4,7 +4,7 @@ on:
branches: branches:
- master - master
name: Code Coverage name: Codecov
jobs: jobs:
test: test:

View File

@ -10,24 +10,26 @@ name: CI
jobs: jobs:
check: check:
name: Build name: Check
strategy: strategy:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
toolchain: stable toolchain: stable-x86_64-unknown-linux-gnu
#- os: windows-latest - os: windows-latest
# toolchain: stable-x86_64-pc-windows-gnu toolchain: stable-x86_64-pc-windows-gnu
- os: macos-latest - os: macos-latest
toolchain: stable toolchain: stable-x86_64-apple-darwin
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
submodules: true submodules: true
- name: Checkout submodules - name: Checkout submodules
run: git submodule update --init --recursive run: git submodule update --init --recursive
- uses: actions/setup-go@v3 # we need go to build go-waku
with:
go-version: '1.20'
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
profile: minimal profile: minimal
@ -40,24 +42,26 @@ jobs:
command: check command: check
test: test:
name: Test name: Test Suite
strategy: strategy:
matrix: matrix:
include: include:
- os: ubuntu-latest - os: ubuntu-latest
toolchain: stable toolchain: stable-x86_64-unknown-linux-gnu
#- os: windows-latest - os: windows-latest
# toolchain: stable-x86_64-pc-windows-gnu toolchain: stable-x86_64-pc-windows-gnu
- os: macos-latest - os: macos-latest
toolchain: stable toolchain: stable-x86_64-apple-darwin
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
submodules: true submodules: true
- name: Checkout submodules - name: Checkout submodules
run: git submodule update --init --recursive run: git submodule update --init --recursive
- uses: actions/setup-go@v3 # we need go to build go-waku
with:
go-version: '1.20'
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
profile: minimal profile: minimal
@ -74,7 +78,7 @@ jobs:
command: test command: test
lints: lints:
name: Lint name: Rust lints
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -82,6 +86,9 @@ jobs:
submodules: true submodules: true
- name: Checkout submodules - name: Checkout submodules
run: git submodule update --init --recursive run: git submodule update --init --recursive
- uses: actions/setup-go@v3 # we need go to build go-waku
with:
go-version: '1.20'
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
profile: minimal profile: minimal

3
.gitignore vendored
View File

@ -1,5 +1,4 @@
**target **target
/Cargo.lock /Cargo.lock
/.idea /.idea
/.fleet /.fleet
nimcache/

2
.gitmodules vendored
View File

@ -1,3 +1,3 @@
[submodule "waku-sys/vendor"] [submodule "waku-sys/vendor"]
path = waku-sys/vendor path = waku-sys/vendor
url = https://github.com/logos-messaging/logos-messaging-nim url = https://github.com/status-im/go-waku.git

2202
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,5 +3,5 @@
members = [ members = [
"waku-bindings", "waku-bindings",
"waku-sys", "waku-sys",
"examples/basic" "examples/toy-chat"
] ]

View File

@ -9,12 +9,12 @@
[crates-url]: https://crates.io/crates/waku-bindings [crates-url]: https://crates.io/crates/waku-bindings
[docs-badge]: https://docs.rs/waku-bindings/badge.svg [docs-badge]: https://docs.rs/waku-bindings/badge.svg
[docs-url]: https://docs.rs/waku-bindings [docs-url]: https://docs.rs/waku-bindings
[actions-badge]: https://github.com/logos-messaging/logos-messaging-rust-bindings/workflows/CI/badge.svg [actions-badge]: https://github.com/waku-org/waku-rust-bindings/workflows/CI/badge.svg
[actions-url]: https://github.com/logos-messaging/logos-messaging-rust-bindings/actions/workflows/main.yml?query=workflow%3ACI+branch%3Amaster [actions-url]: https://github.com/waku-org/waku-rust-bindings/actions/workflows/main.yml?query=workflow%3ACI+branch%3Amaster
[codecov-badge]: https://codecov.io/github/logos-messaging/logos-messaging-rust-bindings/branch/main/graph/badge.svg?token=H4CQWRUCUS [codecov-badge]: https://codecov.io/github/waku-org/waku-rust-bindings/branch/main/graph/badge.svg?token=H4CQWRUCUS
[codecov-url]: https://codecov.io/github/logos-messaging/logos-messaging-rust-bindings [codecov-url]: https://codecov.io/github/waku-org/waku-rust-bindings
Rust layer on top of [`logos-messaging-nim`](https://github.com/logos-messaging/logos-messaging-nim) [C FFI bindings](https://github.com/logos-messaging/logos-messaging-nim/blob/master/library/libwaku.h). Rust layer on top of [`go-waku`](https://github.com/waku-org/go-waku) [C FFI bindings](https://github.com/waku-org/go-waku/blob/master/library/README.md).
## About Waku ## About Waku

5093
examples/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,5 @@
[workspace] [workspace]
members = [ members = [
"basic",
"tic-tac-toe-gui",
"toy-chat" "toy-chat"
] ]

View File

@ -1,13 +0,0 @@
[package]
name = "basic"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
futures = "0.3.30"
tokio = { version = "1.36.0", features = ["full"] }
tokio-util = { version = "0.7.10", features = ["rt"] }
waku = { path = "../../waku-bindings", package = "waku-bindings" }
serde_json = "1.0"

View File

@ -1,19 +0,0 @@
### Description
This is a very simplistic example where two waku nodes are instantiated within the same Rust app.
### What it does
1. Instantiates two Waku nodes
2. Each node registers an event callback (waku message event, connection change event, etc.)
3. Each node starts
4. Each node perform relay subscription
5. "node1" publishes a waku message
6. Both nodes are stopped
### How to run
From within the `examples/basic/` foder run:
```code
cargo run
```

View File

@ -1,147 +0,0 @@
use std::io::Error;
use std::str::from_utf8;
use tokio::time::{sleep, Duration};
use waku::{
general::pubsubtopic::PubsubTopic, waku_new, Encoding, LibwakuResponse, WakuContentTopic,
WakuEvent, WakuMessage, WakuNodeConfig,
};
#[tokio::main]
async fn main() -> Result<(), Error> {
let node1 = waku_new(Some(WakuNodeConfig {
tcp_port: Some(60010), // TODO: use any available port.
..Default::default()
}))
.await
.expect("should instantiate");
let node2 = waku_new(Some(WakuNodeConfig {
tcp_port: Some(60020), // TODO: use any available port.
..Default::default()
}))
.await
.expect("should instantiate");
// ========================================================================
// Setting an event callback to be executed each time a message is received
node2
.set_event_callback(|response| {
if let LibwakuResponse::Success(v) = response {
let event: WakuEvent =
serde_json::from_str(v.unwrap().as_str()).expect("Parsing event to succeed");
match event {
WakuEvent::WakuMessage(evt) => {
// println!("WakuMessage event received: {:?}", evt.waku_message);
let message = evt.waku_message;
let payload = message.payload.to_vec();
let msg = from_utf8(&payload).expect("should be valid message");
println!("::::::::::::::::::::::::::::::::::::::::::::::::::::");
println!("Message Received in NODE 2: {}", msg);
println!("::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
WakuEvent::RelayTopicHealthChange(_evt) => {
// dbg!("Relay topic change evt", evt);
}
WakuEvent::ConnectionChange(_evt) => {
// dbg!("Conn change evt", evt);
}
WakuEvent::Unrecognized(err) => panic!("Unrecognized waku event: {:?}", err),
_ => panic!("event case not expected"),
};
}
})
.expect("set event call back working");
node1
.set_event_callback(|response| {
if let LibwakuResponse::Success(v) = response {
let event: WakuEvent =
serde_json::from_str(v.unwrap().as_str()).expect("Parsing event to succeed");
match event {
WakuEvent::WakuMessage(evt) => {
// println!("WakuMessage event received: {:?}", evt.waku_message);
let message = evt.waku_message;
let payload = message.payload.to_vec();
let msg = from_utf8(&payload).expect("should be valid message");
println!("::::::::::::::::::::::::::::::::::::::::::::::::::::");
println!("Message Received in NODE 1: {}", msg);
println!("::::::::::::::::::::::::::::::::::::::::::::::::::::");
}
WakuEvent::RelayTopicHealthChange(_evt) => {
// dbg!("Relay topic change evt", evt);
}
WakuEvent::ConnectionChange(_evt) => {
// dbg!("Conn change evt", evt);
}
WakuEvent::Unrecognized(err) => panic!("Unrecognized waku event: {:?}", err),
_ => panic!("event case not expected"),
};
}
})
.expect("set event call back working");
let node1 = node1.start().await.expect("node1 should start");
let node2 = node2.start().await.expect("node2 should start");
// ========================================================================
// Subscribe to pubsub topic
let topic = PubsubTopic::new("test");
node1
.relay_subscribe(&topic)
.await
.expect("node1 should subscribe");
node2
.relay_subscribe(&topic)
.await
.expect("node2 should subscribe");
// ========================================================================
// Connect nodes with each other
let addresses2 = node2
.listen_addresses()
.await
.expect("should obtain the addresses");
node1
.connect(&addresses2[0], None)
.await
.expect("node1 should connect to node2");
// ========================================================================
// Wait for gossipsub mesh to form
sleep(Duration::from_secs(2)).await;
// ========================================================================
// Publish a message
let content_topic = WakuContentTopic::new("waku", "2", "test", Encoding::Proto);
let message = WakuMessage::new("Hello world", content_topic, 0, Vec::new(), false);
node1
.relay_publish_message(&message, &topic, None)
.await
.expect("should have sent the message");
// ========================================================================
// Waiting for message to arrive
sleep(Duration::from_secs(1)).await;
// ========================================================================
// Stop both instances
let node1 = node1.stop().await.expect("should stop");
let node2 = node2.stop().await.expect("should stop");
// ========================================================================
// Free resources
node1.waku_destroy().await.expect("should deallocate");
node2.waku_destroy().await.expect("should deallocate");
Ok(())
}

View File

@ -1,17 +0,0 @@
[package]
name = "tic-tac-toe-gui"
version = "0.1.0"
edition = "2021"
[dependencies]
waku = { path = "../../waku-bindings", package = "waku-bindings" }
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
ark-std = "0.4"
ctrlc = "3.2.4"
tokio = { version = "1", features = ["full"] }
tokio-util = "0.6" # for utility functions if needed
egui = "0.22"
eframe = "0.22"
secp256k1 = { version = "0.26", features = ["rand", "recovery", "serde"] }

View File

@ -1,24 +0,0 @@
### Description
This is a tic-tac-toe example that aims to show how to deal with
a Waku node in an app with UI. The example is very naïve and it
assumes only two tic-tac-toe instances are running globally. Therefore, the game messages might collide with other plays.
The game board is shown within a Rust eframe.
### What it does
1. Instantiates one Waku node
2. Starts the Waku node
3. Registers the node to waku events (messages, connection change, topic health, etc.)
4. Subscribes de node to the game_topic
### How to run
From within the `examples/tic-tac-toe/` foder run:
```code
cargo run
```
Another player can start their instance in either another
terminal or another machine.

View File

@ -1,409 +0,0 @@
use eframe::egui;
use serde::{Deserialize, Serialize};
use std::str::from_utf8;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use tokio::task;
use tokio::sync::mpsc;
use waku::{
waku_new, Encoding, WakuEvent, LibwakuResponse, WakuContentTopic,
WakuMessage, WakuNodeConfig, WakuNodeHandle, Initialized, Running,
general::pubsubtopic::PubsubTopic,
};
#[derive(Serialize, Deserialize, PartialEq, Debug, Copy, Clone)]
enum Player {
X,
O,
}
#[derive(Serialize, Deserialize, Clone)]
struct GameState {
board: [[Option<Player>; 3]; 3],
current_turn: Player,
moves_left: usize,
}
struct TicTacToeApp<State> {
game_state: Arc<Mutex<GameState>>,
waku: WakuNodeHandle<State>,
game_topic: PubsubTopic,
tx: mpsc::Sender<String>, // Sender to send `msg` to main thread
player_role: Option<Player>, // Store the player's role (X or O)
}
impl TicTacToeApp<Initialized> {
fn new(
waku: WakuNodeHandle<Initialized>,
game_topic: PubsubTopic,
game_state: Arc<Mutex<GameState>>,
tx: mpsc::Sender<String>,
) -> Self {
Self {
game_state,
waku,
game_topic,
tx,
player_role: None,
}
}
async fn start(self) -> TicTacToeApp<Running> {
let tx_clone = self.tx.clone();
let game_content_topic = WakuContentTopic::new("waku", "2", "tictactoegame", Encoding::Proto);
let my_closure = move |response| {
if let LibwakuResponse::Success(v) = response {
let event: WakuEvent =
serde_json::from_str(v.unwrap().as_str()).expect("Parsing event to succeed");
match event {
WakuEvent::WakuMessage(evt) => {
let message = evt.waku_message;
// Filter: only process messages for our game content topic
if message.content_topic != game_content_topic {
return; // Skip messages from other apps
}
let payload = message.payload.to_vec();
match from_utf8(&payload) {
Ok(msg) => {
// Lock succeeded, proceed to send the message
if tx_clone.blocking_send(msg.to_string()).is_err() {
eprintln!("Failed to send message to async task");
}
}
Err(e) => {
eprintln!("Failed to decode payload as UTF-8: {}", e);
// Handle the error as needed, or just log and skip
}
}
},
WakuEvent::RelayTopicHealthChange(_evt) => {
// dbg!("Relay topic change evt", evt);
},
WakuEvent::ConnectionChange(_evt) => {
// dbg!("Conn change evt", evt);
},
WakuEvent::Unrecognized(err) => panic!("Unrecognized waku event: {:?}", err),
_ => panic!("event case not expected"),
};
}
};
// Establish a closure that handles the incoming messages
self.waku.set_event_callback(my_closure).expect("set event call back working");
// Start the waku node
let waku = self.waku.start().await.expect("waku should start");
// Subscribe to desired topic using the relay protocol
waku.relay_subscribe(&self.game_topic).await.expect("waku should subscribe");
// Example filter subscription. This is needed in edge nodes (resource-restricted devices)
// Nodes usually use either relay or lightpush/filter protocols
// let ctopic = WakuContentTopic::new("waku", "2", "tictactoegame", Encoding::Proto);
// let content_topics = vec![ctopic];
// waku.filter_subscribe(&self.game_topic, content_topics).await.expect("waku should subscribe");
// End filter example ----------------------------------------
// Example to establish direct connection to a well-known node
// Connect to hard-coded node
// let target_node_multi_addr =
// "/ip4/159.223.242.94/tcp/30303/p2p/16Uiu2HAmAUdrQ3uwzuE4Gy4D56hX6uLKEeerJAnhKEHZ3DxF1EfT"
// // "/dns4/store-01.do-ams3.status.prod.status.im/tcp/30303/p2p/16Uiu2HAmAUdrQ3uwzuE4Gy4D56hX6uLKEeerJAnhKEHZ3DxF1EfT"
// // "/ip4/24.144.78.119/tcp/30303/p2p/16Uiu2HAm3xVDaz6SRJ6kErwC21zBJEZjavVXg7VSkoWzaV1aMA3F"
// .parse::<Multiaddr>().expect("parse multiaddress");
// self.waku.connect(&target_node_multi_addr, None)
// .expect("waku should connect to other node");
// End example direct connection
TicTacToeApp {
game_state: self.game_state,
waku,
game_topic: self.game_topic,
tx: self.tx,
player_role: self.player_role,
}
}
}
impl TicTacToeApp<Running> {
async fn send_game_state(&self, game_state: &GameState) {
let serialized_game_state = serde_json::to_string(game_state).unwrap();
let content_topic = WakuContentTopic::new("waku", "2", "tictactoegame", Encoding::Proto);
let message = WakuMessage::new(
&serialized_game_state,
content_topic,
0,
Vec::new(),
false,
);
if let Ok(msg_hash) = self.waku.relay_publish_message(&message, &self.game_topic, None).await {
dbg!(format!("message hash published: {}", msg_hash));
}
// Example lightpush publish message. This is needed in edge nodes (resource-restricted devices)
// Nodes usually use either relay or lightpush/filter protocols
//
// let msg_hash_ret = self.waku.lightpush_publish_message(&message, &self.game_topic).await;
// match msg_hash_ret {
// Ok(msg_hash) => println!("Published message hash {:?}", msg_hash.to_string()),
// Err(error) => println!("Failed to publish with lightpush: {}", error)
// }
// End example lightpush publish message
}
fn make_move(&mut self, row: usize, col: usize) {
if let Ok(mut game_state) = self.game_state.try_lock() {
if let Some(my_role) = self.player_role {
if game_state.current_turn != my_role {
return; // skip click if not my turn
}
}
if game_state.board[row][col].is_none() && game_state.moves_left > 0 {
game_state.board[row][col] = Some(game_state.current_turn);
game_state.moves_left -= 1;
if let Some(winner) = self.check_winner(&game_state) {
game_state.current_turn = winner;
} else {
game_state.current_turn = match game_state.current_turn {
Player::X => Player::O,
Player::O => Player::X,
};
}
// Call the async function in a blocking context
task::block_in_place(|| {
// Obtain the current runtime handle
let handle = tokio::runtime::Handle::current();
// Block on the async function
handle.block_on(async {
// Assuming `self` is available in the current context
self.send_game_state(&game_state).await;
});
});
}
}
}
fn check_winner(&self, game_state: &GameState) -> Option<Player> {
// Check rows, columns, and diagonals
for i in 0..3 {
if game_state.board[i][0] == game_state.board[i][1] &&
game_state.board[i][1] == game_state.board[i][2] {
if let Some(player) = game_state.board[i][0] {
return Some(player);
}
}
if game_state.board[0][i] == game_state.board[1][i] &&
game_state.board[1][i] == game_state.board[2][i] {
if let Some(player) = game_state.board[0][i] {
return Some(player);
}
}
}
if game_state.board[0][0] == game_state.board[1][1] &&
game_state.board[1][1] == game_state.board[2][2] {
if let Some(player) = game_state.board[0][0] {
return Some(player);
}
}
if game_state.board[0][2] == game_state.board[1][1] &&
game_state.board[1][1] == game_state.board[2][0] {
if let Some(player) = game_state.board[0][2] {
return Some(player);
}
}
None
}
fn reset_game(&mut self) {
self.game_state = Arc::new(Mutex::new(GameState {
board: [[None; 3]; 3],
current_turn: Player::X,
moves_left: 9,
}));
self.player_role = None
}
}
impl eframe::App for TicTacToeApp<Running> {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
// Request a repaint every second
ctx.request_repaint_after(Duration::from_secs(1));
egui::CentralPanel::default().show(ctx, |ui| {
ui.heading("Tic-Tac-Toe");
// If the player hasn't selected a role, show the role selection buttons
if self.player_role.is_none() {
ui.label("Select your role:");
if ui.button("Play as X").clicked() {
self.player_role = Some(Player::X);
}
if ui.button("Play as O").clicked() {
self.player_role = Some(Player::O);
// Player O waits for Player X to make the first move
// No need to change current_turn as it's already X
}
return; // Exit early until a role is selected
}
let player_role = self.player_role.unwrap(); // Safe to unwrap because we've ensured it's Some
// Main game UI
ui.label(format!("You are playing as: {:?}", player_role));
// Draw the game board and handle the game state
let board_size = ui.available_size();
let cell_size = board_size.x / 4.0;
ui.horizontal(|ui| {
for row in 0..3 {
ui.vertical(|ui| {
for col in 0..3 {
let label;
{
if let Ok(game_state) = self.game_state.try_lock() {
label = match game_state.board[row][col] {
Some(Player::X) => "X",
Some(Player::O) => "O",
None => "-",
};
}
else {
label = "#";
}
}
let button = ui.add(egui::Button::new(label).min_size(egui::vec2(cell_size, cell_size)).sense(egui::Sense::click()));
if button.clicked() {
self.make_move(row, col);
}
}
});
if row < 2 {
ui.add_space(4.0);
}
}
});
if let Ok(game_state) = self.game_state.try_lock() {
if let Some(winner) = self.check_winner(&game_state) {
ui.label(format!(
"Player {} wins!",
match winner {
Player::X => "X",
Player::O => "O",
}
));
} else if game_state.moves_left == 0 {
ui.label("It's a tie!");
} else {
ui.label(format!(
"Player {}'s turn",
match game_state.current_turn {
Player::X => "X",
Player::O => "O",
}
));
}
}
if ui.add(egui::Button::new("Restart Game")).clicked() {
self.reset_game();
}
});
}
fn on_exit(&mut self, _gl: Option<&eframe::glow::Context>) {
// TODO: implement the cleanup an proper stop of waku node
}
}
#[tokio::main]
async fn main() -> eframe::Result<()> {
let (tx, mut rx) = mpsc::channel::<String>(3200); // Channel to communicate between threads
let game_topic = PubsubTopic::new("/waku/2/rs/16/32");
// Create a Waku instance
let waku = waku_new(Some(WakuNodeConfig {
tcp_port: Some(60010),
cluster_id: Some(16),
shards: vec![1, 32, 64, 128, 256],
// node_key: Some(SecretKey::from_str("2fc0515879e52b7b73297cfd6ab3abf7c344ef84b7a90ff6f4cc19e05a198027").unwrap()),
max_message_size: Some("1024KiB".to_string()),
relay_topics: vec![String::from(&game_topic)],
log_level: Some("FATAL"), // Supported: TRACE, DEBUG, INFO, NOTICE, WARN, ERROR or FATAL
keep_alive: Some(true),
// Discovery
dns_discovery: Some(true),
dns_discovery_url: Some("enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.prod.status.nodes.status.im"),
// discv5_discovery: Some(true),
// discv5_udp_port: Some(9001),
// discv5_enr_auto_update: Some(false),
..Default::default()
})).await
.expect("should instantiate");
let game_state = GameState {
board: [[None; 3]; 3],
current_turn: Player::X,
moves_left: 9,
};
let shared_state = Arc::new(Mutex::new(game_state));
let clone = shared_state.clone();
let app = TicTacToeApp::new(waku, game_topic, clone, tx);
let app = app.start().await;
let clone = shared_state.clone();
// Listen for messages in the main thread
tokio::spawn(async move {
while let Some(msg) = rx.recv().await {
// println!("MSG received: {}", msg);
// Handle the received message, e.g., update the UI or game state
if let Ok(parsed_value) = serde_json::from_str::<GameState>(&msg)
{
if let Ok(mut unclocked_game_state) = clone.lock(){
*unclocked_game_state = parsed_value;
}
}
else {
eprintln!("Failed to parse JSON");
}
}
});
eframe::run_native(
"Tic-Tac-Toe Multiplayer via Waku",
eframe::NativeOptions {
initial_window_size: Some(egui::vec2(400.0, 400.0)),
..Default::default()
},
Box::new(|_cc| Box::new(app)),
)?;
Ok(())
}

View File

@ -8,12 +8,9 @@ authors = [
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
waku = { path = "../../waku-bindings", package = "waku-bindings" } waku-bindings = { path = "../../waku-bindings" }
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
tui = "0.19" tui = "0.19"
crossterm = "0.25" crossterm = "0.25"
unicode-width = "0.1" unicode-width = "0.1"
prost = "0.11" prost = "0.11"
chrono = "0.4" chrono = "0.4"
tokio = { version = "1", features = ["full"] }

View File

@ -1,21 +0,0 @@
### Description
This is a chat example where multiple participants can talk within the same room.
### What it does
1. Instantiates one Waku node
2. Starts the Waku node
3. Registers the node to waku events (messages, connection change, topic health, etc.)
4. Subscribes de node to the game_topic
5. Retrieves previous chat messages at the beginning
### How to run
From within the `examples/toy-chat/` folder, run the following to start a chat using the given nick name.
e.g.:
```code
cargo run "Alice"
```

View File

@ -1,18 +1,18 @@
mod protocol; mod protocol;
use crate::protocol::{Chat2Message, TOY_CHAT_CONTENT_TOPIC}; use crate::protocol::{Chat2Message, TOY_CHAT_CONTENT_TOPIC};
use tokio::task; use chrono::Utc;
use crossterm::{ use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode}, event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode},
execute, execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
}; };
use prost::Message; use prost::Message;
use chrono::Utc;
use std::io::Write; use std::io::Write;
use std::str::FromStr;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::{error::Error, io};
use std::time::Duration; use std::time::Duration;
use std::{error::Error, io};
use tui::{ use tui::{
backend::{Backend, CrosstermBackend}, backend::{Backend, CrosstermBackend},
layout::{Constraint, Direction, Layout}, layout::{Constraint, Direction, Layout},
@ -22,9 +22,9 @@ use tui::{
Frame, Terminal, Frame, Terminal,
}; };
use unicode_width::UnicodeWidthStr; use unicode_width::UnicodeWidthStr;
use waku::{ use waku_bindings::{
general::pubsubtopic::PubsubTopic, general::Result, waku_new, Initialized, LibwakuResponse, Running, WakuEvent, waku_default_pubsub_topic, waku_new, waku_set_event_callback, ContentFilter, Multiaddr,
WakuMessage, WakuNodeConfig, WakuNodeHandle, PagingOptions, ProtocolId, Running, StoreQuery, WakuMessage, WakuNodeHandle,
}; };
enum InputMode { enum InputMode {
@ -32,12 +32,14 @@ enum InputMode {
Editing, Editing,
} }
const STORE_NODE: &str = "/dns4/store-01.do-ams3.status.staging.status.im/tcp/30303/p2p/16Uiu2HAm3xVDaz6SRJ6kErwC21zBJEZjavVXg7VSkoWzaV1aMA3F"; const NODES: &[&str] = &[
"/dns4/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAkvWiyFsgRhuJEb9JfjYxEkoHLgnUQmr1N5mKWnYjxYRVm",
const DEFAULT_PUBSUB_TOPIC: &str = "/waku/2/rs/16/32"; "/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
"/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS"
];
/// App holds the state of the application /// App holds the state of the application
struct App<State> { struct App {
/// Current value of the input box /// Current value of the input box
input: String, input: String,
nick: String, nick: String,
@ -45,212 +47,77 @@ struct App<State> {
input_mode: InputMode, input_mode: InputMode,
/// History of recorded messages /// History of recorded messages
messages: Arc<RwLock<Vec<Chat2Message>>>, messages: Arc<RwLock<Vec<Chat2Message>>>,
waku: WakuNodeHandle<State>,
node_handle: WakuNodeHandle<Running>,
} }
impl App<Initialized> { impl App {
async fn new(nick: String) -> Result<App<Initialized>> { fn new(nick: String, node_handle: WakuNodeHandle<Running>) -> App {
let pubsub_topic = PubsubTopic::new(DEFAULT_PUBSUB_TOPIC); App {
let waku = waku_new(Some(WakuNodeConfig {
tcp_port: Some(60010),
cluster_id: Some(16),
shards: vec![1, 32, 64, 128, 256],
// node_key: Some(SecretKey::from_str("2fc0515879e52b7b73297cfd6ab3abf7c344ef84b7a90ff6f4cc19e05a198027").unwrap()),
max_message_size: Some("1024KiB".to_string()),
relay_topics: vec![String::from(&pubsub_topic)],
log_level: Some("FATAL"), // Supported: TRACE, DEBUG, INFO, NOTICE, WARN, ERROR or FATAL
keep_alive: Some(true),
// Discovery
dns_discovery: Some(true),
dns_discovery_url: Some("enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.prod.status.nodes.status.im"),
// discv5_discovery: Some(true),
// discv5_udp_port: Some(9001),
// discv5_enr_auto_update: Some(false),
..Default::default()
})).await?;
Ok(App {
input: String::new(), input: String::new(),
input_mode: InputMode::Normal, input_mode: InputMode::Normal,
messages: Arc::new(RwLock::new(Vec::new())), messages: Arc::new(RwLock::new(Vec::new())),
node_handle,
nick, nick,
waku,
})
}
async fn start_waku_node(self) -> Result<App<Running>> {
let shared_messages = Arc::clone(&self.messages);
self.waku.set_event_callback(move|response| {
if let LibwakuResponse::Success(v) = response {
let event: WakuEvent =
serde_json::from_str(v.unwrap().as_str()).expect("failed parsing event in set_event_callback");
match event {
WakuEvent::WakuMessage(evt) => {
if evt.waku_message.content_topic != TOY_CHAT_CONTENT_TOPIC {
return; // skip the messages that don't belong to the toy chat
}
match <Chat2Message as Message>::decode(evt.waku_message.payload()) {
Ok(chat_message) => {
// Add the new message to the front
{
let mut messages_lock = shared_messages.write().unwrap();
messages_lock.insert(0, chat_message); // Insert at the front (index 0)
}
}
Err(e) => {
let mut out = std::io::stderr();
write!(out, "{e:?}").unwrap();
}
}
},
WakuEvent::RelayTopicHealthChange(_evt) => {
// dbg!("Relay topic change evt", evt);
},
WakuEvent::ConnectionChange(_evt) => {
// dbg!("Conn change evt", evt);
},
WakuEvent::Unrecognized(err) => eprintln!("Unrecognized waku event: {:?}", err),
_ => eprintln!("event case not expected"),
};
}
})?;
let waku = self.waku.start().await?;
Ok(App {
input: self.input,
nick: self.nick,
input_mode: self.input_mode,
messages: self.messages,
waku,
})
}
}
impl App<Running> {
async fn retrieve_history(&mut self) {
let one_day_in_secs = 60 * 60 * 24;
let time_start = (Duration::from_secs(Utc::now().timestamp() as u64)
- Duration::from_secs(one_day_in_secs))
.as_nanos() as u64;
let include_data = true;
let messages = self.waku.store_query(None,
vec![TOY_CHAT_CONTENT_TOPIC.clone()],
STORE_NODE,
include_data,
Some(time_start),
None,
None).await.unwrap();
let messages: Vec<_> = messages
.into_iter()
// we expect messages because the query was passed with include_data == true
.filter(|item| item.message.is_some())
.map(|store_resp_msg| {
<Chat2Message as Message>::decode(store_resp_msg.message.unwrap().payload())
.expect("Toy chat messages should be decodeable")
})
.collect();
if !messages.is_empty() {
*self.messages.write().unwrap() = messages;
} }
} }
}
fn retrieve_history(
node_handle: &WakuNodeHandle<Running>,
) -> waku_bindings::Result<Vec<Chat2Message>> {
let self_id = node_handle.peer_id().unwrap();
let peer = node_handle
.peers()?
.iter()
.find(|&peer| peer.peer_id() != &self_id)
.cloned()
.unwrap();
fn run_main_loop<B: Backend>( let result = node_handle.store_query(
&mut self, &StoreQuery {
terminal: &mut Terminal<B>, pubsub_topic: None,
) -> std::result::Result<(), Box<dyn Error>> { content_topics: vec![TOY_CHAT_CONTENT_TOPIC.clone()],
loop { start_time: Some(
terminal.draw(|f| ui(f, self))?; (Duration::from_secs(Utc::now().timestamp() as u64)
- Duration::from_secs(60 * 60 * 24))
.as_nanos() as usize,
),
end_time: None,
paging_options: Some(PagingOptions {
page_size: 25,
cursor: None,
forward: true,
}),
},
peer.peer_id(),
Some(Duration::from_secs(10)),
)?;
if event::poll(Duration::from_millis(500)).unwrap() { Ok(result
if let Event::Key(key) = event::read()? { .messages()
match self.input_mode { .iter()
InputMode::Normal => match key.code { .map(|waku_message| {
KeyCode::Char('e') => { <Chat2Message as Message>::decode(waku_message.payload())
self.input_mode = InputMode::Editing; .expect("Toy chat messages should be decodeable")
} })
KeyCode::Char('q') => { .collect())
return Ok(());
}
_ => {}
},
InputMode::Editing => match key.code {
KeyCode::Enter => {
let message_content: String = self.input.drain(..).collect();
let message = Chat2Message::new(&self.nick, &message_content);
let mut buff = Vec::new();
let meta = Vec::new();
Message::encode(&message, &mut buff)?;
let waku_message = WakuMessage::new(
buff,
TOY_CHAT_CONTENT_TOPIC.clone(),
1,
meta,
false,
);
// Call the async function in a blocking context
task::block_in_place(|| {
// Obtain the current runtime handle
let handle = tokio::runtime::Handle::current();
// Block on the async function
handle.block_on(async {
// Assuming `self` is available in the current context
let pubsub_topic = PubsubTopic::new(DEFAULT_PUBSUB_TOPIC);
if let Err(e) = self.waku.relay_publish_message(
&waku_message,
&pubsub_topic,
None,
).await {
let mut out = std::io::stderr();
write!(out, "{e:?}").unwrap();
}
});
});
}
KeyCode::Char(c) => {
self.input.push(c);
}
KeyCode::Backspace => {
self.input.pop();
}
KeyCode::Esc => {
self.input_mode = InputMode::Normal;
}
_ => {}
},
}
}
}
}
}
async fn stop_app(self) {
self.waku.stop().await.expect("the node should stop properly");
}
} }
#[tokio::main] fn setup_node_handle() -> std::result::Result<WakuNodeHandle<Running>, Box<dyn Error>> {
async fn main() -> std::result::Result<(), Box<dyn Error>> { let node_handle = waku_new(None)?;
let node_handle = node_handle.start()?;
for address in NODES.iter().map(|a| Multiaddr::from_str(a).unwrap()) {
let peerid = node_handle.add_peer(&address, ProtocolId::Relay)?;
node_handle.connect_peer_with_id(&peerid, None)?;
}
let content_filter = ContentFilter::new(Some(waku_default_pubsub_topic()), vec![]);
node_handle.relay_subscribe(&content_filter)?;
Ok(node_handle)
}
fn main() -> std::result::Result<(), Box<dyn Error>> {
let nick = std::env::args().nth(1).expect("Nick to be set"); let nick = std::env::args().nth(1).expect("Nick to be set");
let app = App::new(nick).await?;
let mut app = app.start_waku_node().await?;
// setup terminal // setup terminal
enable_raw_mode()?; enable_raw_mode()?;
let mut stdout = io::stdout(); let mut stdout = io::stdout();
@ -258,9 +125,40 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
let backend = CrosstermBackend::new(stdout); let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?; let mut terminal = Terminal::new(backend)?;
app.retrieve_history().await; let node_handle = setup_node_handle()?;
let res = app.run_main_loop(&mut terminal);
app.stop_app().await; // create app and run it
let mut app = App::new(nick, node_handle);
let history = retrieve_history(&app.node_handle)?;
if !history.is_empty() {
*app.messages.write().unwrap() = history;
}
let shared_messages = Arc::clone(&app.messages);
waku_set_event_callback(move |signal| match signal.event() {
waku_bindings::Event::WakuMessage(event) => {
if event.waku_message().content_topic() != &TOY_CHAT_CONTENT_TOPIC {
return;
}
match <Chat2Message as Message>::decode(event.waku_message().payload()) {
Ok(chat_message) => {
shared_messages.write().unwrap().push(chat_message);
}
Err(e) => {
let mut out = std::io::stderr();
write!(out, "{e:?}").unwrap();
}
}
}
waku_bindings::Event::Unrecognized(data) => {
let mut out = std::io::stderr();
write!(out, "Error, received unrecognized event {data}").unwrap();
}
_ => {}
});
// app.node_handle.relay_publish_message(&WakuMessage::new(Chat2Message::new(&app.nick, format!(""))))
let res = run_app(&mut terminal, &mut app);
// restore terminal // restore terminal
disable_raw_mode()?; disable_raw_mode()?;
@ -270,6 +168,7 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
DisableMouseCapture DisableMouseCapture
)?; )?;
terminal.show_cursor()?; terminal.show_cursor()?;
app.node_handle.stop()?;
if let Err(err) = res { if let Err(err) = res {
println!("{err:?}") println!("{err:?}")
@ -277,7 +176,65 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
fn ui<B: Backend, State>(f: &mut Frame<B>, app: &App<State>) { fn run_app<B: Backend>(
terminal: &mut Terminal<B>,
app: &mut App,
) -> std::result::Result<(), Box<dyn Error>> {
loop {
terminal.draw(|f| ui(f, app))?;
if let Event::Key(key) = event::read()? {
match app.input_mode {
InputMode::Normal => match key.code {
KeyCode::Char('e') => {
app.input_mode = InputMode::Editing;
}
KeyCode::Char('q') => {
return Ok(());
}
_ => {}
},
InputMode::Editing => match key.code {
KeyCode::Enter => {
let message_content: String = app.input.drain(..).collect();
let message = Chat2Message::new(&app.nick, &message_content);
let mut buff = Vec::new();
let meta = Vec::new();
Message::encode(&message, &mut buff)?;
let waku_message = WakuMessage::new(
buff,
TOY_CHAT_CONTENT_TOPIC.clone(),
1,
Utc::now().timestamp_nanos() as usize,
meta,
false,
);
if let Err(e) = app.node_handle.relay_publish_message(
&waku_message,
Some(waku_default_pubsub_topic()),
None,
) {
let mut out = std::io::stderr();
write!(out, "{e:?}").unwrap();
}
}
KeyCode::Char(c) => {
app.input.push(c);
}
KeyCode::Backspace => {
app.input.pop();
}
KeyCode::Esc => {
app.input_mode = InputMode::Normal;
}
_ => {}
},
}
}
}
}
fn ui<B: Backend>(f: &mut Frame<B>, app: &App) {
let chunks = Layout::default() let chunks = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.margin(2) .margin(2)

View File

@ -1,6 +1,6 @@
use chrono::{DateTime, LocalResult, TimeZone, Utc}; use chrono::{DateTime, LocalResult, TimeZone, Utc};
use prost::Message; use prost::Message;
use waku::{Encoding, WakuContentTopic}; use waku_bindings::{Encoding, WakuContentTopic};
pub static TOY_CHAT_CONTENT_TOPIC: WakuContentTopic = pub static TOY_CHAT_CONTENT_TOPIC: WakuContentTopic =
WakuContentTopic::new("toy-chat", "2", "huilong", Encoding::Proto); WakuContentTopic::new("toy-chat", "2", "huilong", Encoding::Proto);

View File

@ -1,15 +1,13 @@
[package] [package]
name = "waku-bindings" name = "waku-bindings"
version = "1.0.0" version = "0.5.0"
edition = "2021" edition = "2021"
authors = [ authors = [
"Daniel Sanchez Quiros <danielsq@status.im>", "Daniel Sanchez Quiros <danielsq@status.im>"
"Richard Ramos <richard@waku.org>",
"Ivan Folgueira Bande <ivansete@status.im>"
] ]
description = "Waku networking library" description = "Waku networking library"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
repository = "https://github.com/logos-messaging/logos-messaging-rust-bindings" repository = "https://github.com/waku-org/waku-rust-bindings"
keywords = ["waku", "peer-to-peer", "libp2p", "networking"] keywords = ["waku", "peer-to-peer", "libp2p", "networking"]
categories = ["network-programming"] categories = ["network-programming"]
@ -28,14 +26,9 @@ serde_json = "1.0"
sscanf = "0.4" sscanf = "0.4"
smart-default = "0.6" smart-default = "0.6"
url = "2.3" url = "2.3"
waku-sys = { version = "1.0.0", path = "../waku-sys" } waku-sys = { version = "0.5.0", path = "../waku-sys" }
libc = "0.2" libc = "0.2"
serde-aux = "4.3.1" serde-aux = "4.3.1"
rln = "0.3.4"
tokio = { version = "1", features = ["full"] }
regex = "1"
chrono = "0.4"
uuid = { version = "1.3", features = ["v4"] }
[dev-dependencies] [dev-dependencies]
futures = "0.3.25" futures = "0.3.25"

View File

@ -1,9 +1,9 @@
# Waku Rust bindings # Waku Rust bindings
[<img alt="github" src="https://img.shields.io/badge/github-Github-red?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/logos-messaging/logos-messaging-rust-bindings) [<img alt="github" src="https://img.shields.io/badge/github-Github-red?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/waku-org/waku-rust-bindings)
[<img alt="crates.io" src="https://img.shields.io/crates/v/waku-bindings.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/waku-bindings) [<img alt="crates.io" src="https://img.shields.io/crates/v/waku-bindings.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/waku-bindings)
[<img alt="docs.rs" src="https://img.shields.io/badge/doc/waku-bindings-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/waku-bindings) [<img alt="docs.rs" src="https://img.shields.io/badge/doc/waku-bindings-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/waku-bindings)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/logos-messaging/logos-messaging-rust-bindings/main.yml?branch=master" height="20">](https://github.com/logos-messaging/logos-messaging-rust-bindings/actions/workflows/main.yml?query=branch%3Amaster) [<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/waku-org/waku-rust-bindings/main.yml?branch=master" height="20">](https://github.com/waku-org/waku-rust-bindings/actions/workflows/main.yml?query=branch%3Amaster)
Rust api on top of [`waku-sys`](https://crates.io/crates/waku-sys) bindgen bindings to [c ffi bindings](https://github.com/status-im/go-waku/blob/v0.2.2/library/README.md). Rust api on top of [`waku-sys`](https://crates.io/crates/waku-sys) bindgen bindings to [c ffi bindings](https://github.com/status-im/go-waku/blob/v0.2.2/library/README.md).

View File

@ -0,0 +1,91 @@
//! Symmetric and asymmetric waku messages [decrypting](https://rfc.vac.dev/spec/36/#decrypting-messages) methods
// std
use std::ffi::CString;
// crates
use aes_gcm::{Aes256Gcm, Key};
use libc::*;
use secp256k1::SecretKey;
// internal
use crate::general::{DecodedPayload, Result, WakuMessage};
use crate::utils::{get_trampoline, handle_json_response};
/// Decrypt a message using a symmetric key
///
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_decode_symmetricchar-messagejson-char-symmetrickey)
pub fn waku_decode_symmetric(
message: &WakuMessage,
symmetric_key: &Key<Aes256Gcm>,
) -> Result<DecodedPayload> {
let symk = hex::encode(symmetric_key.as_slice());
let message_ptr = CString::new(
serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"),
)
.expect("CString should build properly from the serialized waku message")
.into_raw();
let symk_ptr = CString::new(symk)
.expect("CString should build properly from hex encoded symmetric key")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_decode_symmetric(
message_ptr,
symk_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(message_ptr));
drop(CString::from_raw(symk_ptr));
out
};
handle_json_response(code, &result)
}
/// Decrypt a message using a symmetric key
///
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_decode_asymmetricchar-messagejson-char-privatekey)
pub fn waku_decode_asymmetric(
message: &WakuMessage,
asymmetric_key: &SecretKey,
) -> Result<DecodedPayload> {
let sk = hex::encode(asymmetric_key.secret_bytes());
let message_ptr = CString::new(
serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"),
)
.expect("CString should build properly from the serialized waku message")
.into_raw();
let sk_ptr = CString::new(sk)
.expect("CString should build properly from hex encoded symmetric key")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_decode_asymmetric(
message_ptr,
sk_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(message_ptr));
drop(CString::from_raw(sk_ptr));
out
};
handle_json_response(code, &result)
}

View File

@ -0,0 +1,101 @@
// std
use std::ffi::CString;
// crates
use aes_gcm::{Aes256Gcm, Key};
use libc::*;
use secp256k1::{PublicKey, SecretKey};
// internal
use crate::general::{Result, WakuMessage};
use crate::utils::{get_trampoline, handle_json_response};
/// Optionally sign and encrypt a message using asymmetric encryption
pub fn waku_encode_asymmetric(
message: &WakuMessage,
public_key: &PublicKey,
signing_key: Option<&SecretKey>,
) -> Result<WakuMessage> {
let pk = hex::encode(public_key.serialize_uncompressed());
let sk = signing_key
.map(|signing_key| hex::encode(signing_key.secret_bytes()))
.unwrap_or_default();
let message_ptr = CString::new(
serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"),
)
.expect("CString should build properly from the serialized waku message")
.into_raw();
let pk_ptr = CString::new(pk)
.expect("CString should build properly from hex encoded public key")
.into_raw();
let sk_ptr = CString::new(sk)
.expect("CString should build properly from hex encoded signing key")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_encode_asymmetric(
message_ptr,
pk_ptr,
sk_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(message_ptr));
drop(CString::from_raw(pk_ptr));
drop(CString::from_raw(sk_ptr));
out
};
handle_json_response(code, &result)
}
/// Optionally sign and encrypt a message using symmetric encryption
pub fn waku_encode_symmetric(
message: &WakuMessage,
symmetric_key: &Key<Aes256Gcm>,
signing_key: Option<&SecretKey>,
) -> Result<WakuMessage> {
let symk = hex::encode(symmetric_key.as_slice());
let sk = signing_key
.map(|signing_key| hex::encode(signing_key.secret_bytes()))
.unwrap_or_default();
let message_ptr = CString::new(
serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"),
)
.expect("CString should build properly from the serialized waku message")
.into_raw();
let symk_ptr = CString::new(symk)
.expect("CString should build properly from hex encoded symmetric key")
.into_raw();
let sk_ptr = CString::new(sk)
.expect("CString should build properly from hex encoded signing key")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_encode_symmetric(
message_ptr,
symk_ptr,
sk_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(message_ptr));
drop(CString::from_raw(symk_ptr));
drop(CString::from_raw(sk_ptr));
out
};
handle_json_response(code, &result)
}

View File

@ -0,0 +1,123 @@
//! Waku message [event](https://rfc.vac.dev/spec/36/#events) related items
//!
//! Asynchronous events require a callback to be registered.
//! An example of an asynchronous event that might be emitted is receiving a message.
//! When an event is emitted, this callback will be triggered receiving a [`Signal`]
// std
use std::ffi::{c_char, c_int, c_void, CStr};
use std::ops::Deref;
use std::sync::Mutex;
// crates
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
// internal
use crate::general::{WakuMessage, WakuPubSubTopic};
use crate::MessageId;
/// Event signal
#[derive(Serialize, Deserialize)]
pub struct Signal {
/// Type of signal being emitted. Currently, only message is available
#[serde(alias = "type")]
_type: String,
/// Format depends on the type of signal
event: Event,
}
impl Signal {
pub fn event(&self) -> &Event {
&self.event
}
}
/// Waku event
/// For now just WakuMessage is supported
#[non_exhaustive]
#[derive(Serialize, Deserialize)]
#[serde(untagged, rename_all = "camelCase")]
pub enum Event {
WakuMessage(WakuMessageEvent),
Unrecognized(serde_json::Value),
}
/// Type of `event` field for a `message` event
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct WakuMessageEvent {
/// The pubsub topic on which the message was received
pubsub_topic: WakuPubSubTopic,
/// The message id
message_id: MessageId,
/// The message in [`WakuMessage`] format
waku_message: WakuMessage,
}
impl WakuMessageEvent {
pub fn pubsub_topic(&self) -> &WakuPubSubTopic {
&self.pubsub_topic
}
pub fn message_id(&self) -> &String {
&self.message_id
}
pub fn waku_message(&self) -> &WakuMessage {
&self.waku_message
}
}
/// Shared callback slot. Callbacks are registered here so they can be accessed by the extern "C"
#[allow(clippy::type_complexity)]
static CALLBACK: Lazy<Mutex<Box<dyn FnMut(Signal) + Send + Sync>>> =
Lazy::new(|| Mutex::new(Box::new(|_| {})));
/// Register global callback
fn set_callback<F: FnMut(Signal) + Send + Sync + 'static>(f: F) {
*CALLBACK.lock().unwrap() = Box::new(f);
}
/// Wrapper callback, it transformst the `*const c_char` into a [`Signal`]
/// and executes the [`CALLBACK`] funtion with it
extern "C" fn callback(_ret_code: c_int, data: *const c_char, _user_data: *mut c_void) {
let raw_response = unsafe { CStr::from_ptr(data) }
.to_str()
.expect("Not null ptr");
let data: Signal = serde_json::from_str(raw_response).expect("Parsing signal to succeed");
(CALLBACK
.deref()
.lock()
.expect("Access to the shared callback")
.as_mut())(data)
}
/// Register callback to act as event handler and receive application signals,
/// which are used to react to asynchronous events in Waku
pub fn waku_set_event_callback<F: FnMut(Signal) + Send + Sync + 'static>(f: F) {
set_callback(f);
unsafe { waku_sys::waku_set_event_callback(Some(callback)) };
}
#[cfg(test)]
mod tests {
use crate::events::waku_set_event_callback;
use crate::{Event, Signal};
// TODO: how to actually send a signal and check if the callback is run?
#[test]
fn set_event_callback() {
waku_set_event_callback(|_signal| {});
}
#[test]
fn deserialize_signal() {
let s = "{\"type\":\"message\",\"event\":{\"messageId\":\"0x26ff3d7fbc950ea2158ce62fd76fd745eee0323c9eac23d0713843b0f04ea27c\",\"pubsubTopic\":\"/waku/2/default-waku/proto\",\"wakuMessage\":{\"payload\":\"SGkgZnJvbSDwn6aAIQ==\",\"contentTopic\":\"/toychat/2/huilong/proto\",\"timestamp\":1665580926660}}}";
let _: Signal = serde_json::from_str(s).unwrap();
}
#[test]
fn deserialize_event() {
let e = "{\"messageId\":\"0x26ff3d7fbc950ea2158ce62fd76fd745eee0323c9eac23d0713843b0f04ea27c\",\"pubsubTopic\":\"/waku/2/default-waku/proto\",\"wakuMessage\":{\"payload\":\"SGkgZnJvbSDwn6aAIQ==\",\"contentTopic\":\"/toychat/2/huilong/proto\",\"timestamp\":1665580926660}}";
let _: Event = serde_json::from_str(e).unwrap();
}
}

View File

@ -1,142 +0,0 @@
// std
use crate::general::waku_decode::WakuDecode;
use crate::general::Result;
use std::borrow::Cow;
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer};
use sscanf::{scanf, RegexRepresentation};
/// WakuMessage encoding scheme
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub enum Encoding {
#[default]
Proto,
Rlp,
Rfc26,
Unknown(String),
}
impl Display for Encoding {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let s = match self {
Encoding::Proto => "proto",
Encoding::Rlp => "rlp",
Encoding::Rfc26 => "rfc26",
Encoding::Unknown(value) => value,
};
f.write_str(s)
}
}
impl FromStr for Encoding {
type Err = std::io::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"proto" => Ok(Self::Proto),
"rlp" => Ok(Self::Rlp),
"rfc26" => Ok(Self::Rfc26),
encoding => Ok(Self::Unknown(encoding.to_string())),
}
}
}
impl RegexRepresentation for Encoding {
const REGEX: &'static str = r"\w";
}
/// A waku content topic `/{application_name}/{version}/{content_topic_name}/{encdoing}`
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct WakuContentTopic {
pub application_name: Cow<'static, str>,
pub version: Cow<'static, str>,
pub content_topic_name: Cow<'static, str>,
pub encoding: Encoding,
}
impl WakuContentTopic {
pub const fn new(
application_name: &'static str,
version: &'static str,
content_topic_name: &'static str,
encoding: Encoding,
) -> Self {
Self {
application_name: Cow::Borrowed(application_name),
version: Cow::Borrowed(version),
content_topic_name: Cow::Borrowed(content_topic_name),
encoding,
}
}
pub fn join_content_topics(topics: Vec<WakuContentTopic>) -> String {
topics
.iter()
.map(|topic| topic.to_string())
.collect::<Vec<_>>()
.join(",")
}
}
impl WakuDecode for WakuContentTopic {
fn decode(input: &str) -> Result<Self> {
Ok(serde_json::from_str(input).expect("could not parse store resp"))
}
}
impl FromStr for WakuContentTopic {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
if let Ok((application_name, version, content_topic_name, encoding)) =
scanf!(s, "/{}/{}/{}/{:/.+?/}", String, String, String, Encoding)
{
Ok(WakuContentTopic {
application_name: Cow::Owned(application_name),
version: Cow::Owned(version),
content_topic_name: Cow::Owned(content_topic_name),
encoding,
})
} else {
Err(
format!(
"Wrong pub-sub topic format. Should be `/{{application-name}}/{{version-of-the-application}}/{{content-topic-name}}/{{encoding}}`. Got: {s}"
)
)
}
}
}
impl Display for WakuContentTopic {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"/{}/{}/{}/{}",
self.application_name, self.version, self.content_topic_name, self.encoding
)
}
}
impl Serialize for WakuContentTopic {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
self.to_string().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for WakuContentTopic {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let as_string: String = String::deserialize(deserializer)?;
as_string
.parse::<WakuContentTopic>()
.map_err(D::Error::custom)
}
}

View File

@ -1,64 +0,0 @@
use crate::general::waku_decode::WakuDecode;
use crate::general::Result;
use std::convert::TryFrom;
use std::str;
use waku_sys::{RET_ERR, RET_MISSING_CALLBACK, RET_OK};
#[derive(Debug, Clone, Default, PartialEq)]
pub enum LibwakuResponse {
Success(Option<String>),
Failure(String),
MissingCallback,
#[default]
Undefined,
}
impl TryFrom<(u32, &str)> for LibwakuResponse {
type Error = String;
fn try_from((ret_code, response): (u32, &str)) -> std::result::Result<Self, Self::Error> {
let opt_value = Some(response.to_string()).filter(|s| !s.is_empty());
match ret_code {
RET_OK => Ok(LibwakuResponse::Success(opt_value)),
RET_ERR => Ok(LibwakuResponse::Failure(format!(
"waku error: {}",
response
))),
RET_MISSING_CALLBACK => Ok(LibwakuResponse::MissingCallback),
_ => Err(format!("undefined return code {}", ret_code)),
}
}
}
/// Used in cases where the FFI call doesn't return additional information in the
/// callback. Instead, it returns RET_OK, RET_ERR, etc.
pub fn handle_no_response(code: i32, result: LibwakuResponse) -> Result<()> {
if result == LibwakuResponse::Undefined && code as u32 == RET_OK {
// Some functions will only execute the callback on error
return Ok(());
}
match result {
LibwakuResponse::Success(_) => Ok(()),
LibwakuResponse::Failure(v) => Err(v),
LibwakuResponse::MissingCallback => panic!("callback is required"),
LibwakuResponse::Undefined => panic!(
"undefined ffi state: code({}) was returned but callback was not executed",
code
),
}
}
/// Used in cases where the FFI function returns a code (RET_OK, RET_ERR, etc) plus additional
/// information, i.e. LibwakuResponse
pub fn handle_response<F: WakuDecode>(code: i32, result: LibwakuResponse) -> Result<F> {
match result {
LibwakuResponse::Success(v) => WakuDecode::decode(&v.unwrap_or_default()),
LibwakuResponse::Failure(v) => Err(v),
LibwakuResponse::MissingCallback => panic!("callback is required"),
LibwakuResponse::Undefined => panic!(
"undefined ffi state: code({}) was returned but callback was not executed",
code
),
}
}

View File

@ -1,30 +0,0 @@
use crate::general::waku_decode::WakuDecode;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::hash::Hash;
use std::str::FromStr;
/// Waku message hash, hex encoded sha256 digest of the message
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Clone, Hash)]
pub struct MessageHash(String);
impl FromStr for MessageHash {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(MessageHash(s.to_string()))
}
}
impl WakuDecode for MessageHash {
fn decode(input: &str) -> Result<Self, String> {
MessageHash::from_str(input)
}
}
// Implement the Display trait
impl fmt::Display for MessageHash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}

View File

@ -1,73 +1,94 @@
//! Waku [general](https://rfc.vac.dev/spec/36/#general) types //! Waku [general](https://rfc.vac.dev/spec/36/#general) types
pub mod contenttopic; // std
pub mod libwaku_response; use std::borrow::Cow;
pub mod messagehash; use std::fmt::{Display, Formatter};
pub mod pubsubtopic; use std::str::FromStr;
pub mod time;
pub mod waku_decode;
// crates // crates
use crate::general::time::get_now_in_nanosecs; use aes_gcm::{Aes256Gcm, Key};
use contenttopic::WakuContentTopic; use base64::Engine;
use serde::{Deserialize, Serialize}; use secp256k1::{ecdsa::Signature, PublicKey, SecretKey};
use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer};
use serde_aux::prelude::*; use serde_aux::prelude::*;
use sscanf::{scanf, RegexRepresentation};
// internal
use crate::decrypt::{waku_decode_asymmetric, waku_decode_symmetric};
use crate::encrypt::{waku_encode_asymmetric, waku_encode_symmetric};
/// Waku message version /// Waku message version
pub type WakuMessageVersion = usize; pub type WakuMessageVersion = usize;
/// Base58 encoded peer id
pub type PeerId = String;
/// Waku message id, hex encoded sha256 digest of the message
pub type MessageId = String;
/// Waku pubsub topic
pub type WakuPubSubTopic = String;
/// Protocol identifiers
#[non_exhaustive]
#[derive(Debug, Clone, Copy)]
pub enum ProtocolId {
Store,
Lightpush,
Filter,
Relay,
}
impl ProtocolId {
pub fn as_string_with_version(&self, version: &str) -> String {
format!("{self}/{version}")
}
}
impl Display for ProtocolId {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let tag = match self {
ProtocolId::Store => "/vac/waku/store",
ProtocolId::Lightpush => "/vac/waku/lightpush",
ProtocolId::Filter => "/vac/waku/filter",
ProtocolId::Relay => "/vac/waku/relay",
#[allow(unreachable_patterns)]
_ => unreachable!(),
};
write!(f, "{tag}")
}
}
/// Waku response, just a `Result` with an `String` error.
pub type Result<T> = std::result::Result<T, String>; pub type Result<T> = std::result::Result<T, String>;
// TODO: Properly type and deserialize payload form base64 encoded string // TODO: Properly type and deserialize payload form base64 encoded string
/// Waku message in JSON format. /// Waku message in JSON format.
/// as per the [specification](https://rfc.vac.dev/spec/36/#jsonmessage-type) /// as per the [specification](https://rfc.vac.dev/spec/36/#jsonmessage-type)
#[derive(Clone, Serialize, Deserialize, Debug, Default)] #[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct WakuMessage { pub struct WakuMessage {
#[serde(with = "base64_serde", default = "Vec::new")] #[serde(with = "base64_serde", default = "Vec::new")]
pub payload: Vec<u8>, payload: Vec<u8>,
/// The content topic to be set on the message /// The content topic to be set on the message
pub content_topic: WakuContentTopic, content_topic: WakuContentTopic,
// TODO: check if missing default should be 0
/// The Waku Message version number /// The Waku Message version number
#[serde(default)] #[serde(default)]
pub version: WakuMessageVersion, version: WakuMessageVersion,
/// Unix timestamp in nanoseconds /// Unix timestamp in nanoseconds
#[serde(deserialize_with = "deserialize_number_from_string")] #[serde(deserialize_with = "deserialize_number_from_string")]
pub timestamp: u64, timestamp: usize,
#[serde(with = "base64_serde", default = "Vec::new")] #[serde(with = "base64_serde", default = "Vec::new")]
pub meta: Vec<u8>, meta: Vec<u8>,
#[serde(default)] #[serde(default)]
pub ephemeral: bool, ephemeral: bool,
// TODO: implement RLN fields // TODO: implement RLN fields
#[serde(flatten)] #[serde(flatten)]
_extras: serde_json::Value, _extras: serde_json::Value,
} }
#[derive(Clone, Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase")]
pub struct WakuStoreRespMessage {
// #[serde(with = "base64_serde", default = "Vec::new")]
pub payload: Vec<u8>,
/// The content topic to be set on the message
// #[serde(rename = "contentTopic")]
pub content_topic: String,
// #[serde(with = "base64_serde", default = "Vec::new")]
pub meta: Vec<u8>,
/// The Waku Message version number
#[serde(default)]
pub version: WakuMessageVersion,
/// Unix timestamp in nanoseconds
pub timestamp: usize,
#[serde(default)]
pub ephemeral: bool,
pub proof: Vec<u8>,
}
impl WakuMessage { impl WakuMessage {
pub fn new<PAYLOAD: AsRef<[u8]>, META: AsRef<[u8]>>( pub fn new<PAYLOAD: AsRef<[u8]>, META: AsRef<[u8]>>(
payload: PAYLOAD, payload: PAYLOAD,
content_topic: WakuContentTopic, content_topic: WakuContentTopic,
version: WakuMessageVersion, version: WakuMessageVersion,
timestamp: usize,
meta: META, meta: META,
ephemeral: bool, ephemeral: bool,
) -> Self { ) -> Self {
@ -78,7 +99,7 @@ impl WakuMessage {
payload, payload,
content_topic, content_topic,
version, version,
timestamp: get_now_in_nanosecs(), timestamp,
meta, meta,
ephemeral, ephemeral,
_extras: Default::default(), _extras: Default::default(),
@ -88,11 +109,413 @@ impl WakuMessage {
pub fn payload(&self) -> &[u8] { pub fn payload(&self) -> &[u8] {
&self.payload &self.payload
} }
pub fn content_topic(&self) -> &WakuContentTopic {
&self.content_topic
}
pub fn version(&self) -> WakuMessageVersion {
self.version
}
pub fn timestamp(&self) -> usize {
self.timestamp
}
pub fn meta(&self) -> &[u8] {
&self.meta
}
pub fn ephemeral(&self) -> bool {
self.ephemeral
}
/// Optionally sign and encrypt a message using symmetric encryption
pub fn encode_symmetric(
&self,
symmetric_key: &Key<Aes256Gcm>,
signing_key: Option<&SecretKey>,
) -> Result<WakuMessage> {
waku_encode_symmetric(self, symmetric_key, signing_key)
}
/// Try decode the message with an expected symmetric key
///
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_decode_symmetricchar-messagejson-char-symmetrickey)
pub fn try_decode_symmetric(&self, symmetric_key: &Key<Aes256Gcm>) -> Result<DecodedPayload> {
waku_decode_symmetric(self, symmetric_key)
}
/// Optionally sign and encrypt a message using asymmetric encryption
pub fn encode_asymmetric(
&self,
public_key: &PublicKey,
signing_key: Option<&SecretKey>,
) -> Result<WakuMessage> {
waku_encode_asymmetric(self, public_key, signing_key)
}
/// Try decode the message with an expected asymmetric key
///
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_decode_asymmetricchar-messagejson-char-privatekey)
pub fn try_decode_asymmetric(&self, asymmetric_key: &SecretKey) -> Result<DecodedPayload> {
waku_decode_asymmetric(self, asymmetric_key)
}
} }
impl WakuStoreRespMessage { /// A payload once decoded, used when a received Waku Message is encrypted
pub fn payload(&self) -> &[u8] { #[derive(Deserialize, Debug)]
&self.payload #[serde(rename_all = "camelCase")]
pub struct DecodedPayload {
/// Public key that signed the message (optional), hex encoded with 0x prefix
#[serde(deserialize_with = "deserialize_optional_pk", default)]
public_key: Option<PublicKey>,
/// Message signature (optional), hex encoded with 0x prefix
#[serde(deserialize_with = "deserialize_optional_signature", default)]
signature: Option<Signature>,
/// Decrypted message payload base64 encoded
#[serde(with = "base64_serde")]
data: Vec<u8>,
/// Padding base64 encoded
#[serde(with = "base64_serde")]
padding: Vec<u8>,
}
impl DecodedPayload {
pub fn public_key(&self) -> Option<&PublicKey> {
self.public_key.as_ref()
}
pub fn signature(&self) -> Option<&Signature> {
self.signature.as_ref()
}
pub fn data(&self) -> &[u8] {
&self.data
}
pub fn padding(&self) -> &[u8] {
&self.padding
}
}
/// The content topic of a Waku message
/// as per the [specification](https://rfc.vac.dev/spec/36/#contentfilter-type)
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct LegacyContentFilter {
/// The content topic of a Waku message
content_topic: WakuContentTopic,
}
impl LegacyContentFilter {
pub fn new(content_topic: WakuContentTopic) -> Self {
Self { content_topic }
}
pub fn content_topic(&self) -> &WakuContentTopic {
&self.content_topic
}
}
/// The criteria to create subscription to a light node in JSON Format
/// as per the [specification](https://rfc.vac.dev/spec/36/#filtersubscription-type)
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct LegacyFilterSubscription {
/// Array of [`ContentFilter`] being subscribed to / unsubscribed from
content_filters: Vec<ContentFilter>,
/// Optional pubsub topic
pubsub_topic: Option<WakuPubSubTopic>,
}
impl LegacyFilterSubscription {
pub fn new(content_filters: Vec<ContentFilter>, pubsub_topic: Option<WakuPubSubTopic>) -> Self {
Self {
content_filters,
pubsub_topic,
}
}
pub fn content_filters(&self) -> &[ContentFilter] {
&self.content_filters
}
pub fn pubsub_topic(&self) -> Option<&WakuPubSubTopic> {
self.pubsub_topic.as_ref()
}
}
/// The criteria to create subscription to a filter full node matching a content filter.
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ContentFilter {
/// optional if using autosharding, mandatory if using static or named sharding.
pubsub_topic: Option<WakuPubSubTopic>,
/// mandatory, at least one required, with a max of 10
content_topics: Vec<WakuContentTopic>,
}
impl ContentFilter {
pub fn new(
pubsub_topic: Option<WakuPubSubTopic>,
content_topics: Vec<WakuContentTopic>,
) -> Self {
Self {
content_topics,
pubsub_topic,
}
}
pub fn content_topics(&self) -> &[WakuContentTopic] {
&self.content_topics
}
pub fn pubsub_topic(&self) -> Option<&WakuPubSubTopic> {
self.pubsub_topic.as_ref()
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FilterSubscriptionDetail {
#[serde(rename = "peerID")]
peer_id: PeerId,
content_topics: Vec<WakuContentTopic>,
pubsub_topic: WakuPubSubTopic,
}
impl FilterSubscriptionDetail {
pub fn new(
peer_id: PeerId,
content_topics: Vec<WakuContentTopic>,
pubsub_topic: WakuPubSubTopic,
) -> Self {
Self {
peer_id,
content_topics,
pubsub_topic,
}
}
pub fn peer_id(&self) -> &PeerId {
&self.peer_id
}
pub fn content_topics(&self) -> &[WakuContentTopic] {
&self.content_topics
}
pub fn pubsub_topic(&self) -> &WakuPubSubTopic {
&self.pubsub_topic
}
}
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FilterSubscriptionResult {
subscriptions: Vec<FilterSubscriptionDetail>,
error: Option<String>,
}
impl FilterSubscriptionResult {
pub fn new(subscriptions: Vec<FilterSubscriptionDetail>, error: Option<String>) -> Self {
Self {
subscriptions,
error,
}
}
pub fn subscriptions(&self) -> &[FilterSubscriptionDetail] {
&self.subscriptions
}
pub fn error(&self) -> &Option<String> {
&self.error
}
}
/// Criteria used to retrieve historical messages
#[derive(Clone, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct StoreQuery {
/// The pubsub topic on which messages are published
pub pubsub_topic: Option<WakuPubSubTopic>,
/// Array of [`WakuContentTopic`] to query for historical messages
pub content_topics: Vec<WakuContentTopic>,
/// The inclusive lower bound on the timestamp of queried messages.
/// This field holds the Unix epoch time in nanoseconds
pub start_time: Option<usize>,
/// The inclusive upper bound on the timestamp of queried messages.
/// This field holds the Unix epoch time in nanoseconds
pub end_time: Option<usize>,
/// Paging information in [`PagingOptions`] format
pub paging_options: Option<PagingOptions>,
}
/// The response received after doing a query to a store node
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct StoreResponse {
/// Array of retrieved historical messages in [`WakuMessage`] format
#[serde(default)]
pub messages: Vec<WakuMessage>,
/// Paging information in [`PagingOptions`] format from which to resume further historical queries
pub paging_options: Option<PagingOptions>,
}
impl StoreResponse {
pub fn messages(&self) -> &[WakuMessage] {
&self.messages
}
pub fn paging_options(&self) -> Option<&PagingOptions> {
self.paging_options.as_ref()
}
}
/// Paging information
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct PagingOptions {
/// Number of messages to retrieve per page
pub page_size: usize,
/// Message Index from which to perform pagination.
/// If not included and forward is set to `true`, paging will be performed from the beginning of the list.
/// If not included and forward is set to `false`, paging will be performed from the end of the list
pub cursor: Option<MessageIndex>,
/// `true` if paging forward, `false` if paging backward
pub forward: bool,
}
/// Pagination index type
#[derive(Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MessageIndex {
/// Hash of the message at this [``MessageIndex`]
pub digest: String,
/// UNIX timestamp in nanoseconds at which the message at this [`MessageIndex`] was received
pub receiver_time: usize,
/// UNIX timestamp in nanoseconds at which the message is generated by its sender
pub sender_time: usize,
/// The pubsub topic of the message at this [`MessageIndex`]
pub pubsub_topic: WakuPubSubTopic,
}
/// WakuMessage encoding scheme
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Encoding {
Proto,
Rlp,
Rfc26,
Unknown(String),
}
impl Display for Encoding {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let s = match self {
Encoding::Proto => "proto",
Encoding::Rlp => "rlp",
Encoding::Rfc26 => "rfc26",
Encoding::Unknown(value) => value,
};
f.write_str(s)
}
}
impl FromStr for Encoding {
type Err = std::io::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"proto" => Ok(Self::Proto),
"rlp" => Ok(Self::Rlp),
"rfc26" => Ok(Self::Rfc26),
encoding => Ok(Self::Unknown(encoding.to_string())),
}
}
}
impl RegexRepresentation for Encoding {
const REGEX: &'static str = r"\w";
}
/// A waku content topic `/{application_name}/{version}/{content_topic_name}/{encdoing}`
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct WakuContentTopic {
pub application_name: Cow<'static, str>,
pub version: Cow<'static, str>,
pub content_topic_name: Cow<'static, str>,
pub encoding: Encoding,
}
impl WakuContentTopic {
pub const fn new(
application_name: &'static str,
version: &'static str,
content_topic_name: &'static str,
encoding: Encoding,
) -> Self {
Self {
application_name: Cow::Borrowed(application_name),
version: Cow::Borrowed(version),
content_topic_name: Cow::Borrowed(content_topic_name),
encoding,
}
}
}
impl FromStr for WakuContentTopic {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
if let Ok((application_name, version, content_topic_name, encoding)) =
scanf!(s, "/{}/{}/{}/{:/.+?/}", String, String, String, Encoding)
{
Ok(WakuContentTopic {
application_name: Cow::Owned(application_name),
version: Cow::Owned(version),
content_topic_name: Cow::Owned(content_topic_name),
encoding,
})
} else {
Err(
format!(
"Wrong pub-sub topic format. Should be `/{{application-name}}/{{version-of-the-application}}/{{content-topic-name}}/{{encoding}}`. Got: {s}"
)
)
}
}
}
impl Display for WakuContentTopic {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"/{}/{}/{}/{}",
self.application_name, self.version, self.content_topic_name, self.encoding
)
}
}
impl Serialize for WakuContentTopic {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
self.to_string().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for WakuContentTopic {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let as_string: String = String::deserialize(deserializer)?;
as_string
.parse::<WakuContentTopic>()
.map_err(D::Error::custom)
} }
} }
@ -121,13 +544,93 @@ mod base64_serde {
} }
} }
pub fn deserialize_optional_pk<'de, D>(
deserializer: D,
) -> std::result::Result<Option<PublicKey>, D::Error>
where
D: Deserializer<'de>,
{
let base64_str: Option<String> = Option::<String>::deserialize(deserializer)?;
base64_str
.map(|base64_str| {
let raw_bytes = base64::engine::general_purpose::STANDARD
.decode(base64_str)
.map_err(D::Error::custom)?;
PublicKey::from_slice(&raw_bytes).map_err(D::Error::custom)
})
.transpose()
}
pub fn deserialize_optional_signature<'de, D>(
deserializer: D,
) -> std::result::Result<Option<Signature>, D::Error>
where
D: Deserializer<'de>,
{
let hex_str: Option<String> = Option::<String>::deserialize(deserializer)?;
hex_str
.map(|hex_str| {
let raw_bytes = hex::decode(hex_str.strip_prefix("0x").unwrap_or(&hex_str))
.map_err(D::Error::custom)?;
if ![64, 65].contains(&raw_bytes.len()) {
return Err(D::Error::custom(
"Invalid signature, only 64 or 65 bytes len are supported",
));
}
Signature::from_compact(&raw_bytes[..64]).map_err(D::Error::custom)
})
.transpose()
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::WakuPubSubTopic;
use secp256k1::{rand, Secp256k1};
use std::time::SystemTime;
#[test]
fn parse_waku_topic() {
let s = "/waku/2/default-waku/proto";
let _: WakuPubSubTopic = s.parse().unwrap();
}
#[test] #[test]
fn deserialize_waku_message() { fn deserialize_waku_message() {
let message = "{\"payload\":\"SGkgZnJvbSDwn6aAIQ==\",\"contentTopic\":\"/toychat/2/huilong/proto\",\"timestamp\":1665580926660,\"ephemeral\":true,\"meta\":\"SGkgZnJvbSDwn6aAIQ==\"}"; let message = "{\"payload\":\"SGkgZnJvbSDwn6aAIQ==\",\"contentTopic\":\"/toychat/2/huilong/proto\",\"timestamp\":1665580926660,\"ephemeral\":true,\"meta\":\"SGkgZnJvbSDwn6aAIQ==\"}";
let _: WakuMessage = serde_json::from_str(message).unwrap(); let _: WakuMessage = serde_json::from_str(message).unwrap();
} }
#[test]
fn encode_decode() {
let content_topic = WakuContentTopic::new("hello", "2", "world", Encoding::Proto);
let message = WakuMessage::new(
"hello",
content_topic,
1,
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis()
.try_into()
.unwrap(),
Vec::new(),
false,
);
let secp = Secp256k1::new();
let signing_key = SecretKey::new(&mut rand::thread_rng());
let encrypt_key = SecretKey::new(&mut rand::thread_rng());
let public_key = PublicKey::from_secret_key(&secp, &encrypt_key);
let encoded_message = message
.encode_asymmetric(&public_key, Some(&signing_key))
.expect("could not encode");
let decoded_message = encoded_message
.try_decode_asymmetric(&encrypt_key)
.expect("could not decode");
assert!(message.payload() != encoded_message.payload());
assert!(encoded_message.version() == 1);
assert!(message.payload() == decoded_message.data());
}
} }

View File

@ -1,19 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PubsubTopic(String);
impl PubsubTopic {
// Constructor to create a new MyString
pub fn new(value: &str) -> Self {
PubsubTopic(value.to_string())
}
}
// to allow conversion from `PubsubTopic` to `String`
impl From<&PubsubTopic> for String {
fn from(topic: &PubsubTopic) -> Self {
topic.0.to_string()
}
}

View File

@ -1,7 +0,0 @@
use std::time::{SystemTime, UNIX_EPOCH};
pub fn get_now_in_nanosecs() -> u64 {
let now = SystemTime::now();
let since_epoch = now.duration_since(UNIX_EPOCH).expect("Time went backwards");
since_epoch.as_secs() * 1_000_000_000 + since_epoch.subsec_nanos() as u64
}

View File

@ -1,26 +0,0 @@
use crate::general::Result;
use multiaddr::Multiaddr;
// Define the WakuDecode trait
pub trait WakuDecode: Sized {
fn decode(input: &str) -> Result<Self>;
}
impl WakuDecode for String {
fn decode(input: &str) -> Result<Self> {
Ok(input.to_string())
}
}
pub fn decode<T: WakuDecode>(input: String) -> Result<T> {
T::decode(input.as_str())
}
impl WakuDecode for Vec<Multiaddr> {
fn decode(input: &str) -> Result<Self> {
input
.split(',')
.map(|s| s.trim().parse::<Multiaddr>().map_err(|err| err.to_string()))
.collect::<Result<Vec<Multiaddr>>>() // Collect results into a Vec
.map_err(|err| format!("could not parse Multiaddr: {}", err))
}
}

View File

@ -1,23 +1,25 @@
//! # Waku //! # Waku
//! //!
//! Implementation on top of [`waku-bindings`](https://rfc.vac.dev/spec/36/) //! Implementation on top of [`waku-bindings`](https://rfc.vac.dev/spec/36/)
pub mod general; mod decrypt;
mod macros; mod encrypt;
pub mod node; mod events;
mod general;
// Re-export the LibwakuResponse type to make it accessible outside this module mod node;
pub use general::libwaku_response::LibwakuResponse; mod utils;
// Required so functions inside libwaku can call RLN functions even if we
// use it within the bindings functions
#[allow(clippy::single_component_path_imports)]
#[allow(unused)]
use rln;
pub use node::{ pub use node::{
waku_create_content_topic, waku_new, Initialized, Key, Multiaddr, PublicKey, RLNConfig, waku_create_content_topic, waku_default_pubsub_topic, waku_discv5_update_bootnodes,
Running, SecretKey, WakuEvent, WakuMessageEvent, WakuNodeConfig, WakuNodeHandle, waku_dns_discovery, waku_new, Aes256Gcm, DnsInfo, GossipSubParams, Initialized, Key, Multiaddr,
Protocol, PublicKey, Running, SecretKey, WakuLogLevel, WakuNodeConfig, WakuNodeHandle,
WakuPeerData, WakuPeers, WebsocketParams,
}; };
pub use general::contenttopic::{Encoding, WakuContentTopic}; pub use general::{
pub use general::{messagehash::MessageHash, Result, WakuMessage, WakuMessageVersion}; ContentFilter, DecodedPayload, Encoding, FilterSubscriptionDetail, FilterSubscriptionResult,
LegacyContentFilter, LegacyFilterSubscription, MessageId, MessageIndex, PagingOptions, PeerId,
ProtocolId, Result, StoreQuery, StoreResponse, WakuContentTopic, WakuMessage,
WakuMessageVersion, WakuPubSubTopic,
};
pub use events::{waku_set_event_callback, Event, Signal, WakuMessageEvent};

View File

@ -1,73 +0,0 @@
use crate::general::libwaku_response::LibwakuResponse;
use std::{slice, str};
use waku_sys::WakuCallBack;
unsafe extern "C" fn trampoline<F>(
ret_code: ::std::os::raw::c_int,
data: *const ::std::os::raw::c_char,
data_len: usize,
user_data: *mut ::std::os::raw::c_void,
) where
F: FnMut(LibwakuResponse),
{
let closure = &mut *(user_data as *mut F);
let response = if data.is_null() {
""
} else {
str::from_utf8(slice::from_raw_parts(data as *mut u8, data_len))
.expect("could not retrieve response")
};
let result = LibwakuResponse::try_from((ret_code as u32, response))
.expect("invalid response obtained from libwaku");
closure(result);
}
pub fn get_trampoline<F>(_closure: &F) -> WakuCallBack
where
F: FnMut(LibwakuResponse),
{
Some(trampoline::<F>)
}
#[macro_export]
macro_rules! handle_ffi_call {
// Case: With or without additional arguments
($waku_fn:expr, $resp_hndlr:expr, $ctx:expr $(, $($arg:expr),*)?) => {{
use $crate::macros::get_trampoline;
use std::sync::Arc;
use tokio::sync::Notify;
use libc::*;
let mut result = LibwakuResponse::default();
let notify = Arc::new(Notify::new());
let notify_clone = notify.clone();
// Callback to update the result and notify the waiter
let result_cb = |r: LibwakuResponse| {
result = r;
notify_clone.notify_one();
};
let mut closure = result_cb;
// Create trampoline and invoke the `waku_sys` function
let code = unsafe {
let cb = get_trampoline(&closure);
$waku_fn(
$ctx, // Pass the context
$($($arg),*,)? // Expand the variadic arguments if provided
cb, // Pass the callback trampoline
&mut closure as *mut _ as *mut c_void
)
};
// Wait for the callback to notify us
notify.notified().await;
// Handle the response
$resp_hndlr(code, result)
}};
}

View File

@ -1,7 +1,11 @@
//! Waku node [configuration](https://rfc.vac.dev/spec/36/#jsonconfig-type) related items //! Waku node [configuration](https://rfc.vac.dev/spec/36/#jsonconfig-type) related items
use std::fmt::{Display, Formatter};
use std::str::FromStr;
// std // std
// crates // crates
use crate::WakuPubSubTopic;
use multiaddr::Multiaddr;
use secp256k1::SecretKey; use secp256k1::SecretKey;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use smart_default::SmartDefault; use smart_default::SmartDefault;
@ -16,92 +20,253 @@ pub struct WakuNodeConfig {
pub host: Option<std::net::IpAddr>, pub host: Option<std::net::IpAddr>,
/// Libp2p TCP listening port. Default `60000`. Use `0` for **random** /// Libp2p TCP listening port. Default `60000`. Use `0` for **random**
#[default(Some(60000))] #[default(Some(60000))]
pub tcp_port: Option<usize>, pub port: Option<usize>,
/// External address to advertise to other nodes. Can be ip4, ip6 or dns4, dns6.
/// If null, the multiaddress(es) generated from the ip and port specified in the config (or default ones) will be used.
/// Default: null
pub advertise_addr: Option<Multiaddr>,
/// Secp256k1 private key in Hex format (`0x123...abc`). Default random /// Secp256k1 private key in Hex format (`0x123...abc`). Default random
#[serde(with = "secret_key_serde", rename = "key")] #[serde(with = "secret_key_serde")]
pub node_key: Option<SecretKey>, pub node_key: Option<SecretKey>,
/// Cluster id that the node is running in /// Interval in seconds for pinging peers to keep the connection alive. Default `20`
#[default(Some(0))] #[default(Some(20))]
pub cluster_id: Option<usize>, pub keep_alive_interval: Option<usize>,
/// Enable relay protocol. Default `true`
/// Relay protocol
#[default(Some(true))] #[default(Some(true))]
pub relay: Option<bool>, pub relay: Option<bool>,
pub relay_topics: Vec<String>, /// Enable store protocol to persist message history
#[default(vec![1])]
pub shards: Vec<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_message_size: Option<String>,
/// Store protocol
#[serde(skip_serializing_if = "Option::is_none")]
pub storenode: Option<&'static str>,
/// RLN configuration
#[serde(skip_serializing_if = "Option::is_none")]
pub rln_relay: Option<RLNConfig>,
// Discovery
#[default(Some(false))] #[default(Some(false))]
pub dns_discovery: Option<bool>, pub store: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] /// Url connection string. Accepts SQLite and PostgreSQL connection strings
pub dns_discovery_url: Option<&'static str>, #[default(Some("sqlite3://store.db".to_string()))]
pub database_url: Option<String>,
#[serde( /// Max number of messages to store in the databas
skip_serializing_if = "Option::is_none", #[default(Some(1000))]
rename = "discV5BootstrapNodes" pub store_retention_max_messages: Option<usize>,
)] /// Max number of seconds that a message will be persisted in the database, default 1 day
pub discv5_bootstrap_nodes: Option<Vec<String>>, #[default(Some(86400))]
pub store_retention_max_seconds: Option<usize>,
pub relay_topics: Vec<WakuPubSubTopic>,
/// The minimum number of peers required on a topic to allow broadcasting a message. Default `0`
#[default(Some(0))]
pub min_peers_to_publish: Option<usize>,
/// Enable filter protocol. Default `false`
#[default(Some(false))] #[default(Some(false))]
pub discv5_discovery: Option<bool>, #[serde(rename = "legacyFilter")]
#[serde(skip_serializing_if = "Option::is_none")] pub filter: Option<bool>,
pub discv5_udp_port: Option<usize>, /// Set the log level. Default `INFO`. Allowed values "DEBUG", "INFO", "WARN", "ERROR", "DPANIC", "PANIC", "FATAL"
#[default(Some(WakuLogLevel::Info))]
pub log_level: Option<WakuLogLevel>,
/// Enable DiscoveryV5. Default `false`
#[default(Some(false))] #[default(Some(false))]
pub discv5_enr_auto_update: Option<bool>, #[serde(rename = "discV5")]
pub discv5: Option<bool>,
// other settings /// Array of bootstrap nodes ENR.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "discV5BootstrapNodes", default)]
pub log_level: Option<&'static str>, pub discv5_bootstrap_nodes: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")] /// UDP port for DiscoveryV5. Default `9000`.
pub keep_alive: Option<bool>, #[default(Some(9000))]
#[serde(rename = "discV5UDPPort")]
pub discv5_udp_port: Option<u16>,
/// Gossipsub custom configuration.
pub gossipsub_params: Option<GossipSubParams>,
/// The domain name resolving to the node's public IPv4 address.
#[serde(rename = "dns4DomainName")]
pub dns4_domain_name: Option<String>,
/// Custom websocket support parameters
#[serde(rename = "websockets")]
pub websocket_params: Option<WebsocketParams>,
} }
/// RLN Relay configuration
#[derive(Clone, SmartDefault, Serialize, Deserialize, Debug)] #[derive(Clone, SmartDefault, Serialize, Deserialize, Debug)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "camelCase")]
pub struct RLNConfig { pub struct GossipSubParams {
/// Indicates if RLN support will be enabled. /// Sets the optimal degree for a GossipSub topic mesh. For example, if D == 6,
pub enabled: bool, /// each peer will want to have about six peers in their mesh for each topic they're subscribed to.
/// Index of the onchain commitment to use /// `d` should be set somewhere between `dlo` and `dhi`.
#[serde(skip_serializing_if = "Option::is_none", rename = "membership-index")] #[serde(rename = "d")]
pub membership_index: Option<usize>, pub d: Option<i32>,
/// On-chain dynamic group management /// Sets the lower bound on the number of peers we keep in a GossipSub topic mesh.
#[serde(skip_serializing_if = "Option::is_none")] /// If we have fewer than dlo peers, we will attempt to graft some more into the mesh at
pub dynamic: Option<bool>, /// the next heartbeat.
/// Path to the RLN merkle tree sled db (https://github.com/spacejam/sled) #[serde(rename = "d_low")]
#[serde(skip_serializing_if = "Option::is_none")] pub dlo: Option<i32>,
pub tree_path: Option<String>, /// Sets the upper bound on the number of peers we keep in a GossipSub topic mesh.
/// Message rate in bytes/sec after which verification of proofs should happen /// If we have more than dhi peers, we will select some to prune from the mesh at the next heartbeat.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "d_high")]
pub bandwidth_threshold: Option<usize>, pub dhi: Option<i32>,
/// Path for persisting rln-relay credential /// `dscore` affects how peers are selected when pruning a mesh due to over subscription.
#[serde(skip_serializing_if = "Option::is_none")] /// At least dscore of the retained peers will be high-scoring, while the remainder are
pub cred_path: Option<String>, /// chosen randomly.
/// HTTP address of an Ethereum testnet client e.g., http://localhost:8540/ #[serde(rename = "d_score")]
#[serde(skip_serializing_if = "Option::is_none")] pub dscore: Option<i32>,
pub eth_client_address: Option<String>, /// Sets the quota for the number of outbound connections to maintain in a topic mesh.
/// Address of membership contract on an Ethereum testnet /// When the mesh is pruned due to over subscription, we make sure that we have outbound connections
#[serde(skip_serializing_if = "Option::is_none")] /// to at least dout of the survivor peers. This prevents sybil attackers from overwhelming
pub eth_contract_address: Option<String>, /// our mesh with incoming connections.
/// Password for encrypting RLN credentials ///
#[serde(skip_serializing_if = "Option::is_none")] /// dout must be set below Dlo, and must not exceed D / 2.
pub cred_password: Option<String>, #[serde(rename = "d_out")]
/// Set a user message limit for the rln membership registration pub dout: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")] /// Controls the size of the message cache used for gossip.
pub user_message_limit: Option<u64>, /// The message cache will remember messages for history_length heartbeats.
/// Epoch size in seconds used to rate limit RLN memberships pub history_length: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")] /// Controls how many cached message ids we will advertise in
pub epoch_sec: Option<u64>, /// IHAVE gossip messages. When asked for our seen message IDs, we will return
/// only those from the most recent history_gossip heartbeats. The slack between
/// history_gossip and history_length allows us to avoid advertising messages
/// that will be expired by the time they're requested.
///
/// history_gossip must be less than or equal to history_length to
/// avoid a runtime panic.
pub history_gossip: Option<i32>,
/// dlazy affects how many peers we will emit gossip to at each heartbeat.
/// We will send gossip to at least dlazy peers outside our mesh. The actual
/// number may be more, depending on gossip_factor and how many peers we're
/// connected to.
pub dlazy: Option<i32>,
/// `gossip_factor` affects how many peers we will emit gossip to at each heartbeat.
/// We will send gossip to gossip_factor * (total number of non-mesh peers), or
/// Dlazy, whichever is greater.
pub gossip_factor: Option<f64>,
/// Controls how many times we will allow a peer to request
/// the same message id through IWANT gossip before we start ignoring them. This is designed
/// to prevent peers from spamming us with requests and wasting our resources.
pub gossip_retransmission: Option<i32>,
/// Short delay before the heartbeat timer begins
/// after the router is initialized.
pub heartbeat_initial_delay_ms: Option<i32>,
/// Controls the time between heartbeats.
pub heartbeat_interval_seconds: Option<i32>,
/// Duration threshold for heartbeat processing before emitting
/// a warning; this would be indicative of an overloaded peer.
pub slow_heartbeat_warning: Option<f64>,
/// Controls how long we keep track of the fanout state. If it's been
/// fanout_ttl_seconds since we've published to a topic that we're not subscribed to,
/// we'll delete the fanout map for that topic.
pub fanout_ttl_seconds: Option<i32>,
/// Controls the number of peers to include in prune Peer eXchange.
/// When we prune a peer that's eligible for PX (has a good score, etc), we will try to
/// send them signed peer records for up to prune_peers other peers that we
/// know of.
pub prune_peers: Option<i32>,
/// Controls the backoff time for pruned peers. This is how long
/// a peer must wait before attempting to graft into our mesh again after being pruned.
/// When pruning a peer, we send them our value of PruneBackoff so they know
/// the minimum time to wait. Peers running older versions may not send a backoff time,
/// so if we receive a prune message without one, we will wait at least PruneBackoff
/// before attempting to re-graft.
pub prune_backoff_seconds: Option<i32>,
/// Controls the backoff time to use when unsuscribing
/// from a topic. A peer should not resubscribe to this topic before this
/// duration.
pub unsubscribe_backoff_seconds: Option<i32>,
/// Controls the number of active connection attempts for peers obtained through PX.
pub connectors: Option<i32>,
/// Sets the maximum number of pending connections for peers attempted through px.
pub max_pending_connections: Option<i32>,
/// Controls the timeout for connection attempts.
pub connection_timeout_seconds: Option<i32>,
/// Number of heartbeat ticks for attempting to reconnect direct peers
/// that are not currently connected.
pub direct_connect_ticks: Option<u64>,
/// Initial delay before opening connections to direct peers
pub direct_connect_initial_delay_seconds: Option<i32>,
/// Number of heartbeat ticks for attempting to improve the mesh
/// with opportunistic grafting. Every opportunistic_graft_ticks we will attempt to select some
/// high-scoring mesh peers to replace lower-scoring ones, if the median score of our mesh peers falls
/// below a threshold (see https://godoc.org/github.com/libp2p/go-libp2p-pubsub#PeerScoreThresholds).
pub opportunistic_graft_ticks: Option<u64>,
/// Number of peers to opportunistically graft.
pub opportunistic_graft_peers: Option<i32>,
/// If a GRAFT comes before graft_flood_threshold_seconds has elapsed since the last PRUNE,
/// then there is an extra score penalty applied to the peer through P7.
pub graft_flood_threshold_seconds: Option<i32>,
/// Maximum number of messages to include in an IHAVE message.
/// Also controls the maximum number of IHAVE ids we will accept and request with IWANT from a
/// peer within a heartbeat, to protect from IHAVE floods. You should adjust this value from the
/// default if your system is pushing more than 5000 messages in history_gossip heartbeats;
/// with the defaults this is 1666 messages/s.
#[serde(rename = "maxIHaveLength")]
pub max_ihave_length: Option<i32>,
/// Maximum number of IHAVE messages to accept from a peer within a heartbeat.
#[serde(rename = "maxIHaveMessages")]
pub max_ihave_messages: Option<i32>,
/// Time to wait for a message requested through IWANT following an IHAVE advertisement.
/// If the message is not received within this window, a broken promise is declared and
/// the router may apply bahavioural penalties.
#[serde(rename = "iwantFollowupTimeSeconds")]
pub iwant_followup_time_seconds: Option<i32>,
// Time until a previously seen message ID can be forgotten about.
#[serde(rename = "seenMessagesTTLSeconds")]
pub seen_messages_ttl_seconds: Option<i32>,
}
#[derive(Clone, SmartDefault, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct WebsocketParams {
/// Indicates if websockets support will be enabled
#[default(Some(false))]
pub enabled: Option<bool>,
/// Listening address for websocket connections. Default `0.0.0.0`
#[default(Some(std::net::IpAddr::V4(std::net::Ipv4Addr::new(0, 0, 0, 0))))]
pub host: Option<std::net::IpAddr>,
/// TCP listening port for websocket connection. Use `0` for **random**. Default `60001`, if secure websockets support is enabled, the default is `6443“`
pub port: Option<usize>,
/// Enable secure websockets support
#[default(Some(false))]
pub secure: Option<bool>,
/// Secure websocket certificate path. Mandatory if secure websockets support is enabled.
pub cert_path: Option<String>,
/// Secure websocket key path. Mandatory if secure websockets support is enabled.
pub key_path: Option<String>,
}
#[derive(Clone, Default, Serialize, Deserialize, Debug)]
pub enum WakuLogLevel {
#[default]
Info,
Debug,
Warn,
Error,
DPanic,
Panic,
Fatal,
}
impl FromStr for WakuLogLevel {
type Err = std::io::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"info" => Ok(Self::Info),
"debug" => Ok(Self::Debug),
"warn" => Ok(Self::Warn),
"error" => Ok(Self::Error),
"dpanic" => Ok(Self::DPanic),
"panic" => Ok(Self::Panic),
"fatal" => Ok(Self::Fatal),
_ => Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Unrecognized waku log level: {s}. Allowed values \"DEBUG\", \"INFO\", \"WARN\", \"ERROR\", \"DPANIC\", \"PANIC\", \"FATAL\""),
)),
}
}
}
impl Display for WakuLogLevel {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let tag = match self {
WakuLogLevel::Info => "INFO",
WakuLogLevel::Debug => "DEBUG",
WakuLogLevel::Warn => "WARN",
WakuLogLevel::Error => "ERROR",
WakuLogLevel::DPanic => "DPANIC",
WakuLogLevel::Panic => "PANIC",
WakuLogLevel::Fatal => "FATAL",
};
write!(f, "{tag}")
}
} }
mod secret_key_serde { mod secret_key_serde {

View File

@ -1,63 +0,0 @@
use std::ffi::c_void;
use std::ptr::null_mut;
use std::sync::{Arc, Mutex};
use crate::general::libwaku_response::LibwakuResponse;
use crate::macros::get_trampoline;
type LibwakuResponseClosure = dyn FnMut(LibwakuResponse) + Send + Sync;
pub struct WakuNodeContext {
obj_ptr: *mut c_void,
msg_observer: Arc<Mutex<Box<LibwakuResponseClosure>>>,
}
impl WakuNodeContext {
pub fn new(obj_ptr: *mut c_void) -> Self {
let me = Self {
obj_ptr,
msg_observer: Arc::new(Mutex::new(Box::new(|_| {}))),
};
// By default we set a callback that will panic if the user didn't specify a valid callback.
// And by valid callback we mean a callback that can properly handle the waku events.
me.waku_set_event_callback(WakuNodeContext::panic_callback)
.expect("correctly set default callback");
me
}
// default callback that does nothing. A valid callback should be set
fn panic_callback(_response: LibwakuResponse) {
panic!("callback not set. Please use waku_set_event_callback to set a valid callback")
}
pub fn get_ptr(&self) -> *mut c_void {
self.obj_ptr
}
pub fn reset_ptr(mut self) {
self.obj_ptr = null_mut();
}
/// Register callback to act as event handler and receive application events,
/// which are used to react to asynchronous events in Waku
pub fn waku_set_event_callback<F: FnMut(LibwakuResponse) + 'static + Sync + Send>(
&self,
closure: F,
) -> Result<(), String> {
if let Ok(mut boxed_closure) = self.msg_observer.lock() {
*boxed_closure = Box::new(closure);
unsafe {
let cb = get_trampoline(&(*boxed_closure));
waku_sys::waku_set_event_callback(
self.obj_ptr,
cb,
&mut (*boxed_closure) as *mut _ as *mut c_void,
)
};
Ok(())
} else {
Err("Failed to acquire lock in waku_set_event_callback!".to_string())
}
}
}

View File

@ -0,0 +1,108 @@
// std
use std::ffi::CString;
use std::time::Duration;
// crates
use enr::Enr;
use libc::*;
use multiaddr::Multiaddr;
use serde::Deserialize;
use url::{Host, Url};
// internal
use crate::utils::{get_trampoline, handle_json_response, handle_no_response};
use crate::{PeerId, Result};
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct DnsInfo {
#[serde(alias = "peerID")]
pub peer_id: PeerId,
#[serde(default, alias = "multiaddrs")]
pub addresses: Vec<Multiaddr>,
pub enr: Option<Enr<enr::secp256k1::SecretKey>>,
}
/// RetrieveNodes returns a list of multiaddress given a url to a DNS discoverable ENR tree.
/// The nameserver can optionally be specified to resolve the enrtree url. Otherwise uses the default system dns.
pub fn waku_dns_discovery(
url: &Url,
server: Option<&Host>,
timeout: Option<Duration>,
) -> Result<Vec<DnsInfo>> {
let url = CString::new(url.to_string())
.expect("CString should build properly from a valid Url")
.into_raw();
let server = CString::new(server.map(|host| host.to_string()).unwrap_or_default())
.expect("CString should build properly from a String nameserver")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_dns_discovery(
url,
server,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(url));
drop(CString::from_raw(server));
out
};
handle_json_response(code, &result)
}
/// Update the bootnodes used by DiscoveryV5 by passing a list of ENRs
pub fn waku_discv5_update_bootnodes(bootnodes: Vec<String>) -> Result<()> {
let bootnodes_ptr = CString::new(
serde_json::to_string(&bootnodes)
.expect("Serialization from properly built bootnode array should never fail"),
)
.expect("CString should build properly from the string vector")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_discv5_update_bootnodes(
bootnodes_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(bootnodes_ptr));
out
};
handle_no_response(code, &error)
}
#[cfg(test)]
mod test {
use url::Url;
#[test]
fn test_dns_discovery() {
let enrtree: Url =
"enrtree://AO47IDOLBKH72HIZZOXQP6NMRESAN7CHYWIBNXDXWRJRZWLODKII6@test.wakuv2.nodes.status.im".parse().unwrap();
let result = super::waku_dns_discovery(&enrtree, None, None);
assert!(result.is_ok());
assert!(!result.as_ref().unwrap().is_empty());
println!("{result:?}");
}
}

View File

@ -1,105 +0,0 @@
//! Waku message [event](https://rfc.vac.dev/spec/36/#events) related items
//!
//! Asynchronous events require a callback to be registered.
//! An example of an asynchronous event that might be emitted is receiving a message.
//! When an event is emitted, this callback will be triggered receiving an [`WakuEvent`]
// crates
use serde::{Deserialize, Serialize};
// internal
use crate::general::WakuMessage;
use std::str;
use crate::MessageHash;
/// Waku event
/// For now just WakuMessage is supported
#[non_exhaustive]
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "eventType", rename_all = "camelCase")]
pub enum WakuEvent {
#[serde(rename = "message")]
WakuMessage(WakuMessageEvent),
#[serde(rename = "relay_topic_health_change")]
RelayTopicHealthChange(TopicHealthEvent),
#[serde(rename = "connection_change")]
ConnectionChange(ConnectionChangeEvent),
Unrecognized(serde_json::Value),
}
/// Type of `event` field for a `message` event
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct WakuMessageEvent {
/// The pubsub topic on which the message was received
pub pubsub_topic: String,
/// The message hash
pub message_hash: MessageHash,
/// The message in [`WakuMessage`] format
pub waku_message: WakuMessage,
}
/// Type of `event` field for a `topic health` event
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct TopicHealthEvent {
/// The pubsub topic on which the message was received
pub pubsub_topic: String,
/// The message hash
pub topic_health: String,
}
/// Type of `event` field for a `connection change` event
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ConnectionChangeEvent {
/// The pubsub topic on which the message was received
pub peer_id: String,
/// The message hash
pub peer_event: String,
}
#[cfg(test)]
mod tests {
use crate::WakuEvent;
use crate::WakuEvent::{ConnectionChange, RelayTopicHealthChange};
#[test]
fn deserialize_message_event() {
let s = "{\"eventType\":\"message\",\"messageHash\":\"0xd40aa51bbb4867fe40329a255575cfc9ef4000358cc7321b2668b008cba94b30\",\"pubsubTopic\":\"/waku/2/default-waku/proto\",\"wakuMessage\":{\"payload\":\"SGkgZnJvbSDwn6aAIQ==\",\"contentTopic\":\"/toychat/2/huilong/proto\",\"timestamp\":1665580926660}}";
let evt: WakuEvent = serde_json::from_str(s).unwrap();
assert!(matches!(evt, WakuEvent::WakuMessage(_)));
}
#[test]
fn deserialize_topic_health_change_event() {
let s = "{\"eventType\":\"relay_topic_health_change\", \"pubsubTopic\":\"/waku/2/rs/16/1\",\"topicHealth\":\"MinimallyHealthy\"}";
let evt: WakuEvent = serde_json::from_str(s).unwrap();
match evt {
RelayTopicHealthChange(topic_health_event) => {
assert_eq!(topic_health_event.pubsub_topic, "/waku/2/rs/16/1");
assert_eq!(topic_health_event.topic_health, "MinimallyHealthy");
}
_ => panic!("Expected RelayTopicHealthChange event, but got {:?}", evt),
}
}
#[test]
fn deserialize_connection_change_event() {
let s = "{\"eventType\":\"connection_change\", \"peerId\":\"16Uiu2HAmAR24Mbb6VuzoyUiGx42UenDkshENVDj4qnmmbabLvo31\",\"peerEvent\":\"Joined\"}";
let evt: WakuEvent = serde_json::from_str(s).unwrap();
match evt {
ConnectionChange(conn_change_event) => {
assert_eq!(
conn_change_event.peer_id,
"16Uiu2HAmAR24Mbb6VuzoyUiGx42UenDkshENVDj4qnmmbabLvo31"
);
assert_eq!(conn_change_event.peer_event, "Joined");
}
_ => panic!("Expected RelayTopicHealthChange event, but got {:?}", evt),
}
}
}

View File

@ -1,59 +1,182 @@
//! Waku filter protocol related methods //! Waku [filter](https://rfc.vac.dev/spec/36/#waku-filter) protocol related methods
// std // std
use std::ffi::CString; use std::ffi::CString;
use std::time::Duration;
// crates
use libc::*;
// internal // internal
use crate::general::contenttopic::WakuContentTopic;
use crate::general::libwaku_response::{handle_no_response, LibwakuResponse};
use crate::general::pubsubtopic::PubsubTopic;
use crate::general::Result; use crate::general::Result;
use crate::handle_ffi_call; use crate::general::{ContentFilter, FilterSubscriptionResult, PeerId};
use crate::node::context::WakuNodeContext; use crate::utils::{get_trampoline, handle_json_response, handle_no_response};
pub async fn waku_filter_subscribe( /// Creates a subscription in a lightnode for messages that matches a content filter and optionally a [`WakuPubSubTopic`](`crate::general::WakuPubSubTopic`)
ctx: &WakuNodeContext, /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_filter_subscribechar-filterjson-char-peerid-int-timeoutms)
pubsub_topic: &PubsubTopic, pub fn waku_filter_subscribe(
content_topics: Vec<WakuContentTopic>, content_filter: &ContentFilter,
peer_id: Option<PeerId>,
timeout: Option<Duration>,
) -> Result<FilterSubscriptionResult> {
let content_filter_ptr = CString::new(
serde_json::to_string(content_filter)
.expect("ContentFilter should always succeed to serialize"),
)
.expect("ContentFilter should always be able to be serialized")
.into_raw();
let peer_id_ptr = match peer_id {
None => CString::new(""),
Some(t) => CString::new(t),
}
.expect("CString should build properly from peer id")
.into_raw();
let mut response: String = Default::default();
let response_cb = |v: &str| response = v.to_string();
let code = unsafe {
let mut closure = response_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_filter_subscribe(
content_filter_ptr,
peer_id_ptr,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(content_filter_ptr));
drop(CString::from_raw(peer_id_ptr));
out
};
handle_json_response(code, &response)
}
/// Used to know if a service node has an active subscription for this client
/// peerID should contain the ID of a peer we are subscribed to, supporting the filter protocol
pub fn waku_filter_ping(peer_id: PeerId, timeout: Option<Duration>) -> Result<()> {
let peer_id_ptr = CString::new(peer_id)
.expect("PeerId should always be able to be serialized")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_filter_ping(
peer_id_ptr,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(peer_id_ptr));
out
};
handle_no_response(code, &error)
}
/// Sends a requests to a service node to stop pushing messages matching this filter to this client.
/// It might be used to modify an existing subscription by providing a subset of the original filter
/// criteria
pub fn waku_filter_unsubscribe(
content_filter: &ContentFilter,
peer_id: PeerId,
timeout: Option<Duration>,
) -> Result<()> { ) -> Result<()> {
let pubsub_topic = CString::new(String::from(pubsub_topic)) let content_filter_ptr = CString::new(
.expect("CString should build properly from pubsub topic"); serde_json::to_string(content_filter)
let content_topics = WakuContentTopic::join_content_topics(content_topics); .expect("ContentFilter should always succeed to serialize"),
let content_topics =
CString::new(content_topics).expect("CString should build properly from content topic");
handle_ffi_call!(
waku_sys::waku_filter_subscribe,
handle_no_response,
ctx.get_ptr(),
pubsub_topic.as_ptr(),
content_topics.as_ptr()
) )
.expect("CString should build properly from the serialized filter subscription")
.into_raw();
let peer_id_ptr = CString::new(peer_id)
.expect("PeerId should always be able to be serialized")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_filter_unsubscribe(
content_filter_ptr,
peer_id_ptr,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(content_filter_ptr));
drop(CString::from_raw(peer_id_ptr));
out
};
handle_no_response(code, &error)
} }
pub async fn waku_filter_unsubscribe( /// Sends a requests to a service node (or all service nodes) to stop pushing messages
ctx: &WakuNodeContext, /// peerID should contain the ID of a peer this client is subscribed to, or can be None
pubsub_topic: &PubsubTopic, /// to stop all active subscriptions
content_topics: Vec<WakuContentTopic>, // comma-separated list of content topics pub fn waku_filter_unsubscribe_all(
peer_id: Option<PeerId>,
timeout: Option<Duration>,
) -> Result<()> { ) -> Result<()> {
let pubsub_topic = CString::new(String::from(pubsub_topic)) let peer_id_ptr = match peer_id {
.expect("CString should build properly from pubsub topic"); None => CString::new(""),
let content_topics = WakuContentTopic::join_content_topics(content_topics); Some(t) => CString::new(t),
let content_topics = }
CString::new(content_topics).expect("CString should build properly from content topic"); .expect("CString should build properly from peer id")
.into_raw();
handle_ffi_call!( let mut error: String = Default::default();
waku_sys::waku_filter_unsubscribe, let error_cb = |v: &str| error = v.to_string();
handle_no_response, let code = unsafe {
ctx.get_ptr(), let mut closure = error_cb;
pubsub_topic.as_ptr(), let cb = get_trampoline(&closure);
content_topics.as_ptr() let out = waku_sys::waku_filter_unsubscribe_all(
) peer_id_ptr,
} timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
pub async fn waku_filter_unsubscribe_all(ctx: &WakuNodeContext) -> Result<()> { drop(CString::from_raw(peer_id_ptr));
handle_ffi_call!(
waku_sys::waku_filter_unsubscribe_all, out
handle_no_response, };
ctx.get_ptr()
) handle_no_response(code, &error)
} }

View File

@ -0,0 +1,89 @@
//! Waku [filter](https://rfc.vac.dev/spec/36/#waku-filter) protocol related methods
// std
use std::ffi::CString;
use std::time::Duration;
// crates
use libc::*;
// internal
use crate::general::Result;
use crate::general::{LegacyFilterSubscription, PeerId};
use crate::utils::{get_trampoline, handle_no_response};
/// Creates a subscription in a lightnode for messages that matches a content filter and optionally a [`WakuPubSubTopic`](`crate::general::WakuPubSubTopic`)
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_legacy_filter_subscribechar-filterjson-char-peerid-int-timeoutms)
pub fn waku_legacy_filter_subscribe(
filter_subscription: &LegacyFilterSubscription,
peer_id: PeerId,
timeout: Duration,
) -> Result<()> {
let filter_subscription_ptr = CString::new(
serde_json::to_string(filter_subscription)
.expect("FilterSubscription should always succeed to serialize"),
)
.expect("FilterSubscription should always be able to be serialized")
.into_raw();
let peer_id_ptr = CString::new(peer_id)
.expect("PeerId should always be able to be serialized")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_legacy_filter_subscribe(
filter_subscription_ptr,
peer_id_ptr,
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32"),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(filter_subscription_ptr));
drop(CString::from_raw(peer_id_ptr));
out
};
handle_no_response(code, &error)
}
/// Removes subscriptions in a light node matching a content filter and, optionally, a [`WakuPubSubTopic`](`crate::general::WakuPubSubTopic`)
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_filter_unsubscribechar-filterjson-int-timeoutms)
pub fn waku_legacy_filter_unsubscribe(
filter_subscription: &LegacyFilterSubscription,
timeout: Duration,
) -> Result<()> {
let filter_subscription_ptr = CString::new(
serde_json::to_string(filter_subscription)
.expect("FilterSubscription should always succeed to serialize"),
)
.expect("CString should build properly from the serialized filter subscription")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_legacy_filter_unsubscribe(
filter_subscription_ptr,
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32"),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(filter_subscription_ptr));
out
};
handle_no_response(code, &error)
}

View File

@ -1,34 +1,66 @@
//! Waku lightpush protocol related methods //! Waku [lightpush](https://rfc.vac.dev/spec/36/#waku-lightpush) protocol related methods
// std // std
use std::ffi::CString; use std::ffi::CString;
use std::time::Duration;
// crates
use libc::*;
// internal // internal
use crate::general::libwaku_response::{handle_response, LibwakuResponse}; use crate::general::{MessageId, PeerId, Result, WakuMessage, WakuPubSubTopic};
use crate::general::{messagehash::MessageHash, Result, WakuMessage}; use crate::node::waku_default_pubsub_topic;
use crate::handle_ffi_call; use crate::utils::{get_trampoline, handle_response};
use crate::node::context::WakuNodeContext;
use crate::general::pubsubtopic::PubsubTopic; /// Publish a message using Waku Lightpush
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_lightpush_publishchar-messagejson-char-topic-char-peerid-int-timeoutms)
pub async fn waku_lightpush_publish_message( pub fn waku_lightpush_publish(
ctx: &WakuNodeContext,
message: &WakuMessage, message: &WakuMessage,
pubsub_topic: &PubsubTopic, pubsub_topic: Option<WakuPubSubTopic>,
) -> Result<MessageHash> { peer_id: PeerId,
let message = CString::new( timeout: Option<Duration>,
) -> Result<MessageId> {
let pubsub_topic = pubsub_topic
.unwrap_or_else(waku_default_pubsub_topic)
.to_string();
let message_ptr = CString::new(
serde_json::to_string(&message) serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"), .expect("WakuMessages should always be able to success serializing"),
) )
.expect("CString should build properly from the serialized waku message"); .expect("CString should build properly from the serialized waku message")
.into_raw();
let topic_ptr = CString::new(pubsub_topic)
.expect("CString should build properly from pubsub topic")
.into_raw();
let peer_id_ptr = CString::new(peer_id)
.expect("CString should build properly from peer id")
.into_raw();
let pubsub_topic = CString::new(String::from(pubsub_topic)) let mut result: String = Default::default();
.expect("CString should build properly from pubsub topic"); let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_lightpush_publish(
message_ptr,
topic_ptr,
peer_id_ptr,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
handle_ffi_call!( drop(CString::from_raw(message_ptr));
waku_sys::waku_lightpush_publish, drop(CString::from_raw(topic_ptr));
handle_response, drop(CString::from_raw(peer_id_ptr));
ctx.get_ptr(),
pubsub_topic.as_ptr(), out
message.as_ptr() };
)
handle_response(code, &result)
} }

View File

@ -1,120 +1,123 @@
//! Node lifcycle [mangement](https://rfc.vac.dev/spec/36/#node-management) related methods //! Node lifcycle [mangement](https://rfc.vac.dev/spec/36/#node-management) related methods
// std // std
use multiaddr::Multiaddr;
use std::ffi::CString; use std::ffi::CString;
// crates // crates
use libc::c_void; use libc::*;
use multiaddr::Multiaddr;
use std::sync::Arc;
use tokio::sync::Notify;
// internal // internal
use super::config::WakuNodeConfig; use super::config::WakuNodeConfig;
use crate::general::libwaku_response::{handle_no_response, handle_response, LibwakuResponse}; use crate::general::{PeerId, Result};
use crate::general::Result; use crate::utils::{get_trampoline, handle_json_response, handle_no_response, handle_response};
use crate::handle_ffi_call;
use crate::macros::get_trampoline;
use crate::node::context::WakuNodeContext;
/// Instantiates a Waku node /// Instantiates a Waku node
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_newchar-jsonconfig) /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_newchar-jsonconfig)
pub async fn waku_new(config: Option<WakuNodeConfig>) -> Result<WakuNodeContext> { pub fn waku_new(config: Option<WakuNodeConfig>) -> Result<()> {
let config = config.unwrap_or_default(); let config = config.unwrap_or_default();
let config = CString::new(
let config_ptr = CString::new(
serde_json::to_string(&config) serde_json::to_string(&config)
.expect("Serialization from properly built NodeConfig should never fail"), .expect("Serialization from properly built NodeConfig should never fail"),
) )
.expect("CString should build properly from the config"); .expect("CString should build properly from the config")
let config_ptr = config.as_ptr(); .into_raw();
let notify = Arc::new(Notify::new()); let mut error: String = Default::default();
let notify_clone = notify.clone(); let error_cb = |v: &str| error = v.to_string();
let mut result = LibwakuResponse::default(); let code = unsafe {
let result_cb = |r: LibwakuResponse| { let mut closure = error_cb;
result = r;
notify_clone.notify_one(); // Notify that the value has been updated
};
let mut closure = result_cb;
let obj_ptr = unsafe {
let cb = get_trampoline(&closure); let cb = get_trampoline(&closure);
waku_sys::waku_new(config_ptr, cb, &mut closure as *mut _ as *mut c_void) let out = waku_sys::waku_new(config_ptr, cb, &mut closure as *mut _ as *mut c_void);
drop(CString::from_raw(config_ptr));
out
}; };
notify.notified().await; // Wait until a result is received handle_no_response(code, &error)
match result {
LibwakuResponse::MissingCallback => panic!("callback is required"),
LibwakuResponse::Failure(v) => Err(v),
_ => Ok(WakuNodeContext::new(obj_ptr)),
}
}
pub async fn waku_destroy(ctx: &WakuNodeContext) -> Result<()> {
handle_ffi_call!(waku_sys::waku_destroy, handle_no_response, ctx.get_ptr())
} }
/// Start a Waku node mounting all the protocols that were enabled during the Waku node instantiation. /// Start a Waku node mounting all the protocols that were enabled during the Waku node instantiation.
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_start) /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_start)
pub async fn waku_start(ctx: &WakuNodeContext) -> Result<()> { pub fn waku_start() -> Result<()> {
handle_ffi_call!(waku_sys::waku_start, handle_no_response, ctx.get_ptr()) let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_start(cb, &mut closure as *mut _ as *mut c_void)
};
handle_no_response(code, &error)
} }
/// Stops a Waku node /// Stops a Waku node
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop) /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop)
pub async fn waku_stop(ctx: &WakuNodeContext) -> Result<()> { pub fn waku_stop() -> Result<()> {
handle_ffi_call!(waku_sys::waku_stop, handle_no_response, ctx.get_ptr()) let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_stop(cb, &mut closure as *mut _ as *mut c_void)
};
handle_no_response(code, &error)
} }
/// nwaku version /// If the execution is successful, the result is the peer ID as a string (base58 encoded)
pub async fn waku_version(ctx: &WakuNodeContext) -> Result<String> { /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop)
handle_ffi_call!(waku_sys::waku_version, handle_response, ctx.get_ptr()) pub fn waku_peer_id() -> Result<PeerId> {
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_peerid(cb, &mut closure as *mut _ as *mut c_void)
};
handle_response(code, &result)
} }
/// Get the multiaddresses the Waku node is listening to /// Get the multiaddresses the Waku node is listening to
/// as per [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_listen_addresses) /// as per [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_listen_addresses)
pub async fn waku_listen_addresses(ctx: &WakuNodeContext) -> Result<Vec<Multiaddr>> { pub fn waku_listen_addresses() -> Result<Vec<Multiaddr>> {
handle_ffi_call!( let mut result: String = Default::default();
waku_sys::waku_listen_addresses, let result_cb = |v: &str| result = v.to_string();
handle_response, let code = unsafe {
ctx.get_ptr() let mut closure = result_cb;
) let cb = get_trampoline(&closure);
waku_sys::waku_listen_addresses(cb, &mut closure as *mut _ as *mut c_void)
};
handle_json_response(code, &result)
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::waku_new; use super::waku_new;
use crate::node::management::{ use crate::node::management::{waku_listen_addresses, waku_peer_id, waku_start, waku_stop};
waku_destroy, waku_listen_addresses, waku_start, waku_stop, waku_version, use crate::node::peers::waku_peer_count;
};
use serial_test::serial; use serial_test::serial;
#[tokio::test] #[test]
#[serial] #[serial]
async fn waku_flow() { fn waku_flow() {
let node = waku_new(None).await.unwrap(); waku_new(None).unwrap();
waku_start().unwrap();
// test peer id call, since we cannot start different instances of the node
let id = waku_peer_id().unwrap();
dbg!(&id);
assert!(!id.is_empty());
waku_start(&node).await.unwrap(); let peer_cnt = waku_peer_count().unwrap();
dbg!(peer_cnt);
// test addresses // test addresses, since we cannot start different instances of the node
let addresses = waku_listen_addresses(&node).await.unwrap(); let addresses = waku_listen_addresses().unwrap();
dbg!(&addresses); dbg!(&addresses);
assert!(!addresses.is_empty()); assert!(!addresses.is_empty());
waku_stop(&node).await.unwrap(); waku_stop().unwrap();
waku_destroy(&node).await.unwrap();
}
#[tokio::test]
#[serial]
async fn nwaku_version() {
let node = waku_new(None).await.unwrap();
let version = waku_version(&node)
.await
.expect("should return the version");
print!("Current version: {}", version);
assert!(!version.is_empty());
waku_destroy(&node).await.unwrap();
} }
} }

View File

@ -1,9 +1,9 @@
//! Waku node implementation //! Waku node implementation
mod config; mod config;
mod context; mod discovery;
mod events;
mod filter; mod filter;
mod legacyfilter;
mod lightpush; mod lightpush;
mod management; mod management;
mod peers; mod peers;
@ -11,97 +11,102 @@ mod relay;
mod store; mod store;
// std // std
pub use aes_gcm::Key; pub use aes_gcm::{Aes256Gcm, Key};
pub use multiaddr::Multiaddr; pub use multiaddr::Multiaddr;
pub use secp256k1::{PublicKey, SecretKey}; pub use secp256k1::{PublicKey, SecretKey};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::sync::Mutex;
use std::time::Duration; use std::time::Duration;
use store::{StoreQueryRequest, StoreWakuMessageResponse}; // crates
// internal // internal
use crate::general::contenttopic::{Encoding, WakuContentTopic};
use crate::general::libwaku_response::LibwakuResponse;
pub use crate::general::pubsubtopic::PubsubTopic;
use crate::general::{messagehash::MessageHash, Result, WakuMessage};
use crate::node::context::WakuNodeContext; use crate::general::{
pub use config::RLNConfig; ContentFilter, FilterSubscriptionResult, LegacyFilterSubscription, MessageId, PeerId,
pub use config::WakuNodeConfig; ProtocolId, Result, StoreQuery, StoreResponse, WakuMessage, WakuPubSubTopic,
pub use events::{WakuEvent, WakuMessageEvent}; };
pub use relay::waku_create_content_topic;
// Define state marker types pub use config::{GossipSubParams, WakuLogLevel, WakuNodeConfig, WebsocketParams};
pub use discovery::{waku_discv5_update_bootnodes, waku_dns_discovery, DnsInfo};
pub use peers::{Protocol, WakuPeerData, WakuPeers};
pub use relay::{waku_create_content_topic, waku_default_pubsub_topic};
pub use store::{waku_local_store_query, waku_store_query};
/// Shared flag to check if a waku node is already running in the current process
static WAKU_NODE_INITIALIZED: Mutex<bool> = Mutex::new(false);
/// Marker trait to disallow undesired waku node states in the handle
pub trait WakuNodeState {}
/// Waku node initialized state
pub struct Initialized; pub struct Initialized;
/// Waku node running state
pub struct Running; pub struct Running;
impl WakuNodeState for Initialized {}
impl WakuNodeState for Running {}
/// Handle to the underliying waku node /// Handle to the underliying waku node
pub struct WakuNodeHandle<State> { /// Safe to sendt to/through threads.
ctx: WakuNodeContext, /// Only a waku node can be running at a time.
_state: PhantomData<State>, /// Referenes (`&`) to the handle can call queries and perform operations in a thread safe way.
/// Only an owned version of the handle can `start` or `stop` the node.
pub struct WakuNodeHandle<State: WakuNodeState>(PhantomData<State>);
/// We do not have any inner state, so the handle should be safe to be send among threads.
unsafe impl<State: WakuNodeState> Send for WakuNodeHandle<State> {}
/// References to the handle are safe to share, as they do not mutate the handle itself and
/// operations are performed by the bindings backend, which is supposed to be thread safe.
unsafe impl<State: WakuNodeState> Sync for WakuNodeHandle<State> {}
impl<State: WakuNodeState> WakuNodeHandle<State> {
/// If the execution is successful, the result is the peer ID as a string (base58 encoded)
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop)
pub fn peer_id(&self) -> Result<PeerId> {
management::waku_peer_id()
}
/// Get the multiaddresses the Waku node is listening to
/// as per [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_listen_addresses)
pub fn listen_addresses(&self) -> Result<Vec<Multiaddr>> {
management::waku_listen_addresses()
}
/// Add a node multiaddress and protocol to the waku nodes peerstore.
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_add_peerchar-address-char-protocolid)
pub fn add_peer(&self, address: &Multiaddr, protocol_id: ProtocolId) -> Result<PeerId> {
peers::waku_add_peers(address, protocol_id)
}
} }
/// Spawn a new Waku node with the given configuration (default configuration if `None` provided) fn stop_node() -> Result<()> {
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_newchar-jsonconfig) let mut node_initialized = WAKU_NODE_INITIALIZED
pub async fn waku_new(config: Option<WakuNodeConfig>) -> Result<WakuNodeHandle<Initialized>> { .lock()
Ok(WakuNodeHandle { .expect("Access to the mutex at some point");
ctx: management::waku_new(config).await?, *node_initialized = false;
_state: PhantomData, management::waku_stop().map(|_| ())
})
}
impl<State> WakuNodeHandle<State> {
/// Get the nwaku version
pub async fn version(&self) -> Result<String> {
management::waku_version(&self.ctx).await
}
pub async fn waku_destroy(self) -> Result<()> {
let res = management::waku_destroy(&self.ctx).await;
self.ctx.reset_ptr();
res
}
/// Subscribe to WakuRelay to receive messages matching a content filter.
pub async fn relay_subscribe(&self, pubsub_topic: &PubsubTopic) -> Result<()> {
relay::waku_relay_subscribe(&self.ctx, pubsub_topic).await
}
} }
impl WakuNodeHandle<Initialized> { impl WakuNodeHandle<Initialized> {
/// Start a Waku node mounting all the protocols that were enabled during the Waku node instantiation. /// Start a Waku node mounting all the protocols that were enabled during the Waku node instantiation.
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_start) /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_start)
pub async fn start(self) -> Result<WakuNodeHandle<Running>> { pub fn start(self) -> Result<WakuNodeHandle<Running>> {
management::waku_start(&self.ctx) management::waku_start().map(|_| WakuNodeHandle(Default::default()))
.await
.map(|_| WakuNodeHandle {
ctx: self.ctx,
_state: PhantomData,
})
} }
pub fn set_event_callback<F: FnMut(LibwakuResponse) + 'static + Sync + Send>( /// Stops a Waku node
&self, /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop)
closure: F, pub fn stop(self) -> Result<()> {
) -> Result<()> { stop_node()
self.ctx.waku_set_event_callback(closure)
} }
} }
impl WakuNodeHandle<Running> { impl WakuNodeHandle<Running> {
/// Stops a Waku node /// Stops a Waku node
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop) /// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_stop)
pub async fn stop(self) -> Result<WakuNodeHandle<Initialized>> { pub fn stop(self) -> Result<()> {
management::waku_stop(&self.ctx) stop_node()
.await
.map(|_| WakuNodeHandle {
ctx: self.ctx,
_state: PhantomData,
})
}
/// Get the multiaddresses the Waku node is listening to
/// as per [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_listen_addresses)
pub async fn listen_addresses(&self) -> Result<Vec<Multiaddr>> {
management::waku_listen_addresses(&self.ctx).await
} }
/// Dial peer using a multiaddress /// Dial peer using a multiaddress
@ -109,105 +114,198 @@ impl WakuNodeHandle<Running> {
/// If the function execution takes longer than `timeout` value, the execution will be canceled and an error returned. /// If the function execution takes longer than `timeout` value, the execution will be canceled and an error returned.
/// Use 0 for no timeout /// Use 0 for no timeout
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peerchar-address-int-timeoutms) /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peerchar-address-int-timeoutms)
pub async fn connect(&self, address: &Multiaddr, timeout: Option<Duration>) -> Result<()> { pub fn connect_peer_with_address(
peers::waku_connect(&self.ctx, address, timeout).await &self,
address: &Multiaddr,
timeout: Option<Duration>,
) -> Result<()> {
peers::waku_connect_peer_with_address(address, timeout)
} }
pub async fn relay_publish_txt( /// Dial peer using a peer id
&self, /// If `timeout` as milliseconds doesn't fit into a `i32` it is clamped to [`i32::MAX`]
pubsub_topic: &PubsubTopic, /// The peer must be already known.
msg_txt: &String, /// It must have been added before with [`WakuNodeHandle::add_peer`] or previously dialed with [`WakuNodeHandle::connect_peer_with_address`]
content_topic_name: &'static str, /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peeridchar-peerid-int-timeoutms)
timeout: Option<Duration>, pub fn connect_peer_with_id(&self, peer_id: &PeerId, timeout: Option<Duration>) -> Result<()> {
) -> Result<MessageHash> { peers::waku_connect_peer_with_id(peer_id, timeout)
let content_topic = WakuContentTopic::new("waku", "2", content_topic_name, Encoding::Proto); }
let message = WakuMessage::new(msg_txt, content_topic, 0, Vec::new(), false);
relay::waku_relay_publish_message(&self.ctx, &message, pubsub_topic, timeout).await /// Disconnect a peer using its peer id
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_disconnect_peerchar-peerid)
pub fn disconnect_peer_with_id(&self, peer_id: &PeerId) -> Result<()> {
peers::waku_disconnect_peer_with_id(peer_id)
}
/// Get number of connected peers
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_peer_count)
pub fn peer_count(&self) -> Result<usize> {
peers::waku_peer_count()
}
/// Retrieve the list of peers known by the Waku node
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_peers)
pub fn peers(&self) -> Result<WakuPeers> {
peers::waku_peers()
} }
/// Publish a message using Waku Relay. /// Publish a message using Waku Relay.
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_relay_publishchar-messagejson-char-pubsubtopic-int-timeoutms) /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_relay_publishchar-messagejson-char-pubsubtopic-int-timeoutms)
/// The pubsub_topic parameter is optional and if not specified it will be derived from the contentTopic. /// The pubsub_topic parameter is optional and if not specified it will be derived from the contentTopic.
pub async fn relay_publish_message( pub fn relay_publish_message(
&self, &self,
message: &WakuMessage, message: &WakuMessage,
pubsub_topic: &PubsubTopic, pubsub_topic: Option<WakuPubSubTopic>,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> Result<MessageHash> { ) -> Result<MessageId> {
relay::waku_relay_publish_message(&self.ctx, message, pubsub_topic, timeout).await relay::waku_relay_publish_message(message, pubsub_topic, timeout)
}
/// Determine if there are enough peers to publish a message on a given pubsub topic
pub fn relay_enough_peers(&self, pubsub_topic: Option<WakuPubSubTopic>) -> Result<bool> {
relay::waku_enough_peers(pubsub_topic)
}
/// Subscribe to WakuRelay to receive messages matching a content filter.
pub fn relay_subscribe(&self, content_filter: &ContentFilter) -> Result<()> {
relay::waku_relay_subscribe(content_filter)
} }
/// Closes the pubsub subscription to stop receiving messages matching a content filter. No more messages will be received from this pubsub topic /// Closes the pubsub subscription to stop receiving messages matching a content filter. No more messages will be received from this pubsub topic
pub async fn relay_unsubscribe(&self, pubsub_topic: &PubsubTopic) -> Result<()> { pub fn relay_unsubscribe(&self, content_filter: &ContentFilter) -> Result<()> {
relay::waku_relay_unsubscribe(&self.ctx, pubsub_topic).await relay::waku_relay_unsubscribe(content_filter)
} }
pub async fn filter_subscribe( /// Returns the list of pubsub topics the node is subscribed to in Waku Relay
pub fn relay_topics(&self) -> Result<Vec<String>> {
relay::waku_relay_topics()
}
/// Retrieves historical messages on specific content topics. This method may be called with [`PagingOptions`](`crate::general::PagingOptions`),
/// to retrieve historical messages on a per-page basis. If the request included [`PagingOptions`](`crate::general::PagingOptions`),
/// the node must return messages on a per-page basis and include [`PagingOptions`](`crate::general::PagingOptions`) in the response.
/// These [`PagingOptions`](`crate::general::PagingOptions`) must contain a cursor pointing to the Index from which a new page can be requested
pub fn store_query(
&self, &self,
pubsub_topic: &PubsubTopic, query: &StoreQuery,
content_topics: Vec<WakuContentTopic>, peer_id: &PeerId,
) -> Result<()> { timeout: Option<Duration>,
filter::waku_filter_subscribe(&self.ctx, pubsub_topic, content_topics).await ) -> Result<StoreResponse> {
store::waku_store_query(query, peer_id, timeout)
} }
pub async fn filter_unsubscribe( /// Retrieves locally stored historical messages on specific content topics. This method may be called with [`PagingOptions`](`crate::general::PagingOptions`),
&self, /// to retrieve historical messages on a per-page basis. If the request included [`PagingOptions`](`crate::general::PagingOptions`),
pubsub_topic: &PubsubTopic, /// the node must return messages on a per-page basis and include [`PagingOptions`](`crate::general::PagingOptions`) in the response.
content_topics: Vec<WakuContentTopic>, /// These [`PagingOptions`](`crate::general::PagingOptions`) must contain a cursor pointing to the Index from which a new page can be requested
) -> Result<()> { pub fn local_store_query(&self, query: &StoreQuery) -> Result<StoreResponse> {
filter::waku_filter_unsubscribe(&self.ctx, pubsub_topic, content_topics).await store::waku_local_store_query(query)
} }
pub async fn filter_unsubscribe_all(&self) -> Result<()> { /// Publish a message using Waku Lightpush.
filter::waku_filter_unsubscribe_all(&self.ctx).await /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_lightpush_publishchar-messagejson-char-topic-char-peerid-int-timeoutms)
} /// The pubsub_topic parameter is optional and if not specified it will be derived from the contentTopic.
pub fn lightpush_publish(
pub async fn lightpush_publish_message(
&self, &self,
message: &WakuMessage, message: &WakuMessage,
pubsub_topic: &PubsubTopic, pubsub_topic: Option<WakuPubSubTopic>,
) -> Result<MessageHash> { peer_id: PeerId,
lightpush::waku_lightpush_publish_message(&self.ctx, message, pubsub_topic).await timeout: Option<Duration>,
) -> Result<MessageId> {
lightpush::waku_lightpush_publish(message, pubsub_topic, peer_id, timeout)
} }
#[allow(clippy::too_many_arguments)] /// Creates a subscription in a lightnode for messages that matches a content filter and optionally a [`WakuPubSubTopic`](`crate::general::WakuPubSubTopic`)
pub async fn store_query( /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_filter_subscribechar-filterjson-char-peerid-int-timeoutms)
#[deprecated]
pub fn legacy_filter_subscribe(
&self, &self,
pubsub_topic: Option<PubsubTopic>, filter_subscription: &LegacyFilterSubscription,
content_topics: Vec<WakuContentTopic>, peer_id: PeerId,
peer_addr: &str, timeout: Duration,
include_data: bool, // is true, resp contains payload, etc. Only msg_hashes otherwise ) -> Result<()> {
time_start: Option<u64>, // unix time nanoseconds legacyfilter::waku_legacy_filter_subscribe(filter_subscription, peer_id, timeout)
time_end: Option<u64>, // unix time nanoseconds }
timeout_millis: Option<Duration>,
) -> Result<Vec<StoreWakuMessageResponse>> {
let mut cursor: Option<MessageHash> = None;
let mut messages: Vec<StoreWakuMessageResponse> = Vec::new(); /// Removes subscriptions in a light node matching a content filter and, optionally, a [`WakuPubSubTopic`](`crate::general::WakuPubSubTopic`)
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_filter_unsubscribechar-filterjson-int-timeoutms)
#[deprecated]
pub fn legacy_filter_unsubscribe(
&self,
filter_subscription: &LegacyFilterSubscription,
timeout: Duration,
) -> Result<()> {
legacyfilter::waku_legacy_filter_unsubscribe(filter_subscription, timeout)
}
loop { /// Creates a subscription to a filter full node matching a content filter.
let query = StoreQueryRequest::new() /// Returns the PeerId on which the filter subscription was created
.with_pubsub_topic(pubsub_topic.clone()) pub fn filter_subscribe(
.with_content_topics(content_topics.clone()) &self,
.with_include_data(include_data) content_filter: &ContentFilter,
.with_time_start(time_start) peer_id: Option<PeerId>,
.with_time_end(time_end) timeout: Option<Duration>,
.with_pagination_cursor(cursor) ) -> Result<FilterSubscriptionResult> {
.with_pagination_forward(true); filter::waku_filter_subscribe(content_filter, peer_id, timeout)
}
let response = /// Used to know if a service node has an active subscription for this client
store::waku_store_query(&self.ctx, query, peer_addr, timeout_millis).await?; pub fn filter_ping(&self, peer_id: PeerId, timeout: Option<Duration>) -> Result<()> {
filter::waku_filter_ping(peer_id, timeout)
}
messages.extend(response.messages); /// Sends a requests to a service node to stop pushing messages matching this filter to this client.
/// It might be used to modify an existing subscription by providing a subset of the original filter
/// criteria
pub fn filter_unsubscribe(
&self,
content_filter: &ContentFilter,
peer_id: PeerId,
timeout: Option<Duration>,
) -> Result<()> {
filter::waku_filter_unsubscribe(content_filter, peer_id, timeout)
}
if response.pagination_cursor.is_none() { /// Sends a requests to a service node (or all service nodes) to stop pushing messages
break; pub fn filter_unsubscribe_all(
} &self,
cursor = response.pagination_cursor; peer_id: Option<PeerId>,
} timeout: Option<Duration>,
) -> Result<()> {
filter::waku_filter_unsubscribe_all(peer_id, timeout)
}
messages.reverse(); /// Update the bootnodes used by DiscoveryV5 by passing a list of ENRs
pub fn discv5_update_bootnodes(bootnodes: Vec<String>) -> Result<()> {
Ok(messages) discovery::waku_discv5_update_bootnodes(bootnodes)
}
}
/// Spawn a new Waku node with the given configuration (default configuration if `None` provided)
/// as per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_newchar-jsonconfig)
pub fn waku_new(config: Option<WakuNodeConfig>) -> Result<WakuNodeHandle<Initialized>> {
let mut node_initialized = WAKU_NODE_INITIALIZED
.lock()
.expect("Access to the mutex at some point");
if *node_initialized {
return Err("Waku node is already initialized".into());
}
*node_initialized = true;
management::waku_new(config).map(|_| WakuNodeHandle(Default::default()))
}
#[cfg(test)]
mod tests {
use super::waku_new;
use serial_test::serial;
#[test]
#[serial]
fn exclusive_running() {
let handle1 = waku_new(None).unwrap();
let handle2 = waku_new(None);
assert!(handle2.is_err());
let stop_handle = handle1.start().unwrap();
stop_handle.stop().unwrap();
} }
} }

View File

@ -4,33 +4,224 @@
use std::ffi::CString; use std::ffi::CString;
use std::time::Duration; use std::time::Duration;
// crates // crates
use libc::*;
use multiaddr::Multiaddr; use multiaddr::Multiaddr;
use serde::Deserialize;
// internal // internal
use crate::general::libwaku_response::{handle_no_response, LibwakuResponse}; use crate::general::{PeerId, ProtocolId, Result};
use crate::general::Result; use crate::utils::{get_trampoline, handle_json_response, handle_no_response, handle_response};
use crate::handle_ffi_call;
use crate::node::context::WakuNodeContext; /// Add a node multiaddress and protocol to the waku nodes peerstore.
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_add_peerchar-address-char-protocolid)
pub fn waku_add_peers(address: &Multiaddr, protocol_id: ProtocolId) -> Result<PeerId> {
let address_ptr = CString::new(address.to_string())
.expect("CString should build properly from the address")
.into_raw();
let protocol_id_ptr = CString::new(protocol_id.to_string())
.expect("CString should build properly from the protocol id")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_add_peer(
address_ptr,
protocol_id_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(address_ptr));
drop(CString::from_raw(protocol_id_ptr));
out
};
handle_response(code, &result)
}
/// Dial peer using a multiaddress /// Dial peer using a multiaddress
/// If `timeout` as milliseconds doesn't fit into a `i32` it is clamped to [`i32::MAX`] /// If `timeout` as milliseconds doesn't fit into a `i32` it is clamped to [`i32::MAX`]
/// If the function execution takes longer than `timeout` value, the execution will be canceled and an error returned. /// If the function execution takes longer than `timeout` value, the execution will be canceled and an error returned.
/// Use 0 for no timeout /// Use 0 for no timeout
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peerchar-address-int-timeoutms) /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peerchar-address-int-timeoutms)
pub async fn waku_connect( pub fn waku_connect_peer_with_address(
ctx: &WakuNodeContext,
address: &Multiaddr, address: &Multiaddr,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> Result<()> { ) -> Result<()> {
let address = let address_ptr = CString::new(address.to_string())
CString::new(address.to_string()).expect("CString should build properly from multiaddress"); .expect("CString should build properly from multiaddress")
.into_raw();
handle_ffi_call!( let mut error: String = Default::default();
waku_sys::waku_connect, let error_cb = |v: &str| error = v.to_string();
handle_no_response, let code = unsafe {
ctx.get_ptr(), let mut closure = error_cb;
address.as_ptr(), let cb = get_trampoline(&closure);
timeout let out = waku_sys::waku_connect(
.map(|duration| duration.as_millis().try_into().unwrap_or(u32::MAX)) address_ptr,
.unwrap_or(0) timeout
) .map(|duration| duration.as_millis().try_into().unwrap_or(i32::MAX))
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(address_ptr));
out
};
handle_no_response(code, &error)
}
/// Dial peer using a peer id
/// If `timeout` as milliseconds doesn't fit into a `i32` it is clamped to [`i32::MAX`]
/// The peer must be already known.
/// It must have been added before with [`waku_add_peers`] or previously dialed with [`waku_connect_peer_with_address`]
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_connect_peeridchar-peerid-int-timeoutms)
pub fn waku_connect_peer_with_id(peer_id: &PeerId, timeout: Option<Duration>) -> Result<()> {
let peer_id_ptr = CString::new(peer_id.as_bytes())
.expect("CString should build properly from peer id")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_connect_peerid(
peer_id_ptr,
timeout
.map(|duration| duration.as_millis().try_into().unwrap_or(i32::MAX))
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(peer_id_ptr));
out
};
handle_no_response(code, &error)
}
/// Disconnect a peer using its peer id
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_disconnect_peerchar-peerid)
pub fn waku_disconnect_peer_with_id(peer_id: &PeerId) -> Result<()> {
let peer_id_ptr = CString::new(peer_id.as_bytes())
.expect("CString should build properly from peer id")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_disconnect(peer_id_ptr, cb, &mut closure as *mut _ as *mut c_void);
drop(CString::from_raw(peer_id_ptr));
out
};
handle_no_response(code, &error)
}
/// Get number of connected peers
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_peer_count)
pub fn waku_peer_count() -> Result<usize> {
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_peer_cnt(cb, &mut closure as *mut _ as *mut c_void)
};
handle_response(code, &result)
}
/// Waku peer supported protocol
///
/// Examples:
/// `"/ipfs/id/1.0.0"`
/// `"/vac/waku/relay/2.0.0"`
/// `"/ipfs/ping/1.0.0"`
pub type Protocol = String;
/// Peer data from known/connected waku nodes
#[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
pub struct WakuPeerData {
/// Waku peer id
#[serde(alias = "peerID")]
peer_id: PeerId,
/// Supported node protocols
protocols: Vec<Protocol>,
/// Node available addresses
#[serde(alias = "addrs")]
addresses: Vec<Multiaddr>,
/// Already connected flag
connected: bool,
}
impl WakuPeerData {
pub fn peer_id(&self) -> &PeerId {
&self.peer_id
}
pub fn protocols(&self) -> &[Protocol] {
&self.protocols
}
pub fn addresses(&self) -> &[Multiaddr] {
&self.addresses
}
pub fn connected(&self) -> bool {
self.connected
}
}
/// List of [`WakuPeerData`]
pub type WakuPeers = Vec<WakuPeerData>;
/// Retrieve the list of peers known by the Waku node
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_peers)
pub fn waku_peers() -> Result<WakuPeers> {
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_peers(cb, &mut closure as *mut _ as *mut c_void)
};
handle_json_response(code, &result)
}
#[cfg(test)]
mod tests {
use crate::node::peers::WakuPeerData;
#[test]
fn deserialize_waku_peer_data() {
let json_str = r#"{
"peerID": "16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47RedcBafeDCBA",
"protocols": [
"/ipfs/id/1.0.0",
"/vac/waku/relay/2.0.0",
"/ipfs/ping/1.0.0"
],
"addrs": [
"/ip4/1.2.3.4/tcp/30303"
],
"connected": true
}"#;
let _: WakuPeerData = serde_json::from_str(json_str).unwrap();
}
} }

View File

@ -3,99 +3,214 @@
// std // std
use std::ffi::CString; use std::ffi::CString;
use std::time::Duration; use std::time::Duration;
// crates
use libc::*;
// internal // internal
use crate::general::contenttopic::{Encoding, WakuContentTopic}; use crate::general::{
use crate::general::libwaku_response::{handle_no_response, handle_response, LibwakuResponse}; ContentFilter, Encoding, MessageId, Result, WakuContentTopic, WakuMessage, WakuPubSubTopic,
use crate::general::pubsubtopic::PubsubTopic; };
use crate::general::{messagehash::MessageHash, Result, WakuMessage}; use crate::utils::{get_trampoline, handle_json_response, handle_no_response, handle_response};
use crate::handle_ffi_call;
use crate::node::context::WakuNodeContext;
/// Create a content topic according to [RFC 23](https://rfc.vac.dev/spec/23/) /// Create a content topic according to [RFC 23](https://rfc.vac.dev/spec/23/)
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_content_topicchar-applicationname-unsigned-int-applicationversion-char-contenttopicname-char-encoding) /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_content_topicchar-applicationname-unsigned-int-applicationversion-char-contenttopicname-char-encoding)
#[allow(clippy::not_unsafe_ptr_arg_deref)] pub fn waku_create_content_topic(
pub async fn waku_create_content_topic(
ctx: &WakuNodeContext,
application_name: &str, application_name: &str,
application_version: u32, application_version: &str,
content_topic_name: &str, content_topic_name: &str,
encoding: Encoding, encoding: Encoding,
) -> Result<WakuContentTopic> { ) -> WakuContentTopic {
let application_name = CString::new(application_name) let application_name_ptr = CString::new(application_name)
.expect("Application name should always transform to CString"); .expect("Application name should always transform to CString")
let content_topic_name = .into_raw();
CString::new(content_topic_name).expect("Content topic should always transform to CString"); let application_version_ptr = CString::new(application_version)
let encoding = .expect("Application version should always transform to CString")
CString::new(encoding.to_string()).expect("Encoding should always transform to CString"); .into_raw();
let content_topic_name_ptr = CString::new(content_topic_name)
.expect("Content topic should always transform to CString")
.into_raw();
let encoding_ptr = CString::new(encoding.to_string())
.expect("Encoding should always transform to CString")
.into_raw();
handle_ffi_call!( let mut result: String = Default::default();
waku_sys::waku_content_topic, let result_cb = |v: &str| result = v.to_string();
handle_response, let code = unsafe {
ctx.get_ptr(), let mut closure = result_cb;
application_name.as_ptr(), let cb = get_trampoline(&closure);
application_version, let out = waku_sys::waku_content_topic(
content_topic_name.as_ptr(), application_name_ptr,
encoding.as_ptr() application_version_ptr,
) content_topic_name_ptr,
encoding_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(application_name_ptr));
drop(CString::from_raw(application_version_ptr));
drop(CString::from_raw(content_topic_name_ptr));
drop(CString::from_raw(encoding_ptr));
out
};
handle_response::<WakuContentTopic>(code, &result)
.expect("&str from result should always be extracted")
}
/// Default pubsub topic used for exchanging waku messages defined in [RFC 10](https://rfc.vac.dev/spec/10/)
pub fn waku_default_pubsub_topic() -> WakuPubSubTopic {
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_default_pubsub_topic(cb, &mut closure as *mut _ as *mut c_void)
};
handle_response(code, &result).expect("&str from result should always be extracted")
}
/// Get the list of subscribed pubsub topics in Waku Relay.
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_relay_topics)
pub fn waku_relay_topics() -> Result<Vec<String>> {
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
waku_sys::waku_relay_topics(cb, &mut closure as *mut _ as *mut c_void)
};
handle_json_response(code, &result)
} }
/// Publish a message using Waku Relay /// Publish a message using Waku Relay
/// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_relay_publishchar-messagejson-char-pubsubtopic-int-timeoutms) /// As per the [specification](https://rfc.vac.dev/spec/36/#extern-char-waku_relay_publishchar-messagejson-char-pubsubtopic-int-timeoutms)
pub async fn waku_relay_publish_message( pub fn waku_relay_publish_message(
ctx: &WakuNodeContext,
message: &WakuMessage, message: &WakuMessage,
pubsub_topic: &PubsubTopic, pubsub_topic: Option<WakuPubSubTopic>,
timeout: Option<Duration>, timeout: Option<Duration>,
) -> Result<MessageHash> { ) -> Result<MessageId> {
let message = CString::new( let pubsub_topic = pubsub_topic
.unwrap_or_else(waku_default_pubsub_topic)
.to_string();
let message_ptr = CString::new(
serde_json::to_string(&message) serde_json::to_string(&message)
.expect("WakuMessages should always be able to success serializing"), .expect("WakuMessages should always be able to success serializing"),
) )
.expect("CString should build properly from the serialized waku message"); .expect("CString should build properly from the serialized waku message")
.into_raw();
let pubsub_topic_ptr = CString::new(pubsub_topic)
.expect("CString should build properly from pubsub topic")
.into_raw();
let pubsub_topic = CString::new(String::from(pubsub_topic)) let mut result: String = Default::default();
.expect("CString should build properly from pubsub topic"); let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_relay_publish(
message_ptr,
pubsub_topic_ptr,
timeout
.map(|duration| {
duration
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
handle_ffi_call!( drop(CString::from_raw(message_ptr));
waku_sys::waku_relay_publish, drop(CString::from_raw(pubsub_topic_ptr));
handle_response,
ctx.get_ptr(), out
pubsub_topic.as_ptr(), };
message.as_ptr(),
timeout handle_response(code, &result)
.map(|duration| {
duration
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a u32")
})
.unwrap_or(0)
)
} }
pub async fn waku_relay_subscribe(ctx: &WakuNodeContext, pubsub_topic: &PubsubTopic) -> Result<()> { pub fn waku_enough_peers(pubsub_topic: Option<WakuPubSubTopic>) -> Result<bool> {
let pubsub_topic = CString::new(String::from(pubsub_topic)) let pubsub_topic = pubsub_topic
.expect("CString should build properly from pubsub topic"); .unwrap_or_else(waku_default_pubsub_topic)
.to_string();
handle_ffi_call!( let pubsub_topic_ptr = CString::new(pubsub_topic)
waku_sys::waku_relay_subscribe, .expect("CString should build properly from pubsub topic")
handle_no_response, .into_raw();
ctx.get_ptr(),
pubsub_topic.as_ptr() let mut result: String = Default::default();
) let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_relay_enough_peers(
pubsub_topic_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(pubsub_topic_ptr));
out
};
handle_response(code, &result)
} }
pub async fn waku_relay_unsubscribe( pub fn waku_relay_subscribe(content_filter: &ContentFilter) -> Result<()> {
ctx: &WakuNodeContext, let content_filter_ptr = CString::new(
pubsub_topic: &PubsubTopic, serde_json::to_string(content_filter)
) -> Result<()> { .expect("ContentFilter should always succeed to serialize"),
let pubsub_topic = CString::new(String::from(pubsub_topic))
.expect("CString should build properly from pubsub topic");
handle_ffi_call!(
waku_sys::waku_relay_unsubscribe,
handle_no_response,
ctx.get_ptr(),
pubsub_topic.as_ptr()
) )
.expect("ContentFilter should always be able to be serialized")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_relay_subscribe(
content_filter_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(content_filter_ptr));
out
};
handle_no_response(code, &error)
}
pub fn waku_relay_unsubscribe(content_filter: &ContentFilter) -> Result<()> {
let content_filter_ptr = CString::new(
serde_json::to_string(content_filter)
.expect("ContentFilter should always succeed to serialize"),
)
.expect("ContentFilter should always be able to be serialized")
.into_raw();
let mut error: String = Default::default();
let error_cb = |v: &str| error = v.to_string();
let code = unsafe {
let mut closure = error_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_relay_subscribe(
content_filter_ptr,
cb,
&mut closure as *mut _ as *mut c_void,
);
drop(CString::from_raw(content_filter_ptr));
out
};
handle_no_response(code, &error)
} }

View File

@ -1,176 +1,84 @@
//! Waku store protocol related methods //! Waku [store](https://rfc.vac.dev/spec/36/#waku-store) handling methods
// std // std
use std::ffi::CString; use std::ffi::CString;
use uuid::Uuid; use std::time::Duration;
// crates // crates
use tokio::time::Duration; use libc::*;
// internal // internal
use crate::general::libwaku_response::{handle_response, LibwakuResponse}; use crate::general::{PeerId, Result, StoreQuery, StoreResponse};
use crate::general::time::get_now_in_nanosecs; use crate::utils::{get_trampoline, handle_json_response};
use crate::general::waku_decode::WakuDecode;
use crate::general::{
contenttopic::WakuContentTopic, messagehash::MessageHash, pubsubtopic::PubsubTopic, Result,
WakuStoreRespMessage,
};
use crate::handle_ffi_call;
use crate::node::context::WakuNodeContext;
use multiaddr::Multiaddr;
use serde::{Deserialize, Serialize};
// #[derive(Clone, Serialize, Deserialize, Debug)] /// Retrieves historical messages on specific content topics. This method may be called with [`PagingOptions`](`crate::general::PagingOptions`),
// #[serde(rename_all = "camelCase")] /// to retrieve historical messages on a per-page basis. If the request included [`PagingOptions`](`crate::general::PagingOptions`),
// pub struct PagingOptions { /// the node must return messages on a per-page basis and include [`PagingOptions`](`crate::general::PagingOptions`) in the response.
// pub page_size: usize, /// These [`PagingOptions`](`crate::general::PagingOptions`) must contain a cursor pointing to the Index from which a new page can be requested
// pub cursor: Option<MessageHash>, pub fn waku_store_query(
// pub forward: bool, query: &StoreQuery,
// } peer_id: &PeerId,
timeout: Option<Duration>,
/// Criteria used to retrieve historical messages
#[derive(Clone, Serialize, Debug)]
pub struct StoreQueryRequest {
/// if true, the store-response will include the full message content. If false,
/// the store-response will only include a list of message hashes.
#[serde(rename = "request_id")]
request_id: String,
#[serde(rename = "include_data")]
include_data: bool,
#[serde(rename = "pubsub_topic", skip_serializing_if = "Option::is_none")]
pubsub_topic: Option<PubsubTopic>,
#[serde(rename = "content_topics")]
content_topics: Vec<WakuContentTopic>,
#[serde(rename = "time_start", skip_serializing_if = "Option::is_none")]
time_start: Option<u64>,
#[serde(rename = "time_end", skip_serializing_if = "Option::is_none")]
time_end: Option<u64>,
#[serde(rename = "message_hashes", skip_serializing_if = "Option::is_none")]
message_hashes: Option<Vec<MessageHash>>,
#[serde(rename = "pagination_cursor", skip_serializing_if = "Option::is_none")]
pagination_cursor: Option<MessageHash>, // Message hash (key) from where to start query (exclusive)
#[serde(rename = "pagination_forward")]
pagination_forward: bool,
#[serde(rename = "pagination_limit", skip_serializing_if = "Option::is_none")]
pagination_limit: Option<u64>,
}
impl StoreQueryRequest {
pub fn new() -> Self {
StoreQueryRequest {
request_id: Uuid::new_v4().to_string(),
include_data: true,
pubsub_topic: None,
content_topics: Vec::new(),
time_start: Some(get_now_in_nanosecs()),
time_end: Some(get_now_in_nanosecs()),
message_hashes: None,
pagination_cursor: None,
pagination_forward: true,
pagination_limit: Some(25),
}
}
pub fn with_include_data(mut self, include_data: bool) -> Self {
self.include_data = include_data;
self
}
pub fn with_pubsub_topic(mut self, pubsub_topic: Option<PubsubTopic>) -> Self {
self.pubsub_topic = pubsub_topic;
self
}
pub fn with_content_topics(mut self, content_topics: Vec<WakuContentTopic>) -> Self {
self.content_topics = content_topics;
self
}
pub fn with_time_start(mut self, time_start: Option<u64>) -> Self {
self.time_start = time_start;
self
}
pub fn with_time_end(mut self, time_end: Option<u64>) -> Self {
self.time_end = time_end;
self
}
#[allow(dead_code)]
pub fn with_message_hashes(mut self, message_hashes: Vec<MessageHash>) -> Self {
self.message_hashes = Some(message_hashes);
self
}
pub fn with_pagination_cursor(mut self, pagination_cursor: Option<MessageHash>) -> Self {
self.pagination_cursor = pagination_cursor;
self
}
pub fn with_pagination_forward(mut self, pagination_forward: bool) -> Self {
self.pagination_forward = pagination_forward;
self
}
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct StoreWakuMessageResponse {
pub message_hash: MessageHash,
pub message: Option<WakuStoreRespMessage>, // None if include_data == false
pub pubsub_topic: String,
}
#[derive(Clone, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct StoreResponse {
#[allow(unused)]
pub request_id: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[allow(unused)]
pub status_code: u32,
#[allow(unused)]
pub status_desc: String,
/// Array of retrieved historical messages in [`WakuMessage`] format
// #[serde(default)]
pub messages: Vec<StoreWakuMessageResponse>,
/// Paging information in [`PagingOptions`] format from which to resume further historical queries
#[serde(skip_serializing_if = "Option::is_none")]
pub pagination_cursor: Option<MessageHash>,
}
// Implement WakuDecode for Vec<Multiaddr>
impl WakuDecode for StoreResponse {
fn decode(input: &str) -> Result<Self> {
Ok(serde_json::from_str(input).expect("could not parse store resp"))
}
}
pub async fn waku_store_query(
ctx: &WakuNodeContext,
query: StoreQueryRequest,
peer_addr: &str,
timeout_millis: Option<Duration>,
) -> Result<StoreResponse> { ) -> Result<StoreResponse> {
let json_query = CString::new( let query_ptr = CString::new(
serde_json::to_string(&query).expect("StoreQuery should always be able to be serialized"), serde_json::to_string(query).expect("StoreQuery should always be able to be serialized"),
) )
.expect("CString should build properly from the serialized filter subscription"); .expect("CString should build properly from the serialized filter subscription")
.into_raw();
let peer_id_ptr = CString::new(peer_id.clone())
.expect("CString should build properly from peer id")
.into_raw();
peer_addr let mut result: String = Default::default();
.parse::<Multiaddr>() let result_cb = |v: &str| result = v.to_string();
.expect("correct multiaddress in store query"); let code = unsafe {
let peer_addr = CString::new(peer_addr).expect("peer_addr CString should be created"); let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out = waku_sys::waku_store_query(
query_ptr,
peer_id_ptr,
timeout
.map(|timeout| {
timeout
.as_millis()
.try_into()
.expect("Duration as milliseconds should fit in a i32")
})
.unwrap_or(0),
cb,
&mut closure as *mut _ as *mut c_void,
);
let timeout_millis = timeout_millis.unwrap_or(Duration::from_secs(10)); drop(CString::from_raw(query_ptr));
drop(CString::from_raw(peer_id_ptr));
handle_ffi_call!( out
waku_sys::waku_store_query, };
handle_response,
ctx.get_ptr(), handle_json_response(code, &result)
json_query.as_ptr(), }
peer_addr.as_ptr(),
timeout_millis.as_millis() as i32 /// Retrieves locally stored historical messages on specific content topics from the local archive system. This method may be called with [`PagingOptions`](`crate::general::PagingOptions`),
) /// to retrieve historical messages on a per-page basis. If the request included [`PagingOptions`](`crate::general::PagingOptions`),
/// the node must return messages on a per-page basis and include [`PagingOptions`](`crate::general::PagingOptions`) in the response.
/// These [`PagingOptions`](`crate::general::PagingOptions`) must contain a cursor pointing to the Index from which a new page can be requested
pub fn waku_local_store_query(query: &StoreQuery) -> Result<StoreResponse> {
let query_ptr = CString::new(
serde_json::to_string(query).expect("StoreQuery should always be able to be serialized"),
)
.expect("CString should build properly from the serialized filter subscription")
.into_raw();
let mut result: String = Default::default();
let result_cb = |v: &str| result = v.to_string();
let code = unsafe {
let mut closure = result_cb;
let cb = get_trampoline(&closure);
let out =
waku_sys::waku_store_local_query(query_ptr, cb, &mut closure as *mut _ as *mut c_void);
drop(CString::from_raw(query_ptr));
out
};
handle_json_response(code, &result)
} }

View File

@ -0,0 +1,72 @@
use crate::general::Result;
use core::str::FromStr;
use serde::de::DeserializeOwned;
use std::ffi::CStr;
use waku_sys::WakuCallBack;
use waku_sys::{RET_ERR, RET_MISSING_CALLBACK, RET_OK};
pub fn decode<T: DeserializeOwned>(input: &str) -> Result<T> {
serde_json::from_str(input)
.map_err(|err| format!("could not deserialize waku response: {}", err))
}
unsafe extern "C" fn trampoline<F>(
_ret_code: ::std::os::raw::c_int,
data: *const ::std::os::raw::c_char,
user_data: *mut ::std::os::raw::c_void,
) where
F: FnMut(&str),
{
let user_data = &mut *(user_data as *mut F);
let response = if data.is_null() {
""
} else {
unsafe { CStr::from_ptr(data) }
.to_str()
.map_err(|err| {
format!(
"could not retrieve response from pointer returned by waku: {}",
err
)
})
.expect("could not retrieve response")
};
user_data(response);
}
pub fn get_trampoline<F>(_closure: &F) -> WakuCallBack
where
F: FnMut(&str),
{
Some(trampoline::<F>)
}
pub fn handle_no_response(code: i32, error: &str) -> Result<()> {
match code {
RET_OK => Ok(()),
RET_ERR => Err(format!("waku error: {}", error)),
RET_MISSING_CALLBACK => Err("missing callback".to_string()),
_ => Err(format!("undefined return code {}", code)),
}
}
pub fn handle_json_response<F: DeserializeOwned>(code: i32, result: &str) -> Result<F> {
match code {
RET_OK => decode(result),
RET_ERR => Err(format!("waku error: {}", result)),
RET_MISSING_CALLBACK => Err("missing callback".to_string()),
_ => Err(format!("undefined return code {}", code)),
}
}
pub fn handle_response<F: FromStr>(code: i32, result: &str) -> Result<F> {
match code {
RET_OK => result
.parse()
.map_err(|_| format!("could not parse value: {}", result)),
RET_ERR => Err(format!("waku error: {}", result)),
RET_MISSING_CALLBACK => Err("missing callback".to_string()),
_ => Err(format!("undefined return code {}", code)),
}
}

View File

@ -1,154 +1,223 @@
use aes_gcm::{Aes256Gcm, KeyInit};
use multiaddr::Multiaddr; use multiaddr::Multiaddr;
use regex::Regex; use rand::thread_rng;
use secp256k1::SecretKey; use secp256k1::SecretKey;
use serial_test::serial; use serial_test::serial;
use std::net::IpAddr;
use std::str::FromStr; use std::str::FromStr;
use std::sync::{Arc, Mutex}; use std::time::{Duration, SystemTime};
use std::time::Duration;
use std::{collections::HashSet, str::from_utf8}; use std::{collections::HashSet, str::from_utf8};
use tokio::sync::mpsc::{self, Sender};
use tokio::time; use tokio::time;
use tokio::time::sleep;
use waku_bindings::node::PubsubTopic;
use waku_bindings::{ use waku_bindings::{
waku_new, Encoding, Initialized, MessageHash, WakuContentTopic, WakuEvent, WakuMessage, waku_default_pubsub_topic, waku_new, waku_set_event_callback, ContentFilter, Encoding, Event,
WakuNodeConfig, WakuNodeHandle, GossipSubParams, Key, MessageId, ProtocolId, Running, WakuContentTopic, WakuLogLevel,
WakuMessage, WakuNodeConfig, WakuNodeHandle, WakuPubSubTopic,
}; };
use waku_bindings::{LibwakuResponse, Running};
const ECHO_TIMEOUT: u64 = 1000;
const ECHO_MESSAGE: &str = "Hi from 🦀!";
const TEST_PUBSUBTOPIC: &str = "test";
async fn try_publish_relay_messages( const ECHO_TIMEOUT: u64 = 10;
const ECHO_MESSAGE: &str = "Hi from 🦀!";
const NODES: &[&str] = &[
"/dns4/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAkvWiyFsgRhuJEb9JfjYxEkoHLgnUQmr1N5mKWnYjxYRVm",
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
"/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/30303/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS"
];
fn try_publish_relay_messages(
node: &WakuNodeHandle<Running>, node: &WakuNodeHandle<Running>,
msg: &WakuMessage, msg: &WakuMessage,
) -> Result<HashSet<MessageHash>, String> { ) -> Result<HashSet<MessageId>, String> {
Ok(HashSet::from([node Ok(HashSet::from(
.relay_publish_message(msg, &PubsubTopic::new(TEST_PUBSUBTOPIC), None) [node.relay_publish_message(msg, None, None)?],
.await?])) ))
}
fn try_publish_lightpush_messages(
node: &WakuNodeHandle<Running>,
msg: &WakuMessage,
) -> Result<HashSet<MessageId>, String> {
let peer_id = node
.peers()
.unwrap()
.iter()
.map(|peer| peer.peer_id())
.find(|id| id.as_str() != node.peer_id().unwrap().as_str())
.unwrap()
.clone();
Ok(HashSet::from([
node.lightpush_publish(msg, None, peer_id, None)?
]))
}
#[derive(Debug)]
struct Response {
id: MessageId,
payload: Vec<u8>,
}
fn set_callback(tx: Sender<Response>, sk: SecretKey, ssk: Key<Aes256Gcm>) {
waku_set_event_callback(move |signal| {
if let Event::WakuMessage(message) = signal.event() {
let id = message.message_id();
let message = message.waku_message();
let payload = if let Ok(message) = message
.try_decode_asymmetric(&sk)
.map_err(|e| println!("{e}"))
{
message.data().to_vec()
} else if let Ok(message) = message
.try_decode_symmetric(&ssk)
.map_err(|e| println!("{e}"))
{
message.data().to_vec()
} else {
message.payload().to_vec()
};
futures::executor::block_on(tx.send(Response {
id: id.to_string(),
payload,
}))
.expect("send response to the receiver");
}
});
} }
async fn test_echo_messages( async fn test_echo_messages(
node1: WakuNodeHandle<Initialized>, node: &WakuNodeHandle<Running>,
node2: WakuNodeHandle<Initialized>,
content: &'static str, content: &'static str,
content_topic: WakuContentTopic, content_topic: WakuContentTopic,
) -> Result<(), String> { sk: SecretKey,
// setting a naïve event handler to avoid appearing ERR messages in logs ssk: Key<Aes256Gcm>,
node1 ) {
.set_event_callback(|_| {}) let message = WakuMessage::new(
.expect("set event call back working"); content,
content_topic,
1,
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis()
.try_into()
.unwrap(),
Vec::new(),
false,
);
let rx_waku_message: Arc<Mutex<WakuMessage>> = Arc::new(Mutex::new(WakuMessage::default())); let (tx, mut rx) = mpsc::channel(1);
set_callback(tx, sk, ssk);
let rx_waku_message_cloned = rx_waku_message.clone(); let mut ids = try_publish_relay_messages(node, &message).expect("send relay messages");
let closure = move |response| {
if let LibwakuResponse::Success(v) = response {
let event: WakuEvent = serde_json::from_str(v.unwrap().as_str())
.expect("Parsing event to succeed test_echo_messages");
match event { ids.extend(try_publish_lightpush_messages(node, &message).expect("send lightpush messages"));
WakuEvent::WakuMessage(evt) => {
if let Ok(mut msg_lock) = rx_waku_message_cloned.lock() { while let Some(res) = rx.recv().await {
*msg_lock = evt.waku_message; if ids.take(&res.id).is_some() {
} let msg = from_utf8(&res.payload).expect("should be valid message");
} assert_eq!(content, msg);
WakuEvent::RelayTopicHealthChange(_evt) => {
// dbg!("Relay topic change evt", evt);
}
WakuEvent::ConnectionChange(_evt) => {
// dbg!("Conn change evt", evt);
}
WakuEvent::Unrecognized(err) => panic!("Unrecognized waku event: {:?}", err),
_ => panic!("event case not expected"),
};
} }
};
println!("Before setting event callback"); if ids.is_empty() {
break;
node2
.set_event_callback(closure)
.expect("set event call back working"); // Set the event callback with the closure
let node1 = node1.start().await?;
let node2 = node2.start().await?;
node1
.relay_subscribe(&PubsubTopic::new(TEST_PUBSUBTOPIC))
.await
.unwrap();
node2
.relay_subscribe(&PubsubTopic::new(TEST_PUBSUBTOPIC))
.await
.unwrap();
sleep(Duration::from_secs(5)).await;
// Interconnect nodes
// Replace all matches with 127.0.0.1 to avoid issue with NAT or firewall.
let addresses1 = node1.listen_addresses().await.unwrap();
let addresses1 = &addresses1[0].to_string();
let re = Regex::new(r"\b(?:\d{1,3}\.){3}\d{1,3}\b").unwrap();
let addresses1 = re.replace_all(addresses1, "127.0.0.1").to_string();
let addresses1 = addresses1.parse::<Multiaddr>().expect("parse multiaddress");
println!("Connecting node1 to node2: {}", addresses1);
node2.connect(&addresses1, None).await.unwrap();
// Wait for mesh to form
sleep(Duration::from_secs(3)).await;
dbg!("Before publish");
let message = WakuMessage::new(content, content_topic, 1, Vec::new(), false);
let _ids = try_publish_relay_messages(&node1, &message)
.await
.expect("send relay messages");
// Wait for the msg to arrive
let rx_waku_message_cloned = rx_waku_message.clone();
for _ in 0..50 {
let message_received = if let Ok(msg) = rx_waku_message_cloned.lock() {
// dbg!("The waku message value is: {:?}", msg);
let payload = msg.payload.to_vec();
let payload_str = from_utf8(&payload).expect("should be valid message");
payload_str == ECHO_MESSAGE
} else {
false
};
if message_received {
node1.stop().await?;
node2.stop().await?;
return Ok(());
} }
sleep(Duration::from_millis(100)).await;
} }
let node1 = node1.stop().await?;
let node2 = node2.stop().await?;
node1.waku_destroy().await?;
node2.waku_destroy().await?;
Err("Unexpected test ending".to_string())
} }
#[ignore]
#[tokio::test]
#[serial]
async fn discv5_echo() -> Result<(), String> {
let config = WakuNodeConfig {
host: IpAddr::from_str("0.0.0.0").ok(),
log_level: Some(WakuLogLevel::Error),
discv5: Some(true),
discv5_udp_port: Some(9000),
discv5_bootstrap_nodes: Vec::new(),
..Default::default()
};
let node = waku_new(Some(config))?;
let node = node.start()?;
println!("Node peer id: {}", node.peer_id()?);
for node_address in NODES {
let address: Multiaddr = node_address.parse().unwrap();
let peer_id = node.add_peer(&address, ProtocolId::Relay)?;
node.connect_peer_with_id(&peer_id, None)?;
}
assert!(node.peers()?.len() >= NODES.len());
assert!(node.peer_count()? >= NODES.len());
assert!(node.relay_enough_peers(None)?);
let sk = SecretKey::new(&mut thread_rng());
let ssk = Aes256Gcm::generate_key(&mut thread_rng());
// Subscribe to default channel.
let content_filter = ContentFilter::new(Some(waku_default_pubsub_topic()), vec![]);
node.relay_subscribe(&content_filter)?;
let content_topic = WakuContentTopic::new("toychat", "2", "huilong", Encoding::Proto);
let topics = node.relay_topics()?;
let default_topic = waku_default_pubsub_topic();
assert!(topics.len() == 1);
let topic: WakuPubSubTopic = topics[0].parse().unwrap();
assert!(topic == default_topic);
let sleep = time::sleep(Duration::from_secs(ECHO_TIMEOUT));
tokio::pin!(sleep);
// Send and receive messages. Waits until all messages received.
let got_all = tokio::select! {
_ = sleep => false,
_ = test_echo_messages(&node, ECHO_MESSAGE, content_topic, sk, ssk) => true,
};
assert!(got_all);
for node_data in node.peers()? {
if node_data.peer_id() != &node.peer_id()? {
node.disconnect_peer_with_id(node_data.peer_id())?;
}
}
node.stop()?;
Ok(())
}
#[ignore]
#[tokio::test] #[tokio::test]
#[serial] #[serial]
async fn default_echo() -> Result<(), String> { async fn default_echo() -> Result<(), String> {
println!("Test default_echo"); let config = WakuNodeConfig {
let node1 = waku_new(Some(WakuNodeConfig { log_level: Some(WakuLogLevel::Error),
tcp_port: Some(60010),
..Default::default() ..Default::default()
})) };
.await?;
let node2 = waku_new(Some(WakuNodeConfig {
tcp_port: Some(60020),
..Default::default()
}))
.await?;
let node = waku_new(Some(config))?;
let node = node.start()?;
println!("Node peer id: {}", node.peer_id()?);
for node_address in NODES {
let address: Multiaddr = node_address.parse().unwrap();
let peer_id = node.add_peer(&address, ProtocolId::Relay)?;
node.connect_peer_with_id(&peer_id, None)?;
}
assert!(node.peers()?.len() >= NODES.len());
assert!(node.peer_count()? >= NODES.len());
assert!(node.relay_enough_peers(None)?);
let sk = SecretKey::new(&mut thread_rng());
let ssk = Aes256Gcm::generate_key(&mut thread_rng());
// subscribe to default channel
let content_filter = ContentFilter::new(Some(waku_default_pubsub_topic()), vec![]);
node.relay_subscribe(&content_filter)?;
let content_topic = WakuContentTopic::new("toychat", "2", "huilong", Encoding::Proto); let content_topic = WakuContentTopic::new("toychat", "2", "huilong", Encoding::Proto);
let sleep = time::sleep(Duration::from_secs(ECHO_TIMEOUT)); let sleep = time::sleep(Duration::from_secs(ECHO_TIMEOUT));
@ -157,34 +226,106 @@ async fn default_echo() -> Result<(), String> {
// Send and receive messages. Waits until all messages received. // Send and receive messages. Waits until all messages received.
let got_all = tokio::select! { let got_all = tokio::select! {
_ = sleep => false, _ = sleep => false,
_ = test_echo_messages(node1, node2, ECHO_MESSAGE, content_topic) => true, _ = test_echo_messages(&node, ECHO_MESSAGE, content_topic, sk, ssk) => true,
}; };
assert!(got_all); assert!(got_all);
for node_data in node.peers()? {
if node_data.peer_id() != &node.peer_id()? {
node.disconnect_peer_with_id(node_data.peer_id())?;
}
}
node.stop()?;
Ok(()) Ok(())
} }
#[tokio::test] #[test]
#[serial] #[serial]
async fn node_restart() { fn gossipsub_config() -> Result<(), String> {
let params = GossipSubParams {
d: Some(6),
dlo: Some(3),
dhi: Some(12),
dscore: Some(10),
dout: Some(8),
history_length: Some(500),
history_gossip: Some(3),
dlazy: Some(12),
gossip_factor: Some(0.25),
gossip_retransmission: Some(4),
heartbeat_initial_delay_ms: Some(500),
heartbeat_interval_seconds: Some(60),
slow_heartbeat_warning: Some(0.5),
fanout_ttl_seconds: Some(60),
prune_peers: Some(3),
prune_backoff_seconds: Some(900),
unsubscribe_backoff_seconds: Some(60),
connectors: Some(3),
max_pending_connections: Some(50),
connection_timeout_seconds: Some(15),
direct_connect_ticks: Some(5),
direct_connect_initial_delay_seconds: Some(5),
opportunistic_graft_ticks: Some(8),
opportunistic_graft_peers: Some(2),
graft_flood_threshold_seconds: Some(120),
max_ihave_length: Some(32),
max_ihave_messages: Some(8),
iwant_followup_time_seconds: Some(120),
seen_messages_ttl_seconds: Some(120),
};
let config = WakuNodeConfig { let config = WakuNodeConfig {
node_key: Some( gossipsub_params: params.into(),
SecretKey::from_str("05f381866cc21f6c1e2e80e07fa732008e36d942dce3206ad6dcd6793c98d609") log_level: Some(WakuLogLevel::Error),
.unwrap(),
),
..Default::default() ..Default::default()
}; };
let node = waku_new(Some(config))?;
let node = node.start()?;
node.stop()?;
Ok(())
}
#[test]
#[serial]
fn loglevel_error() -> Result<(), String> {
let config = WakuNodeConfig {
log_level: Some(WakuLogLevel::Error),
..Default::default()
};
let node = waku_new(Some(config))?;
let node = node.start()?;
node.stop()?;
Ok(())
}
#[test]
#[serial]
fn loglevel_info() -> Result<(), String> {
let config = WakuNodeConfig {
log_level: Some(WakuLogLevel::Info),
..Default::default()
};
let node = waku_new(Some(config))?;
let node = node.start()?;
node.stop()?;
Ok(())
}
#[test]
#[serial]
fn node_restart() {
let config = WakuNodeConfig::default();
for _ in 0..3 { for _ in 0..3 {
let node = waku_new(config.clone().into()) let node = waku_new(config.clone().into()).expect("default config should be valid");
.await let node = node.start().expect("node should start with valid config");
.expect("default config should be valid");
let node = node assert!(node.peer_id().is_ok());
.start() node.stop().expect("node should stop");
.await
.expect("node should start with valid config");
let node = node.stop().await.expect("node should stop");
node.waku_destroy().await.expect("free resources");
} }
} }

View File

@ -1,15 +1,13 @@
[package] [package]
name = "waku-sys" name = "waku-sys"
version = "1.0.0" version = "0.5.0"
edition = "2021" edition = "2021"
authors = [ authors = [
"Daniel Sanchez Quiros <danielsq@status.im>", "Daniel Sanchez Quiros <danielsq@status.im>"
"Richard Ramos <richard@waku.org>",
"Ivan Folgueira Bande <ivansete@status.im>"
] ]
description = "Waku networking library generated bindings" description = "Waku networking library generated bindings"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
repository = "https://github.com/logos-messaging/logos-messaging-rust-bindings" repository = "https://github.com/waku-org/waku-rust-bindings"
keywords = ["waku", "peer-to-peer", "libp2p", "networking"] keywords = ["waku", "peer-to-peer", "libp2p", "networking"]
categories = ["network-programming"] categories = ["network-programming"]
@ -18,12 +16,14 @@ exclude = [
"vendor/docs/*", "vendor/docs/*",
"vendor/coverage/*", "vendor/coverage/*",
"vendor/pkg/*", "vendor/pkg/*",
"vendor/scripts/*",
"vendor/tests/*", "vendor/tests/*",
"vendor/ci/*", "vendor/ci/*",
"vendor/cmd/*", "vendor/cmd/*",
"**/*.md", "**/*.md",
"**/*.lock", "**/*.lock",
"**/*.nix", "**/*.nix",
"**/Makefile",
"**/Dockerfile", "**/Dockerfile",
] ]
@ -35,4 +35,3 @@ crate-type = ["rlib"]
[build-dependencies] [build-dependencies]
bindgen = "0.64" bindgen = "0.64"
cc = "1.0.73"

View File

@ -1,9 +1,9 @@
# Waku rust bindgen bindings # Waku rust bindgen bindings
[<img alt="github" src="https://img.shields.io/badge/github-Github-red?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/logos-messaging/logos-messaging-rust-bindings) [<img alt="github" src="https://img.shields.io/badge/github-Github-red?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/waku-org/waku-rust-bindings)
[<img alt="crates.io" src="https://img.shields.io/crates/v/waku-bindings.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/waku-sys) [<img alt="crates.io" src="https://img.shields.io/crates/v/waku-bindings.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/waku-sys)
[<img alt="docs.rs" src="https://img.shields.io/badge/doc/waku-bindings-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/waku-sys) [<img alt="docs.rs" src="https://img.shields.io/badge/doc/waku-bindings-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/waku-sys)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/logos-messaging/logos-messaging-rust-bindings/main.yml?branch=master" height="20">](https://github.com/logos-messaging/logos-messaging-rust-bindings/actions/workflows/main.yml?query=branch%3Amaster) [<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/waku-org/waku-rust-bindings/main.yml?branch=master" height="20">](https://github.com/waku-org/waku-rust-bindings/actions/workflows/main.yml?query=branch%3Amaster)
Rust layer on top of [`go-waku`](https://github.com/status-im/go-waku) [c ffi bindings](https://github.com/status-im/go-waku/blob/v0.2.2/library/README.md). Rust layer on top of [`go-waku`](https://github.com/status-im/go-waku) [c ffi bindings](https://github.com/status-im/go-waku/blob/v0.2.2/library/README.md).

View File

@ -1,123 +1,86 @@
use std::env; use std::env;
use std::env::set_current_dir; use std::env::set_current_dir;
use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
extern crate cc; fn get_go_bin() -> String {
if cfg!(target_family = "unix") {
let output = String::from_utf8(
Command::new("/usr/bin/which")
.arg("go")
.output()
.map_err(|e| println!("cargo:warning=Couldn't find `which` command: {e}"))
.expect("`which` command not found")
.stdout,
)
.expect("which output couldnt be parsed");
fn submodules_init(project_dir: &Path) { if output.is_empty() {
let mark_file_path = ".submodules-initialized"; println!("cargo:warning=Couldn't find go binary installed, please ensure that it is installed and/or withing the system paths");
panic!("Couldn't find `go` binary installed");
// Check if the mark file exists
if !Path::new(mark_file_path).exists() {
// If mark file doesn't exist, initialize submodule
if Command::new("git")
.args(["submodule", "init"])
.status()
.expect("Failed to execute 'git submodule init'")
.success()
&& Command::new("git")
.args(["submodule", "update", "--recursive"])
.status()
.expect("Failed to execute 'git submodule update --recursive'")
.success()
{
// Now, inside nwaku folder, run 'make update' to get nwaku's vendors
let nwaku_path = project_dir.join("vendor");
set_current_dir(nwaku_path).expect("Moving to vendor dir");
if Command::new("make")
.args(["update"])
.status()
.expect("Failed to execute 'make update'")
.success()
{
std::fs::File::create(mark_file_path).expect("Failed to create mark file");
} else {
panic!("Failed to run 'make update' within nwaku folder.");
}
set_current_dir(project_dir).expect("Going back to project dir");
println!("Git submodules initialized and updated successfully.");
} else {
panic!("Failed to initialize or update git submodules.");
} }
output.trim().to_string()
} else if cfg!(target_family = "windows") {
"go".into()
} else { } else {
println!("Mark file '{mark_file_path}' exists. Skipping git submodule initialization."); panic!("OS not supported!");
} }
} }
fn build_nwaku_lib(project_dir: &Path) { fn build_go_waku_lib(go_bin: &str, project_dir: &Path) {
let nwaku_path = project_dir.join("vendor"); // Build go-waku static lib
set_current_dir(nwaku_path).expect("Moving to vendor dir"); // build command taken from waku make file:
// https://github.com/status-im/go-waku/blob/eafbc4c01f94f3096c3201fb1e44f17f907b3068/Makefile#L115
let out_dir: PathBuf = env::var_os("OUT_DIR").unwrap().into();
let vendor_path = project_dir.join("vendor");
set_current_dir(vendor_path).expect("Moving to vendor dir");
let mut cmd = Command::new(go_bin);
cmd.env("CGO_ENABLED", "1")
.arg("build")
.arg("-buildmode=c-archive")
.arg("-tags=gowaku_no_rln")
.arg("-o")
.arg(out_dir.join("libgowaku.a"))
.arg("./library/c");
// Setting `GOCACHE=/tmp/` for crates.io job that builds documentation
// when a crate is being published or updated.
if std::env::var("DOCS_RS").is_ok() {
cmd.env("GOCACHE", "/tmp/");
}
let mut cmd = Command::new("make");
cmd.arg("libwaku").arg("STATIC=1");
cmd.status() cmd.status()
.map_err(|e| println!("cargo:warning=make build failed due to: {e}")) .map_err(|e| println!("cargo:warning=go build failed due to: {e}"))
.unwrap(); .unwrap();
set_current_dir(project_dir).expect("Going back to project dir"); set_current_dir(project_dir).expect("Going back to project dir");
} }
fn generate_bindgen_code(project_dir: &Path) { fn patch_gowaku_lib() {
let nwaku_path = project_dir.join("vendor"); // Replacing cgo_utils.h as it is only needed when compiling go-waku bindings
let header_path = nwaku_path.join("library/libwaku.h"); let lib_dir: PathBuf = env::var_os("OUT_DIR").unwrap().into();
let file_path = lib_dir.join("libgowaku.h");
let data = fs::read_to_string(&file_path).expect("Unable to read file");
let new_data = data.replace("#include <cgo_utils.h>", "");
fs::write(&file_path, new_data).expect("Unable to write file");
}
cc::Build::new() fn generate_bindgen_code() {
.object( let lib_dir: PathBuf = env::var_os("OUT_DIR").unwrap().into();
nwaku_path
.join("vendor/nim-libbacktrace/libbacktrace_wrapper.o")
.display()
.to_string(),
)
.compile("libbacktrace_wrapper");
println!("cargo:rerun-if-changed={}", header_path.display()); println!("cargo:rustc-link-search={}", lib_dir.display());
println!( println!("cargo:rustc-link-lib=static=gowaku");
"cargo:rustc-link-search={}", println!("cargo:rerun-if-changed=libgowaku.h");
nwaku_path.join("build").display()
);
println!("cargo:rustc-link-lib=static=waku");
println!( patch_gowaku_lib();
"cargo:rustc-link-search={}",
nwaku_path
.join("vendor/nim-nat-traversal/vendor/miniupnp/miniupnpc/build")
.display()
);
println!("cargo:rustc-link-lib=static=miniupnpc");
println!(
"cargo:rustc-link-search={}",
nwaku_path
.join("vendor/nim-nat-traversal/vendor/libnatpmp-upstream")
.display()
);
println!("cargo:rustc-link-lib=static=natpmp");
println!("cargo:rustc-link-lib=dl");
println!("cargo:rustc-link-lib=m");
println!(
"cargo:rustc-link-search=native={}",
nwaku_path
.join("vendor/nim-libbacktrace/install/usr/lib")
.display()
);
println!("cargo:rustc-link-lib=static=backtrace");
cc::Build::new()
.file("src/cmd.c") // Compile the C file
.compile("cmditems"); // Compile it as a library
println!("cargo:rustc-link-lib=static=cmditems");
// Generate waku bindings with bindgen // Generate waku bindings with bindgen
let bindings = bindgen::Builder::default() let bindings = bindgen::Builder::default()
// The input header we would like to generate // The input header we would like to generate
// bindings for. // bindings for.
.header(format!("{}", header_path.display())) .header(format!("{}/{}", lib_dir.display(), "libgowaku.h"))
// Tell cargo to invalidate the built crate whenever any of the // Tell cargo to invalidate the built crate whenever any of the
// included header files changed. // included header files changed.
.parse_callbacks(Box::new(bindgen::CargoCallbacks)) .parse_callbacks(Box::new(bindgen::CargoCallbacks))
@ -135,9 +98,10 @@ fn generate_bindgen_code(project_dir: &Path) {
#[cfg(not(doc))] #[cfg(not(doc))]
fn main() { fn main() {
let go_bin = get_go_bin();
let project_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); let project_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
submodules_init(&project_dir); build_go_waku_lib(&go_bin, &project_dir);
build_nwaku_lib(&project_dir); generate_bindgen_code();
generate_bindgen_code(&project_dir);
} }

View File

@ -1,13 +0,0 @@
/*
This file is needed to avoid errors like the following when linking the waku-sys lib crate:
<<undefined reference to `cmdCount'>>
and
<<undefined reference to `cmdLine'>>
*/
#include <stdio.h>
int cmdCount = 0;
char** cmdLine = NULL;

@ -1 +1 @@
Subproject commit 4117449b9af6c0304a6115dd4bc0d1d745159685 Subproject commit e340337d64622d22cb94a969255efe4e36637df0