mirror of https://github.com/waku-org/js-waku.git
feat!: protocols filter peers as per configured shard (#1756)
* merge: master * fix: tests * update: interfafces * rm: comments * metadata: store peerIdStr instead of peerId * chore(utils): move fast-utils to dev deps * fix: allow autosharding nodes to get peers (#1785) * fix: merge * fix: build * fix: failing tests from master merge --------- Co-authored-by: Arseniy Klempner <arseniyk@status.im>
This commit is contained in:
parent
bb680e49f7
commit
477c2a5918
|
@ -28010,6 +28010,7 @@
|
||||||
"@rollup/plugin-node-resolve": "^15.2.3",
|
"@rollup/plugin-node-resolve": "^15.2.3",
|
||||||
"@waku/build-utils": "*",
|
"@waku/build-utils": "*",
|
||||||
"cspell": "^7.3.2",
|
"cspell": "^7.3.2",
|
||||||
|
"fast-check": "^3.14.0",
|
||||||
"npm-run-all": "^4.1.5",
|
"npm-run-all": "^4.1.5",
|
||||||
"rollup": "^4.9.5"
|
"rollup": "^4.9.5"
|
||||||
},
|
},
|
||||||
|
@ -32135,6 +32136,7 @@
|
||||||
"chai": "^4.3.10",
|
"chai": "^4.3.10",
|
||||||
"cspell": "^7.3.2",
|
"cspell": "^7.3.2",
|
||||||
"debug": "^4.3.4",
|
"debug": "^4.3.4",
|
||||||
|
"fast-check": "^3.14.0",
|
||||||
"npm-run-all": "^4.1.5",
|
"npm-run-all": "^4.1.5",
|
||||||
"rollup": "^4.9.5",
|
"rollup": "^4.9.5",
|
||||||
"uint8arrays": "^4.0.4"
|
"uint8arrays": "^4.0.4"
|
||||||
|
|
|
@ -14,8 +14,7 @@ export * as waku_filter from "./lib/filter/index.js";
|
||||||
export { wakuFilter, FilterCodecs } from "./lib/filter/index.js";
|
export { wakuFilter, FilterCodecs } from "./lib/filter/index.js";
|
||||||
|
|
||||||
export * as waku_light_push from "./lib/light_push/index.js";
|
export * as waku_light_push from "./lib/light_push/index.js";
|
||||||
export { LightPushCodec } from "./lib/light_push/index.js";
|
export { LightPushCodec, wakuLightPush } from "./lib/light_push/index.js";
|
||||||
export { wakuLightPush } from "./lib/light_push/index.js";
|
|
||||||
|
|
||||||
export * as waku_store from "./lib/store/index.js";
|
export * as waku_store from "./lib/store/index.js";
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ import type {
|
||||||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||||
import { Logger, shardInfoToPubsubTopics } from "@waku/utils";
|
import { Logger, shardInfoToPubsubTopics } from "@waku/utils";
|
||||||
import {
|
import {
|
||||||
getConnectedPeersForProtocol,
|
getConnectedPeersForProtocolAndShard,
|
||||||
getPeersForProtocol,
|
getPeersForProtocol,
|
||||||
sortPeersByLatency
|
sortPeersByLatency
|
||||||
} from "@waku/utils/libp2p";
|
} from "@waku/utils/libp2p";
|
||||||
|
@ -25,12 +25,16 @@ export class BaseProtocol implements IBaseProtocol {
|
||||||
public readonly addLibp2pEventListener: Libp2p["addEventListener"];
|
public readonly addLibp2pEventListener: Libp2p["addEventListener"];
|
||||||
public readonly removeLibp2pEventListener: Libp2p["removeEventListener"];
|
public readonly removeLibp2pEventListener: Libp2p["removeEventListener"];
|
||||||
protected streamManager: StreamManager;
|
protected streamManager: StreamManager;
|
||||||
|
protected pubsubTopics: PubsubTopic[];
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
public multicodec: string,
|
public multicodec: string,
|
||||||
private components: Libp2pComponents,
|
private components: Libp2pComponents,
|
||||||
private log: Logger
|
private log: Logger,
|
||||||
|
private options?: ProtocolCreateOptions
|
||||||
) {
|
) {
|
||||||
|
this.pubsubTopics = this.initializePubsubTopic(options);
|
||||||
|
|
||||||
this.addLibp2pEventListener = components.events.addEventListener.bind(
|
this.addLibp2pEventListener = components.events.addEventListener.bind(
|
||||||
components.events
|
components.events
|
||||||
);
|
);
|
||||||
|
@ -59,10 +63,19 @@ export class BaseProtocol implements IBaseProtocol {
|
||||||
* the class protocol. Waku may or may not be currently connected to these
|
* the class protocol. Waku may or may not be currently connected to these
|
||||||
* peers.
|
* peers.
|
||||||
*/
|
*/
|
||||||
public async peers(): Promise<Peer[]> {
|
public async allPeers(): Promise<Peer[]> {
|
||||||
return getPeersForProtocol(this.peerStore, [this.multicodec]);
|
return getPeersForProtocol(this.peerStore, [this.multicodec]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async connectedPeers(): Promise<Peer[]> {
|
||||||
|
const peers = await this.allPeers();
|
||||||
|
return peers.filter((peer) => {
|
||||||
|
return (
|
||||||
|
this.components.connectionManager.getConnections(peer.id).length > 0
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves a list of connected peers that support the protocol. The list is sorted by latency.
|
* Retrieves a list of connected peers that support the protocol. The list is sorted by latency.
|
||||||
*
|
*
|
||||||
|
@ -83,16 +96,18 @@ export class BaseProtocol implements IBaseProtocol {
|
||||||
numPeers: 0
|
numPeers: 0
|
||||||
}
|
}
|
||||||
): Promise<Peer[]> {
|
): Promise<Peer[]> {
|
||||||
// Retrieve all connected peers that support the protocol
|
// Retrieve all connected peers that support the protocol & shard (if configured)
|
||||||
const allPeersForProtocol = await getConnectedPeersForProtocol(
|
const connectedPeersForProtocolAndShard =
|
||||||
|
await getConnectedPeersForProtocolAndShard(
|
||||||
this.components.connectionManager.getConnections(),
|
this.components.connectionManager.getConnections(),
|
||||||
this.peerStore,
|
this.peerStore,
|
||||||
[this.multicodec]
|
[this.multicodec],
|
||||||
|
this.options?.shardInfo
|
||||||
);
|
);
|
||||||
|
|
||||||
// Filter the peers based on discovery & number of peers requested
|
// Filter the peers based on discovery & number of peers requested
|
||||||
const filteredPeers = await filterPeersByDiscovery(
|
const filteredPeers = filterPeersByDiscovery(
|
||||||
allPeersForProtocol,
|
connectedPeersForProtocolAndShard,
|
||||||
numPeers,
|
numPeers,
|
||||||
maxBootstrapPeers
|
maxBootstrapPeers
|
||||||
);
|
);
|
||||||
|
@ -112,7 +127,9 @@ export class BaseProtocol implements IBaseProtocol {
|
||||||
return sortedFilteredPeers;
|
return sortedFilteredPeers;
|
||||||
}
|
}
|
||||||
|
|
||||||
initializePubsubTopic(options?: ProtocolCreateOptions): PubsubTopic[] {
|
private initializePubsubTopic(
|
||||||
|
options?: ProtocolCreateOptions
|
||||||
|
): PubsubTopic[] {
|
||||||
return (
|
return (
|
||||||
options?.pubsubTopics ??
|
options?.pubsubTopics ??
|
||||||
(options?.shardInfo
|
(options?.shardInfo
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import type { Peer, PeerId, PeerInfo, PeerStore } from "@libp2p/interface";
|
import type { Peer, PeerId, PeerInfo, PeerStore } from "@libp2p/interface";
|
||||||
import { CustomEvent, TypedEventEmitter } from "@libp2p/interface";
|
import { CustomEvent, TypedEventEmitter } from "@libp2p/interface";
|
||||||
import { decodeRelayShard } from "@waku/enr";
|
|
||||||
import {
|
import {
|
||||||
ConnectionManagerOptions,
|
ConnectionManagerOptions,
|
||||||
EConnectionStateEvents,
|
EConnectionStateEvents,
|
||||||
|
@ -15,7 +14,7 @@ import {
|
||||||
ShardInfo
|
ShardInfo
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { Libp2p, Tags } from "@waku/interfaces";
|
import { Libp2p, Tags } from "@waku/interfaces";
|
||||||
import { shardInfoToPubsubTopics } from "@waku/utils";
|
import { decodeRelayShard, shardInfoToPubsubTopics } from "@waku/utils";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
import { KeepAliveManager } from "./keep_alive_manager.js";
|
import { KeepAliveManager } from "./keep_alive_manager.js";
|
||||||
|
@ -377,6 +376,8 @@ export class ConnectionManager
|
||||||
},
|
},
|
||||||
"peer:connect": (evt: CustomEvent<PeerId>): void => {
|
"peer:connect": (evt: CustomEvent<PeerId>): void => {
|
||||||
void (async () => {
|
void (async () => {
|
||||||
|
log.info(`Connected to peer ${evt.detail.toString()}`);
|
||||||
|
|
||||||
const peerId = evt.detail;
|
const peerId = evt.detail;
|
||||||
|
|
||||||
this.keepAliveManager.start(
|
this.keepAliveManager.start(
|
||||||
|
|
|
@ -258,7 +258,6 @@ class Subscription {
|
||||||
}
|
}
|
||||||
|
|
||||||
class Filter extends BaseProtocol implements IReceiver {
|
class Filter extends BaseProtocol implements IReceiver {
|
||||||
private readonly pubsubTopics: PubsubTopic[] = [];
|
|
||||||
private activeSubscriptions = new Map<string, Subscription>();
|
private activeSubscriptions = new Map<string, Subscription>();
|
||||||
private readonly NUM_PEERS_PROTOCOL = 1;
|
private readonly NUM_PEERS_PROTOCOL = 1;
|
||||||
|
|
||||||
|
@ -279,9 +278,7 @@ class Filter extends BaseProtocol implements IReceiver {
|
||||||
}
|
}
|
||||||
|
|
||||||
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
||||||
super(FilterCodecs.SUBSCRIBE, libp2p.components, log);
|
super(FilterCodecs.SUBSCRIBE, libp2p.components, log, options);
|
||||||
|
|
||||||
this.pubsubTopics = this.initializePubsubTopic(options);
|
|
||||||
|
|
||||||
libp2p.handle(FilterCodecs.PUSH, this.onRequest.bind(this)).catch((e) => {
|
libp2p.handle(FilterCodecs.PUSH, this.onRequest.bind(this)).catch((e) => {
|
||||||
log.error("Failed to register ", FilterCodecs.PUSH, e);
|
log.error("Failed to register ", FilterCodecs.PUSH, e);
|
||||||
|
@ -300,8 +297,6 @@ class Filter extends BaseProtocol implements IReceiver {
|
||||||
|
|
||||||
ensurePubsubTopicIsConfigured(pubsubTopic, this.pubsubTopics);
|
ensurePubsubTopicIsConfigured(pubsubTopic, this.pubsubTopics);
|
||||||
|
|
||||||
//TODO: get a relevant peer for the topic/shard
|
|
||||||
// https://github.com/waku-org/js-waku/pull/1586#discussion_r1336428230
|
|
||||||
const peer = (
|
const peer = (
|
||||||
await this.getPeers({
|
await this.getPeers({
|
||||||
maxBootstrapPeers: 1,
|
maxBootstrapPeers: 1,
|
||||||
|
|
|
@ -27,7 +27,7 @@ describe("filterPeersByDiscovery function", function () {
|
||||||
}
|
}
|
||||||
] as unknown as Peer[];
|
] as unknown as Peer[];
|
||||||
|
|
||||||
const result = await filterPeersByDiscovery(mockPeers, 0, 10);
|
const result = filterPeersByDiscovery(mockPeers, 0, 10);
|
||||||
expect(result.length).to.deep.equal(mockPeers.length);
|
expect(result.length).to.deep.equal(mockPeers.length);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ describe("filterPeersByDiscovery function", function () {
|
||||||
}
|
}
|
||||||
] as unknown as Peer[];
|
] as unknown as Peer[];
|
||||||
|
|
||||||
const result = await filterPeersByDiscovery(mockPeers, 0, 0);
|
const result = filterPeersByDiscovery(mockPeers, 0, 0);
|
||||||
|
|
||||||
// result should have no bootstrap peers, and a total of 2 peers
|
// result should have no bootstrap peers, and a total of 2 peers
|
||||||
expect(result.length).to.equal(2);
|
expect(result.length).to.equal(2);
|
||||||
|
@ -95,7 +95,7 @@ describe("filterPeersByDiscovery function", function () {
|
||||||
}
|
}
|
||||||
] as unknown as Peer[];
|
] as unknown as Peer[];
|
||||||
|
|
||||||
const result = await filterPeersByDiscovery(mockPeers, 0, 1);
|
const result = filterPeersByDiscovery(mockPeers, 0, 1);
|
||||||
|
|
||||||
// result should have 1 bootstrap peers, and a total of 4 peers
|
// result should have 1 bootstrap peers, and a total of 4 peers
|
||||||
expect(result.length).to.equal(4);
|
expect(result.length).to.equal(4);
|
||||||
|
@ -134,7 +134,7 @@ describe("filterPeersByDiscovery function", function () {
|
||||||
}
|
}
|
||||||
] as unknown as Peer[];
|
] as unknown as Peer[];
|
||||||
|
|
||||||
const result = await filterPeersByDiscovery(mockPeers, 5, 2);
|
const result = filterPeersByDiscovery(mockPeers, 5, 2);
|
||||||
|
|
||||||
// check that result has at least 2 bootstrap peers and no more than 5 peers
|
// check that result has at least 2 bootstrap peers and no more than 5 peers
|
||||||
expect(result.length).to.be.at.least(2);
|
expect(result.length).to.be.at.least(2);
|
||||||
|
|
|
@ -10,13 +10,13 @@ import { Tags } from "@waku/interfaces";
|
||||||
* @param peers - The list of peers to filter from.
|
* @param peers - The list of peers to filter from.
|
||||||
* @param numPeers - The total number of peers to retrieve. If 0, all peers are returned, irrespective of `maxBootstrapPeers`.
|
* @param numPeers - The total number of peers to retrieve. If 0, all peers are returned, irrespective of `maxBootstrapPeers`.
|
||||||
* @param maxBootstrapPeers - The maximum number of bootstrap peers to retrieve.
|
* @param maxBootstrapPeers - The maximum number of bootstrap peers to retrieve.
|
||||||
* @returns A Promise that resolves to an array of peers based on the specified criteria.
|
* @returns An array of peers based on the specified criteria.
|
||||||
*/
|
*/
|
||||||
export async function filterPeersByDiscovery(
|
export function filterPeersByDiscovery(
|
||||||
peers: Peer[],
|
peers: Peer[],
|
||||||
numPeers: number,
|
numPeers: number,
|
||||||
maxBootstrapPeers: number
|
maxBootstrapPeers: number
|
||||||
): Promise<Peer[]> {
|
): Peer[] {
|
||||||
// Collect the bootstrap peers up to the specified maximum
|
// Collect the bootstrap peers up to the specified maximum
|
||||||
let bootstrapPeers = peers
|
let bootstrapPeers = peers
|
||||||
.filter((peer) => peer.tags.has(Tags.BOOTSTRAP))
|
.filter((peer) => peer.tags.has(Tags.BOOTSTRAP))
|
||||||
|
|
|
@ -5,7 +5,6 @@ import {
|
||||||
IMessage,
|
IMessage,
|
||||||
Libp2p,
|
Libp2p,
|
||||||
ProtocolCreateOptions,
|
ProtocolCreateOptions,
|
||||||
PubsubTopic,
|
|
||||||
SendError,
|
SendError,
|
||||||
SendResult
|
SendResult
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
|
@ -43,12 +42,10 @@ type PreparePushMessageResult =
|
||||||
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
|
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
|
||||||
*/
|
*/
|
||||||
class LightPush extends BaseProtocol implements ILightPush {
|
class LightPush extends BaseProtocol implements ILightPush {
|
||||||
private readonly pubsubTopics: PubsubTopic[];
|
|
||||||
private readonly NUM_PEERS_PROTOCOL = 1;
|
private readonly NUM_PEERS_PROTOCOL = 1;
|
||||||
|
|
||||||
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
||||||
super(LightPushCodec, libp2p.components, log);
|
super(LightPushCodec, libp2p.components, log, options);
|
||||||
this.pubsubTopics = this.initializePubsubTopic(options);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async preparePushMessage(
|
private async preparePushMessage(
|
||||||
|
@ -107,7 +104,6 @@ class LightPush extends BaseProtocol implements ILightPush {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO: get a relevant peer for the topic/shard
|
|
||||||
const peers = await this.getPeers({
|
const peers = await this.getPeers({
|
||||||
maxBootstrapPeers: 1,
|
maxBootstrapPeers: 1,
|
||||||
numPeers: this.NUM_PEERS_PROTOCOL
|
numPeers: this.NUM_PEERS_PROTOCOL
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId } from "@libp2p/interface";
|
||||||
import { IncomingStreamData } from "@libp2p/interface";
|
import { IncomingStreamData } from "@libp2p/interface";
|
||||||
import { encodeRelayShard } from "@waku/enr";
|
|
||||||
import type {
|
import type {
|
||||||
IMetadata,
|
IMetadata,
|
||||||
Libp2pComponents,
|
Libp2pComponents,
|
||||||
|
PeerIdStr,
|
||||||
ShardInfo,
|
ShardInfo,
|
||||||
ShardingParams
|
ShardingParams
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { proto_metadata } from "@waku/proto";
|
import { proto_metadata } from "@waku/proto";
|
||||||
import { Logger } from "@waku/utils";
|
import { encodeRelayShard, Logger } from "@waku/utils";
|
||||||
import all from "it-all";
|
import all from "it-all";
|
||||||
import * as lp from "it-length-prefixed";
|
import * as lp from "it-length-prefixed";
|
||||||
import { pipe } from "it-pipe";
|
import { pipe } from "it-pipe";
|
||||||
|
@ -20,13 +20,16 @@ const log = new Logger("metadata");
|
||||||
|
|
||||||
export const MetadataCodec = "/vac/waku/metadata/1.0.0";
|
export const MetadataCodec = "/vac/waku/metadata/1.0.0";
|
||||||
|
|
||||||
class Metadata extends BaseProtocol {
|
class Metadata extends BaseProtocol implements IMetadata {
|
||||||
private readonly shardInfo: ShardingParams;
|
|
||||||
private libp2pComponents: Libp2pComponents;
|
private libp2pComponents: Libp2pComponents;
|
||||||
constructor(shardInfo: ShardingParams, libp2p: Libp2pComponents) {
|
handshakesConfirmed: Set<PeerIdStr> = new Set();
|
||||||
super(MetadataCodec, libp2p.components, log);
|
|
||||||
|
constructor(
|
||||||
|
public shardInfo: ShardingParams,
|
||||||
|
libp2p: Libp2pComponents
|
||||||
|
) {
|
||||||
|
super(MetadataCodec, libp2p.components, log, shardInfo && { shardInfo });
|
||||||
this.libp2pComponents = libp2p;
|
this.libp2pComponents = libp2p;
|
||||||
this.shardInfo = shardInfo;
|
|
||||||
void libp2p.registrar.handle(MetadataCodec, (streamData) => {
|
void libp2p.registrar.handle(MetadataCodec, (streamData) => {
|
||||||
void this.onRequest(streamData);
|
void this.onRequest(streamData);
|
||||||
});
|
});
|
||||||
|
@ -53,12 +56,10 @@ class Metadata extends BaseProtocol {
|
||||||
const remoteShardInfoResponse =
|
const remoteShardInfoResponse =
|
||||||
this.decodeMetadataResponse(encodedResponse);
|
this.decodeMetadataResponse(encodedResponse);
|
||||||
|
|
||||||
// add or update the shardInfo to peer store
|
await this.savePeerShardInfo(
|
||||||
await this.libp2pComponents.peerStore.merge(connection.remotePeer, {
|
connection.remotePeer,
|
||||||
metadata: {
|
remoteShardInfoResponse
|
||||||
shardInfo: encodeRelayShard(remoteShardInfoResponse)
|
);
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error("Error handling metadata request", error);
|
log.error("Error handling metadata request", error);
|
||||||
}
|
}
|
||||||
|
@ -87,9 +88,19 @@ class Metadata extends BaseProtocol {
|
||||||
|
|
||||||
const decodedResponse = this.decodeMetadataResponse(encodedResponse);
|
const decodedResponse = this.decodeMetadataResponse(encodedResponse);
|
||||||
|
|
||||||
|
await this.savePeerShardInfo(peerId, decodedResponse);
|
||||||
|
|
||||||
return decodedResponse;
|
return decodedResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async confirmOrAttemptHandshake(peerId: PeerId): Promise<void> {
|
||||||
|
if (this.handshakesConfirmed.has(peerId.toString())) return;
|
||||||
|
|
||||||
|
await this.query(peerId);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
private decodeMetadataResponse(encodedResponse: Uint8ArrayList[]): ShardInfo {
|
private decodeMetadataResponse(encodedResponse: Uint8ArrayList[]): ShardInfo {
|
||||||
const bytes = new Uint8ArrayList();
|
const bytes = new Uint8ArrayList();
|
||||||
|
|
||||||
|
@ -104,6 +115,20 @@ class Metadata extends BaseProtocol {
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async savePeerShardInfo(
|
||||||
|
peerId: PeerId,
|
||||||
|
shardInfo: ShardInfo
|
||||||
|
): Promise<void> {
|
||||||
|
// add or update the shardInfo to peer store
|
||||||
|
await this.libp2pComponents.peerStore.merge(peerId, {
|
||||||
|
metadata: {
|
||||||
|
shardInfo: encodeRelayShard(shardInfo)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.handshakesConfirmed.add(peerId.toString());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function wakuMetadata(
|
export function wakuMetadata(
|
||||||
|
|
|
@ -6,8 +6,7 @@ import {
|
||||||
IDecoder,
|
IDecoder,
|
||||||
IStore,
|
IStore,
|
||||||
Libp2p,
|
Libp2p,
|
||||||
ProtocolCreateOptions,
|
ProtocolCreateOptions
|
||||||
PubsubTopic
|
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { proto_store as proto } from "@waku/proto";
|
import { proto_store as proto } from "@waku/proto";
|
||||||
import { ensurePubsubTopicIsConfigured, isDefined } from "@waku/utils";
|
import { ensurePubsubTopicIsConfigured, isDefined } from "@waku/utils";
|
||||||
|
@ -74,12 +73,10 @@ export interface QueryOptions {
|
||||||
* The Waku Store protocol can be used to retrieved historical messages.
|
* The Waku Store protocol can be used to retrieved historical messages.
|
||||||
*/
|
*/
|
||||||
class Store extends BaseProtocol implements IStore {
|
class Store extends BaseProtocol implements IStore {
|
||||||
private readonly pubsubTopics: PubsubTopic[];
|
|
||||||
private readonly NUM_PEERS_PROTOCOL = 1;
|
private readonly NUM_PEERS_PROTOCOL = 1;
|
||||||
|
|
||||||
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
|
||||||
super(StoreCodec, libp2p.components, log);
|
super(StoreCodec, libp2p.components, log, options);
|
||||||
this.pubsubTopics = this.initializePubsubTopic(options);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
import type { IdentifyResult } from "@libp2p/interface";
|
import type { IdentifyResult } from "@libp2p/interface";
|
||||||
import type { IBaseProtocol, IRelay, Waku } from "@waku/interfaces";
|
import type { IBaseProtocol, IMetadata, IRelay, Waku } from "@waku/interfaces";
|
||||||
import { Protocols } from "@waku/interfaces";
|
import { Protocols } from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
import { pEvent } from "p-event";
|
import { pEvent } from "p-event";
|
||||||
|
|
||||||
const log = new Logger("wait-for-remote-peer");
|
const log = new Logger("wait-for-remote-peer");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -32,6 +31,11 @@ export async function waitForRemotePeer(
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
protocols = protocols ?? getEnabledProtocols(waku);
|
protocols = protocols ?? getEnabledProtocols(waku);
|
||||||
|
|
||||||
|
const isShardingEnabled = waku.shardInfo !== undefined;
|
||||||
|
const metadataService = isShardingEnabled
|
||||||
|
? waku.libp2p.services.metadata
|
||||||
|
: undefined;
|
||||||
|
|
||||||
if (!waku.isStarted()) return Promise.reject("Waku node is not started");
|
if (!waku.isStarted()) return Promise.reject("Waku node is not started");
|
||||||
|
|
||||||
const promises = [];
|
const promises = [];
|
||||||
|
@ -45,19 +49,19 @@ export async function waitForRemotePeer(
|
||||||
if (protocols.includes(Protocols.Store)) {
|
if (protocols.includes(Protocols.Store)) {
|
||||||
if (!waku.store)
|
if (!waku.store)
|
||||||
throw new Error("Cannot wait for Store peer: protocol not mounted");
|
throw new Error("Cannot wait for Store peer: protocol not mounted");
|
||||||
promises.push(waitForConnectedPeer(waku.store));
|
promises.push(waitForConnectedPeer(waku.store, metadataService));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (protocols.includes(Protocols.LightPush)) {
|
if (protocols.includes(Protocols.LightPush)) {
|
||||||
if (!waku.lightPush)
|
if (!waku.lightPush)
|
||||||
throw new Error("Cannot wait for LightPush peer: protocol not mounted");
|
throw new Error("Cannot wait for LightPush peer: protocol not mounted");
|
||||||
promises.push(waitForConnectedPeer(waku.lightPush));
|
promises.push(waitForConnectedPeer(waku.lightPush, metadataService));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (protocols.includes(Protocols.Filter)) {
|
if (protocols.includes(Protocols.Filter)) {
|
||||||
if (!waku.filter)
|
if (!waku.filter)
|
||||||
throw new Error("Cannot wait for Filter peer: protocol not mounted");
|
throw new Error("Cannot wait for Filter peer: protocol not mounted");
|
||||||
promises.push(waitForConnectedPeer(waku.filter));
|
promises.push(waitForConnectedPeer(waku.filter, metadataService));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (timeoutMs) {
|
if (timeoutMs) {
|
||||||
|
@ -73,21 +77,62 @@ export async function waitForRemotePeer(
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wait for a peer with the given protocol to be connected.
|
* Wait for a peer with the given protocol to be connected.
|
||||||
|
* If sharding is enabled on the node, it will also wait for the peer to be confirmed by the metadata service.
|
||||||
*/
|
*/
|
||||||
async function waitForConnectedPeer(protocol: IBaseProtocol): Promise<void> {
|
async function waitForConnectedPeer(
|
||||||
|
protocol: IBaseProtocol,
|
||||||
|
metadataService?: IMetadata
|
||||||
|
): Promise<void> {
|
||||||
const codec = protocol.multicodec;
|
const codec = protocol.multicodec;
|
||||||
const peers = await protocol.peers();
|
const peers = await protocol.connectedPeers();
|
||||||
|
|
||||||
if (peers.length) {
|
if (peers.length) {
|
||||||
|
if (!metadataService) {
|
||||||
log.info(`${codec} peer found: `, peers[0].id.toString());
|
log.info(`${codec} peer found: `, peers[0].id.toString());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// once a peer is connected, we need to confirm the metadata handshake with at least one of those peers if sharding is enabled
|
||||||
|
try {
|
||||||
|
await Promise.any(
|
||||||
|
peers.map((peer) => metadataService.confirmOrAttemptHandshake(peer.id))
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} catch (e) {
|
||||||
|
if ((e as any).code === "ERR_CONNECTION_BEING_CLOSED")
|
||||||
|
log.error(
|
||||||
|
`Connection with the peer was closed and possibly because it's on a different shard. Error: ${e}`
|
||||||
|
);
|
||||||
|
|
||||||
|
log.error(`Error waiting for handshake confirmation: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(`Waiting for ${codec} peer`);
|
||||||
|
|
||||||
|
// else we'll just wait for the next peer to connect
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
const cb = (evt: CustomEvent<IdentifyResult>): void => {
|
const cb = (evt: CustomEvent<IdentifyResult>): void => {
|
||||||
if (evt.detail?.protocols?.includes(codec)) {
|
if (evt.detail?.protocols?.includes(codec)) {
|
||||||
|
if (metadataService) {
|
||||||
|
metadataService
|
||||||
|
.confirmOrAttemptHandshake(evt.detail.peerId)
|
||||||
|
.then(() => {
|
||||||
protocol.removeLibp2pEventListener("peer:identify", cb);
|
protocol.removeLibp2pEventListener("peer:identify", cb);
|
||||||
resolve();
|
resolve();
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
if (e.code === "ERR_CONNECTION_BEING_CLOSED")
|
||||||
|
log.error(
|
||||||
|
`Connection with the peer was closed and possibly because it's on a different shard. Error: ${e}`
|
||||||
|
);
|
||||||
|
|
||||||
|
log.error(`Error waiting for handshake confirmation: ${e}`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
protocol.removeLibp2pEventListener("peer:identify", cb);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
protocol.addLibp2pEventListener("peer:identify", cb);
|
protocol.addLibp2pEventListener("peer:identify", cb);
|
||||||
|
|
|
@ -57,7 +57,7 @@ export class WakuNode implements Waku {
|
||||||
options: WakuOptions,
|
options: WakuOptions,
|
||||||
pubsubTopics: PubsubTopic[] = [],
|
pubsubTopics: PubsubTopic[] = [],
|
||||||
libp2p: Libp2p,
|
libp2p: Libp2p,
|
||||||
pubsubShardInfo?: ShardingParams,
|
private pubsubShardInfo?: ShardingParams,
|
||||||
store?: (libp2p: Libp2p) => IStore,
|
store?: (libp2p: Libp2p) => IStore,
|
||||||
lightPush?: (libp2p: Libp2p) => ILightPush,
|
lightPush?: (libp2p: Libp2p) => ILightPush,
|
||||||
filter?: (libp2p: Libp2p) => IFilter,
|
filter?: (libp2p: Libp2p) => IFilter,
|
||||||
|
@ -110,6 +110,10 @@ export class WakuNode implements Waku {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get shardInfo(): ShardingParams | undefined {
|
||||||
|
return this.pubsubShardInfo;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dials to the provided peer.
|
* Dials to the provided peer.
|
||||||
*
|
*
|
||||||
|
|
|
@ -6,14 +6,13 @@ import {
|
||||||
} from "@libp2p/interface";
|
} from "@libp2p/interface";
|
||||||
import { peerDiscoverySymbol as symbol } from "@libp2p/interface";
|
import { peerDiscoverySymbol as symbol } from "@libp2p/interface";
|
||||||
import type { PeerInfo } from "@libp2p/interface";
|
import type { PeerInfo } from "@libp2p/interface";
|
||||||
import { encodeRelayShard } from "@waku/enr";
|
|
||||||
import type {
|
import type {
|
||||||
DnsDiscOptions,
|
DnsDiscOptions,
|
||||||
DnsDiscoveryComponents,
|
DnsDiscoveryComponents,
|
||||||
IEnr,
|
IEnr,
|
||||||
NodeCapabilityCount
|
NodeCapabilityCount
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { encodeRelayShard, Logger } from "@waku/utils";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
DEFAULT_BOOTSTRAP_TAG_NAME,
|
DEFAULT_BOOTSTRAP_TAG_NAME,
|
||||||
|
|
|
@ -393,7 +393,6 @@ describe("ENR", function () {
|
||||||
|
|
||||||
it("should properly create peer info with all multiaddrs", () => {
|
it("should properly create peer info with all multiaddrs", () => {
|
||||||
const peerInfo = enr.peerInfo!;
|
const peerInfo = enr.peerInfo!;
|
||||||
console.log(peerInfo);
|
|
||||||
expect(peerInfo.id.toString()).to.equal(peerId.toString());
|
expect(peerInfo.id.toString()).to.equal(peerId.toString());
|
||||||
expect(peerInfo.multiaddrs.length).to.equal(5);
|
expect(peerInfo.multiaddrs.length).to.equal(5);
|
||||||
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
|
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
|
||||||
|
|
|
@ -5,4 +5,3 @@ export * from "./enr.js";
|
||||||
export * from "./peer_id.js";
|
export * from "./peer_id.js";
|
||||||
export * from "./waku2_codec.js";
|
export * from "./waku2_codec.js";
|
||||||
export * from "./crypto.js";
|
export * from "./crypto.js";
|
||||||
export * from "./relay_shard_codec.js";
|
|
||||||
|
|
|
@ -10,11 +10,11 @@ import type {
|
||||||
ShardInfo,
|
ShardInfo,
|
||||||
Waku2
|
Waku2
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
|
import { decodeRelayShard } from "@waku/utils";
|
||||||
import { bytesToUtf8 } from "@waku/utils/bytes";
|
import { bytesToUtf8 } from "@waku/utils/bytes";
|
||||||
|
|
||||||
import { ERR_INVALID_ID } from "./constants.js";
|
import { ERR_INVALID_ID } from "./constants.js";
|
||||||
import { decodeMultiaddrs, encodeMultiaddrs } from "./multiaddrs_codec.js";
|
import { decodeMultiaddrs, encodeMultiaddrs } from "./multiaddrs_codec.js";
|
||||||
import { decodeRelayShard } from "./relay_shard_codec.js";
|
|
||||||
import { decodeWaku2, encodeWaku2 } from "./waku2_codec.js";
|
import { decodeWaku2, encodeWaku2 } from "./waku2_codec.js";
|
||||||
|
|
||||||
export class RawEnr extends Map<ENRKey, ENRValue> {
|
export class RawEnr extends Map<ENRKey, ENRValue> {
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId } from "@libp2p/interface";
|
||||||
|
|
||||||
import type { ShardInfo } from "./enr.js";
|
import type { ShardInfo } from "./enr.js";
|
||||||
import type { IBaseProtocol } from "./protocols.js";
|
import type { IBaseProtocol, ShardingParams } from "./protocols.js";
|
||||||
|
|
||||||
export interface IMetadata extends IBaseProtocol {
|
// IMetadata always has shardInfo defined while it is optionally undefined in IBaseProtocol
|
||||||
|
export interface IMetadata extends Omit<IBaseProtocol, "shardInfo"> {
|
||||||
|
shardInfo: ShardingParams;
|
||||||
|
confirmOrAttemptHandshake(peerId: PeerId): Promise<void>;
|
||||||
query(peerId: PeerId): Promise<ShardInfo | undefined>;
|
query(peerId: PeerId): Promise<ShardInfo | undefined>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,9 +15,11 @@ export enum Protocols {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IBaseProtocol {
|
export interface IBaseProtocol {
|
||||||
|
shardInfo?: ShardInfo;
|
||||||
multicodec: string;
|
multicodec: string;
|
||||||
peerStore: PeerStore;
|
peerStore: PeerStore;
|
||||||
peers: () => Promise<Peer[]>;
|
allPeers: () => Promise<Peer[]>;
|
||||||
|
connectedPeers: () => Promise<Peer[]>;
|
||||||
addLibp2pEventListener: Libp2p["addEventListener"];
|
addLibp2pEventListener: Libp2p["addEventListener"];
|
||||||
removeLibp2pEventListener: Libp2p["removeEventListener"];
|
removeLibp2pEventListener: Libp2p["removeEventListener"];
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import { IConnectionManager } from "./connection_manager.js";
|
||||||
import type { IFilter } from "./filter.js";
|
import type { IFilter } from "./filter.js";
|
||||||
import type { Libp2p } from "./libp2p.js";
|
import type { Libp2p } from "./libp2p.js";
|
||||||
import type { ILightPush } from "./light_push.js";
|
import type { ILightPush } from "./light_push.js";
|
||||||
import { Protocols } from "./protocols.js";
|
import { Protocols, ShardingParams } from "./protocols.js";
|
||||||
import type { IRelay } from "./relay.js";
|
import type { IRelay } from "./relay.js";
|
||||||
import type { IStore } from "./store.js";
|
import type { IStore } from "./store.js";
|
||||||
|
|
||||||
|
@ -16,6 +16,8 @@ export interface Waku {
|
||||||
filter?: IFilter;
|
filter?: IFilter;
|
||||||
lightPush?: ILightPush;
|
lightPush?: ILightPush;
|
||||||
|
|
||||||
|
shardInfo?: ShardingParams;
|
||||||
|
|
||||||
connectionManager: IConnectionManager;
|
connectionManager: IConnectionManager;
|
||||||
|
|
||||||
dial(peer: PeerId | Multiaddr, protocols?: Protocols[]): Promise<Stream>;
|
dial(peer: PeerId | Multiaddr, protocols?: Protocols[]): Promise<Stream>;
|
||||||
|
|
|
@ -7,9 +7,8 @@ import type {
|
||||||
PeerId,
|
PeerId,
|
||||||
PeerInfo
|
PeerInfo
|
||||||
} from "@libp2p/interface";
|
} from "@libp2p/interface";
|
||||||
import { encodeRelayShard } from "@waku/enr";
|
|
||||||
import { Libp2pComponents, Tags } from "@waku/interfaces";
|
import { Libp2pComponents, Tags } from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { encodeRelayShard, Logger } from "@waku/utils";
|
||||||
|
|
||||||
import { PeerExchangeCodec, WakuPeerExchange } from "./waku_peer_exchange.js";
|
import { PeerExchangeCodec, WakuPeerExchange } from "./waku_peer_exchange.js";
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,8 @@ export async function runNodes(
|
||||||
filter: true,
|
filter: true,
|
||||||
lightpush: true,
|
lightpush: true,
|
||||||
relay: true,
|
relay: true,
|
||||||
pubsubTopic: pubsubTopics
|
pubsubTopic: pubsubTopics,
|
||||||
|
...(shardInfo && { clusterId: shardInfo.clusterId })
|
||||||
},
|
},
|
||||||
{ retries: 3 }
|
{ retries: 3 }
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,16 +1,403 @@
|
||||||
import type { Connection, Peer, PeerStore } from "@libp2p/interface";
|
import type { Connection, Peer, PeerStore } from "@libp2p/interface";
|
||||||
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
|
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
|
||||||
|
import { LightPushCodec, waitForRemotePeer } from "@waku/core";
|
||||||
import {
|
import {
|
||||||
|
ContentTopicInfo,
|
||||||
createLightNode,
|
createLightNode,
|
||||||
Libp2pComponents,
|
Libp2pComponents,
|
||||||
type LightNode,
|
type LightNode,
|
||||||
|
Protocols,
|
||||||
|
ShardInfo,
|
||||||
Tags,
|
Tags,
|
||||||
utf8ToBytes
|
utf8ToBytes
|
||||||
} from "@waku/sdk";
|
} from "@waku/sdk";
|
||||||
|
import { shardInfoToPubsubTopics } from "@waku/utils";
|
||||||
|
import { getConnectedPeersForProtocolAndShard } from "@waku/utils/libp2p";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import fc from "fast-check";
|
import fc from "fast-check";
|
||||||
import Sinon from "sinon";
|
import Sinon from "sinon";
|
||||||
|
|
||||||
|
import { makeLogFileName, ServiceNode, tearDownNodes } from "../src/index.js";
|
||||||
|
|
||||||
|
describe("getConnectedPeersForProtocolAndShard", function () {
|
||||||
|
let waku: LightNode;
|
||||||
|
let serviceNode1: ServiceNode;
|
||||||
|
let serviceNode2: ServiceNode;
|
||||||
|
const contentTopic = "/test/2/waku-light-push/utf8";
|
||||||
|
|
||||||
|
this.beforeEach(async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
serviceNode1 = new ServiceNode(makeLogFileName(this) + "1");
|
||||||
|
serviceNode2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
await tearDownNodes([serviceNode1, serviceNode2], waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("same cluster, same shard: nodes connect", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo: ShardInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [1]
|
||||||
|
};
|
||||||
|
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNodeMa = await serviceNode1.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo });
|
||||||
|
await waku.start();
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa, LightPushCodec);
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.greaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("same cluster, different shard: nodes connect", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo: ShardInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [1]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfoServiceNode: ShardInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [2]
|
||||||
|
};
|
||||||
|
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfoServiceNode.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfoServiceNode),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNodeMa = await serviceNode1.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa, LightPushCodec);
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.greaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("different cluster, same shard: nodes don't connect", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo1: ShardInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [1]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfo2: ShardInfo = {
|
||||||
|
clusterId: 2,
|
||||||
|
shards: [1]
|
||||||
|
};
|
||||||
|
|
||||||
|
// we start one node in a separate cluster
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo1.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo1),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// and another node in the same cluster cluster as our node
|
||||||
|
await serviceNode2.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo2.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo2),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNode1Ma = await serviceNode1.getMultiaddrWithId();
|
||||||
|
const serviceNode2Ma = await serviceNode2.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo: shardInfo2 });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode1Ma, LightPushCodec);
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode2Ma, LightPushCodec);
|
||||||
|
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo2
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.equal(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("different cluster, different shard: nodes don't connect", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo1: ShardInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [1]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfo2: ShardInfo = {
|
||||||
|
clusterId: 2,
|
||||||
|
shards: [2]
|
||||||
|
};
|
||||||
|
|
||||||
|
// we start one node in a separate cluster
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo1.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo1),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// and another node in the same cluster cluster as our node
|
||||||
|
const serviceNode2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||||
|
await serviceNode2.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo2.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo2),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNodeMa1 = await serviceNode1.getMultiaddrWithId();
|
||||||
|
const serviceNodeMa2 = await serviceNode2.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo: shardInfo2 });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa1, LightPushCodec);
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa2, LightPushCodec);
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo2
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.equal(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("same cluster, same shard: nodes connect (autosharding)", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo: ContentTopicInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
contentTopics: [contentTopic]
|
||||||
|
};
|
||||||
|
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNodeMa = await serviceNode1.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo });
|
||||||
|
await waku.start();
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa, LightPushCodec);
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.greaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("same cluster, different shard: nodes connect (autosharding)", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo1: ContentTopicInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
contentTopics: [contentTopic]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfo2: ContentTopicInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
contentTopics: ["/test/5/waku-light-push/utf8"]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Separate shard
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo1.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo1),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Same shard
|
||||||
|
await serviceNode2.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo2.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo2),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNode1Ma = await serviceNode1.getMultiaddrWithId();
|
||||||
|
const serviceNode2Ma = await serviceNode2.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo: shardInfo2 });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode1Ma, LightPushCodec);
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode2Ma, LightPushCodec);
|
||||||
|
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo2
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.equal(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("different cluster, same shard: nodes don't connect (autosharding)", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo1: ContentTopicInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
contentTopics: [contentTopic]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfo2: ContentTopicInfo = {
|
||||||
|
clusterId: 2,
|
||||||
|
contentTopics: [contentTopic]
|
||||||
|
};
|
||||||
|
|
||||||
|
// we start one node in a separate cluster
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo1.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo1),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// and another node in the same cluster cluster as our node
|
||||||
|
await serviceNode2.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo2.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo2),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNode1Ma = await serviceNode1.getMultiaddrWithId();
|
||||||
|
const serviceNode2Ma = await serviceNode2.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo: shardInfo2 });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode1Ma, LightPushCodec);
|
||||||
|
await waku.libp2p.dialProtocol(serviceNode2Ma, LightPushCodec);
|
||||||
|
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo2
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.equal(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("different cluster, different shard: nodes don't connect (autosharding)", async function () {
|
||||||
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo1: ContentTopicInfo = {
|
||||||
|
clusterId: 1,
|
||||||
|
contentTopics: [contentTopic]
|
||||||
|
};
|
||||||
|
|
||||||
|
const shardInfo2: ContentTopicInfo = {
|
||||||
|
clusterId: 2,
|
||||||
|
contentTopics: ["/test/5/waku-light-push/utf8"]
|
||||||
|
};
|
||||||
|
|
||||||
|
// we start one node in a separate cluster
|
||||||
|
await serviceNode1.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo1.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo1),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// and another node in the same cluster cluster as our node
|
||||||
|
const serviceNode2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||||
|
await serviceNode2.start({
|
||||||
|
discv5Discovery: true,
|
||||||
|
peerExchange: true,
|
||||||
|
clusterId: shardInfo2.clusterId,
|
||||||
|
pubsubTopic: shardInfoToPubsubTopics(shardInfo2),
|
||||||
|
lightpush: true,
|
||||||
|
relay: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const serviceNodeMa1 = await serviceNode1.getMultiaddrWithId();
|
||||||
|
const serviceNodeMa2 = await serviceNode2.getMultiaddrWithId();
|
||||||
|
|
||||||
|
waku = await createLightNode({ shardInfo: shardInfo2 });
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa1, LightPushCodec);
|
||||||
|
await waku.libp2p.dialProtocol(serviceNodeMa2, LightPushCodec);
|
||||||
|
await waku.start();
|
||||||
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
const peers = await getConnectedPeersForProtocolAndShard(
|
||||||
|
waku.libp2p.getConnections(),
|
||||||
|
waku.libp2p.peerStore,
|
||||||
|
waku.libp2p.getProtocols(),
|
||||||
|
shardInfo2
|
||||||
|
);
|
||||||
|
expect(peers.length).to.be.equal(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
describe("getPeers", function () {
|
describe("getPeers", function () {
|
||||||
let peerStore: PeerStore;
|
let peerStore: PeerStore;
|
||||||
let connectionManager: Libp2pComponents["connectionManager"];
|
let connectionManager: Libp2pComponents["connectionManager"];
|
||||||
|
|
|
@ -33,10 +33,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
clusterId: 3,
|
clusterId: 3,
|
||||||
shard: 1
|
shard: 1
|
||||||
});
|
});
|
||||||
const customPubsubTopic2 = singleShardInfoToPubsubTopic({
|
|
||||||
clusterId: 3,
|
|
||||||
shard: 2
|
|
||||||
});
|
|
||||||
const shardInfo: ShardInfo = { clusterId: 3, shards: [1, 2] };
|
const shardInfo: ShardInfo = { clusterId: 3, shards: [1, 2] };
|
||||||
const singleShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
const singleShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||||
const singleShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
const singleShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||||
|
@ -57,7 +54,10 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
this.timeout(15000);
|
this.timeout(15000);
|
||||||
[nwaku, waku] = await runNodes(
|
[nwaku, waku] = await runNodes(
|
||||||
this,
|
this,
|
||||||
[customPubsubTopic1, customPubsubTopic2],
|
[
|
||||||
|
singleShardInfoToPubsubTopic(singleShardInfo1),
|
||||||
|
singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||||
|
],
|
||||||
shardInfo
|
shardInfo
|
||||||
);
|
);
|
||||||
messageCollector = new MessageCollector(nwaku);
|
messageCollector = new MessageCollector(nwaku);
|
||||||
|
@ -107,7 +107,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
await messageCollector2.waitForMessages(1, {
|
await messageCollector2.waitForMessages(1, {
|
||||||
pubsubTopic: customPubsubTopic2
|
pubsubTopic: singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||||
})
|
})
|
||||||
).to.eq(true);
|
).to.eq(true);
|
||||||
|
|
||||||
|
@ -130,9 +130,12 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
filter: true,
|
filter: true,
|
||||||
lightpush: true,
|
lightpush: true,
|
||||||
relay: true,
|
relay: true,
|
||||||
pubsubTopic: [customPubsubTopic2]
|
pubsubTopic: [singleShardInfoToPubsubTopic(singleShardInfo2)],
|
||||||
|
clusterId: singleShardInfo2.clusterId
|
||||||
});
|
});
|
||||||
await nwaku2.ensureSubscriptions([customPubsubTopic2]);
|
await nwaku2.ensureSubscriptions([
|
||||||
|
singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||||
|
]);
|
||||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||||
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||||
|
|
||||||
|
@ -147,7 +150,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
pubsubTopic: customPubsubTopic1
|
pubsubTopic: customPubsubTopic1
|
||||||
})) ||
|
})) ||
|
||||||
!(await messageCollector2.waitForMessages(1, {
|
!(await messageCollector2.waitForMessages(1, {
|
||||||
pubsubTopic: customPubsubTopic2
|
pubsubTopic: singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||||
})) ||
|
})) ||
|
||||||
pushResponse1!.recipients[0].toString() ===
|
pushResponse1!.recipients[0].toString() ===
|
||||||
pushResponse2!.recipients[0].toString()
|
pushResponse2!.recipients[0].toString()
|
||||||
|
@ -168,7 +171,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
||||||
messageCollector2.verifyReceivedMessage(0, {
|
messageCollector2.verifyReceivedMessage(0, {
|
||||||
expectedMessageText: "M2",
|
expectedMessageText: "M2",
|
||||||
expectedContentTopic: customContentTopic2,
|
expectedContentTopic: customContentTopic2,
|
||||||
expectedPubsubTopic: customPubsubTopic2
|
expectedPubsubTopic: singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -286,7 +289,8 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||||
filter: true,
|
filter: true,
|
||||||
lightpush: true,
|
lightpush: true,
|
||||||
relay: true,
|
relay: true,
|
||||||
pubsubTopic: [autoshardingPubsubTopic2]
|
pubsubTopic: [autoshardingPubsubTopic2],
|
||||||
|
clusterId: shardInfo.clusterId
|
||||||
});
|
});
|
||||||
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
||||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||||
|
@ -349,10 +353,6 @@ describe("Waku Light Push (named sharding): Multiple PubsubTopics", function ()
|
||||||
customContentTopic2,
|
customContentTopic2,
|
||||||
clusterId
|
clusterId
|
||||||
);
|
);
|
||||||
const contentTopicInfo: ContentTopicInfo = {
|
|
||||||
clusterId,
|
|
||||||
contentTopics: [customContentTopic1, customContentTopic2]
|
|
||||||
};
|
|
||||||
const customEncoder1 = createEncoder({
|
const customEncoder1 = createEncoder({
|
||||||
contentTopic: customContentTopic1,
|
contentTopic: customContentTopic1,
|
||||||
pubsubTopicShardInfo: {
|
pubsubTopicShardInfo: {
|
||||||
|
@ -368,11 +368,10 @@ describe("Waku Light Push (named sharding): Multiple PubsubTopics", function ()
|
||||||
|
|
||||||
this.beforeEach(async function () {
|
this.beforeEach(async function () {
|
||||||
this.timeout(15000);
|
this.timeout(15000);
|
||||||
[nwaku, waku] = await runNodes(
|
[nwaku, waku] = await runNodes(this, [
|
||||||
this,
|
autoshardingPubsubTopic1,
|
||||||
[autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
autoshardingPubsubTopic2
|
||||||
contentTopicInfo
|
]);
|
||||||
);
|
|
||||||
messageCollector = new MessageCollector(nwaku);
|
messageCollector = new MessageCollector(nwaku);
|
||||||
nimPeerId = await nwaku.getPeerId();
|
nimPeerId = await nwaku.getPeerId();
|
||||||
});
|
});
|
||||||
|
|
|
@ -24,7 +24,12 @@ export async function runNodes(
|
||||||
): Promise<[ServiceNode, LightNode]> {
|
): Promise<[ServiceNode, LightNode]> {
|
||||||
const nwaku = new ServiceNode(makeLogFileName(context));
|
const nwaku = new ServiceNode(makeLogFileName(context));
|
||||||
await nwaku.start(
|
await nwaku.start(
|
||||||
{ lightpush: true, relay: true, pubsubTopic: pubsubTopics },
|
{
|
||||||
|
lightpush: true,
|
||||||
|
relay: true,
|
||||||
|
pubsubTopic: pubsubTopics,
|
||||||
|
...(shardInfo && { clusterId: shardInfo.clusterId })
|
||||||
|
},
|
||||||
{ retries: 3 }
|
{ retries: 3 }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { MetadataCodec } from "@waku/core";
|
import { MetadataCodec } from "@waku/core";
|
||||||
import { decodeRelayShard } from "@waku/enr";
|
|
||||||
import type { LightNode, ShardInfo } from "@waku/interfaces";
|
import type { LightNode, ShardInfo } from "@waku/interfaces";
|
||||||
import { createLightNode } from "@waku/sdk";
|
import { createLightNode } from "@waku/sdk";
|
||||||
|
import { decodeRelayShard } from "@waku/utils";
|
||||||
import { shardInfoToPubsubTopics } from "@waku/utils";
|
import { shardInfoToPubsubTopics } from "@waku/utils";
|
||||||
import chai, { expect } from "chai";
|
import chai, { expect } from "chai";
|
||||||
import chaiAsPromised from "chai-as-promised";
|
import chaiAsPromised from "chai-as-promised";
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import { createDecoder, waitForRemotePeer } from "@waku/core";
|
import { createDecoder, waitForRemotePeer } from "@waku/core";
|
||||||
import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces";
|
import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces";
|
||||||
import { createLightNode, Protocols } from "@waku/sdk";
|
import { createLightNode, Protocols } from "@waku/sdk";
|
||||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
import {
|
||||||
|
contentTopicToPubsubTopic,
|
||||||
|
singleShardInfosToShardInfo
|
||||||
|
} from "@waku/utils";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
@ -18,6 +21,8 @@ import {
|
||||||
customDecoder2,
|
customDecoder2,
|
||||||
customShardedPubsubTopic1,
|
customShardedPubsubTopic1,
|
||||||
customShardedPubsubTopic2,
|
customShardedPubsubTopic2,
|
||||||
|
customShardInfo1,
|
||||||
|
customShardInfo2,
|
||||||
processQueriedMessages,
|
processQueriedMessages,
|
||||||
sendMessages,
|
sendMessages,
|
||||||
sendMessagesAutosharding,
|
sendMessagesAutosharding,
|
||||||
|
@ -39,6 +44,7 @@ describe("Waku Store, custom pubsub topic", function () {
|
||||||
await nwaku.start({
|
await nwaku.start({
|
||||||
store: true,
|
store: true,
|
||||||
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2],
|
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||||
|
clusterId: customShardInfo1.clusterId,
|
||||||
relay: true
|
relay: true
|
||||||
});
|
});
|
||||||
await nwaku.ensureSubscriptions([
|
await nwaku.ensureSubscriptions([
|
||||||
|
@ -123,6 +129,7 @@ describe("Waku Store, custom pubsub topic", function () {
|
||||||
await nwaku2.start({
|
await nwaku2.start({
|
||||||
store: true,
|
store: true,
|
||||||
pubsubTopic: [customShardedPubsubTopic2],
|
pubsubTopic: [customShardedPubsubTopic2],
|
||||||
|
clusterId: customShardInfo2.clusterId,
|
||||||
relay: true
|
relay: true
|
||||||
});
|
});
|
||||||
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
||||||
|
@ -210,7 +217,8 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
||||||
await nwaku.start({
|
await nwaku.start({
|
||||||
store: true,
|
store: true,
|
||||||
pubsubTopic: [autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
pubsubTopic: [autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
||||||
relay: true
|
relay: true,
|
||||||
|
clusterId
|
||||||
});
|
});
|
||||||
await nwaku.ensureSubscriptionsAutosharding([
|
await nwaku.ensureSubscriptionsAutosharding([
|
||||||
customContentTopic1,
|
customContentTopic1,
|
||||||
|
@ -283,7 +291,8 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
||||||
await nwaku2.start({
|
await nwaku2.start({
|
||||||
store: true,
|
store: true,
|
||||||
pubsubTopic: [autoshardingPubsubTopic2],
|
pubsubTopic: [autoshardingPubsubTopic2],
|
||||||
relay: true
|
relay: true,
|
||||||
|
clusterId
|
||||||
});
|
});
|
||||||
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
||||||
|
|
||||||
|
@ -339,11 +348,18 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
||||||
|
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
this.timeout(15000);
|
this.timeout(15000);
|
||||||
|
|
||||||
|
const shardInfo = singleShardInfosToShardInfo([
|
||||||
|
customShardInfo1,
|
||||||
|
customShardInfo2
|
||||||
|
]);
|
||||||
|
|
||||||
nwaku = new ServiceNode(makeLogFileName(this));
|
nwaku = new ServiceNode(makeLogFileName(this));
|
||||||
await nwaku.start({
|
await nwaku.start({
|
||||||
store: true,
|
store: true,
|
||||||
relay: true,
|
relay: true,
|
||||||
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2]
|
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||||
|
clusterId: shardInfo.clusterId
|
||||||
});
|
});
|
||||||
await nwaku.ensureSubscriptions([
|
await nwaku.ensureSubscriptions([
|
||||||
customShardedPubsubTopic1,
|
customShardedPubsubTopic1,
|
||||||
|
@ -353,10 +369,7 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
||||||
waku = await startAndConnectLightNode(
|
waku = await startAndConnectLightNode(
|
||||||
nwaku,
|
nwaku,
|
||||||
[customShardedPubsubTopic1, customShardedPubsubTopic2],
|
[customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||||
{
|
shardInfo
|
||||||
clusterId: 3,
|
|
||||||
shards: [1, 2]
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -434,7 +447,8 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
||||||
await nwaku2.start({
|
await nwaku2.start({
|
||||||
store: true,
|
store: true,
|
||||||
pubsubTopic: [customShardedPubsubTopic2],
|
pubsubTopic: [customShardedPubsubTopic2],
|
||||||
relay: true
|
relay: true,
|
||||||
|
clusterId: customShardInfo2.clusterId
|
||||||
});
|
});
|
||||||
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,8 @@ import {
|
||||||
LightNode,
|
LightNode,
|
||||||
Protocols,
|
Protocols,
|
||||||
ShardInfo,
|
ShardInfo,
|
||||||
ShardingParams
|
ShardingParams,
|
||||||
|
type SingleShardInfo
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { createLightNode } from "@waku/sdk";
|
import { createLightNode } from "@waku/sdk";
|
||||||
import { Logger, singleShardInfoToPubsubTopic } from "@waku/utils";
|
import { Logger, singleShardInfoToPubsubTopic } from "@waku/utils";
|
||||||
|
@ -23,14 +24,13 @@ export const log = new Logger("test:store");
|
||||||
export const TestContentTopic = "/test/1/waku-store/utf8";
|
export const TestContentTopic = "/test/1/waku-store/utf8";
|
||||||
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||||
export const TestDecoder = createDecoder(TestContentTopic);
|
export const TestDecoder = createDecoder(TestContentTopic);
|
||||||
export const customShardedPubsubTopic1 = singleShardInfoToPubsubTopic({
|
export const customShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||||
clusterId: 3,
|
export const customShardedPubsubTopic1 =
|
||||||
shard: 1
|
singleShardInfoToPubsubTopic(customShardInfo1);
|
||||||
});
|
|
||||||
export const customShardedPubsubTopic2 = singleShardInfoToPubsubTopic({
|
export const customShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||||
clusterId: 3,
|
export const customShardedPubsubTopic2 =
|
||||||
shard: 2
|
singleShardInfoToPubsubTopic(customShardInfo2);
|
||||||
});
|
|
||||||
export const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] };
|
export const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] };
|
||||||
export const customContentTopic1 = "/test/2/waku-store/utf8";
|
export const customContentTopic1 = "/test/2/waku-store/utf8";
|
||||||
export const customContentTopic2 = "/test/3/waku-store/utf8";
|
export const customContentTopic2 = "/test/3/waku-store/utf8";
|
||||||
|
|
|
@ -115,7 +115,9 @@ describe("Wait for remote peer", function () {
|
||||||
await delay(1000);
|
await delay(1000);
|
||||||
await waitForRemotePeer(waku2, [Protocols.Store]);
|
await waitForRemotePeer(waku2, [Protocols.Store]);
|
||||||
|
|
||||||
const peers = (await waku2.store.peers()).map((peer) => peer.id.toString());
|
const peers = (await waku2.store.connectedPeers()).map((peer) =>
|
||||||
|
peer.id.toString()
|
||||||
|
);
|
||||||
const nimPeerId = multiAddrWithId.getPeerId();
|
const nimPeerId = multiAddrWithId.getPeerId();
|
||||||
|
|
||||||
expect(nimPeerId).to.not.be.undefined;
|
expect(nimPeerId).to.not.be.undefined;
|
||||||
|
@ -142,7 +144,9 @@ describe("Wait for remote peer", function () {
|
||||||
await waku2.dial(multiAddrWithId);
|
await waku2.dial(multiAddrWithId);
|
||||||
await waitPromise;
|
await waitPromise;
|
||||||
|
|
||||||
const peers = (await waku2.store.peers()).map((peer) => peer.id.toString());
|
const peers = (await waku2.store.connectedPeers()).map((peer) =>
|
||||||
|
peer.id.toString()
|
||||||
|
);
|
||||||
|
|
||||||
const nimPeerId = multiAddrWithId.getPeerId();
|
const nimPeerId = multiAddrWithId.getPeerId();
|
||||||
|
|
||||||
|
@ -168,7 +172,7 @@ describe("Wait for remote peer", function () {
|
||||||
await waku2.dial(multiAddrWithId);
|
await waku2.dial(multiAddrWithId);
|
||||||
await waitForRemotePeer(waku2, [Protocols.LightPush]);
|
await waitForRemotePeer(waku2, [Protocols.LightPush]);
|
||||||
|
|
||||||
const peers = (await waku2.lightPush.peers()).map((peer) =>
|
const peers = (await waku2.lightPush.connectedPeers()).map((peer) =>
|
||||||
peer.id.toString()
|
peer.id.toString()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -196,7 +200,7 @@ describe("Wait for remote peer", function () {
|
||||||
await waku2.dial(multiAddrWithId);
|
await waku2.dial(multiAddrWithId);
|
||||||
await waitForRemotePeer(waku2, [Protocols.Filter]);
|
await waitForRemotePeer(waku2, [Protocols.Filter]);
|
||||||
|
|
||||||
const peers = (await waku2.filter.peers()).map((peer) =>
|
const peers = (await waku2.filter.connectedPeers()).map((peer) =>
|
||||||
peer.id.toString()
|
peer.id.toString()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -228,14 +232,14 @@ describe("Wait for remote peer", function () {
|
||||||
Protocols.LightPush
|
Protocols.LightPush
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const filterPeers = (await waku2.filter.peers()).map((peer) =>
|
const filterPeers = (await waku2.filter.connectedPeers()).map((peer) =>
|
||||||
peer.id.toString()
|
peer.id.toString()
|
||||||
);
|
);
|
||||||
const storePeers = (await waku2.store.peers()).map((peer) =>
|
const storePeers = (await waku2.store.connectedPeers()).map((peer) =>
|
||||||
peer.id.toString()
|
peer.id.toString()
|
||||||
);
|
);
|
||||||
const lightPushPeers = (await waku2.lightPush.peers()).map((peer) =>
|
const lightPushPeers = (await waku2.lightPush.connectedPeers()).map(
|
||||||
peer.id.toString()
|
(peer) => peer.id.toString()
|
||||||
);
|
);
|
||||||
|
|
||||||
const nimPeerId = multiAddrWithId.getPeerId();
|
const nimPeerId = multiAddrWithId.getPeerId();
|
||||||
|
|
|
@ -80,6 +80,7 @@
|
||||||
"@waku/build-utils": "*",
|
"@waku/build-utils": "*",
|
||||||
"cspell": "^7.3.2",
|
"cspell": "^7.3.2",
|
||||||
"npm-run-all": "^4.1.5",
|
"npm-run-all": "^4.1.5",
|
||||||
|
"fast-check": "^3.14.0",
|
||||||
"rollup": "^4.9.5"
|
"rollup": "^4.9.5"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
|
|
|
@ -5,6 +5,7 @@ export * from "./to_async_iterator.js";
|
||||||
export * from "./is_size_valid.js";
|
export * from "./is_size_valid.js";
|
||||||
export * from "./sharding.js";
|
export * from "./sharding.js";
|
||||||
export * from "./push_or_init_map.js";
|
export * from "./push_or_init_map.js";
|
||||||
|
export * from "./relay_shard_codec.js";
|
||||||
|
|
||||||
export function removeItemFromArray(arr: unknown[], value: unknown): unknown[] {
|
export function removeItemFromArray(arr: unknown[], value: unknown): unknown[] {
|
||||||
const index = arr.indexOf(value);
|
const index = arr.indexOf(value);
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { sha256 } from "@noble/hashes/sha256";
|
||||||
import {
|
import {
|
||||||
DefaultPubsubTopic,
|
DefaultPubsubTopic,
|
||||||
PubsubTopic,
|
PubsubTopic,
|
||||||
|
ShardInfo,
|
||||||
ShardingParams,
|
ShardingParams,
|
||||||
SingleShardInfo
|
SingleShardInfo
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
|
@ -17,11 +18,32 @@ export const singleShardInfoToPubsubTopic = (
|
||||||
return `/waku/2/rs/${shardInfo.clusterId}/${shardInfo.shard}`;
|
return `/waku/2/rs/${shardInfo.clusterId}/${shardInfo.shard}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const singleShardInfosToShardInfo = (
|
||||||
|
singleShardInfos: SingleShardInfo[]
|
||||||
|
): ShardInfo => {
|
||||||
|
if (singleShardInfos.length === 0) throw new Error("Invalid shard");
|
||||||
|
|
||||||
|
const clusterIds = singleShardInfos.map((shardInfo) => shardInfo.clusterId);
|
||||||
|
if (new Set(clusterIds).size !== 1) {
|
||||||
|
throw new Error("Passed shard infos have different clusterIds");
|
||||||
|
}
|
||||||
|
|
||||||
|
const shards = singleShardInfos
|
||||||
|
.map((shardInfo) => shardInfo.shard)
|
||||||
|
.filter((shard): shard is number => shard !== undefined);
|
||||||
|
|
||||||
|
return {
|
||||||
|
clusterId: singleShardInfos[0].clusterId,
|
||||||
|
shards
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
export const shardInfoToPubsubTopics = (
|
export const shardInfoToPubsubTopics = (
|
||||||
shardInfo: ShardingParams
|
shardInfo: ShardingParams
|
||||||
): PubsubTopic[] => {
|
): PubsubTopic[] => {
|
||||||
if (shardInfo.clusterId === undefined)
|
if (shardInfo.clusterId === undefined)
|
||||||
throw new Error("Cluster ID must be specified");
|
throw new Error("Cluster ID must be specified");
|
||||||
|
|
||||||
if ("contentTopics" in shardInfo) {
|
if ("contentTopics" in shardInfo) {
|
||||||
// Autosharding: explicitly defined content topics
|
// Autosharding: explicitly defined content topics
|
||||||
return Array.from(
|
return Array.from(
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import type { Connection, Peer, PeerStore } from "@libp2p/interface";
|
import type { Connection, Peer, PeerStore } from "@libp2p/interface";
|
||||||
|
import { ShardingParams } from "@waku/interfaces";
|
||||||
|
|
||||||
import { bytesToUtf8 } from "../bytes/index.js";
|
import { bytesToUtf8 } from "../bytes/index.js";
|
||||||
|
import { decodeRelayShard } from "../common/relay_shard_codec.js";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a pseudo-random peer that supports the given protocol.
|
* Returns a pseudo-random peer that supports the given protocol.
|
||||||
|
@ -68,10 +70,11 @@ export async function getPeersForProtocol(
|
||||||
return peers;
|
return peers;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getConnectedPeersForProtocol(
|
export async function getConnectedPeersForProtocolAndShard(
|
||||||
connections: Connection[],
|
connections: Connection[],
|
||||||
peerStore: PeerStore,
|
peerStore: PeerStore,
|
||||||
protocols: string[]
|
protocols: string[],
|
||||||
|
shardInfo?: ShardingParams
|
||||||
): Promise<Peer[]> {
|
): Promise<Peer[]> {
|
||||||
const openConnections = connections.filter(
|
const openConnections = connections.filter(
|
||||||
(connection) => connection.status === "open"
|
(connection) => connection.status === "open"
|
||||||
|
@ -79,10 +82,24 @@ export async function getConnectedPeersForProtocol(
|
||||||
|
|
||||||
const peerPromises = openConnections.map(async (connection) => {
|
const peerPromises = openConnections.map(async (connection) => {
|
||||||
const peer = await peerStore.get(connection.remotePeer);
|
const peer = await peerStore.get(connection.remotePeer);
|
||||||
const supportsProtocol = peer.protocols.some((protocol) =>
|
const supportsProtocol = protocols.some((protocol) =>
|
||||||
protocols.includes(protocol)
|
peer.protocols.includes(protocol)
|
||||||
);
|
);
|
||||||
return supportsProtocol ? peer : null;
|
|
||||||
|
if (supportsProtocol) {
|
||||||
|
if (shardInfo) {
|
||||||
|
const encodedPeerShardInfo = peer.metadata.get("shardInfo");
|
||||||
|
const peerShardInfo =
|
||||||
|
encodedPeerShardInfo && decodeRelayShard(encodedPeerShardInfo);
|
||||||
|
|
||||||
|
if (peerShardInfo && shardInfo.clusterId === peerShardInfo.clusterId) {
|
||||||
|
return peer;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return peer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
const peersWithNulls = await Promise.all(peerPromises);
|
const peersWithNulls = await Promise.all(peerPromises);
|
||||||
|
|
Loading…
Reference in New Issue