chore: update dependencies (#1460)

* update to latest prettier

* fix with prettier: added trailing comma

* remove deps from test package, add sinon types, fix type hack in a test, update esling prettier config

* update typescript eslint plugins

* update package-locks
This commit is contained in:
Sasha 2023-08-11 15:14:02 +02:00 committed by GitHub
parent 7227657d69
commit 51f9261a16
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
104 changed files with 18699 additions and 26776 deletions

View File

@ -52,13 +52,15 @@
"no-console": "error",
"@typescript-eslint/no-floating-promises": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/no-misused-promises": "error"
"@typescript-eslint/no-misused-promises": "error",
"@typescript-eslint/no-explicit-any": "warn"
},
"overrides": [
{
"files": ["*.spec.ts", "**/test_utils/*.ts", "*.js", "*.cjs"],
"rules": {
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-explicit-any": "off",
"no-console": "off"
}
},

30068
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -41,21 +41,21 @@
},
"devDependencies": {
"@size-limit/preset-big-lib": "^8.2.4",
"gh-pages": "^5.0.0",
"husky": "^8.0.3",
"lint-staged": "^13.2.2",
"size-limit": "^8.1.2",
"typedoc": "^0.23.26",
"typedoc-plugin-resolve-crossmodule-references": "^0.3.3",
"@typescript-eslint/eslint-plugin": "^5.57.0",
"@typescript-eslint/parser": "^5.62.0",
"@typescript-eslint/eslint-plugin": "^6.2.1",
"@typescript-eslint/parser": "^6.0.0",
"eslint": "^8.41.0",
"eslint-config-prettier": "^8.6.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-eslint-comments": "^3.2.0",
"eslint-plugin-functional": "^5.0.4",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-prettier": "^4.2.1",
"prettier": "^2.8.8"
"eslint-plugin-prettier": "^5.0.0",
"gh-pages": "^5.0.0",
"husky": "^8.0.3",
"lint-staged": "^13.2.2",
"prettier": "^3.0.0",
"size-limit": "^8.1.2",
"typedoc": "^0.23.26",
"typedoc-plugin-resolve-crossmodule-references": "^0.3.3"
},
"lint-staged": {
"*.{ts,js}": [

View File

@ -110,7 +110,6 @@
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"process": "^0.11.10",
"puppeteer": "^20.4.0",
"rollup": "^3.21.3",

View File

@ -17,12 +17,15 @@ export class BaseProtocol implements IBaseProtocol {
public readonly addLibp2pEventListener: Libp2p["addEventListener"];
public readonly removeLibp2pEventListener: Libp2p["removeEventListener"];
constructor(public multicodec: string, private components: Libp2pComponents) {
constructor(
public multicodec: string,
private components: Libp2pComponents,
) {
this.addLibp2pEventListener = components.events.addEventListener.bind(
components.events
components.events,
);
this.removeLibp2pEventListener = components.events.removeEventListener.bind(
components.events
components.events,
);
}
@ -43,13 +46,13 @@ export class BaseProtocol implements IBaseProtocol {
const { peer } = await selectPeerForProtocol(
this.peerStore,
[this.multicodec],
peerId
peerId,
);
return peer;
}
protected async newStream(peer: Peer): Promise<Stream> {
const connections = this.components.connectionManager.getConnections(
peer.id
peer.id,
);
const connection = selectConnection(connections);
if (!connection) {

View File

@ -41,7 +41,7 @@ export class ConnectionManager
libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions,
relay?: IRelay,
options?: ConnectionManagerOptions
options?: ConnectionManagerOptions,
): ConnectionManager {
let instance = ConnectionManager.instances.get(peerId);
if (!instance) {
@ -49,7 +49,7 @@ export class ConnectionManager
libp2p,
keepAliveOptions,
relay,
options
options,
);
ConnectionManager.instances.set(peerId, instance);
}
@ -105,7 +105,7 @@ export class ConnectionManager
libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions,
relay?: IRelay,
options?: Partial<ConnectionManagerOptions>
options?: Partial<ConnectionManagerOptions>,
) {
super();
this.libp2p = libp2p;
@ -126,7 +126,7 @@ export class ConnectionManager
// which means that before the ConnectionManager is initialized, some peers may have been discovered
// we will dial the peers in peerStore ONCE before we start to listen to the `peer:discovery` events within the ConnectionManager
this.dialPeerStorePeers().catch((error) =>
log(`Unexpected error while dialing peer store peers`, error)
log(`Unexpected error while dialing peer store peers`, error),
);
}
@ -159,15 +159,15 @@ export class ConnectionManager
this.keepAliveManager.stopAll();
this.libp2p.removeEventListener(
"peer:connect",
this.onEventHandlers["peer:connect"]
this.onEventHandlers["peer:connect"],
);
this.libp2p.removeEventListener(
"peer:disconnect",
this.onEventHandlers["peer:disconnect"]
this.onEventHandlers["peer:disconnect"],
);
this.libp2p.removeEventListener(
"peer:discovery",
this.onEventHandlers["peer:discovery"]
this.onEventHandlers["peer:discovery"],
);
}
@ -198,7 +198,7 @@ export class ConnectionManager
log(
`Error dialing peer ${peerId.toString()} - ${
(error as any).message
}`
}`,
);
}
this.dialErrorsForPeer.set(peerId.toString(), error);
@ -225,14 +225,14 @@ export class ConnectionManager
}
log(
`Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`
`Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`,
);
this.dialErrorsForPeer.delete(peerId.toString());
await this.libp2p.peerStore.delete(peerId);
} catch (error) {
throw new Error(
`Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`
`Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`,
);
}
}
@ -245,7 +245,7 @@ export class ConnectionManager
log(`Dropped connection with peer ${peerId.toString()}`);
} catch (error) {
log(
`Error dropping connection with peer ${peerId.toString()} - ${error}`
`Error dropping connection with peer ${peerId.toString()} - ${error}`,
);
}
}
@ -266,14 +266,14 @@ export class ConnectionManager
private startPeerDiscoveryListener(): void {
this.libp2p.addEventListener(
"peer:discovery",
this.onEventHandlers["peer:discovery"]
this.onEventHandlers["peer:discovery"],
);
}
private startPeerConnectionListener(): void {
this.libp2p.addEventListener(
"peer:connect",
this.onEventHandlers["peer:connect"]
this.onEventHandlers["peer:connect"],
);
}
@ -292,7 +292,7 @@ export class ConnectionManager
*/
this.libp2p.addEventListener(
"peer:disconnect",
this.onEventHandlers["peer:disconnect"]
this.onEventHandlers["peer:disconnect"],
);
}
@ -315,7 +315,7 @@ export class ConnectionManager
const { id: peerId } = evt.detail;
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP
Tags.BOOTSTRAP,
);
this.dispatchEvent(
@ -325,8 +325,8 @@ export class ConnectionManager
: EPeersByDiscoveryEvents.PEER_DISCOVERY_PEER_EXCHANGE,
{
detail: peerId,
}
)
},
),
);
try {
@ -343,7 +343,7 @@ export class ConnectionManager
this.keepAliveManager.start(peerId, this.libp2p.services.ping);
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP
Tags.BOOTSTRAP,
);
if (isBootstrap) {
@ -362,8 +362,8 @@ export class ConnectionManager
EPeersByDiscoveryEvents.PEER_CONNECT_BOOTSTRAP,
{
detail: peerId,
}
)
},
),
);
}
} else {
@ -372,8 +372,8 @@ export class ConnectionManager
EPeersByDiscoveryEvents.PEER_CONNECT_PEER_EXCHANGE,
{
detail: peerId,
}
)
},
),
);
}
})();

View File

@ -35,7 +35,7 @@ export class FilterSubscribeRpc {
static createSubscribeRequest(
pubsubTopic: string,
contentTopics: string[]
contentTopics: string[],
): FilterSubscribeRpc {
return new FilterSubscribeRpc({
requestId: uuid(),
@ -48,7 +48,7 @@ export class FilterSubscribeRpc {
static createUnsubscribeRequest(
pubsubTopic: string,
contentTopics: string[]
contentTopics: string[],
): FilterSubscribeRpc {
return new FilterSubscribeRpc({
requestId: uuid(),

View File

@ -59,7 +59,7 @@ class Subscription {
constructor(
pubSubTopic: PubSubTopic,
remotePeer: Peer,
newStream: (peer: Peer) => Promise<Stream>
newStream: (peer: Peer) => Promise<Stream>,
) {
this.peer = remotePeer;
this.pubSubTopic = pubSubTopic;
@ -69,7 +69,7 @@ class Subscription {
async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
callback: Callback<T>,
): Promise<void> {
const decodersArray = Array.isArray(decoders) ? decoders : [decoders];
const decodersGroupedByCT = groupByContentTopic(decodersArray);
@ -79,7 +79,7 @@ class Subscription {
const request = FilterSubscribeRpc.createSubscribeRequest(
this.pubSubTopic,
contentTopics
contentTopics,
);
try {
@ -88,7 +88,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
const { statusCode, requestId, statusDesc } =
@ -96,7 +96,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
`Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
);
}
@ -104,7 +104,7 @@ class Subscription {
"Subscribed to peer ",
this.peer.id.toString(),
"for content topics",
contentTopics
contentTopics,
);
} catch (e) {
throw new Error(
@ -113,7 +113,7 @@ class Subscription {
" for content topics: " +
contentTopics +
": " +
e
e,
);
}
@ -138,7 +138,7 @@ class Subscription {
const stream = await this.newStream(this.peer);
const unsubscribeRequest = FilterSubscribeRpc.createUnsubscribeRequest(
this.pubSubTopic,
contentTopics
contentTopics,
);
try {
@ -163,7 +163,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
const { statusCode, requestId, statusDesc } =
@ -171,7 +171,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
`Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
);
}
@ -186,7 +186,7 @@ class Subscription {
const stream = await this.newStream(this.peer);
const request = FilterSubscribeRpc.createUnsubscribeAllRequest(
this.pubSubTopic
this.pubSubTopic,
);
try {
@ -195,7 +195,7 @@ class Subscription {
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
const { statusCode, requestId, statusDesc } =
@ -203,7 +203,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) {
throw new Error(
`Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
`Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`,
);
}
@ -231,7 +231,7 @@ class Filter extends BaseProtocol implements IReceiver {
private getActiveSubscription(
pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr
peerIdStr: PeerIdStr,
): Subscription | undefined {
return this.activeSubscriptions.get(`${pubSubTopic}_${peerIdStr}`);
}
@ -239,7 +239,7 @@ class Filter extends BaseProtocol implements IReceiver {
private setActiveSubscription(
pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr,
subscription: Subscription
subscription: Subscription,
): Subscription {
this.activeSubscriptions.set(`${pubSubTopic}_${peerIdStr}`, subscription);
return subscription;
@ -259,7 +259,7 @@ class Filter extends BaseProtocol implements IReceiver {
async createSubscription(
pubSubTopic?: string,
peerId?: PeerId
peerId?: PeerId,
): Promise<Subscription> {
const _pubSubTopic =
pubSubTopic ?? this.options.pubSubTopic ?? DefaultPubSubTopic;
@ -271,7 +271,7 @@ class Filter extends BaseProtocol implements IReceiver {
this.setActiveSubscription(
_pubSubTopic,
peer.id.toString(),
new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer))
new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer)),
);
return subscription;
@ -279,7 +279,7 @@ class Filter extends BaseProtocol implements IReceiver {
public toSubscriptionIterator<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions | undefined
opts?: ProtocolOptions | undefined,
): Promise<IAsyncIterator<T>> {
return toAsyncIterator(this, decoders, opts);
}
@ -302,7 +302,7 @@ class Filter extends BaseProtocol implements IReceiver {
async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
opts?: ProtocolOptions
opts?: ProtocolOptions,
): Promise<Unsubscribe> {
const subscription = await this.createSubscription(undefined, opts?.peerId);
@ -310,8 +310,8 @@ class Filter extends BaseProtocol implements IReceiver {
const contentTopics = Array.from(
groupByContentTopic(
Array.isArray(decoders) ? decoders : [decoders]
).keys()
Array.isArray(decoders) ? decoders : [decoders],
).keys(),
);
return async () => {
@ -341,7 +341,7 @@ class Filter extends BaseProtocol implements IReceiver {
const peerIdStr = streamData.connection.remotePeer.toString();
const subscription = this.getActiveSubscription(
pubsubTopic,
peerIdStr
peerIdStr,
);
if (!subscription) {
@ -357,7 +357,7 @@ class Filter extends BaseProtocol implements IReceiver {
},
(e) => {
log("Error with receiving pipe", e);
}
},
);
} catch (e) {
log("Error decoding message", e);
@ -366,7 +366,7 @@ class Filter extends BaseProtocol implements IReceiver {
}
export function wakuFilter(
init: Partial<ProtocolCreateOptions> = {}
init: Partial<ProtocolCreateOptions> = {},
): (libp2p: Libp2p) => IFilter {
return (libp2p: Libp2p) => new Filter(libp2p, init);
}
@ -374,7 +374,7 @@ export function wakuFilter(
async function pushMessage<T extends IDecodedMessage>(
subscriptionCallback: SubscriptionCallback<T>,
pubSubTopic: PubSubTopic,
message: WakuMessage
message: WakuMessage,
): Promise<void> {
const { decoders, callback } = subscriptionCallback;
@ -388,7 +388,7 @@ async function pushMessage<T extends IDecodedMessage>(
const decodePromises = decoders.map((dec) =>
dec
.fromProtoObj(pubSubTopic, message as IProtoMessage)
.then((decoded) => decoded || Promise.reject("Decoding failed"))
.then((decoded) => decoded || Promise.reject("Decoding failed")),
);
const decodedMessage = await Promise.any(decodePromises);

View File

@ -41,7 +41,7 @@ class LightPush extends BaseProtocol implements ILightPush {
async send(
encoder: IEncoder,
message: IMessage,
opts?: ProtocolOptions
opts?: ProtocolOptions,
): Promise<SendResult> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -74,7 +74,7 @@ class LightPush extends BaseProtocol implements ILightPush {
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
try {
const bytes = new Uint8ArrayList();
@ -106,7 +106,7 @@ class LightPush extends BaseProtocol implements ILightPush {
}
export function wakuLightPush(
init: Partial<ProtocolCreateOptions> = {}
init: Partial<ProtocolCreateOptions> = {},
): (libp2p: Libp2p) => ILightPush {
return (libp2p: Libp2p) => new LightPush(libp2p, init);
}

View File

@ -7,7 +7,7 @@ export class PushRpc {
static createRequest(
message: proto.WakuMessage,
pubSubTopic: string
pubSubTopic: string,
): PushRpc {
return new PushRpc({
requestId: uuid(),

View File

@ -20,7 +20,7 @@ describe("Waku Message version 0", function () {
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!
protoResult!,
)) as DecodedMessage;
expect(result.contentTopic).to.eq(contentTopic);
@ -29,8 +29,8 @@ describe("Waku Message version 0", function () {
expect(result.ephemeral).to.be.false;
expect(result.payload).to.deep.eq(payload);
expect(result.timestamp).to.not.be.undefined;
}
)
},
),
);
});
@ -50,12 +50,12 @@ describe("Waku Message version 0", function () {
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!
protoResult!,
)) as DecodedMessage;
expect(result.ephemeral).to.be.true;
}
)
},
),
);
});
@ -69,7 +69,7 @@ describe("Waku Message version 0", function () {
// Encode the length of the payload
// Not a relevant real life example
const metaSetter = (
msg: IProtoMessage & { meta: undefined }
msg: IProtoMessage & { meta: undefined },
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -87,7 +87,7 @@ describe("Waku Message version 0", function () {
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubSubTopic,
protoResult!
protoResult!,
)) as DecodedMessage;
const expectedMeta = metaSetter({
@ -101,8 +101,8 @@ describe("Waku Message version 0", function () {
});
expect(result.meta).to.deep.eq(expectedMeta);
}
)
},
),
);
});
});

View File

@ -18,7 +18,10 @@ export const Version = 0;
export { proto };
export class DecodedMessage implements IDecodedMessage {
constructor(public pubSubTopic: string, protected proto: proto.WakuMessage) {}
constructor(
public pubSubTopic: string,
protected proto: proto.WakuMessage,
) {}
get ephemeral(): boolean {
return Boolean(this.proto.ephemeral);
@ -70,7 +73,7 @@ export class Encoder implements IEncoder {
constructor(
public contentTopic: string,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter
public metaSetter?: IMetaSetter,
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -143,7 +146,7 @@ export class Decoder implements IDecoder<DecodedMessage> {
async fromProtoObj(
pubSubTopic: string,
proto: IProtoMessage
proto: IProtoMessage,
): Promise<DecodedMessage | undefined> {
// https://rfc.vac.dev/spec/14/
// > If omitted, the value SHOULD be interpreted as version 0.
@ -152,7 +155,7 @@ export class Decoder implements IDecoder<DecodedMessage> {
"Failed to decode due to incorrect version, expected:",
Version,
", actual:",
proto.version
proto.version,
);
return Promise.resolve(undefined);
}

View File

@ -19,7 +19,7 @@ export enum Fleet {
*/
export function getPredefinedBootstrapNodes(
fleet: Fleet = Fleet.Prod,
wantedNumber: number = DefaultWantedNumber
wantedNumber: number = DefaultWantedNumber,
): string[] {
if (wantedNumber <= 0) {
return [];

View File

@ -1,7 +1,7 @@
export function pushOrInitMapSet<K, V>(
map: Map<K, Set<V>>,
key: K,
newValue: V
newValue: V,
): void {
let arr = map.get(key);
if (typeof arr === "undefined") {

View File

@ -80,7 +80,7 @@ export class HistoryRpc {
}
function directionToProto(
pageDirection: PageDirection
pageDirection: PageDirection,
): proto.PagingInfo.Direction {
switch (pageDirection) {
case PageDirection.BACKWARD:

View File

@ -106,7 +106,7 @@ class Store extends BaseProtocol implements IStore {
async queryOrderedCallback<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: QueryOptions
options?: QueryOptions,
): Promise<void> {
let abort = false;
for await (const promises of this.queryGenerator(decoders, options)) {
@ -129,7 +129,7 @@ class Store extends BaseProtocol implements IStore {
if (msg && !abort) {
abort = Boolean(await callback(msg));
}
})
}),
);
}
}
@ -155,9 +155,9 @@ class Store extends BaseProtocol implements IStore {
async queryCallbackOnPromise<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (
message: Promise<T | undefined>
message: Promise<T | undefined>,
) => Promise<void | boolean> | boolean | void,
options?: QueryOptions
options?: QueryOptions,
): Promise<void> {
let abort = false;
let promises: Promise<void>[] = [];
@ -192,7 +192,7 @@ class Store extends BaseProtocol implements IStore {
*/
async *queryGenerator<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: QueryOptions
options?: QueryOptions,
): AsyncGenerator<Promise<T | undefined>[]> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -207,7 +207,7 @@ class Store extends BaseProtocol implements IStore {
decoders.forEach((dec) => {
if (decodersAsMap.has(dec.contentTopic)) {
throw new Error(
"API does not support different decoder per content topic"
"API does not support different decoder per content topic",
);
}
decodersAsMap.set(dec.contentTopic, dec);
@ -222,7 +222,7 @@ class Store extends BaseProtocol implements IStore {
pageSize: DefaultPageSize,
},
options,
{ contentTopics, startTime, endTime }
{ contentTopics, startTime, endTime },
);
log("Querying history with the following options", {
@ -236,7 +236,7 @@ class Store extends BaseProtocol implements IStore {
this.newStream.bind(this, peer),
queryOpts,
decodersAsMap,
options?.cursor
options?.cursor,
)) {
yield messages;
}
@ -247,14 +247,14 @@ async function* paginate<T extends IDecodedMessage>(
streamFactory: () => Promise<Stream>,
queryOpts: Params,
decoders: Map<string, IDecoder<T>>,
cursor?: Cursor
cursor?: Cursor,
): AsyncGenerator<Promise<T | undefined>[]> {
if (
queryOpts.contentTopics.toString() !==
Array.from(decoders.keys()).toString()
) {
throw new Error(
"Internal error, the decoders should match the query's content topics"
"Internal error, the decoders should match the query's content topics",
);
}
@ -267,7 +267,7 @@ async function* paginate<T extends IDecodedMessage>(
log(
"Querying store peer",
`for (${queryOpts.pubSubTopic})`,
queryOpts.contentTopics
queryOpts.contentTopics,
);
const stream = await streamFactory();
@ -277,7 +277,7 @@ async function* paginate<T extends IDecodedMessage>(
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
const bytes = new Uint8ArrayList();
@ -300,7 +300,7 @@ async function* paginate<T extends IDecodedMessage>(
if (!response.messages || !response.messages.length) {
log(
"Stopping pagination due to store `response.messages` field missing or empty"
"Stopping pagination due to store `response.messages` field missing or empty",
);
break;
}
@ -314,7 +314,7 @@ async function* paginate<T extends IDecodedMessage>(
if (decoder) {
return decoder.fromProtoObj(
queryOpts.pubSubTopic,
toProtoMessage(protoMsg)
toProtoMessage(protoMsg),
);
}
}
@ -326,7 +326,7 @@ async function* paginate<T extends IDecodedMessage>(
// If the server does not return cursor then there is an issue,
// Need to abort, or we end up in an infinite loop
log(
"Stopping pagination due to `response.pagingInfo.cursor` missing from store response"
"Stopping pagination due to `response.pagingInfo.cursor` missing from store response",
);
break;
}
@ -348,7 +348,7 @@ async function* paginate<T extends IDecodedMessage>(
export async function createCursor(
message: IDecodedMessage,
pubsubTopic: string = DefaultPubSubTopic
pubsubTopic: string = DefaultPubSubTopic,
): Promise<Cursor> {
if (
!message ||
@ -374,7 +374,7 @@ export async function createCursor(
}
export function wakuStore(
init: Partial<ProtocolCreateOptions> = {}
init: Partial<ProtocolCreateOptions> = {},
): (libp2p: Libp2p) => IStore {
return (libp2p: Libp2p) => new Store(libp2p, init);
}

View File

@ -28,7 +28,7 @@ const log = debug("waku:wait-for-remote-peer");
export async function waitForRemotePeer(
waku: Waku,
protocols?: Protocols[],
timeoutMs?: number
timeoutMs?: number,
): Promise<void> {
protocols = protocols ?? getEnabledProtocols(waku);
@ -64,7 +64,7 @@ export async function waitForRemotePeer(
await rejectOnTimeout(
Promise.all(promises),
timeoutMs,
"Timed out waiting for a remote peer."
"Timed out waiting for a remote peer.",
);
} else {
await Promise.all(promises);
@ -114,7 +114,7 @@ const awaitTimeout = (ms: number, rejectReason: string): Promise<void> =>
async function rejectOnTimeout<T>(
promise: Promise<T>,
timeoutMs: number,
rejectReason: string
rejectReason: string,
): Promise<void> {
await Promise.race([promise, awaitTimeout(timeoutMs, rejectReason)]);
}

View File

@ -56,7 +56,7 @@ export class WakuNode implements Waku {
store?: (libp2p: Libp2p) => IStore,
lightPush?: (libp2p: Libp2p) => ILightPush,
filter?: (libp2p: Libp2p) => IFilter,
relay?: (libp2p: Libp2p) => IRelay
relay?: (libp2p: Libp2p) => IRelay,
) {
this.libp2p = libp2p;
@ -86,14 +86,14 @@ export class WakuNode implements Waku {
peerId,
libp2p,
{ pingKeepAlive, relayKeepAlive },
this.relay
this.relay,
);
log(
"Waku node created",
peerId,
`relay: ${!!this.relay}, store: ${!!this.store}, light push: ${!!this
.lightPush}, filter: ${!!this.filter}`
.lightPush}, filter: ${!!this.filter}`,
);
}
@ -105,7 +105,7 @@ export class WakuNode implements Waku {
*/
async dial(
peer: PeerId | MultiaddrInput,
protocols?: Protocols[]
protocols?: Protocols[],
): Promise<Stream> {
const _protocols = protocols ?? [];
const peerId = mapToPeerIdOrMultiaddr(peer);
@ -121,11 +121,11 @@ export class WakuNode implements Waku {
if (_protocols.includes(Protocols.Relay)) {
if (this.relay) {
this.relay.gossipSub.multicodecs.forEach((codec: string) =>
codecs.push(codec)
codecs.push(codec),
);
} else {
log(
"Relay codec not included in dial codec: protocol not mounted locally"
"Relay codec not included in dial codec: protocol not mounted locally",
);
}
}
@ -134,7 +134,7 @@ export class WakuNode implements Waku {
codecs.push(this.store.multicodec);
} else {
log(
"Store codec not included in dial codec: protocol not mounted locally"
"Store codec not included in dial codec: protocol not mounted locally",
);
}
}
@ -143,7 +143,7 @@ export class WakuNode implements Waku {
codecs.push(this.lightPush.multicodec);
} else {
log(
"Light Push codec not included in dial codec: protocol not mounted locally"
"Light Push codec not included in dial codec: protocol not mounted locally",
);
}
}
@ -152,7 +152,7 @@ export class WakuNode implements Waku {
codecs.push(this.filter.multicodec);
} else {
log(
"Filter codec not included in dial codec: protocol not mounted locally"
"Filter codec not included in dial codec: protocol not mounted locally",
);
}
}
@ -191,7 +191,7 @@ export class WakuNode implements Waku {
}
}
function mapToPeerIdOrMultiaddr(
peerId: PeerId | MultiaddrInput
peerId: PeerId | MultiaddrInput,
): PeerId | Multiaddr {
return isPeerId(peerId) ? peerId : multiaddr(peerId);
}

View File

@ -80,7 +80,6 @@
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"rollup": "^3.21.3",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4"

View File

@ -174,7 +174,7 @@ describe("DNS Node Discovery w/ capabilities", () => {
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
);
});
@ -189,7 +189,7 @@ describe("DNS Node Discovery w/ capabilities", () => {
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
);
});
@ -203,7 +203,7 @@ describe("DNS Node Discovery w/ capabilities", () => {
expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd",
);
});
@ -224,10 +224,10 @@ describe("DNS Node Discovery w/ capabilities", () => {
expect(peers.length).to.eq(2);
const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
);
expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
);
});
@ -250,13 +250,13 @@ describe("DNS Node Discovery w/ capabilities", () => {
expect(peers.length).to.eq(3);
const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F",
);
expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx",
);
expect(peerIds).to.contain(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd",
);
});
});

View File

@ -34,7 +34,7 @@ export class DnsNodeDiscovery {
private readonly _errorTolerance: number = 10;
public static async dnsOverHttp(
dnsClient?: DnsClient
dnsClient?: DnsClient,
): Promise<DnsNodeDiscovery> {
if (!dnsClient) {
dnsClient = await DnsOverHttps.create();
@ -51,7 +51,7 @@ export class DnsNodeDiscovery {
*/
async getPeers(
enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
): Promise<IEnr[]> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
@ -64,7 +64,7 @@ export class DnsNodeDiscovery {
const peers = await fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount,
this._errorTolerance,
() => this._search(domain, context)
() => this._search(domain, context),
);
log(
"retrieved peers: ",
@ -73,7 +73,7 @@ export class DnsNodeDiscovery {
id: peer.peerId?.toString(),
multiaddrs: peer.multiaddrs?.map((ma) => ma.toString()),
};
})
}),
);
return peers;
}
@ -88,7 +88,7 @@ export class DnsNodeDiscovery {
*/
async *getNextPeer(
enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
): AsyncGenerator<IEnr> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
@ -101,7 +101,7 @@ export class DnsNodeDiscovery {
for await (const peer of yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount,
this._errorTolerance,
() => this._search(domain, context)
() => this._search(domain, context),
)) {
yield peer;
}
@ -113,7 +113,7 @@ export class DnsNodeDiscovery {
*/
private async _search(
subdomain: string,
context: SearchContext
context: SearchContext,
): Promise<ENR | null> {
try {
const entry = await this._getTXTRecord(subdomain, context);
@ -139,7 +139,7 @@ export class DnsNodeDiscovery {
}
} catch (error) {
log(
`Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`
`Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`,
);
return null;
}
@ -157,7 +157,7 @@ export class DnsNodeDiscovery {
*/
private async _getTXTRecord(
subdomain: string,
context: SearchContext
context: SearchContext,
): Promise<string> {
if (this._DNSTreeCache[subdomain]) {
return this._DNSTreeCache[subdomain];

View File

@ -18,7 +18,7 @@ export class DnsOverHttps implements DnsClient {
*/
public static async create(
endpoints?: Endpoint[],
retries?: number
retries?: number,
): Promise<DnsOverHttps> {
const _endpoints = endpoints ?? (await wellknown.endpoints("doh"));
@ -27,7 +27,7 @@ export class DnsOverHttps implements DnsClient {
private constructor(
private endpoints: Endpoint[],
private retries: number = 3
private retries: number = 3,
) {}
/**
@ -47,7 +47,7 @@ export class DnsOverHttps implements DnsClient {
{
endpoints: this.endpoints,
retries: this.retries,
}
},
);
answers = res.answers;
} catch (error) {

View File

@ -19,7 +19,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree root entry must start with 'enrtree-root:'"
"ENRTree root entry must start with 'enrtree-root:'",
);
}
});
@ -56,7 +56,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree tree entry must start with 'enrtree:'"
"ENRTree tree entry must start with 'enrtree:'",
);
}
});
@ -88,7 +88,7 @@ describe("ENRTree", () => {
} catch (err: unknown) {
const e = err as Error;
expect(e.toString()).includes(
"ENRTree branch entry must start with 'enrtree-branch:'"
"ENRTree branch entry must start with 'enrtree-branch:'",
);
}
});

View File

@ -29,7 +29,7 @@ export class ENRTree {
static parseAndVerifyRoot(root: string, publicKey: string): string {
if (!root.startsWith(this.ROOT_PREFIX))
throw new Error(
`ENRTree root entry must start with '${this.ROOT_PREFIX}'`
`ENRTree root entry must start with '${this.ROOT_PREFIX}'`,
);
const rootValues = ENRTree.parseRootValues(root);
@ -43,13 +43,13 @@ export class ENRTree {
const signedComponentBuffer = utf8ToBytes(signedComponent);
const signatureBuffer = fromString(rootValues.signature, "base64url").slice(
0,
64
64,
);
const isVerified = verifySignature(
signatureBuffer,
keccak256(signedComponentBuffer),
new Uint8Array(decodedPublicKey)
new Uint8Array(decodedPublicKey),
);
if (!isVerified) throw new Error("Unable to verify ENRTree root signature");
@ -59,7 +59,7 @@ export class ENRTree {
static parseRootValues(txt: string): ENRRootValues {
const matches = txt.match(
/^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/
/^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/,
);
if (!Array.isArray(matches))
@ -89,7 +89,7 @@ export class ENRTree {
static parseTree(tree: string): ENRTreeValues {
if (!tree.startsWith(this.TREE_PREFIX))
throw new Error(
`ENRTree tree entry must start with '${this.TREE_PREFIX}'`
`ENRTree tree entry must start with '${this.TREE_PREFIX}'`,
);
const matches = tree.match(/^enrtree:\/\/([^@]+)@(.+)$/);
@ -115,7 +115,7 @@ export class ENRTree {
static parseBranch(branch: string): string[] {
if (!branch.startsWith(this.BRANCH_PREFIX))
throw new Error(
`ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`
`ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`,
);
return branch.split(this.BRANCH_PREFIX)[1].split(",");

View File

@ -15,7 +15,7 @@ async function createEnr(waku2: Waku2): Promise<ENR> {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
];
@ -39,7 +39,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ relay: 1 },
0,
getNode
getNode,
);
expect(res.length).to.eq(1);
@ -62,7 +62,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1 },
1,
getNode
getNode,
);
expect(res.length).to.eq(1);
@ -91,7 +91,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1, relay: 2 },
1,
getNode
getNode,
);
expect(res.length).to.eq(3);
@ -108,7 +108,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled(
{ filter: 1, relay: 1 },
5,
getNode
getNode,
);
expect(res.length).to.eq(1);

View File

@ -13,7 +13,7 @@ const log = debug("waku:discovery:fetch_nodes");
export async function fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number,
getNode: () => Promise<IEnr | null>
getNode: () => Promise<IEnr | null>,
): Promise<IEnr[]> {
const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0,
@ -64,7 +64,7 @@ export async function fetchNodesUntilCapabilitiesFulfilled(
export async function* yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number,
getNode: () => Promise<IEnr | null>
getNode: () => Promise<IEnr | null>,
): AsyncGenerator<IEnr> {
const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0,
@ -108,7 +108,7 @@ export async function* yieldNodesUntilCapabilitiesFulfilled(
function isSatisfied(
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount
actual: NodeCapabilityCount,
): boolean {
return (
actual.relay >= wanted.relay &&
@ -146,7 +146,7 @@ function addCapabilities(node: Waku2, total: NodeCapabilityCount): void {
function helpsSatisfyCapabilities(
node: Waku2,
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount
actual: NodeCapabilityCount,
): boolean {
if (isSatisfied(wanted, actual)) {
throw "Internal Error: Waku2 wanted capabilities are already fulfilled";
@ -168,7 +168,7 @@ function helpsSatisfyCapabilities(
*/
function missingCapabilities(
wanted: NodeCapabilityCount,
actual: NodeCapabilityCount
actual: NodeCapabilityCount,
): Waku2 {
return {
relay: actual.relay < wanted.relay,

View File

@ -93,7 +93,7 @@ export class PeerDiscoveryDns
this.nextPeer = dns.getNextPeer.bind(
dns,
enrUrls,
wantedNodeCapabilityCount
wantedNodeCapabilityCount,
);
}
@ -135,7 +135,7 @@ export class PeerDiscoveryDns
if (isPeerChanged) {
this.dispatchEvent(
new CustomEvent<PeerInfo>("peer", { detail: peerInfo })
new CustomEvent<PeerInfo>("peer", { detail: peerInfo }),
);
}
}
@ -159,7 +159,7 @@ export class PeerDiscoveryDns
export function wakuDnsDiscovery(
enrUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
): (components: DnsDiscoveryComponents) => PeerDiscoveryDns {
return (components: DnsDiscoveryComponents) =>
new PeerDiscoveryDns(components, { enrUrls, wantedNodeCapabilityCount });

View File

@ -79,7 +79,6 @@
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"process": "^0.11.10",
"puppeteer": "^20.4.0",
"rollup": "^3.21.3",

View File

@ -9,7 +9,7 @@ import { getPublicKeyFromPeerId } from "./peer_id.js";
export class EnrCreator {
static fromPublicKey(
publicKey: Uint8Array,
kvs: Record<ENRKey, ENRValue> = {}
kvs: Record<ENRKey, ENRValue> = {},
): Promise<ENR> {
// EIP-778 specifies that the key must be in compressed format, 33 bytes
if (publicKey.length !== 33) {
@ -24,7 +24,7 @@ export class EnrCreator {
static async fromPeerId(
peerId: PeerId,
kvs: Record<ENRKey, ENRValue> = {}
kvs: Record<ENRKey, ENRValue> = {},
): Promise<ENR> {
switch (peerId.type) {
case "secp256k1":

View File

@ -12,7 +12,7 @@ import sha3 from "js-sha3";
*/
export async function sign(
message: Uint8Array,
privateKey: Uint8Array
privateKey: Uint8Array,
): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true,
@ -20,7 +20,7 @@ export async function sign(
});
return concat(
[signature, new Uint8Array([recoveryId])],
signature.length + 1
signature.length + 1,
);
}
@ -42,7 +42,7 @@ export function compressPublicKey(publicKey: Uint8Array): Uint8Array {
export function verifySignature(
signature: Uint8Array,
message: Uint8Array | string,
publicKey: Uint8Array
publicKey: Uint8Array,
): boolean {
try {
const _signature = secp.Signature.fromCompact(signature.slice(0, 64));

View File

@ -10,7 +10,7 @@ export class EnrDecoder {
static fromString(encoded: string): Promise<ENR> {
if (!encoded.startsWith(ENR.RECORD_PREFIX)) {
throw new Error(
`"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`
`"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`,
);
}
return EnrDecoder.fromRLP(fromString(encoded.slice(4), "base64url"));
@ -64,7 +64,7 @@ function checkValues(values: Uint8Array[]): {
}
if (!seq || Array.isArray(seq)) {
throw new Error(
"Decoded ENR invalid sequence number: must be a byte array"
"Decoded ENR invalid sequence number: must be a byte array",
);
}
@ -75,7 +75,7 @@ function checkSignature(
seq: Uint8Array,
kvs: Uint8Array[],
enr: ENR,
signature: Uint8Array
signature: Uint8Array,
): void {
const rlpEncodedBytes = hexToBytes(RLP.encode([seq, ...kvs]));
if (!enr.verify(rlpEncodedBytes, signature)) {

View File

@ -9,7 +9,7 @@ import { ENR } from "./enr.js";
export class EnrEncoder {
static async toValues(
enr: ENR,
privateKey?: Uint8Array
privateKey?: Uint8Array,
): Promise<(ENRKey | ENRValue | number[])[]> {
// sort keys and flatten into [k, v, k, v, ...]
const content: Array<ENRKey | ENRValue | number[]> = Array.from(enr.keys())
@ -20,7 +20,7 @@ export class EnrEncoder {
content.unshift(new Uint8Array([Number(enr.seq)]));
if (privateKey) {
content.unshift(
await enr.sign(hexToBytes(RLP.encode(content)), privateKey)
await enr.sign(hexToBytes(RLP.encode(content)), privateKey),
);
} else {
if (!enr.signature) {
@ -33,7 +33,7 @@ export class EnrEncoder {
static async toBytes(enr: ENR, privateKey?: Uint8Array): Promise<Uint8Array> {
const encoded = hexToBytes(
RLP.encode(await EnrEncoder.toValues(enr, privateKey))
RLP.encode(await EnrEncoder.toValues(enr, privateKey)),
);
if (encoded.length >= MAX_RECORD_SIZE) {
throw new Error("ENR must be less than 300 bytes");

View File

@ -29,7 +29,7 @@ describe("ENR", function () {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
];
@ -53,13 +53,13 @@ describe("ENR", function () {
expect(enr2.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr2.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
);
expect(enr2.waku2).to.deep.equal({
relay: true,
@ -87,13 +87,13 @@ describe("ENR", function () {
expect(enr.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
);
});
@ -107,7 +107,7 @@ describe("ENR", function () {
expect(enr.ip).to.be.equal("134.209.139.210");
expect(enr.publicKey).to.not.be.undefined;
expect(enr.peerId?.toString()).to.be.equal(
"16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ"
"16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
);
});
@ -159,7 +159,7 @@ describe("ENR", function () {
const enr = await ENR.create(
{ id: utf8ToBytes("v3") },
BigInt(0),
new Uint8Array()
new Uint8Array(),
);
enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here");
@ -174,7 +174,7 @@ describe("ENR", function () {
const enr = await ENR.create(
{ id: utf8ToBytes("v4") },
BigInt(0),
new Uint8Array()
new Uint8Array(),
);
enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here");
@ -200,7 +200,7 @@ describe("ENR", function () {
beforeEach(async function () {
const seq = BigInt(1);
privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291",
);
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
record.setLocationMultiaddr(multiaddr("/ip4/127.0.0.1/udp/30303"));
@ -210,7 +210,7 @@ describe("ENR", function () {
it("should properly compute the node id", () => {
expect(record.nodeId).to.equal(
"a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7"
"a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7",
);
});
@ -245,7 +245,7 @@ describe("ENR", function () {
beforeEach(async () => {
privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291",
);
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
});
@ -262,14 +262,14 @@ describe("ENR", function () {
record.set("udp", tuples0[1][1]);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(),
).to.equal(multi0.toString());
// set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/udp/30300");
record.setLocationMultiaddr(multi1);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(),
).to.equal(multi1.toString());
// and get the underlying records
const tuples1 = multi1.tuples();
@ -290,14 +290,14 @@ describe("ENR", function () {
record.set("tcp", tuples0[1][1]);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(),
).to.equal(multi0.toString());
// set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/tcp/30300");
record.setLocationMultiaddr(multi1);
// and get the multiaddr
expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(),
).to.equal(multi1.toString());
// and get the underlying records
const tuples1 = multi1.tuples();
@ -312,7 +312,7 @@ describe("ENR", function () {
const tcp = 8080;
const udp = 8080;
const wsMultiaddr = multiaddr(
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss"
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss",
);
let peerId: PeerId;
let enr: ENR;
@ -331,43 +331,43 @@ describe("ENR", function () {
it("should properly create location multiaddrs - udp4", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4)
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4),
).to.deep.equal(multiaddr(`/ip4/${ip4}/udp/${udp}`));
});
it("should properly create location multiaddrs - tcp4", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4)
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4),
).to.deep.equal(multiaddr(`/ip4/${ip4}/tcp/${tcp}`));
});
it("should properly create location multiaddrs - udp6", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6)
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6),
).to.deep.equal(multiaddr(`/ip6/${ip6}/udp/${udp}`));
});
it("should properly create location multiaddrs - tcp6", () => {
expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6)
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6),
).to.deep.equal(multiaddr(`/ip6/${ip6}/tcp/${tcp}`));
});
it("should properly create location multiaddrs - udp", () => {
// default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`)
multiaddr(`/ip4/${ip4}/udp/${udp}`),
);
// if ip6 is set, use it
enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/udp/${udp}`)
multiaddr(`/ip6/${ip6}/udp/${udp}`),
);
// if ip6 does not exist, use ip4
enr.ip6 = undefined;
enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`)
multiaddr(`/ip4/${ip4}/udp/${udp}`),
);
enr.ip6 = ip6;
});
@ -375,18 +375,18 @@ describe("ENR", function () {
it("should properly create location multiaddrs - tcp", () => {
// default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
multiaddr(`/ip4/${ip4}/tcp/${tcp}`),
);
// if ip6 is set, use it
enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`)
multiaddr(`/ip6/${ip6}/tcp/${tcp}`),
);
// if ip6 does not exist, use ip4
enr.ip6 = undefined;
enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
multiaddr(`/ip4/${ip4}/tcp/${tcp}`),
);
enr.ip6 = ip6;
});
@ -397,19 +397,19 @@ describe("ENR", function () {
expect(peerInfo.id.toString()).to.equal(peerId.toString());
expect(peerInfo.multiaddrs.length).to.equal(5);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString()
multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString(),
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString()
multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString(),
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/udp/${udp}`).toString()
multiaddr(`/ip4/${ip4}/udp/${udp}`).toString(),
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/udp/${udp}`).toString()
multiaddr(`/ip6/${ip6}/udp/${udp}`).toString(),
);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
wsMultiaddr.toString()
wsMultiaddr.toString(),
);
});
});

View File

@ -37,7 +37,7 @@ export class ENR extends RawEnr implements IEnr {
static async create(
kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1),
signature?: Uint8Array
signature?: Uint8Array,
): Promise<ENR> {
const enr = new ENR(kvs, seq, signature);
try {
@ -61,7 +61,7 @@ export class ENR extends RawEnr implements IEnr {
}
}
getLocationMultiaddr: (
protocol: TransportProtocol | TransportProtocolPerIpVersion
protocol: TransportProtocol | TransportProtocolPerIpVersion,
) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this);
setLocationMultiaddr(multiaddr: Multiaddr): void {
@ -93,7 +93,7 @@ export class ENR extends RawEnr implements IEnr {
for (const protocol of Object.values(TransportProtocolPerIpVersion)) {
const ma = this.getLocationMultiaddr(
protocol as TransportProtocolPerIpVersion
protocol as TransportProtocolPerIpVersion,
);
if (ma) multiaddrs.push(ma);
}
@ -122,7 +122,7 @@ export class ENR extends RawEnr implements IEnr {
* @param protocol
*/
getFullMultiaddr(
protocol: TransportProtocol | TransportProtocolPerIpVersion
protocol: TransportProtocol | TransportProtocolPerIpVersion,
): Multiaddr | undefined {
if (this.peerId) {
const locationMultiaddr = this.getLocationMultiaddr(protocol);

View File

@ -5,7 +5,7 @@ import { multiaddrFromFields } from "./multiaddr_from_fields.js";
export function locationMultiaddrFromEnrFields(
enr: IEnr,
protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6"
protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6",
): Multiaddr | undefined {
switch (protocol) {
case "udp":
@ -42,6 +42,6 @@ export function locationMultiaddrFromEnrFields(
isIpv6 ? "ip6" : "ip4",
protoName,
ipVal,
protoVal
protoVal,
);
}

View File

@ -6,12 +6,12 @@ export function multiaddrFromFields(
ipFamily: string,
protocol: string,
ipBytes: Uint8Array,
protocolBytes: Uint8Array
protocolBytes: Uint8Array,
): Multiaddr {
let ma = multiaddr("/" + ipFamily + "/" + convertToString(ipFamily, ipBytes));
ma = ma.encapsulate(
multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes))
multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes)),
);
return ma;

View File

@ -8,10 +8,10 @@ describe("ENR multiaddrs codec", function () {
const multiaddrs = [
multiaddr("/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
),
multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
),
];
@ -20,13 +20,13 @@ describe("ENR multiaddrs codec", function () {
const multiaddrsAsStr = result.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss",
);
expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss",
);
});
});

View File

@ -12,7 +12,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
const sizeDataView = new DataView(
bytes.buffer,
index,
MULTIADDR_LENGTH_SIZE
MULTIADDR_LENGTH_SIZE,
);
const size = sizeDataView.getUint16(0);
index += MULTIADDR_LENGTH_SIZE;
@ -28,7 +28,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
export function encodeMultiaddrs(multiaddrs: Multiaddr[]): Uint8Array {
const totalLength = multiaddrs.reduce(
(acc, ma) => acc + MULTIADDR_LENGTH_SIZE + ma.bytes.length,
0
0,
);
const bytes = new Uint8Array(totalLength);
const dataView = new DataView(bytes.buffer);

View File

@ -4,7 +4,7 @@ import type { PeerId } from "@libp2p/interface-peer-id";
import { peerIdFromKeys } from "@libp2p/peer-id";
export function createPeerIdFromPublicKey(
publicKey: Uint8Array
publicKey: Uint8Array,
): Promise<PeerId> {
const _publicKey = new supportedKeys.secp256k1.Secp256k1PublicKey(publicKey);
return peerIdFromKeys(_publicKey.bytes, undefined);
@ -20,7 +20,7 @@ export function getPublicKeyFromPeerId(peerId: PeerId): Uint8Array {
// Only used in tests
export async function getPrivateKeyFromPeerId(
peerId: PeerId
peerId: PeerId,
): Promise<Uint8Array> {
if (peerId.type !== "secp256k1") {
throw new Error("Unsupported peer id type");

View File

@ -17,7 +17,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
protected constructor(
kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1),
signature?: Uint8Array
signature?: Uint8Array,
) {
super(Object.entries(kvs));
this.seq = seq;
@ -147,7 +147,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
this,
"waku2",
waku2,
(w) => new Uint8Array([encodeWaku2(w)])
(w) => new Uint8Array([encodeWaku2(w)]),
);
}
}
@ -155,7 +155,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
function getStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string
proto: string,
): string | undefined {
const raw = map.get(key);
if (!raw) return;
@ -165,7 +165,7 @@ function getStringValue(
function getNumberAsStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string
proto: string,
): number | undefined {
const raw = map.get(key);
if (!raw) return;
@ -176,7 +176,7 @@ function setStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
value: string | undefined
value: string | undefined,
): void {
deleteUndefined(map, key, value, convertToBytes.bind({}, proto));
}
@ -185,7 +185,7 @@ function setNumberAsStringValue(
map: Map<ENRKey, ENRValue>,
key: ENRKey,
proto: string,
value: number | undefined
value: number | undefined,
): void {
setStringValue(map, key, proto, value?.toString(10));
}
@ -194,7 +194,7 @@ function deleteUndefined<K, V, W>(
map: Map<K, W>,
key: K,
value: V | undefined,
transform: (v: V) => W
transform: (v: V) => W,
): void {
if (value !== undefined) {
map.set(key, transform(value));

View File

@ -5,7 +5,7 @@ import { bytesToHex } from "@waku/utils/bytes";
import { keccak256 } from "./crypto.js";
export async function sign(
privKey: Uint8Array,
msg: Uint8Array
msg: Uint8Array,
): Promise<Uint8Array> {
return secp.sign(keccak256(msg), privKey, {
der: false,

View File

@ -58,7 +58,6 @@
"@multiformats/multiaddr": "^12.0.0",
"cspell": "^6.31.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"typescript": "^5.0.4",
"libp2p": "^0.45.9"
},

View File

@ -12,7 +12,7 @@ export type ContentFilter = {
export interface IFilterSubscription {
subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
callback: Callback<T>,
): Promise<void>;
unsubscribe(contentTopics: ContentTopic[]): Promise<void>;
@ -26,6 +26,6 @@ export type IFilter = IReceiver &
IBaseProtocol & {
createSubscription(
pubSubTopic?: string,
peerId?: PeerId
peerId?: PeerId,
): Promise<IFilterSubscription>;
};

View File

@ -73,6 +73,6 @@ export interface IDecoder<T extends IDecodedMessage> {
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
fromProtoObj: (
pubSubTopic: string,
proto: IProtoMessage
proto: IProtoMessage,
) => Promise<T | undefined>;
}

View File

@ -62,7 +62,7 @@ export type ProtocolOptions = {
};
export type Callback<T extends IDecodedMessage> = (
msg: T
msg: T,
) => void | Promise<void>;
export enum SendError {

View File

@ -9,11 +9,11 @@ export type ActiveSubscriptions = Map<PubSubTopic, ContentTopic[]>;
export interface IReceiver {
toSubscriptionIterator: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions
opts?: ProtocolOptions,
) => Promise<IAsyncIterator<T>>;
subscribe: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>,
opts?: ProtocolOptions
opts?: ProtocolOptions,
) => Unsubscribe | Promise<Unsubscribe>;
}

View File

@ -5,6 +5,6 @@ export interface ISender {
send: (
encoder: IEncoder,
message: IMessage,
opts?: ProtocolOptions
opts?: ProtocolOptions,
) => Promise<SendResult>;
}

View File

@ -49,17 +49,17 @@ export interface IStore extends IBaseProtocol {
queryOrderedCallback: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions
options?: StoreQueryOptions,
) => Promise<void>;
queryCallbackOnPromise: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: (
message: Promise<T | undefined>
message: Promise<T | undefined>,
) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions
options?: StoreQueryOptions,
) => Promise<void>;
queryGenerator: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: StoreQueryOptions
options?: StoreQueryOptions,
) => AsyncGenerator<Promise<T | undefined>[]>;
}

View File

@ -101,7 +101,6 @@
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"process": "^0.11.10",
"puppeteer": "^20.4.0",
"rollup": "^3.21.3",

View File

@ -13,7 +13,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const counters = new Uint8Array([ctr >> 24, ctr >> 16, ctr >> 8, ctr]);
const countersSecret = concat(
[counters, secret],
counters.length + secret.length
counters.length + secret.length,
);
const willBeHashResult = sha256(countersSecret);
willBeResult = willBeResult.then((result) =>
@ -21,9 +21,9 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const _hashResult = new Uint8Array(hashResult);
return concat(
[result, _hashResult],
result.length + _hashResult.length
result.length + _hashResult.length,
);
})
}),
);
written += 32;
ctr += 1;
@ -34,7 +34,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
function aesCtrEncrypt(
counter: Uint8Array,
key: ArrayBufferLike,
data: ArrayBufferLike
data: ArrayBufferLike,
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["encrypt"])
@ -42,8 +42,8 @@ function aesCtrEncrypt(
getSubtle().encrypt(
{ name: "AES-CTR", counter: counter, length: 128 },
cryptoKey,
data
)
data,
),
)
.then((bytes) => new Uint8Array(bytes));
}
@ -51,7 +51,7 @@ function aesCtrEncrypt(
function aesCtrDecrypt(
counter: Uint8Array,
key: ArrayBufferLike,
data: ArrayBufferLike
data: ArrayBufferLike,
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["decrypt"])
@ -59,15 +59,15 @@ function aesCtrDecrypt(
getSubtle().decrypt(
{ name: "AES-CTR", counter: counter, length: 128 },
cryptoKey,
data
)
data,
),
)
.then((bytes) => new Uint8Array(bytes));
}
function hmacSha256Sign(
key: ArrayBufferLike,
msg: ArrayBufferLike
msg: ArrayBufferLike,
): PromiseLike<Uint8Array> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
return getSubtle()
@ -79,12 +79,12 @@ function hmacSha256Sign(
function hmacSha256Verify(
key: ArrayBufferLike,
msg: ArrayBufferLike,
sig: ArrayBufferLike
sig: ArrayBufferLike,
): Promise<boolean> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
const _key = getSubtle().importKey("raw", key, algorithm, false, ["verify"]);
return _key.then((cryptoKey) =>
getSubtle().verify(algorithm, cryptoKey, sig, msg)
getSubtle().verify(algorithm, cryptoKey, sig, msg),
);
}
@ -99,11 +99,11 @@ function hmacSha256Verify(
function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
if (privateKeyA.length !== 32) {
throw new Error(
`Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`
`Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`,
);
} else if (publicKeyB.length !== 65) {
throw new Error(
`Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`
`Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`,
);
} else if (publicKeyB[0] !== 4) {
throw new Error("Bad public key, a valid public key would begin with 4");
@ -123,7 +123,7 @@ function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
*/
export async function encrypt(
publicKeyTo: Uint8Array,
msg: Uint8Array
msg: Uint8Array,
): Promise<Uint8Array> {
const ephemPrivateKey = randomBytes(32);
@ -143,7 +143,7 @@ export async function encrypt(
return concat(
[ephemPublicKey, ivCipherText, hmac],
ephemPublicKey.length + ivCipherText.length + hmac.length
ephemPublicKey.length + ivCipherText.length + hmac.length,
);
}
@ -159,15 +159,15 @@ const metaLength = 1 + 64 + 16 + 32;
*/
export async function decrypt(
privateKey: Uint8Array,
encrypted: Uint8Array
encrypted: Uint8Array,
): Promise<Uint8Array> {
if (encrypted.length <= metaLength) {
throw new Error(
`Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`
`Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`,
);
} else if (encrypted[0] !== 4) {
throw new Error(
`Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`
`Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`,
);
} else {
// deserialize
@ -182,7 +182,7 @@ export async function decrypt(
const px = derive(privateKey, ephemPublicKey);
const hash = await kdf(px, 32);
const [encryptionKey, macKey] = await sha256(hash.slice(16)).then(
(macKey) => [hash.slice(0, 16), macKey]
(macKey) => [hash.slice(0, 16), macKey],
);
if (!(await hmacSha256Verify(macKey, cipherAndIv, msgMac))) {

View File

@ -19,7 +19,7 @@ export function getSubtle(): SubtleCrypto {
return crypto.node.webcrypto.subtle;
} else {
throw new Error(
"The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)"
"The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)",
);
}
}
@ -59,7 +59,7 @@ export const getPublicKey = secp.getPublicKey;
*/
export async function sign(
message: Uint8Array,
privateKey: Uint8Array
privateKey: Uint8Array,
): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true,
@ -67,7 +67,7 @@ export async function sign(
});
return concat(
[signature, new Uint8Array([recoveryId])],
signature.length + 1
signature.length + 1,
);
}

View File

@ -5,12 +5,12 @@ import { getSubtle, randomBytes } from "./index.js";
export async function encrypt(
iv: Uint8Array,
key: Uint8Array,
clearText: Uint8Array
clearText: Uint8Array,
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["encrypt"])
.then((cryptoKey) =>
getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText)
getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText),
)
.then((cipher) => new Uint8Array(cipher));
}
@ -18,12 +18,16 @@ export async function encrypt(
export async function decrypt(
iv: Uint8Array,
key: Uint8Array,
cipherText: Uint8Array
cipherText: Uint8Array,
): Promise<Uint8Array> {
return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["decrypt"])
.then((cryptoKey) =>
getSubtle().decrypt({ iv, ...Symmetric.algorithm }, cryptoKey, cipherText)
getSubtle().decrypt(
{ iv, ...Symmetric.algorithm },
cryptoKey,
cipherText,
),
)
.then((clear) => new Uint8Array(clear));
}

View File

@ -15,7 +15,7 @@ export class DecodedMessage
proto: proto.WakuMessage,
decodedPayload: Uint8Array,
public signature?: Uint8Array,
public signaturePublicKey?: Uint8Array
public signaturePublicKey?: Uint8Array,
) {
super(pubSubTopic, proto);
this._decodedPayload = decodedPayload;

View File

@ -34,8 +34,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined;
}
)
},
),
);
});
@ -54,7 +54,7 @@ describe("Ecies Encryption", function () {
contentTopic,
payload,
alicePrivateKey,
bobPrivateKey
bobPrivateKey,
) => {
const alicePublicKey = getPublicKey(alicePrivateKey);
const bobPublicKey = getPublicKey(bobPrivateKey);
@ -78,8 +78,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(alicePublicKey);
}
)
},
),
);
});
@ -93,7 +93,7 @@ describe("Ecies Encryption", function () {
async (pubSubTopic, contentTopic, payload, privateKey) => {
const publicKey = getPublicKey(privateKey);
const metaSetter = (
msg: IProtoMessage & { meta: undefined }
msg: IProtoMessage & { meta: undefined },
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -125,8 +125,8 @@ describe("Ecies Encryption", function () {
});
expect(result.meta).to.deep.equal(expectedMeta);
}
)
},
),
);
});
});

View File

@ -36,7 +36,7 @@ class Encoder implements IEncoder {
private publicKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter
public metaSetter?: IMetaSetter,
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -106,18 +106,21 @@ export function createEncoder({
publicKey,
sigPrivKey,
ephemeral,
metaSetter
metaSetter,
);
}
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(contentTopic: string, private privateKey: Uint8Array) {
constructor(
contentTopic: string,
private privateKey: Uint8Array,
) {
super(contentTopic);
}
async fromProtoObj(
pubSubTopic: string,
protoMessage: IProtoMessage
protoMessage: IProtoMessage,
): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload;
@ -126,7 +129,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:",
Version,
", actual:",
protoMessage.version
protoMessage.version,
);
return;
}
@ -138,7 +141,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) {
log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e
e,
);
return;
}
@ -161,7 +164,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage,
res.payload,
res.sig?.signature,
res.sig?.publicKey
res.sig?.publicKey,
);
}
}
@ -181,7 +184,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
privateKey: Uint8Array
privateKey: Uint8Array,
): Decoder {
return new Decoder(contentTopic, privateKey);
}

View File

@ -32,8 +32,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined;
}
)
},
),
);
});
@ -67,8 +67,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(sigPubKey);
}
)
},
),
);
});
@ -81,7 +81,7 @@ describe("Symmetric Encryption", function () {
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
async (pubSubTopic, contentTopic, payload, symKey) => {
const metaSetter = (
msg: IProtoMessage & { meta: undefined }
msg: IProtoMessage & { meta: undefined },
): Uint8Array => {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
@ -113,8 +113,8 @@ describe("Symmetric Encryption", function () {
});
expect(result.meta).to.deep.equal(expectedMeta);
}
)
},
),
);
});
});

View File

@ -31,7 +31,7 @@ class Encoder implements IEncoder {
private symKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
public metaSetter?: IMetaSetter
public metaSetter?: IMetaSetter,
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -101,13 +101,16 @@ export function createEncoder({
}
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(contentTopic: string, private symKey: Uint8Array) {
constructor(
contentTopic: string,
private symKey: Uint8Array,
) {
super(contentTopic);
}
async fromProtoObj(
pubSubTopic: string,
protoMessage: IProtoMessage
protoMessage: IProtoMessage,
): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload;
@ -116,7 +119,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:",
Version,
", actual:",
protoMessage.version
protoMessage.version,
);
return;
}
@ -128,7 +131,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) {
log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e
e,
);
return;
}
@ -151,7 +154,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage,
res.payload,
res.sig?.signature,
res.sig?.publicKey
res.sig?.publicKey,
);
}
}
@ -171,7 +174,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
symKey: Uint8Array
symKey: Uint8Array,
): Decoder {
return new Decoder(contentTopic, symKey);
}

View File

@ -24,8 +24,8 @@ describe("Waku Payload", () => {
const res = await decryptAsymmetric(enc, privKey);
expect(res).deep.equal(message);
}
)
},
),
);
});
@ -39,8 +39,8 @@ describe("Waku Payload", () => {
const res = await decryptSymmetric(enc, key);
expect(res).deep.equal(message);
}
)
},
),
);
});
@ -52,9 +52,9 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal(
message,
"Payload was not encrypted then decrypted correctly"
"Payload was not encrypted then decrypted correctly",
);
})
}),
);
});
@ -71,14 +71,14 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal(
message,
"Payload was not encrypted then decrypted correctly"
"Payload was not encrypted then decrypted correctly",
);
expect(res?.sig?.publicKey).deep.equal(
sigPubKey,
"signature Public key was not recovered from encrypted then decrypted signature"
"signature Public key was not recovered from encrypted then decrypted signature",
);
}
)
},
),
);
});
});

View File

@ -21,7 +21,7 @@ function getSizeOfPayloadSizeField(message: Uint8Array): number {
function getPayloadSize(
message: Uint8Array,
sizeOfPayloadSizeField: number
sizeOfPayloadSizeField: number,
): number {
let payloadSizeBytes = message.slice(1, 1 + sizeOfPayloadSizeField);
// int 32 == 4 bytes
@ -29,7 +29,7 @@ function getPayloadSize(
// If less than 4 bytes pad right (Little Endian).
payloadSizeBytes = concat(
[payloadSizeBytes, new Uint8Array(4 - sizeOfPayloadSizeField)],
4
4,
);
}
const payloadSizeDataView = new DataView(payloadSizeBytes.buffer);
@ -50,7 +50,7 @@ function isMessageSigned(message: Uint8Array): boolean {
*/
export async function encryptAsymmetric(
data: Uint8Array,
publicKey: Uint8Array | string
publicKey: Uint8Array | string,
): Promise<Uint8Array> {
return ecies.encrypt(hexToBytes(publicKey), data);
}
@ -63,7 +63,7 @@ export async function encryptAsymmetric(
*/
export async function decryptAsymmetric(
payload: Uint8Array,
privKey: Uint8Array
privKey: Uint8Array,
): Promise<Uint8Array> {
return ecies.decrypt(privKey, payload);
}
@ -79,7 +79,7 @@ export async function decryptAsymmetric(
*/
export async function encryptSymmetric(
data: Uint8Array,
key: Uint8Array | string
key: Uint8Array | string,
): Promise<Uint8Array> {
const iv = symmetric.generateIv();
@ -99,7 +99,7 @@ export async function encryptSymmetric(
*/
export async function decryptSymmetric(
payload: Uint8Array,
key: Uint8Array | string
key: Uint8Array | string,
): Promise<Uint8Array> {
const ivStart = payload.length - Symmetric.ivSize;
const cipher = payload.slice(0, ivStart);
@ -135,7 +135,7 @@ function computeSizeOfPayloadSizeField(payload: Uint8Array): number {
function validateDataIntegrity(
value: Uint8Array,
expectedSize: number
expectedSize: number,
): boolean {
if (value.length !== expectedSize) {
return false;
@ -157,7 +157,7 @@ function getHash(message: Uint8Array, isSigned: boolean): Uint8Array {
function ecRecoverPubKey(
messageHash: Uint8Array,
signature: Uint8Array
signature: Uint8Array,
): Uint8Array | undefined {
const recoveryDataView = new DataView(signature.slice(64).buffer);
const recovery = recoveryDataView.getUint8(0);
@ -175,7 +175,7 @@ function ecRecoverPubKey(
*/
export async function preCipher(
messagePayload: Uint8Array,
sigPrivKey?: Uint8Array
sigPrivKey?: Uint8Array,
): Promise<Uint8Array> {
let envelope = new Uint8Array([0]); // No flags
envelope = addPayloadSizeField(envelope, messagePayload);
@ -216,7 +216,7 @@ export async function preCipher(
* @internal
*/
export function postCipher(
message: Uint8Array
message: Uint8Array,
): { payload: Uint8Array; sig?: Signature } | undefined {
const sizeOfPayloadSizeField = getSizeOfPayloadSizeField(message);
if (sizeOfPayloadSizeField === 0) return;

View File

@ -33,12 +33,10 @@
"build:esm": "tsc",
"build:bundle": "rollup --config rollup.config.js",
"fix": "run-s fix:*",
"fix:prettier": "prettier . --write",
"fix:lint": "eslint src *.js --fix",
"check": "run-s check:*",
"check:tsc": "tsc -p tsconfig.dev.json",
"check:lint": "eslint src *.js",
"check:prettier": "prettier . --list-different",
"check:spelling": "cspell \"{README.md,src/**/*.ts}\"",
"test": "run-s test:*",
"test:node": "TS_NODE_PROJECT=./tsconfig.dev.json mocha",
@ -75,7 +73,6 @@
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.4",
"process": "^0.11.10",
"puppeteer": "^20.4.0",
"rollup": "^3.15.0",

View File

@ -8,7 +8,7 @@ import { concat, utf8ToBytes } from "@waku/utils/bytes";
*/
export function messageHash(
pubsubTopic: string,
message: IProtoMessage
message: IProtoMessage,
): Uint8Array {
const pubsubTopicBytes = utf8ToBytes(pubsubTopic);
const contentTopicBytes = utf8ToBytes(message.contentTopic);

View File

@ -74,7 +74,6 @@
"chai": "^4.3.7",
"cspell": "^6.31.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"rollup": "^3.21.3",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4",

View File

@ -37,7 +37,7 @@ export class WakuPeerExchange extends BaseProtocol implements IPeerExchange {
* Make a peer exchange query to a peer
*/
async query(
params: PeerExchangeQueryParams
params: PeerExchangeQueryParams,
): Promise<PeerInfo[] | undefined> {
const { numPeers } = params;
@ -54,7 +54,7 @@ export class WakuPeerExchange extends BaseProtocol implements IPeerExchange {
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
async (source) => await all(source),
);
try {
@ -76,7 +76,7 @@ export class WakuPeerExchange extends BaseProtocol implements IPeerExchange {
.filter(isDefined)
.map(async (enr) => {
return { ENR: await EnrDecoder.fromRLP(enr) };
})
}),
);
} catch (err) {
log("Failed to decode push reply", err);
@ -90,7 +90,7 @@ export class WakuPeerExchange extends BaseProtocol implements IPeerExchange {
* @returns A function that creates a new peer exchange protocol
*/
export function wakuPeerExchange(): (
components: Libp2pComponents
components: Libp2pComponents,
) => WakuPeerExchange {
return (components: Libp2pComponents) => new WakuPeerExchange(components);
}

View File

@ -61,7 +61,7 @@ export class PeerExchangeDiscovery
private queryAttempts: Map<string, number> = new Map();
private readonly handleDiscoveredPeer = (
event: CustomEvent<PeerUpdate>
event: CustomEvent<PeerUpdate>,
): void => {
const {
peer: { protocols, id: peerId },
@ -74,7 +74,7 @@ export class PeerExchangeDiscovery
this.queryingPeers.add(peerId.toString());
this.startRecurringQueries(peerId).catch((error) =>
log(`Error querying peer ${error}`)
log(`Error querying peer ${error}`),
);
};
@ -99,7 +99,7 @@ export class PeerExchangeDiscovery
// might be better to use "peer:identify" or "peer:update"
this.components.events.addEventListener(
"peer:update",
this.handleDiscoveredPeer
this.handleDiscoveredPeer,
);
}
@ -113,7 +113,7 @@ export class PeerExchangeDiscovery
this.queryingPeers.clear();
this.components.events.removeEventListener(
"peer:update",
this.handleDiscoveredPeer
this.handleDiscoveredPeer,
);
}
@ -126,7 +126,7 @@ export class PeerExchangeDiscovery
}
private readonly startRecurringQueries = async (
peerId: PeerId
peerId: PeerId,
): Promise<void> => {
const peerIdStr = peerId.toString();
const {
@ -137,7 +137,7 @@ export class PeerExchangeDiscovery
log(
`Querying peer: ${peerIdStr} (attempt ${
this.queryAttempts.get(peerIdStr) ?? 1
})`
})`,
);
await this.query(peerId);
@ -204,7 +204,7 @@ export class PeerExchangeDiscovery
protocols: [],
multiaddrs: peerInfo.multiaddrs,
},
})
}),
);
}
}
@ -217,7 +217,7 @@ export class PeerExchangeDiscovery
}
export function wakuPeerExchangeDiscovery(): (
components: Libp2pComponents
components: Libp2pComponents,
) => PeerExchangeDiscovery {
return (components: Libp2pComponents) =>
new PeerExchangeDiscovery(components);

View File

@ -53,7 +53,6 @@
"@waku/build-utils": "*",
"cspell": "^6.31.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"protons": "^7.0.2",
"rollup": "^3.21.3",
"typescript": "^5.0.4",

View File

@ -60,7 +60,7 @@ export namespace FilterRequest {
}
return obj;
}
},
);
}
@ -130,8 +130,8 @@ export namespace FilterRequest {
obj.contentFilters.push(
FilterRequest.ContentFilter.codec().decode(
reader,
reader.uint32()
)
reader.uint32(),
),
);
break;
default:
@ -141,7 +141,7 @@ export namespace FilterRequest {
}
return obj;
}
},
);
}
@ -196,7 +196,7 @@ export namespace MessagePush {
switch (tag >>> 3) {
case 1:
obj.messages.push(
WakuMessage.codec().decode(reader, reader.uint32())
WakuMessage.codec().decode(reader, reader.uint32()),
);
break;
default:
@ -206,7 +206,7 @@ export namespace MessagePush {
}
return obj;
}
},
);
}
@ -275,7 +275,7 @@ export namespace FilterRpc {
case 2:
obj.request = FilterRequest.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 3:
@ -288,7 +288,7 @@ export namespace FilterRpc {
}
return obj;
}
},
);
}
@ -409,7 +409,7 @@ export namespace RateLimitProof {
}
return obj;
}
},
);
}
@ -515,7 +515,7 @@ export namespace WakuMessage {
case 21:
obj.rateLimitProof = RateLimitProof.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 31:
@ -528,7 +528,7 @@ export namespace WakuMessage {
}
return obj;
}
},
);
}

View File

@ -63,7 +63,7 @@ export namespace FilterSubscribeRequest {
w.uint32(16);
FilterSubscribeRequest.FilterSubscribeType.codec().encode(
obj.filterSubscribeType,
w
w,
);
}
@ -102,7 +102,7 @@ export namespace FilterSubscribeRequest {
case 2:
obj.filterSubscribeType =
FilterSubscribeRequest.FilterSubscribeType.codec().decode(
reader
reader,
);
break;
case 10:
@ -118,7 +118,7 @@ export namespace FilterSubscribeRequest {
}
return obj;
}
},
);
}
@ -130,7 +130,7 @@ export namespace FilterSubscribeRequest {
};
export const decode = (
buf: Uint8Array | Uint8ArrayList
buf: Uint8Array | Uint8ArrayList,
): FilterSubscribeRequest => {
return decodeMessage(buf, FilterSubscribeRequest.codec());
};
@ -200,7 +200,7 @@ export namespace FilterSubscribeResponse {
}
return obj;
}
},
);
}
@ -212,7 +212,7 @@ export namespace FilterSubscribeResponse {
};
export const decode = (
buf: Uint8Array | Uint8ArrayList
buf: Uint8Array | Uint8ArrayList,
): FilterSubscribeResponse => {
return decodeMessage(buf, FilterSubscribeResponse.codec());
};
@ -260,7 +260,7 @@ export namespace MessagePush {
case 1:
obj.wakuMessage = WakuMessage.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 2:
@ -273,7 +273,7 @@ export namespace MessagePush {
}
return obj;
}
},
);
}
@ -394,7 +394,7 @@ export namespace RateLimitProof {
}
return obj;
}
},
);
}
@ -500,7 +500,7 @@ export namespace WakuMessage {
case 21:
obj.rateLimitProof = RateLimitProof.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 31:
@ -513,7 +513,7 @@ export namespace WakuMessage {
}
return obj;
}
},
);
}

View File

@ -55,7 +55,7 @@ export namespace PushRequest {
case 2:
obj.message = WakuMessage.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
default:
@ -65,7 +65,7 @@ export namespace PushRequest {
}
return obj;
}
},
);
}
@ -135,7 +135,7 @@ export namespace PushResponse {
}
return obj;
}
},
);
}
@ -204,13 +204,13 @@ export namespace PushRpc {
case 2:
obj.request = PushRequest.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 3:
obj.response = PushResponse.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
default:
@ -220,7 +220,7 @@ export namespace PushRpc {
}
return obj;
}
},
);
}
@ -341,7 +341,7 @@ export namespace RateLimitProof {
}
return obj;
}
},
);
}
@ -447,7 +447,7 @@ export namespace WakuMessage {
case 21:
obj.rateLimitProof = RateLimitProof.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 31:
@ -460,7 +460,7 @@ export namespace WakuMessage {
}
return obj;
}
},
);
}

View File

@ -113,7 +113,7 @@ export namespace RateLimitProof {
}
return obj;
}
},
);
}
@ -219,7 +219,7 @@ export namespace WakuMessage {
case 21:
obj.rateLimitProof = RateLimitProof.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 31:
@ -232,7 +232,7 @@ export namespace WakuMessage {
}
return obj;
}
},
);
}

View File

@ -51,7 +51,7 @@ export namespace PeerInfo {
}
return obj;
}
},
);
}
@ -110,7 +110,7 @@ export namespace PeerExchangeQuery {
}
return obj;
}
},
);
}
@ -122,7 +122,7 @@ export namespace PeerExchangeQuery {
};
export const decode = (
buf: Uint8Array | Uint8ArrayList
buf: Uint8Array | Uint8ArrayList,
): PeerExchangeQuery => {
return decodeMessage(buf, PeerExchangeQuery.codec());
};
@ -167,7 +167,7 @@ export namespace PeerExchangeResponse {
switch (tag >>> 3) {
case 1:
obj.peerInfos.push(
PeerInfo.codec().decode(reader, reader.uint32())
PeerInfo.codec().decode(reader, reader.uint32()),
);
break;
default:
@ -177,7 +177,7 @@ export namespace PeerExchangeResponse {
}
return obj;
}
},
);
}
@ -189,7 +189,7 @@ export namespace PeerExchangeResponse {
};
export const decode = (
buf: Uint8Array | Uint8ArrayList
buf: Uint8Array | Uint8ArrayList,
): PeerExchangeResponse => {
return decodeMessage(buf, PeerExchangeResponse.codec());
};
@ -237,13 +237,13 @@ export namespace PeerExchangeRPC {
case 1:
obj.query = PeerExchangeQuery.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 2:
obj.response = PeerExchangeResponse.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
default:
@ -253,7 +253,7 @@ export namespace PeerExchangeRPC {
}
return obj;
}
},
);
}

View File

@ -88,7 +88,7 @@ export namespace Index {
}
return obj;
}
},
);
}
@ -181,7 +181,7 @@ export namespace PagingInfo {
}
return obj;
}
},
);
}
@ -242,7 +242,7 @@ export namespace ContentFilter {
}
return obj;
}
},
);
}
@ -324,13 +324,13 @@ export namespace HistoryQuery {
break;
case 3:
obj.contentFilters.push(
ContentFilter.codec().decode(reader, reader.uint32())
ContentFilter.codec().decode(reader, reader.uint32()),
);
break;
case 4:
obj.pagingInfo = PagingInfo.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 5:
@ -346,7 +346,7 @@ export namespace HistoryQuery {
}
return obj;
}
},
);
}
@ -430,13 +430,13 @@ export namespace HistoryResponse {
switch (tag >>> 3) {
case 2:
obj.messages.push(
WakuMessage.codec().decode(reader, reader.uint32())
WakuMessage.codec().decode(reader, reader.uint32()),
);
break;
case 3:
obj.pagingInfo = PagingInfo.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 4:
@ -449,7 +449,7 @@ export namespace HistoryResponse {
}
return obj;
}
},
);
}
@ -518,13 +518,13 @@ export namespace HistoryRpc {
case 2:
obj.query = HistoryQuery.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 3:
obj.response = HistoryResponse.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
default:
@ -534,7 +534,7 @@ export namespace HistoryRpc {
}
return obj;
}
},
);
}
@ -655,7 +655,7 @@ export namespace RateLimitProof {
}
return obj;
}
},
);
}
@ -761,7 +761,7 @@ export namespace WakuMessage {
case 21:
obj.rateLimitProof = RateLimitProof.codec().decode(
reader,
reader.uint32()
reader.uint32(),
);
break;
case 31:
@ -774,7 +774,7 @@ export namespace WakuMessage {
}
return obj;
}
},
);
}

View File

@ -53,7 +53,7 @@ export namespace TopicOnlyMessage {
}
return obj;
}
},
);
}
@ -65,7 +65,7 @@ export namespace TopicOnlyMessage {
};
export const decode = (
buf: Uint8Array | Uint8ArrayList
buf: Uint8Array | Uint8ArrayList,
): TopicOnlyMessage => {
return decodeMessage(buf, TopicOnlyMessage.codec());
};

File diff suppressed because it is too large Load Diff

View File

@ -61,7 +61,7 @@ class Relay implements IRelay {
constructor(libp2p: Libp2p, options?: Partial<RelayCreateOptions>) {
if (!this.isRelayPubSub(libp2p.services.pubsub)) {
throw Error(
`Failed to initialize Relay. libp2p.pubsub does not support ${Relay.multicodec}`
`Failed to initialize Relay. libp2p.pubsub does not support ${Relay.multicodec}`,
);
}
@ -125,7 +125,7 @@ class Relay implements IRelay {
*/
public subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
callback: Callback<T>,
): () => void {
const contentTopicToObservers = Array.isArray(decoders)
? toObservers(decoders, callback)
@ -147,7 +147,7 @@ class Relay implements IRelay {
const nextObservers = leftMinusJoin(
currentObservers,
observersToRemove
observersToRemove,
);
if (nextObservers.size) {
@ -161,7 +161,7 @@ class Relay implements IRelay {
public toSubscriptionIterator<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions | undefined
opts?: ProtocolOptions | undefined,
): Promise<IAsyncIterator<T>> {
return toAsyncIterator(this, decoders, opts);
}
@ -178,7 +178,7 @@ class Relay implements IRelay {
private async processIncomingMessage<T extends IDecodedMessage>(
pubSubTopic: string,
bytes: Uint8Array
bytes: Uint8Array,
): Promise<void> {
const topicOnlyMsg = await this.defaultDecoder.fromWireToProtoObj(bytes);
if (!topicOnlyMsg || !topicOnlyMsg.contentTopic) {
@ -199,7 +199,7 @@ class Relay implements IRelay {
const protoMsg = await decoder.fromWireToProtoObj(bytes);
if (!protoMsg) {
log(
"Internal error: message previously decoded failed on 2nd pass."
"Internal error: message previously decoded failed on 2nd pass.",
);
return;
}
@ -213,7 +213,7 @@ class Relay implements IRelay {
log("Error while decoding message:", error);
}
})();
})
}),
);
}
@ -231,9 +231,9 @@ class Relay implements IRelay {
this.processIncomingMessage(
event.detail.msg.topic,
event.detail.msg.data
event.detail.msg.data,
).catch((e) => log("Failed to process incoming message", e));
}
},
);
this.gossipSub.topicValidators.set(pubSubTopic, messageValidator);
@ -246,13 +246,13 @@ class Relay implements IRelay {
}
export function wakuRelay(
init: Partial<ProtocolCreateOptions> = {}
init: Partial<ProtocolCreateOptions> = {},
): (libp2p: Libp2p) => IRelay {
return (libp2p: Libp2p) => new Relay(libp2p, init);
}
export function wakuGossipSub(
init: Partial<RelayCreateOptions> = {}
init: Partial<RelayCreateOptions> = {},
): (components: GossipSubComponents) => GossipSub {
return (components: GossipSubComponents) => {
init = {
@ -270,10 +270,10 @@ export function wakuGossipSub(
function toObservers<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: Callback<T>
callback: Callback<T>,
): Map<ContentTopic, Set<Observer<T>>> {
const contentTopicToDecoders = Array.from(
groupByContentTopic(decoders).entries()
groupByContentTopic(decoders).entries(),
);
const contentTopicToObserversEntries = contentTopicToDecoders.map(
@ -286,10 +286,10 @@ function toObservers<T extends IDecodedMessage>(
({
decoder,
callback,
} as Observer<T>)
)
}) as Observer<T>,
),
),
] as [ContentTopic, Set<Observer<T>>]
] as [ContentTopic, Set<Observer<T>>],
);
return new Map(contentTopicToObserversEntries);

View File

@ -29,8 +29,8 @@ describe("Message Validator", () => {
const result = messageValidator(peerId, message);
expect(result).to.eq(TopicValidatorResult.Accept);
}
)
},
),
);
});
@ -51,8 +51,8 @@ describe("Message Validator", () => {
const result = messageValidator(peerId, message);
expect(result).to.eq(TopicValidatorResult.Reject);
}
)
},
),
);
});
});

View File

@ -8,7 +8,7 @@ const log = debug("waku:relay");
export function messageValidator(
peer: PeerId,
message: Message
message: Message,
): TopicValidatorResult {
const startTime = performance.now();
log(`validating message from ${peer} received on ${message.topic}`);

View File

@ -17,7 +17,7 @@ export class TopicOnlyMessage implements IDecodedMessage {
constructor(
public pubSubTopic: string,
private proto: ProtoTopicOnlyMessage
private proto: ProtoTopicOnlyMessage,
) {}
get contentTopic(): string {
@ -44,7 +44,7 @@ export class TopicOnlyDecoder implements IDecoder<TopicOnlyMessage> {
async fromProtoObj(
pubSubTopic: string,
proto: IProtoMessage
proto: IProtoMessage,
): Promise<TopicOnlyMessage | undefined> {
return new TopicOnlyMessage(pubSubTopic, proto);
}

View File

@ -81,7 +81,6 @@
"cspell": "^6.31.1",
"interface-datastore": "^7.0.4",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"rollup": "^3.21.3",
"typescript": "^5.0.4"
},

View File

@ -40,7 +40,7 @@ export { Libp2pComponents };
* Uses Waku Filter V2 by default.
*/
export async function createLightNode(
options?: ProtocolCreateOptions & WakuOptions
options?: ProtocolCreateOptions & WakuOptions,
): Promise<LightNode> {
const libp2pOptions = options?.libp2p ?? {};
const peerDiscovery = libp2pOptions.peerDiscovery ?? [];
@ -52,7 +52,7 @@ export async function createLightNode(
const libp2p = await defaultLibp2p(
undefined,
libp2pOptions,
options?.userAgent
options?.userAgent,
);
const store = wakuStore(options);
@ -64,7 +64,7 @@ export async function createLightNode(
libp2p,
store,
lightPush,
filter
filter,
) as LightNode;
}
@ -73,7 +73,7 @@ export async function createLightNode(
* enabling some privacy preserving properties.
*/
export async function createRelayNode(
options?: ProtocolCreateOptions & WakuOptions & Partial<RelayCreateOptions>
options?: ProtocolCreateOptions & WakuOptions & Partial<RelayCreateOptions>,
): Promise<RelayNode> {
const libp2pOptions = options?.libp2p ?? {};
const peerDiscovery = libp2pOptions.peerDiscovery ?? [];
@ -85,7 +85,7 @@ export async function createRelayNode(
const libp2p = await defaultLibp2p(
wakuGossipSub(options),
libp2pOptions,
options?.userAgent
options?.userAgent,
);
const relay = wakuRelay(options);
@ -96,7 +96,7 @@ export async function createRelayNode(
undefined,
undefined,
undefined,
relay
relay,
) as RelayNode;
}
@ -114,7 +114,7 @@ export async function createRelayNode(
* @internal
*/
export async function createFullNode(
options?: ProtocolCreateOptions & WakuOptions & Partial<RelayCreateOptions>
options?: ProtocolCreateOptions & WakuOptions & Partial<RelayCreateOptions>,
): Promise<FullNode> {
const libp2pOptions = options?.libp2p ?? {};
const peerDiscovery = libp2pOptions.peerDiscovery ?? [];
@ -126,7 +126,7 @@ export async function createFullNode(
const libp2p = await defaultLibp2p(
wakuGossipSub(options),
libp2pOptions,
options?.userAgent
options?.userAgent,
);
const store = wakuStore(options);
@ -140,12 +140,12 @@ export async function createFullNode(
store,
lightPush,
filter,
relay
relay,
) as FullNode;
}
export function defaultPeerDiscovery(): (
components: Libp2pComponents
components: Libp2pComponents,
) => PeerDiscovery {
return wakuDnsDiscovery([enrTree["PROD"]], DEFAULT_NODE_REQUIREMENTS);
}
@ -157,7 +157,7 @@ type PubsubService = {
export async function defaultLibp2p(
wakuGossipSub?: PubsubService["pubsub"],
options?: Partial<Libp2pOptions>,
userAgent?: string
userAgent?: string,
): Promise<Libp2p> {
const pubsubService: PubsubService = wakuGossipSub
? { pubsub: wakuGossipSub }

View File

@ -66,22 +66,20 @@
"@libp2p/bootstrap": "^8.0.0",
"@libp2p/interface-peer-discovery-compliance-tests": "^2.0.8",
"@libp2p/interface-peer-id": "^2.0.2",
"@types/sinon": "^10.0.16",
"@types/chai": "^4.3.4",
"@types/dockerode": "^3.3.17",
"@types/mocha": "^10.0.1",
"@types/tail": "^2.2.1",
"@typescript-eslint/eslint-plugin": "^5.57.0",
"@typescript-eslint/parser": "^5.62.0",
"@waku/sdk": "*",
"@waku/dns-discovery": "*",
"@waku/message-encryption": "*",
"@waku/peer-exchange": "*",
"@waku/sdk": "*",
"chai": "^4.3.7",
"cspell": "^6.31.1",
"debug": "^4.3.4",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"typescript": "^5.0.4",
"interface-datastore": "^8.2.3",
"libp2p": "^0.45.9",

View File

@ -12,7 +12,7 @@ export const NOISE_KEY_1 = new Uint8Array(
b.push(1);
}
return b;
})()
})(),
);
export const NOISE_KEY_2 = new Uint8Array(
@ -22,7 +22,7 @@ export const NOISE_KEY_2 = new Uint8Array(
b.push(2);
}
return b;
})()
})(),
);
export const NOISE_KEY_3 = new Uint8Array(
@ -32,5 +32,5 @@ export const NOISE_KEY_3 = new Uint8Array(
b.push(3);
}
return b;
})()
})(),
);

View File

@ -14,7 +14,7 @@ import { waitForFile } from "./async_fs.js";
export default async function waitForLine(
filepath: string,
logLine: string,
timeout: number
timeout: number,
): Promise<void> {
await pTimeout(waitForFile(filepath), { milliseconds: timeout });

View File

@ -37,12 +37,12 @@ export default class Dockerode {
}
private static async createNetwork(
networkName: string = NETWORK_NAME
networkName: string = NETWORK_NAME,
): Promise<Docker.Network> {
const docker = new Docker();
const networks = await docker.listNetworks();
const existingNetwork = networks.find(
(network) => network.Name === networkName
(network) => network.Name === networkName,
);
let network: Docker.Network;
@ -92,7 +92,7 @@ export default class Dockerode {
ports: number[],
args: Args,
logPath: string,
wakuServiceNodeParams?: string
wakuServiceNodeParams?: string,
): Promise<Docker.Container> {
const [rpcPort, tcpPort, websocketPort, discv5UdpPort] = ports;
@ -150,7 +150,7 @@ export default class Dockerode {
if (stream) {
stream.pipe(logStream);
}
}
},
);
this.containerId = container.id;
@ -164,7 +164,7 @@ export default class Dockerode {
log(
`Shutting down container ID ${
this.containerId
} at ${new Date().toLocaleTimeString()}`
} at ${new Date().toLocaleTimeString()}`,
);
await this.container.stop();

View File

@ -136,7 +136,7 @@ export class NimGoNode {
...(isGoWaku && { minRelayPeersToPublish: 0, legacyFilter }),
},
{ rpcAddress: "0.0.0.0" },
_args
_args,
);
process.env.WAKUNODE2_STORE_MESSAGE_DB_URL = "";
@ -149,7 +149,7 @@ export class NimGoNode {
ports,
mergedArgs,
this.logPath,
WAKU_SERVICE_NODE_PARAMS
WAKU_SERVICE_NODE_PARAMS,
);
try {
@ -191,7 +191,7 @@ export class NimGoNode {
async sendMessage(
message: MessageRpcQuery,
pubSubTopic: string = DefaultPubSubTopic
pubSubTopic: string = DefaultPubSubTopic,
): Promise<boolean> {
this.checkProcess();
@ -206,13 +206,13 @@ export class NimGoNode {
}
async messages(
pubsubTopic: string = DefaultPubSubTopic
pubsubTopic: string = DefaultPubSubTopic,
): Promise<MessageRpcResponse[]> {
this.checkProcess();
const msgs = await this.rpcCall<MessageRpcResponse[]>(
"get_waku_v2_relay_v1_messages",
[pubsubTopic]
[pubsubTopic],
);
return msgs.filter(isDefined);
@ -239,7 +239,7 @@ export class NimGoNode {
async postAsymmetricMessage(
message: MessageRpcQuery,
publicKey: Uint8Array,
pubSubTopic?: string
pubSubTopic?: string,
): Promise<boolean> {
this.checkProcess();
@ -256,7 +256,7 @@ export class NimGoNode {
async getAsymmetricMessages(
privateKey: Uint8Array,
pubSubTopic?: string
pubSubTopic?: string,
): Promise<MessageRpcResponse[]> {
this.checkProcess();
@ -265,7 +265,7 @@ export class NimGoNode {
[
pubSubTopic ? pubSubTopic : DefaultPubSubTopic,
"0x" + bytesToHex(privateKey),
]
],
);
}
@ -274,14 +274,14 @@ export class NimGoNode {
return this.rpcCall<string>(
"get_waku_v2_private_v1_symmetric_key",
[]
[],
).then(hexToBytes);
}
async postSymmetricMessage(
message: MessageRpcQuery,
symKey: Uint8Array,
pubSubTopic?: string
pubSubTopic?: string,
): Promise<boolean> {
this.checkProcess();
@ -298,7 +298,7 @@ export class NimGoNode {
async getSymmetricMessages(
symKey: Uint8Array,
pubSubTopic?: string
pubSubTopic?: string,
): Promise<MessageRpcResponse[]> {
this.checkProcess();
@ -307,7 +307,7 @@ export class NimGoNode {
[
pubSubTopic ? pubSubTopic : DefaultPubSubTopic,
"0x" + bytesToHex(symKey),
]
],
);
}
@ -323,7 +323,7 @@ export class NimGoNode {
const peerId = await this.getPeerId();
this.multiaddrWithId = multiaddr(
`/ip4/127.0.0.1/tcp/${this.websocketPort}/ws/p2p/${peerId.toString()}`
`/ip4/127.0.0.1/tcp/${this.websocketPort}/ws/p2p/${peerId.toString()}`,
);
return this.multiaddrWithId;
}
@ -350,7 +350,7 @@ export class NimGoNode {
private async rpcCall<T>(
method: string,
params: Array<string | number | unknown>
params: Array<string | number | unknown>,
): Promise<T> {
log("RPC Query: ", method, params);
const res = await fetch(this.rpcUrl, {

View File

@ -29,7 +29,7 @@ async function main() {
],
{
stdio: "inherit",
}
},
);
mocha.on("error", (error) => {

View File

@ -42,12 +42,12 @@ describe("ConnectionManager", function () {
EPeersByDiscoveryEvents.PEER_DISCOVERY_BOOTSTRAP,
({ detail: receivedPeerId }) => {
resolve(receivedPeerId.toString() === peerIdBootstrap.toString());
}
},
);
});
waku.libp2p.dispatchEvent(
new CustomEvent("peer", { detail: await createSecp256k1PeerId() })
new CustomEvent("peer", { detail: await createSecp256k1PeerId() }),
);
expect(await peerDiscoveryBootstrap).to.eq(true);
@ -70,12 +70,12 @@ describe("ConnectionManager", function () {
EPeersByDiscoveryEvents.PEER_DISCOVERY_PEER_EXCHANGE,
({ detail: receivedPeerId }) => {
resolve(receivedPeerId.toString() === peerIdPx.toString());
}
},
);
});
waku.libp2p.dispatchEvent(
new CustomEvent("peer", { detail: peerIdPx })
new CustomEvent("peer", { detail: peerIdPx }),
);
expect(await peerDiscoveryPeerExchange).to.eq(true);
@ -102,12 +102,12 @@ describe("ConnectionManager", function () {
EPeersByDiscoveryEvents.PEER_CONNECT_BOOTSTRAP,
({ detail: receivedPeerId }) => {
resolve(receivedPeerId.toString() === peerIdBootstrap.toString());
}
},
);
});
waku.libp2p.dispatchEvent(
new CustomEvent("peer:connect", { detail: peerIdBootstrap })
new CustomEvent("peer:connect", { detail: peerIdBootstrap }),
);
expect(await peerConnectedBootstrap).to.eq(true);
@ -129,12 +129,12 @@ describe("ConnectionManager", function () {
EPeersByDiscoveryEvents.PEER_CONNECT_PEER_EXCHANGE,
({ detail: receivedPeerId }) => {
resolve(receivedPeerId.toString() === peerIdPx.toString());
}
},
);
});
waku.libp2p.dispatchEvent(
new CustomEvent("peer:connect", { detail: peerIdPx })
new CustomEvent("peer:connect", { detail: peerIdPx }),
);
expect(await peerConnectedPeerExchange).to.eq(true);
@ -163,7 +163,7 @@ describe("ConnectionManager", function () {
beforeEach(function () {
attemptDialSpy = sinon.spy(
waku.connectionManager as any,
"attemptDial"
"attemptDial",
);
});
@ -177,7 +177,7 @@ describe("ConnectionManager", function () {
const totalPeerIds = 5;
for (let i = 1; i <= totalPeerIds; i++) {
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", { detail: `peer-id-${i}` })
new CustomEvent("peer:discovery", { detail: `peer-id-${i}` }),
);
}
@ -186,7 +186,7 @@ describe("ConnectionManager", function () {
expect(attemptDialSpy.callCount).to.equal(
totalPeerIds,
"attemptDial should be called once for each peer:discovery event"
"attemptDial should be called once for each peer:discovery event",
);
});
});
@ -195,11 +195,11 @@ describe("ConnectionManager", function () {
beforeEach(function () {
getConnectionsStub = sinon.stub(
(waku.connectionManager as any).libp2p,
"getConnections"
"getConnections",
);
getTagNamesForPeerStub = sinon.stub(
waku.connectionManager as any,
"getTagNamesForPeer"
"getTagNamesForPeer",
);
dialPeerStub = sinon.stub(waku.connectionManager as any, "dialPeer");
});
@ -224,7 +224,7 @@ describe("ConnectionManager", function () {
// emit a peer:discovery event
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", { detail: bootstrapPeer })
new CustomEvent("peer:discovery", { detail: bootstrapPeer }),
);
// wait for the async function calls within attemptDial to finish
@ -233,7 +233,7 @@ describe("ConnectionManager", function () {
// check that dialPeer was called once
expect(dialPeerStub.callCount).to.equal(
1,
"dialPeer should be called for bootstrap peers"
"dialPeer should be called for bootstrap peers",
);
});
@ -248,7 +248,7 @@ describe("ConnectionManager", function () {
// emit first peer:discovery event
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", { detail: "bootstrap-peer" })
new CustomEvent("peer:discovery", { detail: "bootstrap-peer" }),
);
await delay(500);
@ -262,14 +262,14 @@ describe("ConnectionManager", function () {
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", {
detail: await createSecp256k1PeerId(),
})
}),
);
}
// check that dialPeer was called only once
expect(dialPeerStub.callCount).to.equal(
1,
"dialPeer should not be called more than once for bootstrap peers"
"dialPeer should not be called more than once for bootstrap peers",
);
});
});
@ -288,7 +288,7 @@ describe("ConnectionManager", function () {
// emit a peer:discovery event
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", { detail: pxPeer })
new CustomEvent("peer:discovery", { detail: pxPeer }),
);
// wait for the async function calls within attemptDial to finish
@ -297,7 +297,7 @@ describe("ConnectionManager", function () {
// check that dialPeer was called once
expect(dialPeerStub.callCount).to.equal(
1,
"dialPeer should be called for peers with PEER_EXCHANGE tags"
"dialPeer should be called for peers with PEER_EXCHANGE tags",
);
});
@ -316,7 +316,7 @@ describe("ConnectionManager", function () {
waku.libp2p.dispatchEvent(
new CustomEvent("peer:discovery", {
detail: await createSecp256k1PeerId(),
})
}),
);
await delay(500);
}

View File

@ -12,7 +12,6 @@ import { Libp2pComponents } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { expect } from "chai";
import { MemoryDatastore } from "datastore-core/memory";
import { Datastore } from "interface-datastore";
import { delay } from "../src/delay.js";
@ -27,7 +26,7 @@ describe("DNS Discovery: Compliance Test", function () {
peerStore: new PersistentPeerStore({
events: new EventEmitter(),
peerId: await createSecp256k1PeerId(),
datastore: new MemoryDatastore() as any as Datastore,
datastore: new MemoryDatastore(),
}),
} as unknown as Libp2pComponents;

View File

@ -197,10 +197,10 @@ describe("Waku Message Ephemeral field", () => {
}
const normalMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === normalTxt
(msg) => bytesToUtf8(msg.payload) === normalTxt,
);
const ephemeralMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt,
);
expect(normalMsg).to.not.be.undefined;
@ -246,10 +246,10 @@ describe("Waku Message Ephemeral field", () => {
}
const normalMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === normalTxt
(msg) => bytesToUtf8(msg.payload) === normalTxt,
);
const ephemeralMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt,
);
expect(normalMsg).to.not.be.undefined;
@ -296,10 +296,10 @@ describe("Waku Message Ephemeral field", () => {
}
const normalMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === normalTxt
(msg) => bytesToUtf8(msg.payload) === normalTxt,
);
const ephemeralMsg = messages.find(
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt
(msg) => bytesToUtf8(msg.payload) === ephemeralTxt,
);
expect(normalMsg).to.not.be.undefined;

View File

@ -25,7 +25,7 @@ const TestEncoder = createEncoder({
async function runNodes(
context: Mocha.Context,
pubSubTopic?: string
pubSubTopic?: string,
): Promise<[NimGoNode, LightNode]> {
const nwakuOptional = pubSubTopic ? { topic: pubSubTopic } : {};
const nwaku = new NimGoNode(makeLogFileName(context));
@ -143,7 +143,7 @@ describe("Waku Light Push [node only] - custom pubsub topic", () => {
{ payload: utf8ToBytes(messageText) },
{
peerId: nimPeerId,
}
},
);
log("Ack received", pushResponse);
expect(pushResponse.recipients[0].toString()).to.eq(nimPeerId.toString());

View File

@ -74,7 +74,7 @@ describe("Peer Exchange", () => {
const doesPeerIdExistInResponse =
peerInfos.find(
({ ENR }) => ENR?.peerInfo?.id.toString() === nwaku1PeerId.toString()
({ ENR }) => ENR?.peerInfo?.id.toString() === nwaku1PeerId.toString(),
) !== undefined;
expect(doesPeerIdExistInResponse).to.be.equal(true);

View File

@ -41,7 +41,7 @@ describe("Peer Exchange", () => {
waku.libp2p.addEventListener("peer:discovery", (evt) => {
const peerId = evt.detail.id.toString();
const isBootstrapNode = predefinedNodes.find((n) =>
n.includes(peerId)
n.includes(peerId),
);
if (!isBootstrapNode) {
resolve(true);

View File

@ -59,7 +59,7 @@ describe("Waku Relay [node only]", () => {
log("Starting JS Waku instances");
[waku1, waku2] = await Promise.all([
createRelayNode({ staticNoiseKey: NOISE_KEY_1 }).then((waku) =>
waku.start().then(() => waku)
waku.start().then(() => waku),
),
createRelayNode({
staticNoiseKey: NOISE_KEY_2,
@ -121,7 +121,7 @@ describe("Waku Relay [node only]", () => {
const receivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe([TestDecoder], resolve);
}
},
);
await waku1.relay.send(TestEncoder, message);
@ -131,7 +131,7 @@ describe("Waku Relay [node only]", () => {
expect(receivedMsg.contentTopic).to.eq(TestContentTopic);
expect(bytesToUtf8(receivedMsg.payload)).to.eq(messageText);
expect(receivedMsg.timestamp?.valueOf()).to.eq(
messageTimestamp.valueOf()
messageTimestamp.valueOf(),
);
});
@ -240,11 +240,11 @@ describe("Waku Relay [node only]", () => {
(resolve, reject) => {
const deleteObserver = waku2.relay.subscribe(
[createDecoder(contentTopic)],
reject
reject,
) as () => void;
deleteObserver();
setTimeout(resolve, 500);
}
},
);
await waku1.relay.send(createEncoder({ contentTopic }), {
payload: utf8ToBytes(messageText),
@ -311,7 +311,7 @@ describe("Waku Relay [node only]", () => {
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe([TestDecoder], resolve);
}
},
);
// The promise **fails** if we receive a message on the default
@ -320,7 +320,7 @@ describe("Waku Relay [node only]", () => {
(resolve, reject) => {
void waku3.relay.subscribe([TestDecoder], reject);
setTimeout(resolve, 1000);
}
},
);
await waku1.relay.send(TestEncoder, {
@ -368,9 +368,9 @@ describe("Waku Relay [node only]", () => {
void waku2.relay.subscribe([TestDecoder], () =>
resolve({
payload: new Uint8Array([]),
} as DecodedMessage)
} as DecodedMessage),
);
}
},
);
let sendResult = await waku1.relay.send(TestEncoder, {
@ -430,7 +430,7 @@ describe("Waku Relay [node only]", () => {
const nimPeerId = await nwaku.getPeerId();
expect(subscribers.map((p) => p.toString())).to.contain(
nimPeerId.toString()
nimPeerId.toString(),
);
});
@ -461,16 +461,16 @@ describe("Waku Relay [node only]", () => {
const receivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku.relay.subscribe<DecodedMessage>(TestDecoder, (msg) =>
resolve(msg)
resolve(msg),
);
}
},
);
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
contentTopic: TestContentTopic,
payload: utf8ToBytes(messageText),
})
}),
);
const receivedMsg = await receivedMsgPromise;
@ -533,7 +533,7 @@ describe("Waku Relay [node only]", () => {
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe(TestDecoder, resolve);
}
},
);
await waku1.relay.send(TestEncoder, message);

View File

@ -64,8 +64,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
}
@ -134,8 +134,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes(`Message ${i}`),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
}
@ -176,7 +176,7 @@ describe("Waku Store", () => {
expect(messages.length).be.eq(totalMsgs);
expect(bytesToUtf8(testMessage.payload)).to.be.eq(
bytesToUtf8(messages[cursorIndex + 1].payload)
bytesToUtf8(messages[cursorIndex + 1].payload),
);
});
@ -191,8 +191,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
}
@ -211,7 +211,7 @@ describe("Waku Store", () => {
if (msg) {
messages.push(msg);
}
}
},
);
expect(messages?.length).eq(totalMsgs);
@ -232,8 +232,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
}
@ -255,7 +255,7 @@ describe("Waku Store", () => {
}
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
{ pageSize: 7 },
);
expect(messages?.length).eq(desiredMsgs);
@ -271,8 +271,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
@ -292,7 +292,7 @@ describe("Waku Store", () => {
},
{
pageDirection: PageDirection.FORWARD,
}
},
);
expect(messages?.length).eq(totalMsgs);
@ -310,8 +310,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
@ -331,7 +331,7 @@ describe("Waku Store", () => {
},
{
pageDirection: PageDirection.BACKWARD,
}
},
);
messages = messages.reverse();
@ -476,8 +476,8 @@ describe("Waku Store", () => {
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
timestamp: messageTimestamps[i],
})
)
}),
),
).to.be.true;
}
@ -501,7 +501,7 @@ describe("Waku Store", () => {
{
peerId: nwakuPeerId,
timeFilter: { startTime, endTime: message1Timestamp },
}
},
);
const bothMessages: IMessage[] = [];
@ -516,7 +516,7 @@ describe("Waku Store", () => {
startTime,
endTime,
},
}
},
);
expect(firstMessages?.length).eq(1);
@ -537,8 +537,8 @@ describe("Waku Store", () => {
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
})
)
}),
),
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
@ -558,7 +558,7 @@ describe("Waku Store", () => {
messages.push(msg);
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
{ pageSize: 7 },
);
expect(messages?.length).eq(desiredMsgs);
@ -597,8 +597,8 @@ describe("Waku Store, custom pubsub topic", () => {
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
}),
customPubSubTopic
)
customPubSubTopic,
),
).to.be.true;
}

View File

@ -53,7 +53,7 @@ describe("Util: toAsyncIterator: Filter", () => {
waku.filter,
TestDecoder,
{},
{ timeoutMs: 1000 }
{ timeoutMs: 1000 },
);
await waku.lightPush.send(TestEncoder, sent);
@ -70,7 +70,7 @@ describe("Util: toAsyncIterator: Filter", () => {
waku.filter,
TestDecoder,
{},
{ timeoutMs: 1000 }
{ timeoutMs: 1000 },
);
await waku.lightPush.send(TestEncoder, {
@ -93,7 +93,7 @@ describe("Util: toAsyncIterator: Filter", () => {
waku.filter,
TestDecoder,
{},
{ timeoutMs: 1000 }
{ timeoutMs: 1000 },
);
await waku.lightPush.send(TestEncoder, {

View File

@ -88,7 +88,7 @@ describe("Wait for remote peer", function () {
(reason) => {
expect(reason).to.eq("Timed out waiting for a remote peer.");
done();
}
},
);
})
.catch((e) => done(e));
@ -167,7 +167,7 @@ describe("Wait for remote peer", function () {
await waitForRemotePeer(waku2, [Protocols.LightPush]);
const peers = (await waku2.lightPush.peers()).map((peer) =>
peer.id.toString()
peer.id.toString(),
);
const nimPeerId = multiAddrWithId.getPeerId();
@ -195,7 +195,7 @@ describe("Wait for remote peer", function () {
await waitForRemotePeer(waku2, [Protocols.Filter]);
const peers = (await waku2.filter.peers()).map((peer) =>
peer.id.toString()
peer.id.toString(),
);
const nimPeerId = multiAddrWithId.getPeerId();
@ -227,13 +227,13 @@ describe("Wait for remote peer", function () {
]);
const filterPeers = (await waku2.filter.peers()).map((peer) =>
peer.id.toString()
peer.id.toString(),
);
const storePeers = (await waku2.store.peers()).map((peer) =>
peer.id.toString()
peer.id.toString(),
);
const lightPushPeers = (await waku2.lightPush.peers()).map((peer) =>
peer.id.toString()
peer.id.toString(),
);
const nimPeerId = multiAddrWithId.getPeerId();

View File

@ -34,7 +34,7 @@ describe("Use static and several ENR trees for bootstrap", function () {
bootstrap({ list: [multiAddrWithId.toString()] }),
wakuDnsDiscovery(
[enrTree["PROD"], enrTree["TEST"]],
NODE_REQUIREMENTS
NODE_REQUIREMENTS,
),
],
},
@ -48,8 +48,8 @@ describe("Use static and several ENR trees for bootstrap", function () {
// should also have the bootstrap peer
expect(
peersDiscovered.find(
(p) => p.id.toString() === multiAddrWithId.getPeerId()?.toString()
)
(p) => p.id.toString() === multiAddrWithId.getPeerId()?.toString(),
),
).to.not.be.undefined;
});
});

View File

@ -131,7 +131,7 @@ describe("Decryption Keys", () => {
this.timeout(5000);
[waku1, waku2] = await Promise.all([
createRelayNode({ staticNoiseKey: NOISE_KEY_1 }).then((waku) =>
waku.start().then(() => waku)
waku.start().then(() => waku),
),
createRelayNode({
staticNoiseKey: NOISE_KEY_2,
@ -175,7 +175,7 @@ describe("Decryption Keys", () => {
const receivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe([decoder], resolve);
}
},
);
await waku1.relay.send(encoder, message);
@ -225,10 +225,10 @@ describe("User Agent", () => {
]);
expect(bytesToUtf8(waku1PeerInfo.metadata.get("AgentVersion")!)).to.eq(
waku1UserAgent
waku1UserAgent,
);
expect(bytesToUtf8(waku2PeerInfo.metadata.get("AgentVersion")!)).to.eq(
DefaultUserAgent
DefaultUserAgent,
);
});
});

File diff suppressed because it is too large Load Diff

View File

@ -79,7 +79,6 @@
"@waku/interfaces": "0.0.17",
"cspell": "^6.31.1",
"npm-run-all": "^4.1.5",
"prettier": "^2.8.8",
"rollup": "^3.21.3",
"typescript": "^5.0.4"
},

View File

@ -35,7 +35,7 @@ export const utf8ToBytes = (s: string): Uint8Array => fromString(s, "utf8");
*/
export function concat(
byteArrays: Uint8Array[],
totalLength?: number
totalLength?: number,
): Uint8Array {
const len =
totalLength ?? byteArrays.reduce((acc, curr) => acc + curr.length, 0);

Some files were not shown because too many files have changed in this diff Show More