mirror of https://github.com/waku-org/js-waku.git
Support ENR waku2 field
This commit is contained in:
parent
d30a918a44
commit
a5219efc14
|
@ -1,7 +1,7 @@
|
|||
import debug from "debug";
|
||||
import { Multiaddr } from "multiaddr";
|
||||
|
||||
import { DnsNodeDiscovery } from "./dns";
|
||||
import { DnsNodeDiscovery, NodeCapabilityCount } from "./dns";
|
||||
|
||||
import { getPredefinedBootstrapNodes, getPseudoRandomSubset } from "./index";
|
||||
|
||||
|
@ -10,11 +10,12 @@ const dbg = debug("waku:discovery:bootstrap");
|
|||
/**
|
||||
* Setup discovery method used to bootstrap.
|
||||
*
|
||||
* Only one method is used. `default`, `peers`, `getPeers` and `enr` options are mutually exclusive.
|
||||
* Only one method is used. [[default]], [[peers]], [[getPeers]] and [[enrUrl]] options are mutually exclusive.
|
||||
*/
|
||||
export interface BootstrapOptions {
|
||||
/**
|
||||
* The maximum of peers to connect to as part of the bootstrap process.
|
||||
* This only applies if [[peers]] or [[getPeers]] is used.
|
||||
*
|
||||
* @default [[Bootstrap.DefaultMaxPeers]]
|
||||
*/
|
||||
|
@ -39,12 +40,21 @@ export interface BootstrapOptions {
|
|||
/**
|
||||
* An EIP-1459 ENR Tree URL. For example:
|
||||
* "enrtree://AOFTICU2XWDULNLZGRMQS4RIZPAZEHYMV4FYHAPW563HNRAOERP7C@test.nodes.vac.dev"
|
||||
*
|
||||
* [[wantedNodeCapabilityCount]] MUST be passed when using this option.
|
||||
*/
|
||||
enrUrl?: string;
|
||||
/**
|
||||
* Specifies what node capabilities (protocol) must be returned.
|
||||
* This only applies when [[enrUrl]] is passed (EIP-1459 DNS Discovery).
|
||||
*/
|
||||
wantedNodeCapabilityCount?: Partial<NodeCapabilityCount>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse options and expose function to return bootstrap peer addresses.
|
||||
*
|
||||
* @throws if an invalid combination of options is passed, see [[BootstrapOptions]] for details.
|
||||
*/
|
||||
export class Bootstrap {
|
||||
public static DefaultMaxPeers = 1;
|
||||
|
@ -91,13 +101,16 @@ export class Bootstrap {
|
|||
).map((node) => new Multiaddr(node));
|
||||
};
|
||||
} else if (opts.enrUrl) {
|
||||
const wantedNodeCapabilityCount = opts.wantedNodeCapabilityCount;
|
||||
if (!wantedNodeCapabilityCount)
|
||||
throw "`wantedNodeCapabilityCount` must be defined when using `enrUrl`";
|
||||
const enrUrl = opts.enrUrl;
|
||||
dbg("Use provided EIP-1459 ENR Tree URL.");
|
||||
|
||||
const dns = DnsNodeDiscovery.dnsOverHttp();
|
||||
|
||||
this.getBootstrapPeers = async (): Promise<Multiaddr[]> => {
|
||||
const enrs = await dns.getPeers(maxPeers, [enrUrl]);
|
||||
const enrs = await dns.getPeers([enrUrl], wantedNodeCapabilityCount);
|
||||
dbg(`Found ${enrs.length} peers`);
|
||||
return enrs.map((enr) => enr.getFullMultiaddrs()).flat();
|
||||
};
|
||||
|
|
|
@ -73,43 +73,16 @@ describe("DNS Node Discovery", () => {
|
|||
|
||||
it("retrieves a single peer", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [singleBranch]);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enrA]);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(1);
|
||||
expect(peers[0].ip).to.eq("45.77.40.127");
|
||||
expect(peers[0].tcp).to.eq(30303);
|
||||
});
|
||||
|
||||
it("retrieves all peers (2) when maxQuantity larger than DNS tree size", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enrA]);
|
||||
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrB]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(50, [mockData.enrTree]);
|
||||
|
||||
expect(peers.length).to.eq(2);
|
||||
expect(peers[0].ip).to.not.eq(peers[1].ip);
|
||||
});
|
||||
|
||||
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enr]);
|
||||
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrA]);
|
||||
mockDns.addRes(`${partialBranchA}${partialBranchB}.${host}`, [
|
||||
mockData.enrB,
|
||||
]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(50, [mockData.enrTree]);
|
||||
|
||||
expect(peers.length).to.eq(3);
|
||||
expect(peers[0].ip).to.not.eq(peers[1].ip);
|
||||
expect(peers[0].ip).to.not.eq(peers[2].ip);
|
||||
expect(peers[1].ip).to.not.eq(peers[2].ip);
|
||||
expect(peers[0].ip).to.eq("192.168.178.251");
|
||||
expect(peers[0].tcp).to.eq(8002);
|
||||
});
|
||||
|
||||
it("it tolerates circular branch references", async function () {
|
||||
|
@ -119,7 +92,9 @@ describe("DNS Node Discovery", () => {
|
|||
mockDns.addRes(`${branchDomainA}.${host}`, [singleBranch]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(0);
|
||||
});
|
||||
|
@ -131,14 +106,16 @@ describe("DNS Node Discovery", () => {
|
|||
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
let peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
let peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(0);
|
||||
|
||||
// No TXT records case
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
||||
|
||||
peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { relay: 1 });
|
||||
expect(peers.length).to.eq(0);
|
||||
});
|
||||
|
||||
|
@ -147,35 +124,148 @@ describe("DNS Node Discovery", () => {
|
|||
mockDns.addThrow(`${branchDomainC}.${host}`);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
expect(peers.length).to.eq(0);
|
||||
});
|
||||
|
||||
it("ignores unrecognized TXT record formats", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]);
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
expect(peers.length).to.eq(0);
|
||||
});
|
||||
|
||||
it("caches peers it previously fetched", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [errorBranchB]);
|
||||
mockDns.addRes(`${branchDomainD}.${host}`, [mockData.enrA]);
|
||||
mockDns.addRes(`${branchDomainD}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peersA = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peersA = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
expect(peersA.length).to.eq(1);
|
||||
|
||||
// Specify that a subsequent network call retrieving the same peer should throw.
|
||||
// This test passes only if the peer is fetched from cache
|
||||
mockDns.addThrow(`${branchDomainD}.${host}`);
|
||||
|
||||
const peersB = await dnsNodeDiscovery.getPeers(1, [mockData.enrTree]);
|
||||
const peersB = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
expect(peersB.length).to.eq(1);
|
||||
expect(peersA[0].ip).to.eq(peersB[0].ip);
|
||||
});
|
||||
});
|
||||
|
||||
describe("DNS Node Discovery w/ capabilities", () => {
|
||||
let mockDns: MockDNS;
|
||||
|
||||
beforeEach(() => {
|
||||
mockDns = new MockDNS();
|
||||
mockDns.addRes(host, [mockData.enrRoot]);
|
||||
});
|
||||
|
||||
it("should only return 1 node with relay capability", async () => {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
relay: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(1);
|
||||
expect(peers[0].peerId?.toB58String()).to.eq(
|
||||
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
|
||||
);
|
||||
});
|
||||
|
||||
it("should only return 1 node with relay and store capability", async () => {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2RelayStore]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
store: 1,
|
||||
relay: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(1);
|
||||
expect(peers[0].peerId?.toB58String()).to.eq(
|
||||
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
|
||||
);
|
||||
});
|
||||
|
||||
it("should only return 1 node with store capability", async () => {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2Store]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
store: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(1);
|
||||
expect(peers[0].peerId?.toB58String()).to.eq(
|
||||
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
|
||||
);
|
||||
});
|
||||
|
||||
it("retrieves all peers (2) when cannot fulfill all requirements", async () => {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [
|
||||
mockData.enrWithWaku2RelayStore,
|
||||
]);
|
||||
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
store: 1,
|
||||
relay: 2,
|
||||
filter: 1,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(2);
|
||||
const peerIds = peers.map((p) => p.peerId?.toB58String());
|
||||
expect(peerIds).to.contain(
|
||||
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
|
||||
);
|
||||
expect(peerIds).to.contain(
|
||||
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
|
||||
);
|
||||
});
|
||||
|
||||
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
|
||||
mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch);
|
||||
mockDns.addRes(`${branchDomainA}.${host}`, [
|
||||
mockData.enrWithWaku2RelayStore,
|
||||
]);
|
||||
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||
mockDns.addRes(`${partialBranchA}${partialBranchB}.${host}`, [
|
||||
mockData.enrWithWaku2Store,
|
||||
]);
|
||||
|
||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||
store: 2,
|
||||
relay: 2,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(3);
|
||||
const peerIds = peers.map((p) => p.peerId?.toB58String());
|
||||
expect(peerIds).to.contain(
|
||||
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
|
||||
);
|
||||
expect(peerIds).to.contain(
|
||||
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
|
||||
);
|
||||
expect(peerIds).to.contain(
|
||||
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("DNS Node Discovery [live data]", function () {
|
||||
const publicKey = "AOFTICU2XWDULNLZGRMQS4RIZPAZEHYMV4FYHAPW563HNRAOERP7C";
|
||||
const fqdn = "test.waku.nodes.status.im";
|
||||
|
@ -192,7 +282,12 @@ describe("DNS Node Discovery [live data]", function () {
|
|||
this.timeout(10000);
|
||||
// Google's dns server address. Needs to be set explicitly to run in CI
|
||||
const dnsNodeDiscovery = DnsNodeDiscovery.dnsOverHttp();
|
||||
const peers = await dnsNodeDiscovery.getPeers(maxQuantity, [enrTree]);
|
||||
const peers = await dnsNodeDiscovery.getPeers([enrTree], {
|
||||
relay: maxQuantity,
|
||||
store: maxQuantity,
|
||||
filter: maxQuantity,
|
||||
lightPush: maxQuantity,
|
||||
});
|
||||
|
||||
expect(peers.length).to.eq(maxQuantity);
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import { ENR } from "../enr";
|
|||
|
||||
import { DnsOverHttps, Endpoints } from "./dns_over_https";
|
||||
import { ENRTree } from "./enrtree";
|
||||
import fetchNodesUntilCapabilitiesFulfilled from "./fetch_nodes";
|
||||
|
||||
const dbg = debug("waku:discovery:dns");
|
||||
|
||||
|
@ -19,6 +20,13 @@ export interface DnsClient {
|
|||
resolveTXT: (domain: string) => Promise<string[]>;
|
||||
}
|
||||
|
||||
export interface NodeCapabilityCount {
|
||||
relay: number;
|
||||
store: number;
|
||||
filter: number;
|
||||
lightPush: number;
|
||||
}
|
||||
|
||||
export class DnsNodeDiscovery {
|
||||
private readonly dns: DnsClient;
|
||||
private readonly _DNSTreeCache: { [key: string]: string };
|
||||
|
@ -31,38 +39,29 @@ export class DnsNodeDiscovery {
|
|||
|
||||
/**
|
||||
* Returns a list of verified peers listed in an EIP-1459 DNS tree. Method may
|
||||
* return fewer peers than requested if `maxQuantity` is larger than the number
|
||||
* of ENR records or the number of errors/duplicate peers encountered by randomized
|
||||
* search exceeds `maxQuantity` plus the `errorTolerance` factor.
|
||||
* return fewer peers than requested if [[wantedNodeCapabilityCount]] requires
|
||||
* larger quantity of peers than available or the number of errors/duplicate
|
||||
* peers encountered by randomized search exceeds the sum of the fields of
|
||||
* [[wantedNodeCapabilityCount]] plus the [[_errorTolerance]] factor.
|
||||
*/
|
||||
async getPeers(maxQuantity: number, enrTreeUrls: string[]): Promise<ENR[]> {
|
||||
let totalSearches = 0;
|
||||
const peers: ENR[] = [];
|
||||
|
||||
async getPeers(
|
||||
enrTreeUrls: string[],
|
||||
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
|
||||
): Promise<ENR[]> {
|
||||
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
|
||||
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
|
||||
const context: SearchContext = {
|
||||
domain,
|
||||
publicKey,
|
||||
visits: {},
|
||||
};
|
||||
|
||||
while (
|
||||
peers.length < maxQuantity &&
|
||||
totalSearches < maxQuantity + this._errorTolerance
|
||||
) {
|
||||
const context: SearchContext = {
|
||||
domain,
|
||||
publicKey,
|
||||
visits: {},
|
||||
};
|
||||
|
||||
const peer = await this._search(domain, context);
|
||||
|
||||
if (peer && isNewPeer(peer, peers)) {
|
||||
peers.push(peer);
|
||||
dbg(
|
||||
`got new peer candidate from DNS address=${peer.nodeId}@${peer.ip}`
|
||||
);
|
||||
}
|
||||
|
||||
totalSearches++;
|
||||
}
|
||||
const peers = await fetchNodesUntilCapabilitiesFulfilled(
|
||||
wantedNodeCapabilityCount,
|
||||
this._errorTolerance,
|
||||
() => this._search(domain, context)
|
||||
);
|
||||
dbg("retrieved peers: ", peers);
|
||||
return peers;
|
||||
}
|
||||
|
||||
|
@ -189,20 +188,3 @@ function selectRandomPath(branches: string[], context: SearchContext): string {
|
|||
|
||||
return branches[index];
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns false if candidate peer already exists in the
|
||||
* current collection of peers based on the node id value;
|
||||
* true otherwise.
|
||||
*/
|
||||
function isNewPeer(peer: ENR | null, peers: ENR[]): boolean {
|
||||
if (!peer || !peer.nodeId) return false;
|
||||
|
||||
for (const existingPeer of peers) {
|
||||
if (peer.nodeId === existingPeer.nodeId) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
import { expect } from "chai";
|
||||
import { Multiaddr } from "multiaddr";
|
||||
import PeerId from "peer-id";
|
||||
|
||||
import { ENR, Waku2 } from "../enr";
|
||||
|
||||
import fetchNodesUntilCapabilitiesFulfilled from "./fetch_nodes";
|
||||
|
||||
async function createEnr(waku2: Waku2): Promise<ENR> {
|
||||
const peerId = await PeerId.create({ keyType: "secp256k1" });
|
||||
const enr = ENR.createFromPeerId(peerId);
|
||||
enr.setLocationMultiaddr(new Multiaddr("/ip4/18.223.219.100/udp/9000"));
|
||||
enr.multiaddrs = [
|
||||
new Multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
|
||||
new Multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
|
||||
new Multiaddr(
|
||||
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
|
||||
),
|
||||
];
|
||||
|
||||
enr.waku2 = waku2;
|
||||
return enr;
|
||||
}
|
||||
|
||||
const Waku2None = {
|
||||
relay: false,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
};
|
||||
|
||||
describe("Fetch nodes until capabilities are fulfilled", function () {
|
||||
it("1 Relay, 1 fetch", async function () {
|
||||
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||
|
||||
const getNode = (): Promise<ENR> => Promise.resolve(relayNode);
|
||||
|
||||
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||
{ relay: 1 },
|
||||
0,
|
||||
getNode
|
||||
);
|
||||
|
||||
expect(res.length).to.eq(1);
|
||||
expect(res[0].peerId!.toB58String()).to.eq(relayNode.peerId?.toB58String());
|
||||
});
|
||||
|
||||
it("1 Store, 2 fetches", async function () {
|
||||
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||
const storeNode = await createEnr({ ...Waku2None, store: true });
|
||||
|
||||
const retrievedNodes = [relayNode, storeNode];
|
||||
|
||||
let fetchCount = 0;
|
||||
const getNode = (): Promise<ENR> => {
|
||||
const node = retrievedNodes[fetchCount];
|
||||
fetchCount++;
|
||||
return Promise.resolve(node);
|
||||
};
|
||||
|
||||
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||
{ store: 1 },
|
||||
1,
|
||||
getNode
|
||||
);
|
||||
|
||||
expect(res.length).to.eq(1);
|
||||
expect(res[0].peerId!.toB58String()).to.eq(storeNode.peerId?.toB58String());
|
||||
});
|
||||
|
||||
it("1 Store, 2 relays, 2 fetches", async function () {
|
||||
const relayNode1 = await createEnr({ ...Waku2None, relay: true });
|
||||
const relayNode2 = await createEnr({ ...Waku2None, relay: true });
|
||||
const relayNode3 = await createEnr({ ...Waku2None, relay: true });
|
||||
const relayStoreNode = await createEnr({
|
||||
...Waku2None,
|
||||
relay: true,
|
||||
store: true,
|
||||
});
|
||||
|
||||
const retrievedNodes = [relayNode1, relayNode2, relayNode3, relayStoreNode];
|
||||
|
||||
let fetchCount = 0;
|
||||
const getNode = (): Promise<ENR> => {
|
||||
const node = retrievedNodes[fetchCount];
|
||||
fetchCount++;
|
||||
return Promise.resolve(node);
|
||||
};
|
||||
|
||||
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||
{ store: 1, relay: 2 },
|
||||
1,
|
||||
getNode
|
||||
);
|
||||
|
||||
expect(res.length).to.eq(3);
|
||||
expect(res[0].peerId!.toB58String()).to.eq(
|
||||
relayNode1.peerId?.toB58String()
|
||||
);
|
||||
expect(res[1].peerId!.toB58String()).to.eq(
|
||||
relayNode2.peerId?.toB58String()
|
||||
);
|
||||
expect(res[2].peerId!.toB58String()).to.eq(
|
||||
relayStoreNode.peerId?.toB58String()
|
||||
);
|
||||
});
|
||||
|
||||
it("1 Relay, 1 Filter, gives up", async function () {
|
||||
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||
|
||||
const getNode = (): Promise<ENR> => Promise.resolve(relayNode);
|
||||
|
||||
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||
{ filter: 1, relay: 1 },
|
||||
5,
|
||||
getNode
|
||||
);
|
||||
|
||||
expect(res.length).to.eq(1);
|
||||
expect(res[0].peerId!.toB58String()).to.eq(relayNode.peerId?.toB58String());
|
||||
});
|
||||
});
|
|
@ -0,0 +1,130 @@
|
|||
import { debug } from "debug";
|
||||
|
||||
import { ENR, Waku2 } from "../enr";
|
||||
|
||||
import { NodeCapabilityCount } from "./dns";
|
||||
|
||||
const dbg = debug("waku:discovery:fetch_nodes");
|
||||
|
||||
/**
|
||||
* Fetch nodes using passed [[getNode]] until all wanted capabilities are
|
||||
* fulfilled or the number of [[getNode]] call exceeds the sum of
|
||||
* [[wantedNodeCapabilityCount]] plus [[errorTolerance]].
|
||||
*/
|
||||
export default async function fetchNodesUntilCapabilitiesFulfilled(
|
||||
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
|
||||
errorTolerance: number,
|
||||
getNode: () => Promise<ENR | null>
|
||||
): Promise<ENR[]> {
|
||||
const wanted = {
|
||||
relay: wantedNodeCapabilityCount.relay ?? 0,
|
||||
store: wantedNodeCapabilityCount.store ?? 0,
|
||||
filter: wantedNodeCapabilityCount.filter ?? 0,
|
||||
lightPush: wantedNodeCapabilityCount.lightPush ?? 0,
|
||||
};
|
||||
|
||||
const maxSearches =
|
||||
wanted.relay + wanted.store + wanted.filter + wanted.lightPush;
|
||||
|
||||
const actual = {
|
||||
relay: 0,
|
||||
store: 0,
|
||||
filter: 0,
|
||||
lightPush: 0,
|
||||
};
|
||||
|
||||
let totalSearches = 0;
|
||||
const peers: ENR[] = [];
|
||||
|
||||
while (
|
||||
!isSatisfied(wanted, actual) &&
|
||||
totalSearches < maxSearches + errorTolerance
|
||||
) {
|
||||
const peer = await getNode();
|
||||
if (peer && isNewPeer(peer, peers)) {
|
||||
// ENRs without a waku2 key are ignored.
|
||||
if (peer.waku2) {
|
||||
if (helpsSatisfyCapabilities(peer.waku2, wanted, actual)) {
|
||||
addCapabilities(peer.waku2, actual);
|
||||
peers.push(peer);
|
||||
}
|
||||
}
|
||||
dbg(`got new peer candidate from DNS address=${peer.nodeId}@${peer.ip}`);
|
||||
}
|
||||
|
||||
totalSearches++;
|
||||
}
|
||||
return peers;
|
||||
}
|
||||
|
||||
function isSatisfied(
|
||||
wanted: NodeCapabilityCount,
|
||||
actual: NodeCapabilityCount
|
||||
): boolean {
|
||||
return (
|
||||
actual.relay >= wanted.relay &&
|
||||
actual.store >= wanted.store &&
|
||||
actual.filter >= wanted.filter &&
|
||||
actual.lightPush >= wanted.lightPush
|
||||
);
|
||||
}
|
||||
|
||||
function isNewPeer(peer: ENR, peers: ENR[]): boolean {
|
||||
if (!peer.nodeId) return false;
|
||||
|
||||
for (const existingPeer of peers) {
|
||||
if (peer.nodeId === existingPeer.nodeId) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function addCapabilities(node: Waku2, total: NodeCapabilityCount): void {
|
||||
if (node.relay) total.relay += 1;
|
||||
if (node.store) total.store += 1;
|
||||
if (node.filter) total.filter += 1;
|
||||
if (node.lightPush) total.lightPush += 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the proposed ENR [[node]] helps satisfy the [[wanted]] capabilities,
|
||||
* considering the [[actual]] capabilities of nodes retrieved so far..
|
||||
*
|
||||
* @throws If the function is called when the wanted capabilities are already fulfilled.
|
||||
*/
|
||||
function helpsSatisfyCapabilities(
|
||||
node: Waku2,
|
||||
wanted: NodeCapabilityCount,
|
||||
actual: NodeCapabilityCount
|
||||
): boolean {
|
||||
if (isSatisfied(wanted, actual)) {
|
||||
throw "Internal Error: Waku2 wanted capabilities are already fulfilled";
|
||||
}
|
||||
|
||||
const missing = missingCapabilities(wanted, actual);
|
||||
|
||||
return (
|
||||
(missing.relay && node.relay) ||
|
||||
(missing.store && node.store) ||
|
||||
(missing.filter && node.filter) ||
|
||||
(missing.lightPush && node.lightPush)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a [[Waku2]] Object for which capabilities are set to true if they are
|
||||
* [[wanted]] yet missing from [[actual]].
|
||||
*/
|
||||
function missingCapabilities(
|
||||
wanted: NodeCapabilityCount,
|
||||
actual: NodeCapabilityCount
|
||||
): Waku2 {
|
||||
return {
|
||||
relay: actual.relay < wanted.relay,
|
||||
store: actual.store < wanted.store,
|
||||
filter: actual.filter < wanted.filter,
|
||||
lightPush: actual.lightPush < wanted.lightPush,
|
||||
};
|
||||
}
|
|
@ -3,7 +3,7 @@ import { shuffle } from "libp2p-gossipsub/src/utils";
|
|||
export { getPredefinedBootstrapNodes } from "./predefined";
|
||||
export * as predefined from "./predefined";
|
||||
export { Bootstrap, BootstrapOptions } from "./bootstrap";
|
||||
export { DnsClient, DnsNodeDiscovery, SearchContext } from "./dns";
|
||||
export * as dns from "./dns";
|
||||
export { Endpoints, DnsOverHttps } from "./dns_over_https";
|
||||
export { ENRTree, ENRTreeValues, ENRRootValues } from "./enrtree";
|
||||
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
{
|
||||
"dns": {
|
||||
"publicKey": "AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE",
|
||||
"enr": "enr:-Je4QA1w6JNgH44256YxSTujRYIIy-oeCzL3tIvCIIHEZ_HgWbbFlrtfghWaGKQA9PH2INlnOGiKAU66hhVEoocrZdo0g2V0aMfGhOAp6ZGAgmlkgnY0gmlwhChxb4eJc2VjcDI1NmsxoQMla1-eA4bdHAeDEGv_z115bE16iA4GxcbGd-OlmKnSpYN0Y3CCdl-DdWRwgnZf",
|
||||
"enrA": "enr:-Jq4QAopXcF_SSfOwl_AmLdrMUnHQO1Rx-XV4gYeySSK32PTbQ8volkh3IQy1ag1Gkl6O-C5rjskj3EyDi8XVzck4PMVg2V0aMrJhKALwySDbxWAgmlkgnY0gmlwhC1NKH-Jc2VjcDI1NmsxoQO5wMEjJLtqT-h6zhef0xsO-SW-pcQD-yuNqCr3GTEZFoN0Y3CCdl-DdWRwgnZf",
|
||||
"enrB": "enr:-Je4QAFx_6rFjCxCLPUbxIA_KS7FhCYeTU6fXmbj1V08f8DPCUAB9bLoY2Yy7q2hIEby7Yf6e_v7gbofloB1oTnjqeYDg2V0aMfGhOAp6ZGAgmlkgnY0gmlwhLxf-D2Jc2VjcDI1NmsxoQOou7vgUXL96E5CzBsCE6N1GSMqlAACtUxRiNpq6vnB6IN0Y3CCdl-DdWRwgnZf",
|
||||
"enrRoot": "enrtree-root:v1 e=JORXBYVVM7AEKETX5DGXW44EAY l=FDXN3SN67NA5DKA4J2GOK7BVQI seq=1839 sig=Ma7yIqW2gj59dY8F6plfL7dfotaBPz285mu_XZK1e5VRzNrnf0pCAfacu4fBLuE7jMX-nDbqCM1sFiWWLq8WogE",
|
||||
"enrBranch": "enrtree-branch:D2SNLTAGWNQ34NTQTPHNZDECFU,67BLTJEU5R2D5S3B4QKJSBRFCY,A2HDMZBB4JIU53VTEGC4TG6P4A",
|
||||
"enrTree": "enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@nodes.example.org",
|
||||
"enrBadPrefix": "enrabc:-Je4QA1w6JNgH44256YxSTujRYIIy-oeCzL3tIvCIIHEZ_HgWbbFlrtfghWaGKQA9PH2INlnOGiKAU66hhVEoocrZdo0g2V0aMfGhOAp6ZGAgmlkgnY0gmlwhChxb4eJc2VjcDI1NmsxoQMla1-eA4bdHAeDEGv_z115bE16iA4GxcbGd-OlmKnSpYN0Y3CCdl-DdWRwgnZf",
|
||||
"enrRootBadPrefix": "enrtree:v1 e=JORXBYVVM7AEKETX5DGXW44EAY l=FDXN3SN67NA5DKA4J2GOK7BVQI seq=1839 sig=Ma7yIqW2gj59dY8F6plfL7dfotaBPz285mu_XZK1e5VRzNrnf0pCAfacu4fBLuE7jMX-nDbqCM1sFiWWLq8WogE",
|
||||
"enrBranchBadPrefix": "Z64M,JOECK7UUYUFVX24QGXYLR3UHDU,RR6SC4GUZBKLFA2WO4IUY6YGEE,EQRME5EAOS7AJHHLDDZNDYT7GI,JXHUMLDSGKU6UQWYFMNCFYQFHQ,4SNDLPNM3CBG2KLBMRSTHWFNP4,WEEEFCKUXOGU4QPKCRBBEHQLEY,CPXM5AOSTICZ3TODJFQACGBWMU,7U26GD37NS6DV72PDAURZI4WUY,MYLQIGMR5GTKPPBMXIINZ2ALGU",
|
||||
"enrTreeBadPrefix": "entree-branch://AM5FCQLWIZX2QFPNJAP7VUERCCRNGRHWZG3YYHIUV7BVDQ5FDPRT2@nodes.example.org",
|
||||
"enrRootBadSig": "enrtree-root:v1 e=JORXBYVVM7AEKETX5DGXW44EAY l=FDXN3SN67NA5DKA4J2GOK7BVQI seq=1839 sig=Aa7yIqW2gj59dY8F6plfL7dfotaBPz285mu_XZK1e5VRzNrnf0pCAfacu4fBLuE7jMX-nDbqCM1sFiWWLq8WogE",
|
||||
"enrRootMalformed": "enrtree-root:v1 e=FDXN3SN67NA5DKA4J2GOK7BVQI seq=1839 sig=Ma7yIqW2gj59dY8F6plfL7dfotaBPz285mu_XZK1e5VRzNrnf0pCAfacu4fBLuE7jMX-nDbqCM1sFiWWLq8WogE",
|
||||
"enrTreeMalformed": "enrtree://AM5FCQLWIZX2QFPNJAP7VUERCCRNGRHWZG3YYHIUV7BVDQ5FDPRT2nodes.example.org"
|
||||
"enrTreeMalformed": "enrtree://AM5FCQLWIZX2QFPNJAP7VUERCCRNGRHWZG3YYHIUV7BVDQ5FDPRT2nodes.example.org",
|
||||
"enrWithWaku2Relay": "enr:-KO4QANhx2beNpDw8LfX8hdyoNocMD5WCBY28EjKA50DyJv9QDxE5IWafE_O_nhbA31gmasx3WjpH517do3GqmzQvmaAgmlkgnY0gmlwhMCosvuKbXVsdGlhZGRyc4wACgTAqLL7Bh9C3QOJc2VjcDI1NmsxoQOmp7OJvC-UX4KeIJDW1CQQvhSA33OWgRrKccOcTG7dYYN0Y3CCH0KFd2FrdTIB",
|
||||
"enrWithWaku2Store": "enr:-Iu4QMTV1vSz_hVXxnEL-Kyz-OmaXhlXYfbtfcT58oR1xfIZc8Zx-03VijJs6p-BY_hMWEhmmSa58xm-Uj8xc2utdSgBgmlkgnY0gmlwhAAAAACJc2VjcDI1NmsxoQIJIvZxmUAqxvruSndgZqyiVBHXo3D52SnKRa4tj6vejoN0Y3CC6mCFd2FrdTIC",
|
||||
"enrWithWaku2RelayStore": "enr:-KO4QF5qdqu8SRMEqihxFVO1NYraC3QnZJeZnlKwsdgcRa0lTYMqP8_0lvg7ZjZILA4qxZraDnKkCtYpCPtcLerxH_iAgmlkgnY0gmlwhMCosvuKbXVsdGlhZGRyc4wACgTAqLL7Bh9C3QOJc2VjcDI1NmsxoQJmCOWSVb9E3ekrQ5Q9SnFa-KRLBs8yvBCjKgRdLT9I7IN0Y3CCH0KFd2FrdTID"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
import { expect } from "chai";
|
||||
|
||||
import { makeLogFileName, NOISE_KEY_1, Nwaku } from "../../test_utils";
|
||||
import { Protocols, Waku } from "../waku";
|
||||
|
||||
import { ENR } from "./enr";
|
||||
|
||||
describe("ENR Interop: nwaku", function () {
|
||||
let waku: Waku;
|
||||
let nwaku: Nwaku;
|
||||
|
||||
afterEach(async function () {
|
||||
!!nwaku && nwaku.stop();
|
||||
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
|
||||
});
|
||||
|
||||
it("Relay", async function () {
|
||||
this.timeout(20_000);
|
||||
nwaku = new Nwaku(makeLogFileName(this));
|
||||
await nwaku.start({
|
||||
relay: true,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await Waku.create({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
});
|
||||
await waku.dial(multiAddrWithId);
|
||||
await waku.waitForRemotePeer([Protocols.Relay]);
|
||||
|
||||
const nwakuInfo = await nwaku.info();
|
||||
const nimPeerId = await nwaku.getPeerId();
|
||||
|
||||
expect(nwakuInfo.enrUri).to.not.be.undefined;
|
||||
const dec = ENR.decodeTxt(nwakuInfo.enrUri ?? "");
|
||||
expect(dec.peerId?.toB58String()).to.eq(nimPeerId.toB58String());
|
||||
expect(dec.waku2).to.deep.eq({
|
||||
relay: true,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("Relay + Store", async function () {
|
||||
this.timeout(20_000);
|
||||
nwaku = new Nwaku(makeLogFileName(this));
|
||||
await nwaku.start({
|
||||
relay: true,
|
||||
store: true,
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await Waku.create({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
});
|
||||
await waku.dial(multiAddrWithId);
|
||||
await waku.waitForRemotePeer([Protocols.Relay]);
|
||||
|
||||
const nwakuInfo = await nwaku.info();
|
||||
const nimPeerId = await nwaku.getPeerId();
|
||||
|
||||
expect(nwakuInfo.enrUri).to.not.be.undefined;
|
||||
const dec = ENR.decodeTxt(nwakuInfo.enrUri ?? "");
|
||||
expect(dec.peerId?.toB58String()).to.eq(nimPeerId.toB58String());
|
||||
expect(dec.waku2).to.deep.eq({
|
||||
relay: true,
|
||||
store: true,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("All", async function () {
|
||||
this.timeout(20_000);
|
||||
nwaku = new Nwaku(makeLogFileName(this));
|
||||
await nwaku.start({
|
||||
relay: true,
|
||||
store: true,
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await Waku.create({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
});
|
||||
await waku.dial(multiAddrWithId);
|
||||
await waku.waitForRemotePeer([Protocols.Relay]);
|
||||
|
||||
const nwakuInfo = await nwaku.info();
|
||||
const nimPeerId = await nwaku.getPeerId();
|
||||
|
||||
expect(nwakuInfo.enrUri).to.not.be.undefined;
|
||||
const dec = ENR.decodeTxt(nwakuInfo.enrUri ?? "");
|
||||
expect(dec.peerId?.toB58String()).to.eq(nimPeerId.toB58String());
|
||||
expect(dec.waku2).to.deep.eq({
|
||||
relay: true,
|
||||
store: true,
|
||||
filter: true,
|
||||
lightPush: true,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -6,7 +6,8 @@ import { bytesToHex, hexToBytes, utf8ToBytes } from "../utils";
|
|||
|
||||
import { ERR_INVALID_ID } from "./constants";
|
||||
import { ENR } from "./enr";
|
||||
import { createKeypairFromPeerId } from "./keypair";
|
||||
import { createKeypairFromPeerId, IKeypair } from "./keypair";
|
||||
import { Waku2 } from "./waku2_codec";
|
||||
|
||||
import { v4 } from "./index";
|
||||
|
||||
|
@ -19,18 +20,26 @@ describe("ENR", function () {
|
|||
enr.setLocationMultiaddr(new Multiaddr("/ip4/18.223.219.100/udp/9000"));
|
||||
enr.multiaddrs = [
|
||||
new Multiaddr(
|
||||
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
),
|
||||
new Multiaddr(
|
||||
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
),
|
||||
new Multiaddr(
|
||||
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
|
||||
),
|
||||
];
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
|
||||
enr.waku2 = {
|
||||
relay: true,
|
||||
store: false,
|
||||
filter: true,
|
||||
lightPush: false,
|
||||
};
|
||||
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const enr2 = ENR.decodeTxt(txt);
|
||||
|
||||
if (!enr.signature) throw "enr.signature is undefined";
|
||||
if (!enr2.signature) throw "enr.signature is undefined";
|
||||
|
||||
|
@ -41,14 +50,20 @@ describe("ENR", function () {
|
|||
expect(enr2.multiaddrs!.length).to.be.equal(3);
|
||||
const multiaddrsAsStr = enr2.multiaddrs!.map((ma) => ma.toString());
|
||||
expect(multiaddrsAsStr).to.include(
|
||||
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
);
|
||||
expect(multiaddrsAsStr).to.include(
|
||||
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"
|
||||
);
|
||||
expect(multiaddrsAsStr).to.include(
|
||||
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
|
||||
);
|
||||
expect(enr2.waku2).to.deep.equal({
|
||||
relay: true,
|
||||
store: false,
|
||||
filter: true,
|
||||
lightPush: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should decode valid enr successfully", () => {
|
||||
|
@ -363,4 +378,117 @@ describe("ENR", function () {
|
|||
enr.ip6 = ip6;
|
||||
});
|
||||
});
|
||||
|
||||
describe("waku2 key round trip", async () => {
|
||||
let peerId;
|
||||
let enr: ENR;
|
||||
let waku2Protocols: Waku2;
|
||||
let keypair: IKeypair;
|
||||
|
||||
beforeEach(async function () {
|
||||
peerId = await PeerId.create({ keyType: "secp256k1" });
|
||||
enr = ENR.createFromPeerId(peerId);
|
||||
keypair = createKeypairFromPeerId(peerId);
|
||||
waku2Protocols = {
|
||||
relay: false,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
};
|
||||
});
|
||||
|
||||
it("should set field with all protocols disabled", () => {
|
||||
enr.waku2 = waku2Protocols;
|
||||
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(false);
|
||||
expect(decoded.store).to.equal(false);
|
||||
expect(decoded.filter).to.equal(false);
|
||||
expect(decoded.lightPush).to.equal(false);
|
||||
});
|
||||
|
||||
it("should set field with all protocols enabled", () => {
|
||||
waku2Protocols.relay = true;
|
||||
waku2Protocols.store = true;
|
||||
waku2Protocols.filter = true;
|
||||
waku2Protocols.lightPush = true;
|
||||
|
||||
enr.waku2 = waku2Protocols;
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(true);
|
||||
expect(decoded.store).to.equal(true);
|
||||
expect(decoded.filter).to.equal(true);
|
||||
expect(decoded.lightPush).to.equal(true);
|
||||
});
|
||||
|
||||
it("should set field with only RELAY enabled", () => {
|
||||
waku2Protocols.relay = true;
|
||||
|
||||
enr.waku2 = waku2Protocols;
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(true);
|
||||
expect(decoded.store).to.equal(false);
|
||||
expect(decoded.filter).to.equal(false);
|
||||
expect(decoded.lightPush).to.equal(false);
|
||||
});
|
||||
|
||||
it("should set field with only STORE enabled", () => {
|
||||
waku2Protocols.store = true;
|
||||
|
||||
enr.waku2 = waku2Protocols;
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(false);
|
||||
expect(decoded.store).to.equal(true);
|
||||
expect(decoded.filter).to.equal(false);
|
||||
expect(decoded.lightPush).to.equal(false);
|
||||
});
|
||||
|
||||
it("should set field with only FILTER enabled", () => {
|
||||
waku2Protocols.filter = true;
|
||||
|
||||
enr.waku2 = waku2Protocols;
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(false);
|
||||
expect(decoded.store).to.equal(false);
|
||||
expect(decoded.filter).to.equal(true);
|
||||
expect(decoded.lightPush).to.equal(false);
|
||||
});
|
||||
|
||||
it("should set field with only LIGHTPUSH enabled", () => {
|
||||
waku2Protocols.lightPush = true;
|
||||
|
||||
enr.waku2 = waku2Protocols;
|
||||
const txt = enr.encodeTxt(keypair.privateKey);
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(false);
|
||||
expect(decoded.store).to.equal(false);
|
||||
expect(decoded.filter).to.equal(false);
|
||||
expect(decoded.lightPush).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Waku2 key: decode", () => {
|
||||
it("Relay + Store", function () {
|
||||
const txt =
|
||||
"enr:-Iu4QADPfXNCM6iYyte0pIdbMirIw_AsKR7J1DeJBysXDWz4DZvyjgIwpMt-sXTVUzLJdE9FaStVy2ZKtHUVQAH61-KAgmlkgnY0gmlwhMCosvuJc2VjcDI1NmsxoQI0OCNtPJtBayNgvFvKp-0YyCozcvE1rqm_V1W51nHVv4N0Y3CC6mCFd2FrdTIH";
|
||||
|
||||
const decoded = ENR.decodeTxt(txt).waku2!;
|
||||
|
||||
expect(decoded.relay).to.equal(true);
|
||||
expect(decoded.store).to.equal(true);
|
||||
expect(decoded.filter).to.equal(true);
|
||||
expect(decoded.lightPush).to.equal(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -22,6 +22,7 @@ import {
|
|||
import { decodeMultiaddrs, encodeMultiaddrs } from "./multiaddrs_codec";
|
||||
import { ENRKey, ENRValue, NodeId, SequenceNumber } from "./types";
|
||||
import * as v4 from "./v4";
|
||||
import { decodeWaku2, encodeWaku2, Waku2 } from "./waku2_codec";
|
||||
|
||||
const dbg = debug("waku:enr");
|
||||
|
||||
|
@ -420,6 +421,28 @@ export class ENR extends Map<ENRKey, ENRValue> {
|
|||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the `waku2` field from ENR.
|
||||
*/
|
||||
get waku2(): Waku2 | undefined {
|
||||
const raw = this.get("waku2");
|
||||
if (raw) return decodeWaku2(raw[0]);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the `waku2` field on the ENR.
|
||||
*/
|
||||
set waku2(waku2: Waku2 | undefined) {
|
||||
if (waku2 === undefined) {
|
||||
this.delete("waku2");
|
||||
} else {
|
||||
const byte = encodeWaku2(waku2);
|
||||
this.set("waku2", new Uint8Array([byte]));
|
||||
}
|
||||
}
|
||||
|
||||
verify(data: Uint8Array, signature: Uint8Array): boolean {
|
||||
if (!this.get("id") || this.id !== "v4") {
|
||||
throw new Error(ERR_INVALID_ID);
|
||||
|
|
|
@ -5,3 +5,4 @@ export * from "./enr";
|
|||
export * from "./types";
|
||||
export * from "./create";
|
||||
export * from "./keypair";
|
||||
export * from "./waku2_codec";
|
||||
|
|
|
@ -0,0 +1,166 @@
|
|||
import { expect } from "chai";
|
||||
|
||||
import { decodeWaku2, encodeWaku2, Waku2 } from "./waku2_codec";
|
||||
|
||||
const waku2FieldEncodings = {
|
||||
relay: 1,
|
||||
store: 2,
|
||||
filter: 4,
|
||||
lightPush: 8,
|
||||
allTrue: 15,
|
||||
allFalse: 0,
|
||||
relayAndFilterTrue: 5,
|
||||
storeAndLightPushTrue: 10,
|
||||
};
|
||||
|
||||
describe("ENR waku2 codec", function () {
|
||||
let protocols: Waku2;
|
||||
|
||||
beforeEach(function () {
|
||||
protocols = {
|
||||
relay: false,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
};
|
||||
});
|
||||
|
||||
context("Encoding", function () {
|
||||
it("should be able to encode the field with only RELAY set to true", () => {
|
||||
protocols.relay = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.relay);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with only STORE set to true", () => {
|
||||
protocols.store = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.store);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with only FILTER set to true", () => {
|
||||
protocols.filter = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.filter);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with only LIGHTPUSH set to true", () => {
|
||||
protocols.lightPush = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.lightPush);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with ALL protocols set to true", () => {
|
||||
protocols.relay = true;
|
||||
protocols.store = true;
|
||||
protocols.filter = true;
|
||||
protocols.lightPush = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.allTrue);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with ALL protocols set to false", () => {
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.allFalse);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with RELAY and FILTER protocols set to true", () => {
|
||||
protocols.relay = true;
|
||||
protocols.filter = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.relayAndFilterTrue);
|
||||
});
|
||||
|
||||
it("should be able to encode the field with STORE and LIGHTPUSH protocols set to true", () => {
|
||||
protocols.store = true;
|
||||
protocols.lightPush = true;
|
||||
|
||||
const byte = encodeWaku2(protocols);
|
||||
|
||||
expect(byte).to.eq(waku2FieldEncodings.storeAndLightPushTrue);
|
||||
});
|
||||
});
|
||||
|
||||
context("Decoding", function () {
|
||||
it("should be able to decode the field with only RELAY set to true", () => {
|
||||
const byte = waku2FieldEncodings.relay;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.relay).to.be.true;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with only FILTER set to true", () => {
|
||||
const byte = waku2FieldEncodings.filter;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.filter).to.be.true;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with only STORE set to true", () => {
|
||||
const byte = waku2FieldEncodings.store;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.store).to.be.true;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with only LIGHTPUSH set to true", () => {
|
||||
const byte = waku2FieldEncodings.lightPush;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.lightPush).to.be.true;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with ALL protocols set to true", () => {
|
||||
const byte = waku2FieldEncodings.allTrue;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.relay).to.be.true;
|
||||
expect(result.store).to.be.true;
|
||||
expect(result.filter).to.be.true;
|
||||
expect(result.lightPush).to.be.true;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with ALL protocols set to false", () => {
|
||||
const byte = waku2FieldEncodings.allFalse;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.relay).to.be.false;
|
||||
expect(result.store).to.be.false;
|
||||
expect(result.filter).to.be.false;
|
||||
expect(result.lightPush).to.be.false;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with RELAY and FILTER protocols set to true", () => {
|
||||
const byte = waku2FieldEncodings.relayAndFilterTrue;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.relay).to.be.true;
|
||||
expect(result.store).to.be.false;
|
||||
expect(result.filter).to.be.true;
|
||||
expect(result.lightPush).to.be.false;
|
||||
});
|
||||
|
||||
it("should be able to decode the field with STORE and LIGHTPUSH protocols set to true", () => {
|
||||
const byte = waku2FieldEncodings.storeAndLightPushTrue;
|
||||
const result = decodeWaku2(byte);
|
||||
|
||||
expect(result.relay).to.be.false;
|
||||
expect(result.store).to.be.true;
|
||||
expect(result.filter).to.be.false;
|
||||
expect(result.lightPush).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,39 @@
|
|||
export interface Waku2 {
|
||||
relay: boolean;
|
||||
store: boolean;
|
||||
filter: boolean;
|
||||
lightPush: boolean;
|
||||
}
|
||||
|
||||
export function encodeWaku2(protocols: Waku2): number {
|
||||
let byte = 0;
|
||||
|
||||
if (protocols.lightPush) byte += 1;
|
||||
byte = byte << 1;
|
||||
if (protocols.filter) byte += 1;
|
||||
byte = byte << 1;
|
||||
if (protocols.store) byte += 1;
|
||||
byte = byte << 1;
|
||||
if (protocols.relay) byte += 1;
|
||||
|
||||
return byte;
|
||||
}
|
||||
|
||||
export function decodeWaku2(byte: number): Waku2 {
|
||||
const waku2 = {
|
||||
relay: false,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightPush: false,
|
||||
};
|
||||
|
||||
if (byte % 2) waku2.relay = true;
|
||||
byte = byte >> 1;
|
||||
if (byte % 2) waku2.store = true;
|
||||
byte = byte >> 1;
|
||||
if (byte % 2) waku2.filter = true;
|
||||
byte = byte >> 1;
|
||||
if (byte % 2) waku2.lightPush = true;
|
||||
|
||||
return waku2;
|
||||
}
|
|
@ -38,6 +38,8 @@ export interface Args {
|
|||
logLevel?: LogLevel;
|
||||
persistMessages?: boolean;
|
||||
lightpush?: boolean;
|
||||
filter?: boolean;
|
||||
store?: boolean;
|
||||
topics?: string;
|
||||
rpcPrivate?: boolean;
|
||||
websocketSupport?: boolean;
|
||||
|
@ -427,4 +429,5 @@ export function bytesToHex(buffer: Uint8Array): string {
|
|||
interface RpcInfoResponse {
|
||||
// multiaddrs including peer id.
|
||||
listenAddresses: string[];
|
||||
enrUri?: string;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue