Upgrade to latest libp2p versions

And few other packages
This commit is contained in:
Franck Royer 2021-06-21 16:37:31 +10:00
parent dbab0cc582
commit 6cb92dd4b9
No known key found for this signature in database
GPG Key ID: A82ED75A8DFC50A4
10 changed files with 16816 additions and 6410 deletions

View File

@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed ### Changed
- **Breaking**: Auto select peer if none provided for store and light push protocols. - **Breaking**: Auto select peer if none provided for store and light push protocols.
- Upgrade to `libp2p@0.31.7` and `libp2p-gossipsub@0.10.0` to avoid `TextEncoder` errors in ReactJS tests.
## [0.7.0] - 2021-06-15 ## [0.7.0] - 2021-06-15

23147
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -59,8 +59,8 @@
"debug": "^4.3.1", "debug": "^4.3.1",
"it-concat": "^2.0.0", "it-concat": "^2.0.0",
"it-length-prefixed": "^5.0.2", "it-length-prefixed": "^5.0.2",
"libp2p": "^0.31.0", "libp2p": "^0.31.7",
"libp2p-gossipsub": "^0.9.0", "libp2p-gossipsub": "^0.10.0",
"libp2p-mplex": "^0.10.3", "libp2p-mplex": "^0.10.3",
"libp2p-noise": "^3.0.0", "libp2p-noise": "^3.0.0",
"libp2p-tcp": "^0.15.4", "libp2p-tcp": "^0.15.4",

View File

@ -71,7 +71,7 @@ export class Waku {
lightPush: WakuLightPush lightPush: WakuLightPush
) { ) {
this.libp2p = libp2p; this.libp2p = libp2p;
this.relay = (libp2p.pubsub as unknown) as WakuRelay; this.relay = libp2p.pubsub as unknown as WakuRelay;
this.store = store; this.store = store;
this.lightPush = lightPush; this.lightPush = lightPush;
this.keepAliveTimers = {}; this.keepAliveTimers = {};
@ -154,9 +154,7 @@ export class Waku {
* *
* @param peer The peer to dial * @param peer The peer to dial
*/ */
async dial( async dial(peer: PeerId | Multiaddr | string): Promise<{
peer: PeerId | Multiaddr | string
): Promise<{
stream: import('libp2p-interfaces/src/stream-muxer/types').MuxedStream; stream: import('libp2p-interfaces/src/stream-muxer/types').MuxedStream;
protocol: string; protocol: string;
}> { }> {

View File

@ -58,12 +58,10 @@ describe('Waku Relay', () => {
}); });
it('Subscribe', async function () { it('Subscribe', async function () {
const subscribers1 = waku1.libp2p.pubsub.getSubscribers( const subscribers1 =
DefaultPubsubTopic waku1.libp2p.pubsub.getSubscribers(DefaultPubsubTopic);
); const subscribers2 =
const subscribers2 = waku2.libp2p.pubsub.getSubscribers( waku2.libp2p.pubsub.getSubscribers(DefaultPubsubTopic);
DefaultPubsubTopic
);
expect(subscribers1).to.contain(waku2.libp2p.peerId.toB58String()); expect(subscribers1).to.contain(waku2.libp2p.peerId.toB58String());
expect(subscribers2).to.contain(waku1.libp2p.peerId.toB58String()); expect(subscribers2).to.contain(waku1.libp2p.peerId.toB58String());
@ -267,9 +265,8 @@ describe('Waku Relay', () => {
it('nim subscribes to js', async function () { it('nim subscribes to js', async function () {
const nimPeerId = await nimWaku.getPeerId(); const nimPeerId = await nimWaku.getPeerId();
const subscribers = waku.libp2p.pubsub.getSubscribers( const subscribers =
DefaultPubsubTopic waku.libp2p.pubsub.getSubscribers(DefaultPubsubTopic);
);
expect(subscribers).to.contain(nimPeerId.toB58String()); expect(subscribers).to.contain(nimPeerId.toB58String());
}); });

View File

@ -456,9 +456,8 @@ export class WakuRelay extends Gossipsub {
const peerId = PeerId.createFromB58String(p); const peerId = PeerId.createFromB58String(p);
px.push({ px.push({
peerID: peerId.toBytes(), peerID: peerId.toBytes(),
signedPeerRecord: this._libp2p.peerStore.addressBook.getRawEnvelope( signedPeerRecord:
peerId this._libp2p.peerStore.addressBook.getRawEnvelope(peerId),
),
}); });
}); });
} }

View File

@ -132,8 +132,8 @@ const btoa: (bin: string) => string =
((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64')); ((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64'));
function base64FromBytes(arr: Uint8Array): string { function base64FromBytes(arr: Uint8Array): string {
const bin: string[] = []; const bin: string[] = [];
for (let i = 0; i < arr.byteLength; ++i) { for (const byte of arr) {
bin.push(String.fromCharCode(arr[i])); bin.push(String.fromCharCode(byte));
} }
return btoa(bin.join('')); return btoa(bin.join(''));
} }

View File

@ -150,8 +150,8 @@ const btoa: (bin: string) => string =
((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64')); ((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64'));
function base64FromBytes(arr: Uint8Array): string { function base64FromBytes(arr: Uint8Array): string {
const bin: string[] = []; const bin: string[] = [];
for (let i = 0; i < arr.byteLength; ++i) { for (const byte of arr) {
bin.push(String.fromCharCode(arr[i])); bin.push(String.fromCharCode(byte));
} }
return btoa(bin.join('')); return btoa(bin.join(''));
} }

View File

@ -660,8 +660,8 @@ const btoa: (bin: string) => string =
((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64')); ((bin) => globalThis.Buffer.from(bin, 'binary').toString('base64'));
function base64FromBytes(arr: Uint8Array): string { function base64FromBytes(arr: Uint8Array): string {
const bin: string[] = []; const bin: string[] = [];
for (let i = 0; i < arr.byteLength; ++i) { for (const byte of arr) {
bin.push(String.fromCharCode(arr[i])); bin.push(String.fromCharCode(byte));
} }
return btoa(bin.join('')); return btoa(bin.join(''));
} }

View File

@ -30,30 +30,8 @@ it('Convert utf-8 string to hex', function () {
it('Convert buffer to hex', function () { it('Convert buffer to hex', function () {
const buf = Uint8Array.from([ const buf = Uint8Array.from([
0x54, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x6e, 0x20, 0x75,
0x68, 0x74, 0x66, 0x2d, 0x38, 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x2e,
0x69,
0x73,
0x20,
0x69,
0x73,
0x20,
0x61,
0x6e,
0x20,
0x75,
0x74,
0x66,
0x2d,
0x38,
0x20,
0x73,
0x74,
0x72,
0x69,
0x6e,
0x67,
0x2e,
]); ]);
const expected = '5468697320697320616e207574662d3820737472696e672e'; const expected = '5468697320697320616e207574662d3820737472696e672e';