feat(major): drop simple-get (#443)

BREAKING CHANGE: drop simple-get

* perf: drop simple-get

* feat: undici agent and socks

* fix: undici as dev dependency

* feat: require user passed proxy objects for http and ws

* chore: include undici for tests
This commit is contained in:
Cas_ 2023-10-31 10:51:04 +01:00 committed by GitHub
parent e14738bd84
commit bce64e155d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 241 additions and 229 deletions

View File

@ -55,16 +55,16 @@ npm install bittorrent-tracker
To connect to a tracker, just do this: To connect to a tracker, just do this:
```js ```js
var Client = require('bittorrent-tracker') import Client from 'bittorrent-tracker'
var requiredOpts = { const requiredOpts = {
infoHash: new Buffer('012345678901234567890'), // hex string or Buffer infoHash: new Buffer('012345678901234567890'), // hex string or Buffer
peerId: new Buffer('01234567890123456789'), // hex string or Buffer peerId: new Buffer('01234567890123456789'), // hex string or Buffer
announce: [], // list of tracker server urls announce: [], // list of tracker server urls
port: 6881 // torrent client port, (in browser, optional) port: 6881 // torrent client port, (in browser, optional)
} }
var optionalOpts = { const optionalOpts = {
// RTCPeerConnection config object (only used in browser) // RTCPeerConnection config object (only used in browser)
rtcConfig: {}, rtcConfig: {},
// User-Agent header for http requests // User-Agent header for http requests
@ -81,47 +81,24 @@ var optionalOpts = {
customParam: 'blah' // custom parameters supported customParam: 'blah' // custom parameters supported
} }
}, },
// Proxy config object // Proxy options (used to proxy requests in node)
proxyOpts: { proxyOpts: {
// Socks proxy options (used to proxy requests in node) // For WSS trackers this is always a http.Agent
socksProxy: { // For UDP trackers this is an object of options for the Socks Connection
// Configuration from socks module (https://github.com/JoshGlazebrook/socks) // For HTTP trackers this is either an undici Agent if using Node16 or later, or http.Agent if using versions prior to Node 16, ex:
proxy: { // import Socks from 'socks'
// IP Address of Proxy (Required) // proxyOpts.socksProxy = new Socks.Agent(optionsObject, isHttps)
ipaddress: "1.2.3.4", // or if using Node 16 or later
// TCP Port of Proxy (Required) // import { socksDispatcher } from 'fetch-socks'
port: 1080, // proxyOpts.socksProxy = socksDispatcher(optionsObject)
// Proxy Type [4, 5] (Required) socksProxy: new SocksProxy(socksOptionsObject),
// Note: 4 works for both 4 and 4a. // Populated with socksProxy if it's provided
// Type 4 does not support UDP association relay httpAgent: new http.Agent(agentOptionsObject),
type: 5, httpsAgent: new https.Agent(agentOptionsObject)
// SOCKS 4 Specific:
// UserId used when making a SOCKS 4/4a request. (Optional)
userid: "someuserid",
// SOCKS 5 Specific:
// Authentication used for SOCKS 5 (when it's required) (Optional)
authentication: {
username: "Josh",
password: "somepassword"
}
},
// Amount of time to wait for a connection to be established. (Optional)
// - defaults to 10000ms (10 seconds)
timeout: 10000
},
// NodeJS HTTP agents (used to proxy HTTP and Websocket requests in node)
// Populated with Socks.Agent if socksProxy is provided
httpAgent: {},
httpsAgent: {}
}, },
} }
var client = new Client(requiredOpts) const client = new Client(requiredOpts)
client.on('error', function (err) { client.on('error', function (err) {
// fatal client error! // fatal client error!
@ -182,7 +159,7 @@ client.on('scrape', function (data) {
To start a BitTorrent tracker server to track swarms of peers: To start a BitTorrent tracker server to track swarms of peers:
```js ```js
const Server = require('bittorrent-tracker').Server import { Server } from 'bittorrent-tracker'
const server = new Server({ const server = new Server({
udp: true, // enable udp server? [default=true] udp: true, // enable udp server? [default=true]
@ -289,7 +266,7 @@ The http server will handle requests for the following paths: `/announce`, `/scr
Scraping multiple torrent info is possible with a static `Client.scrape` method: Scraping multiple torrent info is possible with a static `Client.scrape` method:
```js ```js
var Client = require('bittorrent-tracker') import Client from 'bittorrent-tracker'
Client.scrape({ announce: announceUrl, infoHash: [ infoHash1, infoHash2 ]}, function (err, results) { Client.scrape({ announce: announceUrl, infoHash: [ infoHash1, infoHash2 ]}, function (err, results) {
results[infoHash1].announce results[infoHash1].announce
results[infoHash1].infoHash results[infoHash1].infoHash

View File

@ -1,9 +1,7 @@
import arrayRemove from 'unordered-array-remove' import arrayRemove from 'unordered-array-remove'
import bencode from 'bencode' import bencode from 'bencode'
import clone from 'clone'
import Debug from 'debug' import Debug from 'debug'
import get from 'simple-get' import fetch from 'cross-fetch-ponyfill'
import Socks from 'socks'
import { bin2hex, hex2bin, arr2text, text2arr, arr2hex } from 'uint8-util' import { bin2hex, hex2bin, arr2text, text2arr, arr2hex } from 'uint8-util'
import common from '../common.js' import common from '../common.js'
@ -13,6 +11,14 @@ import compact2string from 'compact2string'
const debug = Debug('bittorrent-tracker:http-tracker') const debug = Debug('bittorrent-tracker:http-tracker')
const HTTP_SCRAPE_SUPPORT = /\/(announce)[^/]*$/ const HTTP_SCRAPE_SUPPORT = /\/(announce)[^/]*$/
function abortTimeout (ms) {
const controller = new AbortController()
setTimeout(() => {
controller.abort()
}, ms).unref?.()
return controller
}
/** /**
* HTTP torrent tracker client (for an individual tracker) * HTTP torrent tracker client (for an individual tracker)
* *
@ -112,70 +118,72 @@ class HTTPTracker extends Tracker {
} }
} }
_request (requestUrl, params, cb) { async _request (requestUrl, params, cb) {
const self = this
const parsedUrl = new URL(requestUrl + (requestUrl.indexOf('?') === -1 ? '?' : '&') + common.querystringStringify(params)) const parsedUrl = new URL(requestUrl + (requestUrl.indexOf('?') === -1 ? '?' : '&') + common.querystringStringify(params))
let agent let agent
if (this.client._proxyOpts) { if (this.client._proxyOpts) {
agent = parsedUrl.protocol === 'https:' ? this.client._proxyOpts.httpsAgent : this.client._proxyOpts.httpAgent agent = parsedUrl.protocol === 'https:' ? this.client._proxyOpts.httpsAgent : this.client._proxyOpts.httpAgent
if (!agent && this.client._proxyOpts.socksProxy) { if (!agent && this.client._proxyOpts.socksProxy) {
agent = new Socks.Agent(clone(this.client._proxyOpts.socksProxy), (parsedUrl.protocol === 'https:')) agent = this.client._proxyOpts.socksProxy
} }
} }
const cleanup = () => {
if (!controller.signal.aborted) {
arrayRemove(this.cleanupFns, this.cleanupFns.indexOf(cleanup))
controller.abort()
controller = null
}
if (this.maybeDestroyCleanup) this.maybeDestroyCleanup()
}
this.cleanupFns.push(cleanup) this.cleanupFns.push(cleanup)
let request = get.concat({ let res
url: parsedUrl.toString(), let controller = abortTimeout(common.REQUEST_TIMEOUT)
agent, try {
timeout: common.REQUEST_TIMEOUT, res = await fetch(parsedUrl.toString(), {
headers: { agent,
'user-agent': this.client._userAgent || '' signal: controller.signal,
} dispatcher: agent,
}, onResponse) headers: {
'user-agent': this.client._userAgent || ''
function cleanup () { }
if (request) { })
arrayRemove(self.cleanupFns, self.cleanupFns.indexOf(cleanup)) } catch (err) {
request.abort()
request = null
}
if (self.maybeDestroyCleanup) self.maybeDestroyCleanup()
}
function onResponse (err, res, data) {
cleanup()
if (self.destroyed) return
if (err) return cb(err) if (err) return cb(err)
if (res.statusCode !== 200) {
return cb(new Error(`Non-200 response code ${res.statusCode} from ${self.announceUrl}`))
}
if (!data || data.length === 0) {
return cb(new Error(`Invalid tracker response from${self.announceUrl}`))
}
try {
data = bencode.decode(data)
} catch (err) {
return cb(new Error(`Error decoding tracker response: ${err.message}`))
}
const failure = data['failure reason'] && arr2text(data['failure reason'])
if (failure) {
debug(`failure from ${requestUrl} (${failure})`)
return cb(new Error(failure))
}
const warning = data['warning message'] && arr2text(data['warning message'])
if (warning) {
debug(`warning from ${requestUrl} (${warning})`)
self.client.emit('warning', new Error(warning))
}
debug(`response from ${requestUrl}`)
cb(null, data)
} }
let data = new Uint8Array(await res.arrayBuffer())
cleanup()
if (this.destroyed) return
if (res.status !== 200) {
return cb(new Error(`Non-200 response code ${res.statusCode} from ${this.announceUrl}`))
}
if (!data || data.length === 0) {
return cb(new Error(`Invalid tracker response from${this.announceUrl}`))
}
try {
data = bencode.decode(data)
} catch (err) {
return cb(new Error(`Error decoding tracker response: ${err.message}`))
}
const failure = data['failure reason'] && arr2text(data['failure reason'])
if (failure) {
debug(`failure from ${requestUrl} (${failure})`)
return cb(new Error(failure))
}
const warning = data['warning message'] && arr2text(data['warning message'])
if (warning) {
debug(`warning from ${requestUrl} (${warning})`)
this.client.emit('warning', new Error(warning))
}
debug(`response from ${requestUrl}`)
cb(null, data)
} }
_onAnnounceResponse (data) { _onAnnounceResponse (data) {

View File

@ -1,8 +1,6 @@
import clone from 'clone'
import Debug from 'debug' import Debug from 'debug'
import Peer from '@thaunknown/simple-peer/lite.js' import Peer from '@thaunknown/simple-peer/lite.js'
import Socket from '@thaunknown/simple-websocket' import Socket from '@thaunknown/simple-websocket'
import Socks from 'socks'
import { arr2text, arr2hex, hex2bin, bin2hex, randomBytes } from 'uint8-util' import { arr2text, arr2hex, hex2bin, bin2hex, randomBytes } from 'uint8-util'
import common from '../common.js' import common from '../common.js'
@ -185,7 +183,7 @@ class WebSocketTracker extends Tracker {
if (this.client._proxyOpts) { if (this.client._proxyOpts) {
agent = parsedUrl.protocol === 'wss:' ? this.client._proxyOpts.httpsAgent : this.client._proxyOpts.httpAgent agent = parsedUrl.protocol === 'wss:' ? this.client._proxyOpts.httpsAgent : this.client._proxyOpts.httpAgent
if (!agent && this.client._proxyOpts.socksProxy) { if (!agent && this.client._proxyOpts.socksProxy) {
agent = new Socks.Agent(clone(this.client._proxyOpts.socksProxy), (parsedUrl.protocol === 'wss:')) agent = this.client._proxyOpts.socksProxy
} }
} }
this.socket = socketPool[this.announceUrl] = new Socket({ url: this.announceUrl, agent }) this.socket = socketPool[this.announceUrl] = new Socket({ url: this.announceUrl, agent })

View File

@ -34,6 +34,7 @@
"chrome-dgram": "^3.0.6", "chrome-dgram": "^3.0.6",
"clone": "^2.0.0", "clone": "^2.0.0",
"compact2string": "^1.4.1", "compact2string": "^1.4.1",
"cross-fetch-ponyfill": "^1.0.1",
"debug": "^4.1.1", "debug": "^4.1.1",
"ip": "^1.1.5", "ip": "^1.1.5",
"lru": "^3.1.0", "lru": "^3.1.0",
@ -43,7 +44,6 @@
"random-iterate": "^1.0.1", "random-iterate": "^1.0.1",
"run-parallel": "^1.2.0", "run-parallel": "^1.2.0",
"run-series": "^1.1.9", "run-series": "^1.1.9",
"simple-get": "^4.0.0",
"socks": "^2.0.0", "socks": "^2.0.0",
"string2compact": "^2.0.0", "string2compact": "^2.0.0",
"uint8-util": "^2.1.9", "uint8-util": "^2.1.9",
@ -57,6 +57,7 @@
"semantic-release": "21.1.2", "semantic-release": "21.1.2",
"standard": "*", "standard": "*",
"tape": "5.7.2", "tape": "5.7.2",
"undici": "^5.27.0",
"webtorrent-fixtures": "2.0.2", "webtorrent-fixtures": "2.0.2",
"wrtc": "0.4.7" "wrtc": "0.4.7"
}, },

View File

@ -4,6 +4,7 @@ import http from 'http'
import fixtures from 'webtorrent-fixtures' import fixtures from 'webtorrent-fixtures'
import net from 'net' import net from 'net'
import test from 'tape' import test from 'tape'
import undici from 'undici'
const peerId1 = Buffer.from('01234567890123456789') const peerId1 = Buffer.from('01234567890123456789')
const peerId2 = Buffer.from('12345678901234567890') const peerId2 = Buffer.from('12345678901234567890')
@ -572,12 +573,29 @@ function testClientStartHttpAgent (t, serverType) {
t.plan(5) t.plan(5)
common.createServer(t, serverType, function (server, announceUrl) { common.createServer(t, serverType, function (server, announceUrl) {
const agent = new http.Agent() let agent
let agentUsed = false if (global.fetch && serverType !== 'ws') {
agent.createConnection = function (opts, fn) { const connector = undici.buildConnector({ rejectUnauthorized: false })
agentUsed = true agent = new undici.Agent({
return net.createConnection(opts, fn) connect (opts, cb) {
agentUsed = true
connector(opts, (err, socket) => {
if (err) {
cb(err, null)
} else {
cb(null, socket)
}
})
}
})
} else {
agent = new http.Agent()
agent.createConnection = function (opts, fn) {
agentUsed = true
return net.createConnection(opts, fn)
}
} }
let agentUsed = false
const client = new Client({ const client = new Client({
infoHash: fixtures.leaves.parsedTorrent.infoHash, infoHash: fixtures.leaves.parsedTorrent.infoHash,
announce: announceUrl, announce: announceUrl,

View File

@ -3,7 +3,7 @@ import Client from '../index.js'
import common from './common.js' import common from './common.js'
import commonLib from '../lib/common.js' import commonLib from '../lib/common.js'
import fixtures from 'webtorrent-fixtures' import fixtures from 'webtorrent-fixtures'
import get from 'simple-get' import fetch from 'cross-fetch-ponyfill'
import test from 'tape' import test from 'tape'
import { hex2bin } from 'uint8-util' import { hex2bin } from 'uint8-util'
@ -151,44 +151,47 @@ test('udp: MULTI scrape using Client.scrape static method', t => {
}) })
test('server: multiple info_hash scrape (manual http request)', t => { test('server: multiple info_hash scrape (manual http request)', t => {
t.plan(13) t.plan(12)
const binaryInfoHash1 = hex2bin(fixtures.leaves.parsedTorrent.infoHash) const binaryInfoHash1 = hex2bin(fixtures.leaves.parsedTorrent.infoHash)
const binaryInfoHash2 = hex2bin(fixtures.alice.parsedTorrent.infoHash) const binaryInfoHash2 = hex2bin(fixtures.alice.parsedTorrent.infoHash)
common.createServer(t, 'http', (server, announceUrl) => { common.createServer(t, 'http', async (server, announceUrl) => {
const scrapeUrl = announceUrl.replace('/announce', '/scrape') const scrapeUrl = announceUrl.replace('/announce', '/scrape')
const url = `${scrapeUrl}?${commonLib.querystringStringify({ const url = `${scrapeUrl}?${commonLib.querystringStringify({
info_hash: [binaryInfoHash1, binaryInfoHash2] info_hash: [binaryInfoHash1, binaryInfoHash2]
})}` })}`
let res
get.concat(url, (err, res, data) => { try {
res = await fetch(url)
} catch (err) {
t.error(err) t.error(err)
}
let data = Buffer.from(await res.arrayBuffer())
t.equal(res.statusCode, 200) t.equal(res.status, 200)
data = bencode.decode(data) data = bencode.decode(data)
t.ok(data.files) t.ok(data.files)
t.equal(Object.keys(data.files).length, 2) t.equal(Object.keys(data.files).length, 2)
t.ok(data.files[binaryInfoHash1]) t.ok(data.files[binaryInfoHash1])
t.equal(typeof data.files[binaryInfoHash1].complete, 'number') t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
t.ok(data.files[binaryInfoHash2]) t.ok(data.files[binaryInfoHash2])
t.equal(typeof data.files[binaryInfoHash2].complete, 'number') t.equal(typeof data.files[binaryInfoHash2].complete, 'number')
t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number')
t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number')
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
test('server: all info_hash scrape (manual http request)', t => { test('server: all info_hash scrape (manual http request)', t => {
t.plan(10) t.plan(9)
const binaryInfoHash = hex2bin(fixtures.leaves.parsedTorrent.infoHash) const binaryInfoHash = hex2bin(fixtures.leaves.parsedTorrent.infoHash)
@ -207,24 +210,28 @@ test('server: all info_hash scrape (manual http request)', t => {
client.start() client.start()
server.once('start', () => { server.once('start', async () => {
// now do a scrape of everything by omitting the info_hash param // now do a scrape of everything by omitting the info_hash param
get.concat(scrapeUrl, (err, res, data) => { let res
try {
res = await fetch(scrapeUrl)
} catch (err) {
t.error(err) t.error(err)
}
let data = Buffer.from(await res.arrayBuffer())
t.equal(res.statusCode, 200) t.equal(res.status, 200)
data = bencode.decode(data) data = bencode.decode(data)
t.ok(data.files) t.ok(data.files)
t.equal(Object.keys(data.files).length, 1) t.equal(Object.keys(data.files).length, 1)
t.ok(data.files[binaryInfoHash]) t.ok(data.files[binaryInfoHash])
t.equal(typeof data.files[binaryInfoHash].complete, 'number') t.equal(typeof data.files[binaryInfoHash].complete, 'number')
t.equal(typeof data.files[binaryInfoHash].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash].incomplete, 'number')
t.equal(typeof data.files[binaryInfoHash].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash].downloaded, 'number')
client.destroy(() => { t.pass('client destroyed') }) client.destroy(() => { t.pass('client destroyed') })
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
}) })

View File

@ -1,7 +1,7 @@
import Client from '../index.js' import Client from '../index.js'
import commonTest from './common.js' import commonTest from './common.js'
import fixtures from 'webtorrent-fixtures' import fixtures from 'webtorrent-fixtures'
import get from 'simple-get' import fetch from 'cross-fetch-ponyfill'
import test from 'tape' import test from 'tape'
const peerId = Buffer.from('-WW0091-4ea5886ce160') const peerId = Buffer.from('-WW0091-4ea5886ce160')
@ -30,89 +30,94 @@ function parseHtml (html) {
} }
test('server: get empty stats', t => { test('server: get empty stats', t => {
t.plan(11) t.plan(10)
commonTest.createServer(t, 'http', (server, announceUrl) => { commonTest.createServer(t, 'http', async (server, announceUrl) => {
const url = announceUrl.replace('/announce', '/stats') const url = announceUrl.replace('/announce', '/stats')
get.concat(url, (err, res, data) => { let res
try {
res = await fetch(url)
} catch (err) {
t.error(err) t.error(err)
}
const data = Buffer.from(await res.arrayBuffer())
const stats = parseHtml(data.toString()) const stats = parseHtml(data.toString())
t.equal(res.statusCode, 200) t.equal(res.status, 200)
t.equal(stats.torrents, 0) t.equal(stats.torrents, 0)
t.equal(stats.activeTorrents, 0) t.equal(stats.activeTorrents, 0)
t.equal(stats.peersAll, 0) t.equal(stats.peersAll, 0)
t.equal(stats.peersSeederOnly, 0) t.equal(stats.peersSeederOnly, 0)
t.equal(stats.peersLeecherOnly, 0) t.equal(stats.peersLeecherOnly, 0)
t.equal(stats.peersSeederAndLeecher, 0) t.equal(stats.peersSeederAndLeecher, 0)
t.equal(stats.peersIPv4, 0) t.equal(stats.peersIPv4, 0)
t.equal(stats.peersIPv6, 0) t.equal(stats.peersIPv6, 0)
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
test('server: get empty stats with json header', t => { test('server: get empty stats with json header', t => {
t.plan(11) t.plan(10)
commonTest.createServer(t, 'http', (server, announceUrl) => { commonTest.createServer(t, 'http', async (server, announceUrl) => {
const opts = { const opts = {
url: announceUrl.replace('/announce', '/stats'), url: announceUrl.replace('/announce', '/stats'),
headers: { headers: {
accept: 'application/json' accept: 'application/json'
}, }
json: true
} }
let res
get.concat(opts, (err, res, stats) => { try {
res = await fetch(announceUrl.replace('/announce', '/stats'), opts)
} catch (err) {
t.error(err) t.error(err)
}
const stats = await res.json()
t.equal(res.statusCode, 200) t.equal(res.status, 200)
t.equal(stats.torrents, 0) t.equal(stats.torrents, 0)
t.equal(stats.activeTorrents, 0) t.equal(stats.activeTorrents, 0)
t.equal(stats.peersAll, 0) t.equal(stats.peersAll, 0)
t.equal(stats.peersSeederOnly, 0) t.equal(stats.peersSeederOnly, 0)
t.equal(stats.peersLeecherOnly, 0) t.equal(stats.peersLeecherOnly, 0)
t.equal(stats.peersSeederAndLeecher, 0) t.equal(stats.peersSeederAndLeecher, 0)
t.equal(stats.peersIPv4, 0) t.equal(stats.peersIPv4, 0)
t.equal(stats.peersIPv6, 0) t.equal(stats.peersIPv6, 0)
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
test('server: get empty stats on stats.json', t => { test('server: get empty stats on stats.json', t => {
t.plan(11) t.plan(10)
commonTest.createServer(t, 'http', (server, announceUrl) => { commonTest.createServer(t, 'http', async (server, announceUrl) => {
const opts = { let res
url: announceUrl.replace('/announce', '/stats.json'), try {
json: true res = await fetch(announceUrl.replace('/announce', '/stats.json'))
} } catch (err) {
get.concat(opts, (err, res, stats) => {
t.error(err) t.error(err)
}
const stats = await res.json()
t.equal(res.statusCode, 200) t.equal(res.status, 200)
t.equal(stats.torrents, 0) t.equal(stats.torrents, 0)
t.equal(stats.activeTorrents, 0) t.equal(stats.activeTorrents, 0)
t.equal(stats.peersAll, 0) t.equal(stats.peersAll, 0)
t.equal(stats.peersSeederOnly, 0) t.equal(stats.peersSeederOnly, 0)
t.equal(stats.peersLeecherOnly, 0) t.equal(stats.peersLeecherOnly, 0)
t.equal(stats.peersSeederAndLeecher, 0) t.equal(stats.peersSeederAndLeecher, 0)
t.equal(stats.peersIPv4, 0) t.equal(stats.peersIPv4, 0)
t.equal(stats.peersIPv6, 0) t.equal(stats.peersIPv6, 0)
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
test('server: get leecher stats.json', t => { test('server: get leecher stats.json', t => {
t.plan(11) t.plan(10)
commonTest.createServer(t, 'http', (server, announceUrl) => { commonTest.createServer(t, 'http', (server, announceUrl) => {
// announce a torrent to the tracker // announce a torrent to the tracker
@ -127,33 +132,32 @@ test('server: get leecher stats.json', t => {
client.start() client.start()
server.once('start', () => { server.once('start', async () => {
const opts = { let res
url: announceUrl.replace('/announce', '/stats.json'), try {
json: true res = await fetch(announceUrl.replace('/announce', '/stats.json'))
} } catch (err) {
get.concat(opts, (err, res, stats) => {
t.error(err) t.error(err)
}
const stats = await res.json()
t.equal(res.statusCode, 200) t.equal(res.status, 200)
t.equal(stats.torrents, 1) t.equal(stats.torrents, 1)
t.equal(stats.activeTorrents, 1) t.equal(stats.activeTorrents, 1)
t.equal(stats.peersAll, 1) t.equal(stats.peersAll, 1)
t.equal(stats.peersSeederOnly, 0) t.equal(stats.peersSeederOnly, 0)
t.equal(stats.peersLeecherOnly, 1) t.equal(stats.peersLeecherOnly, 1)
t.equal(stats.peersSeederAndLeecher, 0) t.equal(stats.peersSeederAndLeecher, 0)
t.equal(stats.clients.WebTorrent['0.91'], 1) t.equal(stats.clients.WebTorrent['0.91'], 1)
client.destroy(() => { t.pass('client destroyed') }) client.destroy(() => { t.pass('client destroyed') })
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
}) })
test('server: get leecher stats.json (unknown peerId)', t => { test('server: get leecher stats.json (unknown peerId)', t => {
t.plan(11) t.plan(10)
commonTest.createServer(t, 'http', (server, announceUrl) => { commonTest.createServer(t, 'http', (server, announceUrl) => {
// announce a torrent to the tracker // announce a torrent to the tracker
@ -168,27 +172,26 @@ test('server: get leecher stats.json (unknown peerId)', t => {
client.start() client.start()
server.once('start', () => { server.once('start', async () => {
const opts = { let res
url: announceUrl.replace('/announce', '/stats.json'), try {
json: true res = await fetch(announceUrl.replace('/announce', '/stats.json'))
} } catch (err) {
get.concat(opts, (err, res, stats) => {
t.error(err) t.error(err)
}
const stats = await res.json()
t.equal(res.statusCode, 200) t.equal(res.status, 200)
t.equal(stats.torrents, 1) t.equal(stats.torrents, 1)
t.equal(stats.activeTorrents, 1) t.equal(stats.activeTorrents, 1)
t.equal(stats.peersAll, 1) t.equal(stats.peersAll, 1)
t.equal(stats.peersSeederOnly, 0) t.equal(stats.peersSeederOnly, 0)
t.equal(stats.peersLeecherOnly, 1) t.equal(stats.peersLeecherOnly, 1)
t.equal(stats.peersSeederAndLeecher, 0) t.equal(stats.peersSeederAndLeecher, 0)
t.equal(stats.clients.unknown['01234567'], 1) t.equal(stats.clients.unknown['01234567'], 1)
client.destroy(() => { t.pass('client destroyed') }) client.destroy(() => { t.pass('client destroyed') })
server.close(() => { t.pass('server closed') }) server.close(() => { t.pass('server closed') })
})
}) })
}) })
}) })