bittorrent-tracker/lib/client/http-tracker.js

272 lines
7.9 KiB
JavaScript
Raw Normal View History

import arrayRemove from 'unordered-array-remove'
import bencode from 'bencode'
import Debug from 'debug'
import fetch from 'cross-fetch-ponyfill'
2023-08-01 22:05:46 +00:00
import { bin2hex, hex2bin, arr2text, text2arr, arr2hex } from 'uint8-util'
import common from '../common.js'
import Tracker from './tracker.js'
import compact2string from 'compact2string'
const debug = Debug('bittorrent-tracker:http-tracker')
2018-10-03 12:44:11 +00:00
const HTTP_SCRAPE_SUPPORT = /\/(announce)[^/]*$/
function abortTimeout (ms) {
const controller = new AbortController()
setTimeout(() => {
controller.abort()
}, ms).unref?.()
return controller
}
/**
* HTTP torrent tracker client (for an individual tracker)
*
* @param {Client} client parent bittorrent tracker client
* @param {string} announceUrl announce url of tracker
* @param {Object} opts options object
*/
2018-10-03 12:44:11 +00:00
class HTTPTracker extends Tracker {
2021-06-15 01:54:41 +00:00
constructor (client, announceUrl) {
2018-10-03 12:44:11 +00:00
super(client, announceUrl)
2018-10-03 12:44:11 +00:00
debug('new http tracker %s', announceUrl)
2018-10-03 12:44:11 +00:00
// Determine scrape url (if http tracker supports it)
2018-10-03 13:06:38 +00:00
this.scrapeUrl = null
2018-10-03 13:06:38 +00:00
const match = this.announceUrl.match(HTTP_SCRAPE_SUPPORT)
2018-10-03 12:44:11 +00:00
if (match) {
2018-10-03 13:06:38 +00:00
const pre = this.announceUrl.slice(0, match.index)
const post = this.announceUrl.slice(match.index + 9)
this.scrapeUrl = `${pre}/scrape${post}`
2018-10-03 12:44:11 +00:00
}
2018-10-03 13:06:38 +00:00
this.cleanupFns = []
this.maybeDestroyCleanup = null
}
2018-10-03 12:44:11 +00:00
announce (opts) {
2018-10-03 13:06:38 +00:00
if (this.destroyed) return
2018-10-03 12:44:11 +00:00
const params = Object.assign({}, opts, {
compact: (opts.compact == null) ? 1 : opts.compact,
2018-10-03 13:06:38 +00:00
info_hash: this.client._infoHashBinary,
peer_id: this.client._peerIdBinary,
port: this.client._port
2018-10-03 12:44:11 +00:00
})
2018-10-03 13:06:38 +00:00
if (this._trackerId) params.trackerid = this._trackerId
2015-07-29 08:47:09 +00:00
2018-10-03 13:06:38 +00:00
this._request(this.announceUrl, params, (err, data) => {
if (err) return this.client.emit('warning', err)
this._onAnnounceResponse(data)
2018-10-03 12:44:11 +00:00
})
}
2018-10-03 12:44:11 +00:00
scrape (opts) {
2018-10-03 13:06:38 +00:00
if (this.destroyed) return
2018-10-03 13:06:38 +00:00
if (!this.scrapeUrl) {
this.client.emit('error', new Error(`scrape not supported ${this.announceUrl}`))
2018-10-03 12:44:11 +00:00
return
}
2018-10-03 12:44:11 +00:00
const infoHashes = (Array.isArray(opts.infoHash) && opts.infoHash.length > 0)
2023-05-26 16:54:30 +00:00
? opts.infoHash.map(infoHash => hex2bin(infoHash))
: (opts.infoHash && hex2bin(opts.infoHash)) || this.client._infoHashBinary
2018-10-03 12:44:11 +00:00
const params = {
info_hash: infoHashes
}
2018-10-03 13:06:38 +00:00
this._request(this.scrapeUrl, params, (err, data) => {
if (err) return this.client.emit('warning', err)
this._onScrapeResponse(data)
})
}
2018-10-03 12:44:11 +00:00
destroy (cb) {
const self = this
2018-10-03 13:06:38 +00:00
if (this.destroyed) return cb(null)
this.destroyed = true
clearInterval(this.interval)
2020-10-29 20:25:57 +00:00
let timeout
2018-10-03 12:44:11 +00:00
// If there are no pending requests, destroy immediately.
2018-10-03 13:06:38 +00:00
if (this.cleanupFns.length === 0) return destroyCleanup()
2018-10-03 12:44:11 +00:00
// Otherwise, wait a short time for pending requests to complete, then force
// destroy them.
2020-10-29 20:25:57 +00:00
timeout = setTimeout(destroyCleanup, common.DESTROY_TIMEOUT)
2018-10-03 12:44:11 +00:00
// But, if all pending requests complete before the timeout fires, do cleanup
// right away.
2018-10-03 13:06:38 +00:00
this.maybeDestroyCleanup = () => {
if (this.cleanupFns.length === 0) destroyCleanup()
}
2015-07-29 08:47:09 +00:00
2018-10-03 12:44:11 +00:00
function destroyCleanup () {
if (timeout) {
clearTimeout(timeout)
timeout = null
}
self.maybeDestroyCleanup = null
self.cleanupFns.slice(0).forEach(cleanup => {
cleanup()
})
self.cleanupFns = []
cb(null)
}
2018-10-03 12:44:11 +00:00
}
async _request (requestUrl, params, cb) {
const parsedUrl = new URL(requestUrl + (requestUrl.indexOf('?') === -1 ? '?' : '&') + common.querystringStringify(params))
let agent
if (this.client._proxyOpts) {
agent = parsedUrl.protocol === 'https:' ? this.client._proxyOpts.httpsAgent : this.client._proxyOpts.httpAgent
if (!agent && this.client._proxyOpts.socksProxy) {
agent = this.client._proxyOpts.socksProxy
}
}
2018-10-03 12:44:11 +00:00
const cleanup = () => {
if (!controller.signal.aborted) {
arrayRemove(this.cleanupFns, this.cleanupFns.indexOf(cleanup))
controller.abort()
controller = null
2018-10-03 12:44:11 +00:00
}
if (this.maybeDestroyCleanup) this.maybeDestroyCleanup()
}
this.cleanupFns.push(cleanup)
2018-10-03 12:44:11 +00:00
let res
let controller = abortTimeout(common.REQUEST_TIMEOUT)
try {
res = await fetch(parsedUrl.toString(), {
agent,
signal: controller.signal,
dispatcher: agent,
headers: {
'user-agent': this.client._userAgent || ''
}
})
} catch (err) {
2018-10-03 12:44:11 +00:00
if (err) return cb(err)
}
let data = new Uint8Array(await res.arrayBuffer())
cleanup()
if (this.destroyed) return
2018-10-03 12:44:11 +00:00
if (res.status !== 200) {
2024-08-13 21:38:01 +00:00
return cb(new Error(`Non-200 response code ${res.status} from ${this.announceUrl}`))
}
if (!data || data.length === 0) {
return cb(new Error(`Invalid tracker response from${this.announceUrl}`))
}
2018-10-03 12:44:11 +00:00
try {
data = bencode.decode(data)
} catch (err) {
return cb(new Error(`Error decoding tracker response: ${err.message}`))
}
const failure = data['failure reason'] && arr2text(data['failure reason'])
if (failure) {
debug(`failure from ${requestUrl} (${failure})`)
return cb(new Error(failure))
}
2018-10-03 12:44:11 +00:00
const warning = data['warning message'] && arr2text(data['warning message'])
if (warning) {
debug(`warning from ${requestUrl} (${warning})`)
this.client.emit('warning', new Error(warning))
}
debug(`response from ${requestUrl}`)
cb(null, data)
}
2018-10-03 12:44:11 +00:00
_onAnnounceResponse (data) {
const interval = data.interval || data['min interval']
2018-10-03 13:06:38 +00:00
if (interval) this.setInterval(interval * 1000)
2018-10-03 12:44:11 +00:00
const trackerId = data['tracker id']
if (trackerId) {
// If absent, do not discard previous trackerId value
2018-10-03 13:06:38 +00:00
this._trackerId = trackerId
}
2018-10-03 12:44:11 +00:00
const response = Object.assign({}, data, {
2018-10-03 13:06:38 +00:00
announce: this.announceUrl,
2023-05-26 16:54:30 +00:00
infoHash: bin2hex(data.info_hash || String(data.info_hash))
})
2018-10-03 13:06:38 +00:00
this.client.emit('update', response)
2018-10-03 12:44:11 +00:00
let addrs
if (ArrayBuffer.isView(data.peers)) {
2018-10-03 12:44:11 +00:00
// tracker returned compact response
try {
addrs = compact2string.multi(Buffer.from(data.peers))
2018-10-03 12:44:11 +00:00
} catch (err) {
2018-10-03 13:06:38 +00:00
return this.client.emit('warning', err)
2018-10-03 12:44:11 +00:00
}
addrs.forEach(addr => {
2018-10-03 13:06:38 +00:00
this.client.emit('peer', addr)
2018-10-03 12:44:11 +00:00
})
} else if (Array.isArray(data.peers)) {
// tracker returned normal response
data.peers.forEach(peer => {
2018-10-03 13:06:38 +00:00
this.client.emit('peer', `${peer.ip}:${peer.port}`)
2018-10-03 12:44:11 +00:00
})
}
if (ArrayBuffer.isView(data.peers6)) {
2018-10-03 12:44:11 +00:00
// tracker returned compact response
try {
addrs = compact2string.multi6(Buffer.from(data.peers6))
2018-10-03 12:44:11 +00:00
} catch (err) {
2018-10-03 13:06:38 +00:00
return this.client.emit('warning', err)
2018-10-03 12:44:11 +00:00
}
addrs.forEach(addr => {
2018-10-03 13:06:38 +00:00
this.client.emit('peer', addr)
2018-10-03 12:44:11 +00:00
})
} else if (Array.isArray(data.peers6)) {
// tracker returned normal response
data.peers6.forEach(peer => {
const ip = /^\[/.test(peer.ip) || !/:/.test(peer.ip)
? peer.ip /* ipv6 w/ brackets or domain name */
: `[${peer.ip}]` /* ipv6 without brackets */
2018-10-03 13:06:38 +00:00
this.client.emit('peer', `${ip}:${peer.port}`)
2018-10-03 12:44:11 +00:00
})
}
}
2018-10-03 12:44:11 +00:00
_onScrapeResponse (data) {
// NOTE: the unofficial spec says to use the 'files' key, 'host' has been
// seen in practice
data = data.files || data.host || {}
2018-10-03 12:44:11 +00:00
const keys = Object.keys(data)
if (keys.length === 0) {
2018-10-03 13:06:38 +00:00
this.client.emit('warning', new Error('invalid scrape response'))
2018-10-03 12:44:11 +00:00
return
}
2023-08-01 22:05:46 +00:00
keys.forEach(_infoHash => {
2018-10-03 12:44:11 +00:00
// TODO: optionally handle data.flags.min_request_interval
// (separate from announce interval)
2023-08-01 22:05:46 +00:00
const infoHash = _infoHash.length !== 20 ? arr2hex(text2arr(_infoHash)) : bin2hex(_infoHash)
const response = Object.assign(data[_infoHash], {
2018-10-03 13:06:38 +00:00
announce: this.announceUrl,
2023-08-01 22:05:46 +00:00
infoHash
2018-10-03 12:44:11 +00:00
})
2018-10-03 13:06:38 +00:00
this.client.emit('scrape', response)
})
2018-10-03 12:44:11 +00:00
}
}
2018-10-03 12:44:11 +00:00
HTTPTracker.prototype.DEFAULT_ANNOUNCE_INTERVAL = 30 * 60 * 1000 // 30 minutes
export default HTTPTracker