http tracker: support multiple info_hash scrapes

This commit is contained in:
Feross Aboukhadijeh 2014-07-10 21:00:55 -07:00
parent 0a51e59bd0
commit 2c34583c5f

View File

@ -114,23 +114,22 @@ Server.prototype._onHttpRequest = function (req, res) {
var s = req.url.split('?') var s = req.url.split('?')
var params = querystring.parse(s[1]) var params = querystring.parse(s[1])
// TODO: support multiple info_hash parameters as a concatenation of individual requests if (s[0] === '/announce') {
var infoHash = params.info_hash && bytewiseDecodeURIComponent(params.info_hash).toString('hex') var infoHash = typeof params.info_hash === 'string' &&
bytewiseDecodeURIComponent(params.info_hash).toString('hex')
var port = Number(params.port)
var peerId = typeof params.peer_id === 'string' &&
bytewiseDecodeURIComponent(params.peer_id).toString('utf8')
if (!infoHash) return error('missing info_hash') if (!infoHash) return error('invalid info_hash')
if (infoHash.length !== 40) return error('invalid info_hash') if (infoHash.length !== 40) return error('invalid info_hash')
if (!port) return error('invalid port')
if (!peerId) return error('invalid peer_id')
if (s[0] === '/announce' || s[0] === '/') {
var ip = self._trustProxy var ip = self._trustProxy
? req.headers['x-forwarded-for'] || req.connection.remoteAddress ? req.headers['x-forwarded-for'] || req.connection.remoteAddress
: req.connection.remoteAddress.replace(REMOVE_IPV6_RE, '') // force ipv4 : req.connection.remoteAddress.replace(REMOVE_IPV6_RE, '') // force ipv4
var port = Number(params.port)
var addr = ip + ':' + port var addr = ip + ':' + port
var peerId = params.peer_id && bytewiseDecodeURIComponent(params.peer_id).toString('utf8')
if (!port) return error('missing port')
if (!peerId) return error('missing peer_id')
var swarm = self._getSwarm(infoHash) var swarm = self._getSwarm(infoHash)
var peer = swarm.peers[addr] var peer = swarm.peers[addr]
@ -219,18 +218,34 @@ Server.prototype._onHttpRequest = function (req, res) {
res.end(bencode.encode(response)) res.end(bencode.encode(response))
} else if (s[0] === '/scrape') { // unofficial scrape message } else if (s[0] === '/scrape') { // unofficial scrape message
var swarm = self._getSwarm(infoHash) if (typeof params.info_hash === 'string') {
var response = { files : { } } params.info_hash = [ params.info_hash ]
}
if (!Array.isArray(params.info_hash)) return error('invalid info_hash')
response.files[params.info_hash] = { var response = {
complete: swarm.complete, files: {},
incomplete: swarm.incomplete,
downloaded: swarm.complete, // TODO: this only provides a lower-bound
flags: { flags: {
min_request_interval: self._intervalMs min_request_interval: self._intervalMs
} }
} }
params.info_hash.some(function (infoHash) {
var infoHashHex = bytewiseDecodeURIComponent(infoHash).toString('hex')
if (infoHashHex.length !== 40) {
error('invalid info_hash')
return true // early return
}
var swarm = self._getSwarm(infoHashHex)
response.files[infoHash] = {
complete: swarm.complete,
incomplete: swarm.incomplete,
downloaded: swarm.complete // TODO: this only provides a lower-bound
}
})
res.end(bencode.encode(response)) res.end(bencode.encode(response))
} }