diff --git a/client.js b/client.js index 0a605f3..f0c4546 100644 --- a/client.js +++ b/client.js @@ -138,7 +138,7 @@ Tracker.prototype.start = function (opts) { opts.event = 'started' debug('sent `start` to ' + self._announceUrl) - self._request(opts) + self._announce(opts) self.setInterval(self._intervalMs) // start announcing on intervals } @@ -148,7 +148,7 @@ Tracker.prototype.stop = function (opts) { opts.event = 'stopped' debug('sent `stop` to ' + self._announceUrl) - self._request(opts) + self._announce(opts) self.setInterval(0) // stop announcing on intervals } @@ -159,7 +159,7 @@ Tracker.prototype.complete = function (opts) { opts.downloaded = opts.downloaded || self.torrentLength || 0 debug('sent `complete` to ' + self._announceUrl) - self._request(opts) + self._announce(opts) } Tracker.prototype.update = function (opts) { @@ -167,10 +167,34 @@ Tracker.prototype.update = function (opts) { opts = opts || {} debug('sent `update` to ' + self._announceUrl) - self._request(opts) + self._announce(opts) } -Tracker.prototype.scrape = function (opts) { +/** + * Send an announce request to the tracker. + * @param {Object} opts + * @param {number=} opts.uploaded + * @param {number=} opts.downloaded + * @param {number=} opts.left (if not set, calculated automatically) + */ +Tracker.prototype._announce = function (opts) { + var self = this + opts = extend({ + uploaded: 0, // default, user should provide real value + downloaded: 0 // default, user should provide real value + }, opts) + + if (self.client.torrentLength != null && opts.left == null) { + opts.left = self.client.torrentLength - (opts.downloaded || 0) + } + + self._requestImpl(self._announceUrl, opts) +} + +/** + * Send a scrape request to the tracker. + */ +Tracker.prototype.scrape = function () { var self = this if (!self._scrapeUrl) { @@ -189,12 +213,7 @@ Tracker.prototype.scrape = function (opts) { } debug('sent `scrape` to ' + self._announceUrl) - - opts = extend({ - info_hash: common.bytewiseEncodeURIComponent(self.client._infoHash) - }, opts) - - self._requestImpl(self._scrapeUrl, opts) + self._requestImpl(self._scrapeUrl) } Tracker.prototype.setInterval = function (intervalMs) { @@ -207,35 +226,28 @@ Tracker.prototype.setInterval = function (intervalMs) { } } -/** - * Send an announce request to the tracker - */ -Tracker.prototype._request = function (opts) { - var self = this - opts = extend({ - info_hash: common.bytewiseEncodeURIComponent(self.client._infoHash), - peer_id: common.bytewiseEncodeURIComponent(self.client._peerId), - port: self.client._port, - compact: 1, - numwant: self.client._numWant, - uploaded: 0, // default, user should provide real value - downloaded: 0 // default, user should provide real value - }, opts) - - if (self.client.torrentLength !== undefined) { - opts.left = self.client.torrentLength - (opts.downloaded || 0) - } - - if (self._trackerId) { - opts.trackerid = self._trackerId - } - - self._requestImpl(self._announceUrl, opts) -} - Tracker.prototype._requestHttp = function (requestUrl, opts) { var self = this - var fullUrl = requestUrl + '?' + querystring.stringify(opts) + + if (isScrapeUrl(requestUrl)) { + opts = extend({ + info_hash: self.client._infoHash.toString('binary') + }, opts) + } else { + opts = extend({ + info_hash: self.client._infoHash.toString('binary'), + peer_id: self.client._peerId.toString('binary'), + port: self.client._port, + compact: 1, + numwant: self.client._numWant + }, opts) + + if (self._trackerId) { + opts.trackerid = self._trackerId + } + } + + var fullUrl = requestUrl + '?' + common.querystringStringify(opts) var req = http.get(fullUrl, function (res) { if (res.statusCode !== 200) { @@ -255,6 +267,7 @@ Tracker.prototype._requestHttp = function (requestUrl, opts) { Tracker.prototype._requestUdp = function (requestUrl, opts) { var self = this + opts = opts || {} var parsedUrl = url.parse(requestUrl) var socket = dgram.createSocket('udp4') var transactionId = new Buffer(hat(32), 'hex') @@ -294,9 +307,8 @@ Tracker.prototype._requestUdp = function (requestUrl, opts) { return error('invalid udp handshake') } - var scrapeStr = 'scrape' - if (requestUrl.substr(requestUrl.lastIndexOf('/') + 1, scrapeStr.length) === scrapeStr) { - scrape(msg.slice(8, 16), opts) + if (isScrapeUrl(requestUrl)) { + scrape(msg.slice(8, 16)) } else { announce(msg.slice(8, 16), opts) } @@ -395,7 +407,7 @@ Tracker.prototype._requestUdp = function (requestUrl, opts) { ])) } - function scrape (connectionId, opts) { + function scrape (connectionId) { genTransactionId() send(Buffer.concat([ @@ -477,10 +489,6 @@ Tracker.prototype._handleResponse = function (requestUrl, data) { } } -// -// HELPERS -// - function toUInt16 (n) { var buf = new Buffer(2) buf.writeUInt16BE(n, 0) @@ -499,3 +507,7 @@ function toUInt64 (n) { } return Buffer.concat([common.toUInt32(0), common.toUInt32(n)]) } + +function isScrapeUrl (u) { + return u.substr(u.lastIndexOf('/') + 1, 'scrape'.length) === 'scrape' +} diff --git a/lib/common.js b/lib/common.js index a281f83..ccf1bd4 100644 --- a/lib/common.js +++ b/lib/common.js @@ -1,7 +1,13 @@ /** - * Functions and constants needed by both the tracker client and server. + * Functions/constants needed by both the client and server. */ +var querystring = require('querystring') + +exports.CONNECTION_ID = Buffer.concat([ toUInt32(0x417), toUInt32(0x27101980) ]) +exports.ACTIONS = { CONNECT: 0, ANNOUNCE: 1, SCRAPE: 2, ERROR: 3 } +exports.EVENTS = { update: 0, completed: 1, started: 2, stopped: 3 } + function toUInt32 (n) { var buf = new Buffer(4) buf.writeUInt32BE(n, 0) @@ -9,14 +15,34 @@ function toUInt32 (n) { } exports.toUInt32 = toUInt32 -exports.CONNECTION_ID = Buffer.concat([ toUInt32(0x417), toUInt32(0x27101980) ]) -exports.ACTIONS = { CONNECT: 0, ANNOUNCE: 1, SCRAPE: 2, ERROR: 3 } -exports.EVENTS = { update: 0, completed: 1, started: 2, stopped: 3 } - -exports.bytewiseDecodeURIComponent = function (str) { - return new Buffer(decodeURIComponent(str), 'binary') +exports.binaryToUtf8 = function (str) { + return new Buffer(str, 'binary').toString('utf8') } -exports.bytewiseEncodeURIComponent = function (buf) { - return encodeURIComponent(buf.toString('binary')) +/** + * `querystring.parse` using `unescape` instead of decodeURIComponent, since bittorrent + * clients send non-UTF8 querystrings + * @param {string} q + * @return {Object} + */ +exports.querystringParse = function (q) { + var saved = querystring.unescape + querystring.unescape = unescape // global + var ret = querystring.parse(q) + querystring.unescape = saved + return ret +} + +/** + * `querystring.stringify` using `escape` instead of encodeURIComponent, since bittorrent + * clients send non-UTF8 querystrings + * @param {Object} obj + * @return {string} + */ +exports.querystringStringify = function (obj) { + var saved = querystring.escape + querystring.escape = escape // global + var ret = querystring.stringify(obj) + querystring.escape = saved + return ret } diff --git a/server.js b/server.js index e5fa6be..64e8c05 100644 --- a/server.js +++ b/server.js @@ -122,14 +122,11 @@ Server.prototype._onHttpRequest = function (req, res) { var self = this var warning var s = req.url.split('?') - var params = querystring.parse(s[1]) - + var params = common.querystringParse(s[1]) if (s[0] === '/announce') { - var infoHash = typeof params.info_hash === 'string' && - common.bytewiseDecodeURIComponent(params.info_hash).toString('binary') + var infoHash = typeof params.info_hash === 'string' && params.info_hash + var peerId = typeof params.peer_id === 'string' && common.binaryToUtf8(params.peer_id) var port = Number(params.port) - var peerId = typeof params.peer_id === 'string' && - common.bytewiseDecodeURIComponent(params.peer_id).toString('utf8') if (!infoHash) return error('invalid info_hash') if (infoHash.length !== 20) return error('invalid info_hash') @@ -252,7 +249,6 @@ Server.prototype._onHttpRequest = function (req, res) { } params.info_hash.some(function (infoHash) { - infoHash = common.bytewiseDecodeURIComponent(infoHash).toString('binary') if (infoHash.length !== 20) { error('invalid info_hash') return true // early return diff --git a/test/scrape.js b/test/scrape.js index cce5a22..2c01aee 100644 --- a/test/scrape.js +++ b/test/scrape.js @@ -10,23 +10,19 @@ var querystring = require('querystring') var Server = require('../').Server var test = require('tape') +function hexToBinary (str) { + return new Buffer(str, 'hex').toString('binary') +} + var infoHash1 = 'aaa67059ed6bd08362da625b3ae77f6f4a075aaa' -var encodedInfoHash1 = common.bytewiseEncodeURIComponent( - new Buffer(infoHash1, 'hex') -) -var binaryinfoHash1 = new Buffer(infoHash1, 'hex').toString('binary') +var binaryInfoHash1 = hexToBinary(infoHash1) var infoHash2 = 'bbb67059ed6bd08362da625b3ae77f6f4a075bbb' -var encodedInfoHash2 = common.bytewiseEncodeURIComponent( - new Buffer(infoHash2, 'hex') -) -var binaryinfoHash2 = new Buffer(infoHash2, 'hex').toString('binary') +var binaryInfoHash2 = hexToBinary(infoHash2) var bitlove = fs.readFileSync(__dirname + '/torrents/bitlove-intro.torrent') var parsedBitlove = parseTorrent(bitlove) -var encodedBitlove = common.bytewiseEncodeURIComponent( - new Buffer(parsedBitlove.infoHash, 'hex') -) -var binaryBitlove = new Buffer(parsedBitlove.infoHash, 'hex').toString('binary') +var binaryBitlove = hexToBinary(parsedBitlove.infoHash) + var peerId = new Buffer('01234567890123456789') test('server: single info_hash scrape', function (t) { @@ -44,8 +40,8 @@ test('server: single info_hash scrape', function (t) { var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape' server.once('listening', function () { - var url = scrapeUrl + '?' + querystring.stringify({ - info_hash: encodedInfoHash1 + var url = scrapeUrl + '?' + common.querystringStringify({ + info_hash: binaryInfoHash1 }) http.get(url, function (res) { t.equal(res.statusCode, 200) @@ -53,10 +49,10 @@ test('server: single info_hash scrape', function (t) { data = bencode.decode(data) t.ok(data.files) t.equal(Object.keys(data.files).length, 1) - t.ok(data.files[binaryinfoHash1]) - t.equal(typeof data.files[binaryinfoHash1].complete, 'number') - t.equal(typeof data.files[binaryinfoHash1].incomplete, 'number') - t.equal(typeof data.files[binaryinfoHash1].downloaded, 'number') + t.ok(data.files[binaryInfoHash1]) + t.equal(typeof data.files[binaryInfoHash1].complete, 'number') + t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number') + t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number') server.close(function () { t.end() @@ -84,8 +80,8 @@ test('server: multiple info_hash scrape', function (t) { var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape' server.once('listening', function () { - var url = scrapeUrl + '?' + querystring.stringify({ - info_hash: [ encodedInfoHash1, encodedInfoHash2 ] + var url = scrapeUrl + '?' + common.querystringStringify({ + info_hash: [ binaryInfoHash1, binaryInfoHash2 ] }) http.get(url, function (res) { t.equal(res.statusCode, 200) @@ -94,15 +90,15 @@ test('server: multiple info_hash scrape', function (t) { t.ok(data.files) t.equal(Object.keys(data.files).length, 2) - t.ok(data.files[binaryinfoHash1]) - t.equal(typeof data.files[binaryinfoHash1].complete, 'number') - t.equal(typeof data.files[binaryinfoHash1].incomplete, 'number') - t.equal(typeof data.files[binaryinfoHash1].downloaded, 'number') + t.ok(data.files[binaryInfoHash1]) + t.equal(typeof data.files[binaryInfoHash1].complete, 'number') + t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number') + t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number') - t.ok(data.files[binaryinfoHash2]) - t.equal(typeof data.files[binaryinfoHash2].complete, 'number') - t.equal(typeof data.files[binaryinfoHash2].incomplete, 'number') - t.equal(typeof data.files[binaryinfoHash2].downloaded, 'number') + t.ok(data.files[binaryInfoHash2]) + t.equal(typeof data.files[binaryInfoHash2].complete, 'number') + t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number') + t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number') server.close(function () { t.end()