Fix serious client+server encoding error (fix #32)

This commit is contained in:
Feross Aboukhadijeh 2014-07-12 18:44:41 -07:00
parent ddbd10e552
commit be875c40af
4 changed files with 120 additions and 90 deletions

View File

@ -138,7 +138,7 @@ Tracker.prototype.start = function (opts) {
opts.event = 'started' opts.event = 'started'
debug('sent `start` to ' + self._announceUrl) debug('sent `start` to ' + self._announceUrl)
self._request(opts) self._announce(opts)
self.setInterval(self._intervalMs) // start announcing on intervals self.setInterval(self._intervalMs) // start announcing on intervals
} }
@ -148,7 +148,7 @@ Tracker.prototype.stop = function (opts) {
opts.event = 'stopped' opts.event = 'stopped'
debug('sent `stop` to ' + self._announceUrl) debug('sent `stop` to ' + self._announceUrl)
self._request(opts) self._announce(opts)
self.setInterval(0) // stop announcing on intervals self.setInterval(0) // stop announcing on intervals
} }
@ -159,7 +159,7 @@ Tracker.prototype.complete = function (opts) {
opts.downloaded = opts.downloaded || self.torrentLength || 0 opts.downloaded = opts.downloaded || self.torrentLength || 0
debug('sent `complete` to ' + self._announceUrl) debug('sent `complete` to ' + self._announceUrl)
self._request(opts) self._announce(opts)
} }
Tracker.prototype.update = function (opts) { Tracker.prototype.update = function (opts) {
@ -167,10 +167,34 @@ Tracker.prototype.update = function (opts) {
opts = opts || {} opts = opts || {}
debug('sent `update` to ' + self._announceUrl) debug('sent `update` to ' + self._announceUrl)
self._request(opts) self._announce(opts)
} }
Tracker.prototype.scrape = function (opts) { /**
* Send an announce request to the tracker.
* @param {Object} opts
* @param {number=} opts.uploaded
* @param {number=} opts.downloaded
* @param {number=} opts.left (if not set, calculated automatically)
*/
Tracker.prototype._announce = function (opts) {
var self = this
opts = extend({
uploaded: 0, // default, user should provide real value
downloaded: 0 // default, user should provide real value
}, opts)
if (self.client.torrentLength != null && opts.left == null) {
opts.left = self.client.torrentLength - (opts.downloaded || 0)
}
self._requestImpl(self._announceUrl, opts)
}
/**
* Send a scrape request to the tracker.
*/
Tracker.prototype.scrape = function () {
var self = this var self = this
if (!self._scrapeUrl) { if (!self._scrapeUrl) {
@ -189,12 +213,7 @@ Tracker.prototype.scrape = function (opts) {
} }
debug('sent `scrape` to ' + self._announceUrl) debug('sent `scrape` to ' + self._announceUrl)
self._requestImpl(self._scrapeUrl)
opts = extend({
info_hash: common.bytewiseEncodeURIComponent(self.client._infoHash)
}, opts)
self._requestImpl(self._scrapeUrl, opts)
} }
Tracker.prototype.setInterval = function (intervalMs) { Tracker.prototype.setInterval = function (intervalMs) {
@ -207,35 +226,28 @@ Tracker.prototype.setInterval = function (intervalMs) {
} }
} }
/** Tracker.prototype._requestHttp = function (requestUrl, opts) {
* Send an announce request to the tracker
*/
Tracker.prototype._request = function (opts) {
var self = this var self = this
if (isScrapeUrl(requestUrl)) {
opts = extend({ opts = extend({
info_hash: common.bytewiseEncodeURIComponent(self.client._infoHash), info_hash: self.client._infoHash.toString('binary')
peer_id: common.bytewiseEncodeURIComponent(self.client._peerId), }, opts)
} else {
opts = extend({
info_hash: self.client._infoHash.toString('binary'),
peer_id: self.client._peerId.toString('binary'),
port: self.client._port, port: self.client._port,
compact: 1, compact: 1,
numwant: self.client._numWant, numwant: self.client._numWant
uploaded: 0, // default, user should provide real value
downloaded: 0 // default, user should provide real value
}, opts) }, opts)
if (self.client.torrentLength !== undefined) {
opts.left = self.client.torrentLength - (opts.downloaded || 0)
}
if (self._trackerId) { if (self._trackerId) {
opts.trackerid = self._trackerId opts.trackerid = self._trackerId
} }
self._requestImpl(self._announceUrl, opts)
} }
Tracker.prototype._requestHttp = function (requestUrl, opts) { var fullUrl = requestUrl + '?' + common.querystringStringify(opts)
var self = this
var fullUrl = requestUrl + '?' + querystring.stringify(opts)
var req = http.get(fullUrl, function (res) { var req = http.get(fullUrl, function (res) {
if (res.statusCode !== 200) { if (res.statusCode !== 200) {
@ -255,6 +267,7 @@ Tracker.prototype._requestHttp = function (requestUrl, opts) {
Tracker.prototype._requestUdp = function (requestUrl, opts) { Tracker.prototype._requestUdp = function (requestUrl, opts) {
var self = this var self = this
opts = opts || {}
var parsedUrl = url.parse(requestUrl) var parsedUrl = url.parse(requestUrl)
var socket = dgram.createSocket('udp4') var socket = dgram.createSocket('udp4')
var transactionId = new Buffer(hat(32), 'hex') var transactionId = new Buffer(hat(32), 'hex')
@ -294,9 +307,8 @@ Tracker.prototype._requestUdp = function (requestUrl, opts) {
return error('invalid udp handshake') return error('invalid udp handshake')
} }
var scrapeStr = 'scrape' if (isScrapeUrl(requestUrl)) {
if (requestUrl.substr(requestUrl.lastIndexOf('/') + 1, scrapeStr.length) === scrapeStr) { scrape(msg.slice(8, 16))
scrape(msg.slice(8, 16), opts)
} else { } else {
announce(msg.slice(8, 16), opts) announce(msg.slice(8, 16), opts)
} }
@ -395,7 +407,7 @@ Tracker.prototype._requestUdp = function (requestUrl, opts) {
])) ]))
} }
function scrape (connectionId, opts) { function scrape (connectionId) {
genTransactionId() genTransactionId()
send(Buffer.concat([ send(Buffer.concat([
@ -477,10 +489,6 @@ Tracker.prototype._handleResponse = function (requestUrl, data) {
} }
} }
//
// HELPERS
//
function toUInt16 (n) { function toUInt16 (n) {
var buf = new Buffer(2) var buf = new Buffer(2)
buf.writeUInt16BE(n, 0) buf.writeUInt16BE(n, 0)
@ -499,3 +507,7 @@ function toUInt64 (n) {
} }
return Buffer.concat([common.toUInt32(0), common.toUInt32(n)]) return Buffer.concat([common.toUInt32(0), common.toUInt32(n)])
} }
function isScrapeUrl (u) {
return u.substr(u.lastIndexOf('/') + 1, 'scrape'.length) === 'scrape'
}

View File

@ -1,7 +1,13 @@
/** /**
* Functions and constants needed by both the tracker client and server. * Functions/constants needed by both the client and server.
*/ */
var querystring = require('querystring')
exports.CONNECTION_ID = Buffer.concat([ toUInt32(0x417), toUInt32(0x27101980) ])
exports.ACTIONS = { CONNECT: 0, ANNOUNCE: 1, SCRAPE: 2, ERROR: 3 }
exports.EVENTS = { update: 0, completed: 1, started: 2, stopped: 3 }
function toUInt32 (n) { function toUInt32 (n) {
var buf = new Buffer(4) var buf = new Buffer(4)
buf.writeUInt32BE(n, 0) buf.writeUInt32BE(n, 0)
@ -9,14 +15,34 @@ function toUInt32 (n) {
} }
exports.toUInt32 = toUInt32 exports.toUInt32 = toUInt32
exports.CONNECTION_ID = Buffer.concat([ toUInt32(0x417), toUInt32(0x27101980) ]) exports.binaryToUtf8 = function (str) {
exports.ACTIONS = { CONNECT: 0, ANNOUNCE: 1, SCRAPE: 2, ERROR: 3 } return new Buffer(str, 'binary').toString('utf8')
exports.EVENTS = { update: 0, completed: 1, started: 2, stopped: 3 }
exports.bytewiseDecodeURIComponent = function (str) {
return new Buffer(decodeURIComponent(str), 'binary')
} }
exports.bytewiseEncodeURIComponent = function (buf) { /**
return encodeURIComponent(buf.toString('binary')) * `querystring.parse` using `unescape` instead of decodeURIComponent, since bittorrent
* clients send non-UTF8 querystrings
* @param {string} q
* @return {Object}
*/
exports.querystringParse = function (q) {
var saved = querystring.unescape
querystring.unescape = unescape // global
var ret = querystring.parse(q)
querystring.unescape = saved
return ret
}
/**
* `querystring.stringify` using `escape` instead of encodeURIComponent, since bittorrent
* clients send non-UTF8 querystrings
* @param {Object} obj
* @return {string}
*/
exports.querystringStringify = function (obj) {
var saved = querystring.escape
querystring.escape = escape // global
var ret = querystring.stringify(obj)
querystring.escape = saved
return ret
} }

View File

@ -122,14 +122,11 @@ Server.prototype._onHttpRequest = function (req, res) {
var self = this var self = this
var warning var warning
var s = req.url.split('?') var s = req.url.split('?')
var params = querystring.parse(s[1]) var params = common.querystringParse(s[1])
if (s[0] === '/announce') { if (s[0] === '/announce') {
var infoHash = typeof params.info_hash === 'string' && var infoHash = typeof params.info_hash === 'string' && params.info_hash
common.bytewiseDecodeURIComponent(params.info_hash).toString('binary') var peerId = typeof params.peer_id === 'string' && common.binaryToUtf8(params.peer_id)
var port = Number(params.port) var port = Number(params.port)
var peerId = typeof params.peer_id === 'string' &&
common.bytewiseDecodeURIComponent(params.peer_id).toString('utf8')
if (!infoHash) return error('invalid info_hash') if (!infoHash) return error('invalid info_hash')
if (infoHash.length !== 20) return error('invalid info_hash') if (infoHash.length !== 20) return error('invalid info_hash')
@ -252,7 +249,6 @@ Server.prototype._onHttpRequest = function (req, res) {
} }
params.info_hash.some(function (infoHash) { params.info_hash.some(function (infoHash) {
infoHash = common.bytewiseDecodeURIComponent(infoHash).toString('binary')
if (infoHash.length !== 20) { if (infoHash.length !== 20) {
error('invalid info_hash') error('invalid info_hash')
return true // early return return true // early return

View File

@ -10,23 +10,19 @@ var querystring = require('querystring')
var Server = require('../').Server var Server = require('../').Server
var test = require('tape') var test = require('tape')
function hexToBinary (str) {
return new Buffer(str, 'hex').toString('binary')
}
var infoHash1 = 'aaa67059ed6bd08362da625b3ae77f6f4a075aaa' var infoHash1 = 'aaa67059ed6bd08362da625b3ae77f6f4a075aaa'
var encodedInfoHash1 = common.bytewiseEncodeURIComponent( var binaryInfoHash1 = hexToBinary(infoHash1)
new Buffer(infoHash1, 'hex')
)
var binaryinfoHash1 = new Buffer(infoHash1, 'hex').toString('binary')
var infoHash2 = 'bbb67059ed6bd08362da625b3ae77f6f4a075bbb' var infoHash2 = 'bbb67059ed6bd08362da625b3ae77f6f4a075bbb'
var encodedInfoHash2 = common.bytewiseEncodeURIComponent( var binaryInfoHash2 = hexToBinary(infoHash2)
new Buffer(infoHash2, 'hex')
)
var binaryinfoHash2 = new Buffer(infoHash2, 'hex').toString('binary')
var bitlove = fs.readFileSync(__dirname + '/torrents/bitlove-intro.torrent') var bitlove = fs.readFileSync(__dirname + '/torrents/bitlove-intro.torrent')
var parsedBitlove = parseTorrent(bitlove) var parsedBitlove = parseTorrent(bitlove)
var encodedBitlove = common.bytewiseEncodeURIComponent( var binaryBitlove = hexToBinary(parsedBitlove.infoHash)
new Buffer(parsedBitlove.infoHash, 'hex')
)
var binaryBitlove = new Buffer(parsedBitlove.infoHash, 'hex').toString('binary')
var peerId = new Buffer('01234567890123456789') var peerId = new Buffer('01234567890123456789')
test('server: single info_hash scrape', function (t) { test('server: single info_hash scrape', function (t) {
@ -44,8 +40,8 @@ test('server: single info_hash scrape', function (t) {
var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape' var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape'
server.once('listening', function () { server.once('listening', function () {
var url = scrapeUrl + '?' + querystring.stringify({ var url = scrapeUrl + '?' + common.querystringStringify({
info_hash: encodedInfoHash1 info_hash: binaryInfoHash1
}) })
http.get(url, function (res) { http.get(url, function (res) {
t.equal(res.statusCode, 200) t.equal(res.statusCode, 200)
@ -53,10 +49,10 @@ test('server: single info_hash scrape', function (t) {
data = bencode.decode(data) data = bencode.decode(data)
t.ok(data.files) t.ok(data.files)
t.equal(Object.keys(data.files).length, 1) t.equal(Object.keys(data.files).length, 1)
t.ok(data.files[binaryinfoHash1]) t.ok(data.files[binaryInfoHash1])
t.equal(typeof data.files[binaryinfoHash1].complete, 'number') t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
t.equal(typeof data.files[binaryinfoHash1].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
t.equal(typeof data.files[binaryinfoHash1].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
server.close(function () { server.close(function () {
t.end() t.end()
@ -84,8 +80,8 @@ test('server: multiple info_hash scrape', function (t) {
var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape' var scrapeUrl = 'http://127.0.0.1:' + port + '/scrape'
server.once('listening', function () { server.once('listening', function () {
var url = scrapeUrl + '?' + querystring.stringify({ var url = scrapeUrl + '?' + common.querystringStringify({
info_hash: [ encodedInfoHash1, encodedInfoHash2 ] info_hash: [ binaryInfoHash1, binaryInfoHash2 ]
}) })
http.get(url, function (res) { http.get(url, function (res) {
t.equal(res.statusCode, 200) t.equal(res.statusCode, 200)
@ -94,15 +90,15 @@ test('server: multiple info_hash scrape', function (t) {
t.ok(data.files) t.ok(data.files)
t.equal(Object.keys(data.files).length, 2) t.equal(Object.keys(data.files).length, 2)
t.ok(data.files[binaryinfoHash1]) t.ok(data.files[binaryInfoHash1])
t.equal(typeof data.files[binaryinfoHash1].complete, 'number') t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
t.equal(typeof data.files[binaryinfoHash1].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
t.equal(typeof data.files[binaryinfoHash1].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
t.ok(data.files[binaryinfoHash2]) t.ok(data.files[binaryInfoHash2])
t.equal(typeof data.files[binaryinfoHash2].complete, 'number') t.equal(typeof data.files[binaryInfoHash2].complete, 'number')
t.equal(typeof data.files[binaryinfoHash2].incomplete, 'number') t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number')
t.equal(typeof data.files[binaryinfoHash2].downloaded, 'number') t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number')
server.close(function () { server.close(function () {
t.end() t.end()