2014-07-11 04:30:30 +00:00
|
|
|
var bencode = require('bencode')
|
|
|
|
var Client = require('../')
|
2014-07-22 05:58:13 +00:00
|
|
|
var commonLib = require('../lib/common')
|
|
|
|
var commonTest = require('./common')
|
2016-03-16 19:22:33 +00:00
|
|
|
var extend = require('xtend')
|
|
|
|
var fixtures = require('webtorrent-fixtures')
|
2015-01-22 22:23:53 +00:00
|
|
|
var get = require('simple-get')
|
2014-07-11 04:30:30 +00:00
|
|
|
var test = require('tape')
|
|
|
|
|
|
|
|
var peerId = new Buffer('01234567890123456789')
|
|
|
|
|
2014-07-22 05:58:13 +00:00
|
|
|
function testSingle (t, serverType) {
|
2016-03-16 19:22:33 +00:00
|
|
|
var parsedTorrent = extend(fixtures.leaves.parsedTorrent)
|
|
|
|
|
2014-07-22 05:58:13 +00:00
|
|
|
commonTest.createServer(t, serverType, function (server, announceUrl) {
|
2016-03-16 19:22:33 +00:00
|
|
|
parsedTorrent.announce = [ announceUrl ]
|
|
|
|
var client = new Client(peerId, 6881, parsedTorrent)
|
2015-05-02 00:36:07 +00:00
|
|
|
|
|
|
|
client.on('error', function (err) {
|
|
|
|
t.error(err)
|
|
|
|
})
|
|
|
|
|
|
|
|
client.on('warning', function (err) {
|
2015-01-29 22:59:08 +00:00
|
|
|
t.error(err)
|
2015-05-02 00:36:07 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
client.scrape()
|
|
|
|
|
|
|
|
client.on('scrape', function (data) {
|
2015-01-29 22:59:08 +00:00
|
|
|
t.equal(data.announce, announceUrl)
|
2016-03-16 19:22:33 +00:00
|
|
|
t.equal(data.infoHash, parsedTorrent.infoHash)
|
2015-01-29 22:59:08 +00:00
|
|
|
t.equal(typeof data.complete, 'number')
|
|
|
|
t.equal(typeof data.incomplete, 'number')
|
|
|
|
t.equal(typeof data.downloaded, 'number')
|
2015-05-17 06:24:20 +00:00
|
|
|
client.destroy()
|
2015-01-29 22:59:08 +00:00
|
|
|
server.close(function () {
|
|
|
|
t.end()
|
2014-07-22 05:58:13 +00:00
|
|
|
})
|
|
|
|
})
|
2014-07-11 04:30:30 +00:00
|
|
|
})
|
2014-07-22 05:58:13 +00:00
|
|
|
}
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2014-07-22 05:58:13 +00:00
|
|
|
test('http: single info_hash scrape', function (t) {
|
|
|
|
testSingle(t, 'http')
|
|
|
|
})
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2014-07-22 05:58:13 +00:00
|
|
|
test('udp: single info_hash scrape', function (t) {
|
|
|
|
testSingle(t, 'udp')
|
|
|
|
})
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2014-07-22 05:58:13 +00:00
|
|
|
function clientScrapeStatic (t, serverType) {
|
|
|
|
commonTest.createServer(t, serverType, function (server, announceUrl) {
|
2016-03-16 19:22:33 +00:00
|
|
|
Client.scrape(announceUrl, fixtures.leaves.parsedTorrent.infoHash, function (err, data) {
|
2015-01-29 22:59:08 +00:00
|
|
|
t.error(err)
|
|
|
|
t.equal(data.announce, announceUrl)
|
2016-03-16 19:22:33 +00:00
|
|
|
t.equal(data.infoHash, fixtures.leaves.parsedTorrent.infoHash)
|
2015-01-29 22:59:08 +00:00
|
|
|
t.equal(typeof data.complete, 'number')
|
|
|
|
t.equal(typeof data.incomplete, 'number')
|
|
|
|
t.equal(typeof data.downloaded, 'number')
|
|
|
|
server.close(function () {
|
|
|
|
t.end()
|
2014-07-11 04:30:30 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
2014-07-22 05:58:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
test('http: scrape using Client.scrape static method', function (t) {
|
|
|
|
clientScrapeStatic(t, 'http')
|
|
|
|
})
|
|
|
|
|
|
|
|
test('udp: scrape using Client.scrape static method', function (t) {
|
|
|
|
clientScrapeStatic(t, 'udp')
|
2014-07-11 04:30:30 +00:00
|
|
|
})
|
|
|
|
|
2015-05-02 00:36:07 +00:00
|
|
|
function clientScrapeMulti (t, serverType) {
|
2016-03-16 19:22:33 +00:00
|
|
|
var infoHash1 = fixtures.leaves.parsedTorrent.infoHash
|
|
|
|
var infoHash2 = fixtures.alice.parsedTorrent.infoHash
|
|
|
|
|
2015-05-02 00:36:07 +00:00
|
|
|
commonTest.createServer(t, serverType, function (server, announceUrl) {
|
|
|
|
Client.scrape(announceUrl, [ infoHash1, infoHash2 ], function (err, results) {
|
|
|
|
t.error(err)
|
|
|
|
|
|
|
|
t.equal(results[infoHash1].announce, announceUrl)
|
2015-12-02 23:35:42 +00:00
|
|
|
t.equal(results[infoHash1].infoHash, infoHash1)
|
2015-05-02 00:36:07 +00:00
|
|
|
t.equal(typeof results[infoHash1].complete, 'number')
|
|
|
|
t.equal(typeof results[infoHash1].incomplete, 'number')
|
|
|
|
t.equal(typeof results[infoHash1].downloaded, 'number')
|
|
|
|
|
|
|
|
t.equal(results[infoHash2].announce, announceUrl)
|
2015-12-02 23:35:42 +00:00
|
|
|
t.equal(results[infoHash2].infoHash, infoHash2)
|
2015-05-02 00:36:07 +00:00
|
|
|
t.equal(typeof results[infoHash2].complete, 'number')
|
|
|
|
t.equal(typeof results[infoHash2].incomplete, 'number')
|
|
|
|
t.equal(typeof results[infoHash2].downloaded, 'number')
|
|
|
|
|
|
|
|
server.close(function () {
|
|
|
|
t.end()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
test('http: MULTI scrape using Client.scrape static method', function (t) {
|
|
|
|
clientScrapeMulti(t, 'http')
|
|
|
|
})
|
|
|
|
|
|
|
|
test('udp: MULTI scrape using Client.scrape static method', function (t) {
|
|
|
|
clientScrapeMulti(t, 'udp')
|
|
|
|
})
|
2014-07-22 05:58:13 +00:00
|
|
|
|
2015-05-02 00:36:07 +00:00
|
|
|
test('server: multiple info_hash scrape (manual http request)', function (t) {
|
2016-03-01 01:55:31 +00:00
|
|
|
t.plan(13)
|
|
|
|
|
2016-03-16 19:22:33 +00:00
|
|
|
var binaryInfoHash1 = commonLib.hexToBinary(fixtures.leaves.parsedTorrent.infoHash)
|
|
|
|
var binaryInfoHash2 = commonLib.hexToBinary(fixtures.alice.parsedTorrent.infoHash)
|
|
|
|
|
2016-03-01 01:55:31 +00:00
|
|
|
commonTest.createServer(t, 'http', function (server, announceUrl) {
|
|
|
|
var scrapeUrl = announceUrl.replace('/announce', '/scrape')
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
var url = scrapeUrl + '?' + commonLib.querystringStringify({
|
|
|
|
info_hash: [ binaryInfoHash1, binaryInfoHash2 ]
|
|
|
|
})
|
2016-03-01 01:55:31 +00:00
|
|
|
|
2016-02-14 08:31:44 +00:00
|
|
|
get.concat(url, function (err, res, data) {
|
2016-03-01 01:55:31 +00:00
|
|
|
t.error(err)
|
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
t.equal(res.statusCode, 200)
|
|
|
|
|
|
|
|
data = bencode.decode(data)
|
|
|
|
t.ok(data.files)
|
|
|
|
t.equal(Object.keys(data.files).length, 2)
|
|
|
|
|
|
|
|
t.ok(data.files[binaryInfoHash1])
|
|
|
|
t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
|
|
|
|
|
|
|
|
t.ok(data.files[binaryInfoHash2])
|
|
|
|
t.equal(typeof data.files[binaryInfoHash2].complete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number')
|
|
|
|
|
2016-03-01 01:55:31 +00:00
|
|
|
server.close(function () { t.pass('server closed') })
|
2014-07-11 04:30:30 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2015-05-17 06:24:20 +00:00
|
|
|
test('server: all info_hash scrape (manual http request)', function (t) {
|
2016-03-01 01:55:31 +00:00
|
|
|
t.plan(10)
|
2016-03-16 19:22:33 +00:00
|
|
|
|
|
|
|
var parsedTorrent = extend(fixtures.leaves.parsedTorrent)
|
|
|
|
var binaryInfoHash = commonLib.hexToBinary(parsedTorrent.infoHash)
|
|
|
|
|
2016-03-01 01:55:31 +00:00
|
|
|
commonTest.createServer(t, 'http', function (server, announceUrl) {
|
|
|
|
var scrapeUrl = announceUrl.replace('/announce', '/scrape')
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2016-03-16 19:22:33 +00:00
|
|
|
parsedTorrent.announce = [ announceUrl ]
|
2014-07-11 04:30:30 +00:00
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
// announce a torrent to the tracker
|
2016-03-16 19:22:33 +00:00
|
|
|
var client = new Client(peerId, 6881, parsedTorrent)
|
2016-03-01 01:55:31 +00:00
|
|
|
client.on('error', function (err) { t.error(err) })
|
|
|
|
client.on('warning', function (err) { t.error(err) })
|
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
client.start()
|
2015-01-29 20:24:17 +00:00
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
server.once('start', function () {
|
|
|
|
// now do a scrape of everything by omitting the info_hash param
|
2016-02-14 08:31:44 +00:00
|
|
|
get.concat(scrapeUrl, function (err, res, data) {
|
2016-03-01 01:55:31 +00:00
|
|
|
t.error(err)
|
2015-01-29 20:24:17 +00:00
|
|
|
|
2015-01-29 22:59:08 +00:00
|
|
|
t.equal(res.statusCode, 200)
|
|
|
|
data = bencode.decode(data)
|
|
|
|
t.ok(data.files)
|
|
|
|
t.equal(Object.keys(data.files).length, 1)
|
2015-01-29 20:24:17 +00:00
|
|
|
|
2016-03-16 19:22:33 +00:00
|
|
|
t.ok(data.files[binaryInfoHash])
|
|
|
|
t.equal(typeof data.files[binaryInfoHash].complete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash].incomplete, 'number')
|
|
|
|
t.equal(typeof data.files[binaryInfoHash].downloaded, 'number')
|
2015-01-29 20:24:17 +00:00
|
|
|
|
2016-03-01 01:55:31 +00:00
|
|
|
client.destroy(function () { t.pass('client destroyed') })
|
|
|
|
server.close(function () { t.pass('server closed') })
|
2015-01-29 20:24:17 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|