mirror of
https://github.com/BobbyWibowo/lolisafe.git
synced 2025-01-18 17:21:33 +00:00
feat: fetch() with AbortController for timeout
to be used with URL uploads please consult the comments in the source files this adds new dependency abort-controller
This commit is contained in:
parent
234fd0c0a5
commit
93dc820368
@ -1,6 +1,5 @@
|
|||||||
const blake3 = require('blake3')
|
const blake3 = require('blake3')
|
||||||
const contentDisposition = require('content-disposition')
|
const contentDisposition = require('content-disposition')
|
||||||
const fetch = require('node-fetch')
|
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const randomstring = require('randomstring')
|
const randomstring = require('randomstring')
|
||||||
@ -31,13 +30,23 @@ const fileIdentifierLengthChangeable = !config.uploads.fileIdentifierLength.forc
|
|||||||
typeof config.uploads.fileIdentifierLength.min === 'number' &&
|
typeof config.uploads.fileIdentifierLength.min === 'number' &&
|
||||||
typeof config.uploads.fileIdentifierLength.max === 'number'
|
typeof config.uploads.fileIdentifierLength.max === 'number'
|
||||||
|
|
||||||
|
// Regular file uploads
|
||||||
const maxSize = parseInt(config.uploads.maxSize)
|
const maxSize = parseInt(config.uploads.maxSize)
|
||||||
const maxSizeBytes = maxSize * 1e6
|
const maxSizeBytes = maxSize * 1e6
|
||||||
|
|
||||||
|
// URL uploads
|
||||||
const urlMaxSize = parseInt(config.uploads.urlMaxSize)
|
const urlMaxSize = parseInt(config.uploads.urlMaxSize)
|
||||||
const urlMaxSizeBytes = urlMaxSize * 1e6
|
const urlMaxSizeBytes = urlMaxSize * 1e6
|
||||||
|
|
||||||
|
// Max files allowed in a single multiform POST request
|
||||||
const maxFilesPerUpload = 20
|
const maxFilesPerUpload = 20
|
||||||
|
|
||||||
|
// URL uploads timeout for fetch() instances
|
||||||
|
// Please be aware that uWebSockets.js has a hard-coded timeout of 10s of no activity,
|
||||||
|
// so letting fetch() run for more than 10s may cause connection to uploaders to drop early,
|
||||||
|
// thus preventing lolisafe from responding to uploaders about their URL uploads.
|
||||||
|
const urlFetchTimeout = 10 * 1000 // 10 seconds
|
||||||
|
|
||||||
const chunkedUploads = config.uploads.chunkSize &&
|
const chunkedUploads = config.uploads.chunkSize &&
|
||||||
typeof config.uploads.chunkSize === 'object' &&
|
typeof config.uploads.chunkSize === 'object' &&
|
||||||
config.uploads.chunkSize.default
|
config.uploads.chunkSize.default
|
||||||
@ -565,12 +574,13 @@ self.actuallyUploadUrls = async (req, res, data = {}) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to determine size early via Content-Length header,
|
// Try to determine size early via Content-Length header,
|
||||||
// but continue anyway if it isn't a valid number
|
// but continue anyway if it isn't a valid number (some servers don't provide them)
|
||||||
|
const headStart = Date.now()
|
||||||
try {
|
try {
|
||||||
const head = await fetch(url, {
|
const head = await utils.fetch(url, {
|
||||||
method: 'HEAD',
|
method: 'HEAD',
|
||||||
size: urlMaxSizeBytes, // limit max response body size
|
size: urlMaxSizeBytes, // limit max response body size
|
||||||
timeout: 10 * 1000 // 10 seconds
|
timeout: urlFetchTimeout
|
||||||
})
|
})
|
||||||
|
|
||||||
if (head.status === 200) {
|
if (head.status === 200) {
|
||||||
@ -600,10 +610,13 @@ self.actuallyUploadUrls = async (req, res, data = {}) => {
|
|||||||
writeStream = fs.createWriteStream(file.path)
|
writeStream = fs.createWriteStream(file.path)
|
||||||
hashStream = enableHashing && blake3.createHash()
|
hashStream = enableHashing && blake3.createHash()
|
||||||
|
|
||||||
const fetchFile = await fetch(url, {
|
// Reduce GET timeout by time already spent for HEAD request
|
||||||
|
const _timeout = urlFetchTimeout - (Date.now() - headStart)
|
||||||
|
|
||||||
|
const fetchFile = await utils.fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
size: urlMaxSizeBytes, // limit max response body size
|
size: urlMaxSizeBytes, // limit max response body size
|
||||||
timeout: 10 * 1000 // 10 seconds
|
timeout: _timeout
|
||||||
})
|
})
|
||||||
.then(res => new Promise((resolve, reject) => {
|
.then(res => new Promise((resolve, reject) => {
|
||||||
if (res.status !== 200) {
|
if (res.status !== 200) {
|
||||||
@ -709,6 +722,8 @@ self.actuallyUploadUrls = async (req, res, data = {}) => {
|
|||||||
]
|
]
|
||||||
if (suppress.some(t => t.test(errorString))) {
|
if (suppress.some(t => t.test(errorString))) {
|
||||||
throw new ClientError(errorString)
|
throw new ClientError(errorString)
|
||||||
|
} else if (errorString.startsWith('AbortError:')) {
|
||||||
|
throw new ClientError('Fetch timed out. Try again?')
|
||||||
} else {
|
} else {
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
const { promisify } = require('util')
|
const { promisify } = require('util')
|
||||||
|
const AbortController = require('abort-controller')
|
||||||
const fastq = require('fastq')
|
const fastq = require('fastq')
|
||||||
const fetch = require('node-fetch')
|
const fetch = require('node-fetch')
|
||||||
const ffmpeg = require('fluent-ffmpeg')
|
const ffmpeg = require('fluent-ffmpeg')
|
||||||
@ -198,6 +199,32 @@ const statsData = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This helper function initiates fetch() with AbortController
|
||||||
|
// signal controller to handle per-instance global timeout.
|
||||||
|
// node-fetch's built-in timeout option resets on every redirect,
|
||||||
|
// and thus not reliable in certain cases.
|
||||||
|
self.fetch = (url, options = {}) => {
|
||||||
|
if (options.timeout === undefined) {
|
||||||
|
return fetch(url, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init AbortController
|
||||||
|
const abortController = new AbortController()
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
abortController.abort()
|
||||||
|
}, options.timeout)
|
||||||
|
|
||||||
|
// Clean up options object
|
||||||
|
options.signal = abortController.signal
|
||||||
|
delete options.timeout
|
||||||
|
|
||||||
|
// Return instance with an attached Promise.finally() handler to clear timeout
|
||||||
|
return fetch(url, options)
|
||||||
|
.finally(() => {
|
||||||
|
clearTimeout(timeout)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
const cloudflareAuth = config.cloudflare && config.cloudflare.zoneId &&
|
const cloudflareAuth = config.cloudflare && config.cloudflare.zoneId &&
|
||||||
(config.cloudflare.apiToken || config.cloudflare.userServiceKey ||
|
(config.cloudflare.apiToken || config.cloudflare.userServiceKey ||
|
||||||
(config.cloudflare.apiKey && config.cloudflare.email))
|
(config.cloudflare.apiKey && config.cloudflare.email))
|
||||||
|
@ -34,6 +34,7 @@
|
|||||||
"full-upgrade": "rm -f ./yarn.lock && yarn"
|
"full-upgrade": "rm -f ./yarn.lock && yarn"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"abort-controller": "~3.0.0",
|
||||||
"bcrypt": "~5.0.1",
|
"bcrypt": "~5.0.1",
|
||||||
"better-sqlite3": "~7.6.2",
|
"better-sqlite3": "~7.6.2",
|
||||||
"blake3": "~2.1.7",
|
"blake3": "~2.1.7",
|
||||||
|
12
yarn.lock
12
yarn.lock
@ -458,6 +458,13 @@ abbrev@1:
|
|||||||
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
|
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
|
||||||
integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
|
integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
|
||||||
|
|
||||||
|
abort-controller@~3.0.0:
|
||||||
|
version "3.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392"
|
||||||
|
integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==
|
||||||
|
dependencies:
|
||||||
|
event-target-shim "^5.0.0"
|
||||||
|
|
||||||
accepts@^1.3.7:
|
accepts@^1.3.7:
|
||||||
version "1.3.8"
|
version "1.3.8"
|
||||||
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
|
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
|
||||||
@ -2168,6 +2175,11 @@ event-emitter@^0.3.5:
|
|||||||
d "1"
|
d "1"
|
||||||
es5-ext "~0.10.14"
|
es5-ext "~0.10.14"
|
||||||
|
|
||||||
|
event-target-shim@^5.0.0:
|
||||||
|
version "5.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789"
|
||||||
|
integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==
|
||||||
|
|
||||||
expand-brackets@^2.1.4:
|
expand-brackets@^2.1.4:
|
||||||
version "2.1.4"
|
version "2.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
|
resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
|
||||||
|
Loading…
Reference in New Issue
Block a user