From b7600ec3fbb3715ab623cd3794bb216b6bda7d42 Mon Sep 17 00:00:00 2001 From: Bobby Wibowo Date: Thu, 11 Apr 2019 22:27:45 +0700 Subject: [PATCH] Restored DuckDuckGo's proxy for URL uploads Yes. This gets rid of HEAD request prior to downloading the URL. We will no longer check for Content-Length header, instead we will forcibly limit maximum download size for the download stream to the configured value. So assuming someone try to download a bigger file, it will still try to download up to the configured size, but then fail. This will also speed up the general download process since sending HEAD request delayed the whole operation. --- config.sample.js | 42 +++++++++++++-------------------- controllers/uploadController.js | 24 ++++--------------- 2 files changed, 22 insertions(+), 44 deletions(-) diff --git a/config.sample.js b/config.sample.js index 9320566..4a30a14 100644 --- a/config.sample.js +++ b/config.sample.js @@ -139,41 +139,33 @@ module.exports = { Example: https://images.weserv.nl/?url={url-noprot} will become: - https://images.weserv.nl/?url=example.com/assets/image.png + https://images.weserv.nl/?url=example.com%2Fassets%2Fimage.png */ - urlProxy: 'https://images.weserv.nl/?url={url-noprot}', + urlProxy: 'https://proxy.duckduckgo.com/iu/?u={url}', /* - Disclaimer message that will be printed underneath the URL uploads form. - Supports HTML. Be safe though. + Disclaimer message that will be printed underneath the URL uploads form. + Supports HTML. Be safe though. */ - urlDisclaimerMessage: 'URL uploads are being proxied and compressed by images.weserv.nl. By using this feature, you agree to their Privacy Policy.', + urlDisclaimerMessage: 'URL uploads are being proxied by DuckDuckGo. The proxy can only process direct links, and generally it can only proxy images.', /* - Filter mode for URL uploads. - Can be 'blacklist', 'whitelist', or 'inherit'. - 'inherit' => inherit primary extensions filter (extensionsFilter option). - The rest are paired with urlExtensionsFilter option below and should be self-explanatory. - When this is not set to any of the 3 values, this will fallback to 'inherit'. + Filter mode for URL uploads. + Can be 'blacklist', 'whitelist', or 'inherit'. + 'inherit' => inherit primary extensions filter (extensionsFilter option). + The rest are paired with urlExtensionsFilter option below and should be self-explanatory. + When this is not set to any of the 3 values, this will fallback to 'inherit'. */ - urlExtensionsFilterMode: 'whitelist', + urlExtensionsFilterMode: 'inherit', /* - Mainly intended for URL proxies that only support certain extensions. - This will parse the extensions from the URLs, so URLs that do not end with - the file's extensions will always be rejected. - Queries and segments in the URLs will be bypassed. - NOTE: Can not be empty when using either 'blacklist' or 'whitelist' mode. + Mainly intended for URL proxies that only support certain extensions. + This will parse the extensions from the URLs, so URLs that do not end with + the file's extensions will always be rejected. + Queries and segments in the URLs will be bypassed. + NOTE: Can not be empty when using either 'blacklist' or 'whitelist' mode. */ - urlExtensionsFilter: [ - '.gif', - '.jpg', - '.jpeg', - '.png', - '.bmp', - '.xbm', - '.webp' - ], + urlExtensionsFilter: [], /* Scan files using ClamAV through clamd. diff --git a/controllers/uploadController.js b/controllers/uploadController.js index 061d0ab..8fcb4d8 100644 --- a/controllers/uploadController.js +++ b/controllers/uploadController.js @@ -264,26 +264,12 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => { .replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, ''))) try { - const fetchHead = await fetch(url, { method: 'HEAD' }) - if (fetchHead.status !== 200) - return erred(`${fetchHead.status} ${fetchHead.statusText}`) - - const headers = fetchHead.headers - const size = parseInt(headers.get('content-length')) - if (isNaN(size)) - return erred('URLs with missing Content-Length HTTP header are not supported.') - - if (size > urlMaxSizeBytes) - return erred('File too large.') - - if (config.filterEmptyFile && size === 0) - return erred('Empty files are not allowed.') - - // Limit max response body size with the size reported by Content-Length - const fetchFile = await fetch(url, { size }) + // Limit max response body size with maximum allowed size + const fetchFile = await fetch(url, { size: urlMaxSizeBytes }) if (fetchFile.status !== 200) - return erred(`${fetchHead.status} ${fetchHead.statusText}`) + return erred(`${fetchFile.status} ${fetchFile.statusText}`) + const headers = fetchFile.headers const file = await fetchFile.buffer() const length = uploadsController.getFileNameLength(req) @@ -297,7 +283,7 @@ uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => { filename: name, originalname: original, mimetype: headers.get('content-type').split(';')[0] || '', - size, + size: file.byteLength, albumid }