2020-05-28 19:52:58 +00:00
|
|
|
const blake3 = require('blake3')
|
2022-07-29 03:15:11 +00:00
|
|
|
const contentDisposition = require('content-disposition')
|
2018-01-23 20:06:30 +00:00
|
|
|
const fs = require('fs')
|
2022-09-24 02:54:49 +00:00
|
|
|
const parseDuration = require('parse-duration')
|
2018-09-01 20:37:26 +00:00
|
|
|
const path = require('path')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const randomstring = require('randomstring')
|
2020-04-18 19:52:11 +00:00
|
|
|
const searchQuery = require('search-query-parser')
|
2022-09-21 01:03:28 +00:00
|
|
|
const auth = require('./authController')
|
2019-09-08 01:56:29 +00:00
|
|
|
const paths = require('./pathsController')
|
2018-10-13 11:06:58 +00:00
|
|
|
const perms = require('./permissionController')
|
2018-04-13 16:20:57 +00:00
|
|
|
const utils = require('./utilsController')
|
2021-01-08 02:44:04 +00:00
|
|
|
const ClientError = require('./utils/ClientError')
|
|
|
|
const ServerError = require('./utils/ServerError')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const config = require('./../config')
|
|
|
|
const logger = require('./../logger')
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2022-07-29 02:14:55 +00:00
|
|
|
/** Deprecated config options */
|
|
|
|
|
|
|
|
if (config.uploads.cacheFileIdentifiers) {
|
|
|
|
logger.error('Config option "uploads.cacheFileIdentifiers" is DEPRECATED.')
|
|
|
|
logger.error('There is now only "uploads.queryDatabaseForIdentifierMatch" for a similar behavior.')
|
|
|
|
}
|
|
|
|
|
2020-09-26 22:18:42 +00:00
|
|
|
const self = {
|
2022-08-02 09:19:57 +00:00
|
|
|
onHold: new Set() // temporarily held random upload identifiers
|
2020-09-26 22:18:42 +00:00
|
|
|
}
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Preferences */
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const fileIdentifierLengthFallback = 32
|
|
|
|
const fileIdentifierLengthChangeable = !config.uploads.fileIdentifierLength.force &&
|
|
|
|
typeof config.uploads.fileIdentifierLength.min === 'number' &&
|
|
|
|
typeof config.uploads.fileIdentifierLength.max === 'number'
|
|
|
|
|
2022-09-13 18:33:31 +00:00
|
|
|
// Regular file uploads
|
2019-09-08 01:56:29 +00:00
|
|
|
const maxSize = parseInt(config.uploads.maxSize)
|
|
|
|
const maxSizeBytes = maxSize * 1e6
|
2022-09-13 18:33:31 +00:00
|
|
|
|
|
|
|
// URL uploads
|
2022-05-08 08:19:29 +00:00
|
|
|
const urlMaxSize = parseInt(config.uploads.urlMaxSize)
|
|
|
|
const urlMaxSizeBytes = urlMaxSize * 1e6
|
2018-03-13 14:51:39 +00:00
|
|
|
|
2022-09-13 18:33:31 +00:00
|
|
|
// Max files allowed in a single multiform POST request
|
2019-09-23 08:09:15 +00:00
|
|
|
const maxFilesPerUpload = 20
|
|
|
|
|
2022-09-13 18:33:31 +00:00
|
|
|
// URL uploads timeout for fetch() instances
|
|
|
|
// Please be aware that uWebSockets.js has a hard-coded timeout of 10s of no activity,
|
|
|
|
// so letting fetch() run for more than 10s may cause connection to uploaders to drop early,
|
|
|
|
// thus preventing lolisafe from responding to uploaders about their URL uploads.
|
|
|
|
const urlFetchTimeout = 10 * 1000 // 10 seconds
|
|
|
|
|
2020-05-28 19:52:58 +00:00
|
|
|
const chunkedUploads = config.uploads.chunkSize &&
|
|
|
|
typeof config.uploads.chunkSize === 'object' &&
|
|
|
|
config.uploads.chunkSize.default
|
2020-06-15 16:48:43 +00:00
|
|
|
const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000
|
2019-09-08 01:56:29 +00:00
|
|
|
const chunksData = {}
|
2020-11-20 23:31:36 +00:00
|
|
|
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
|
2019-09-08 01:56:29 +00:00
|
|
|
const maxChunksCount = maxSize
|
2020-11-20 23:31:36 +00:00
|
|
|
// Use fs.copyFile() instead of fs.rename() if chunks dir is NOT inside uploads dir
|
|
|
|
const chunksCopyFile = !paths.chunks.startsWith(paths.uploads)
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
const extensionsFilter = Array.isArray(config.extensionsFilter) &&
|
|
|
|
config.extensionsFilter.length
|
|
|
|
const urlExtensionsFilter = Array.isArray(config.uploads.urlExtensionsFilter) &&
|
|
|
|
config.uploads.urlExtensionsFilter.length
|
|
|
|
|
2022-07-25 00:32:25 +00:00
|
|
|
// Only disable hashing if explicitly disabled in config file
|
|
|
|
const enableHashing = config.uploads.hash === undefined
|
|
|
|
? true
|
|
|
|
: Boolean(config.uploads.hash)
|
|
|
|
|
2022-07-29 02:14:55 +00:00
|
|
|
const queryDatabaseForIdentifierMatch = config.uploads.queryDatabaseForIdentifierMatch ||
|
|
|
|
config.uploads.queryDbForFileCollisions // old config name for identical behavior
|
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
const uploadsPerPage = config.dashboard
|
|
|
|
? Math.max(Math.min(config.dashboard.uploadsPerPage || 0, 100), 1)
|
|
|
|
: 25
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Chunks helper class & function **/
|
|
|
|
|
2020-06-15 16:48:43 +00:00
|
|
|
class ChunksData {
|
2022-07-25 00:09:28 +00:00
|
|
|
constructor (uuid) {
|
2020-06-15 16:48:43 +00:00
|
|
|
this.uuid = uuid
|
2022-07-25 00:09:28 +00:00
|
|
|
this.root = path.join(paths.chunks, this.uuid)
|
2020-06-15 16:48:43 +00:00
|
|
|
this.filename = 'tmp'
|
2022-07-25 00:09:28 +00:00
|
|
|
this.path = path.join(this.root, this.filename)
|
2020-06-15 16:48:43 +00:00
|
|
|
this.chunks = 0
|
2022-07-14 09:35:06 +00:00
|
|
|
this.writeStream = null
|
|
|
|
this.hashStream = null
|
2022-07-25 00:09:28 +00:00
|
|
|
// Immediately mark this chunked upload as currently processing
|
|
|
|
this.processing = true
|
2020-06-15 16:48:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
onTimeout () {
|
2022-05-08 05:01:18 +00:00
|
|
|
self.cleanUpChunks(this.uuid)
|
2020-06-15 16:48:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
setTimeout (delay) {
|
|
|
|
this.clearTimeout()
|
|
|
|
this._timeout = setTimeout(this.onTimeout.bind(this), delay)
|
|
|
|
}
|
|
|
|
|
|
|
|
clearTimeout () {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (this._timeout) {
|
2020-06-15 16:48:43 +00:00
|
|
|
clearTimeout(this._timeout)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2020-06-15 16:48:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const initChunks = async uuid => {
|
|
|
|
if (chunksData[uuid] === undefined) {
|
2022-07-25 00:09:28 +00:00
|
|
|
chunksData[uuid] = new ChunksData(uuid)
|
|
|
|
|
|
|
|
const exist = await paths.access(chunksData[uuid].root)
|
|
|
|
.catch(err => {
|
|
|
|
// Re-throw error only if not directory is missing error
|
|
|
|
if (err.code !== 'ENOENT') throw err
|
|
|
|
return false
|
|
|
|
})
|
|
|
|
if (!exist) {
|
|
|
|
await paths.mkdir(chunksData[uuid].root)
|
2018-03-13 14:51:39 +00:00
|
|
|
}
|
2022-07-25 00:09:28 +00:00
|
|
|
|
|
|
|
// Init write & hasher streams
|
|
|
|
chunksData[uuid].writeStream = fs.createWriteStream(chunksData[uuid].path, { flags: 'a' })
|
2022-07-25 00:32:25 +00:00
|
|
|
chunksData[uuid].hashStream = enableHashing && blake3.createHash()
|
2022-07-25 00:09:28 +00:00
|
|
|
} else if (chunksData[uuid].processing) {
|
|
|
|
// Wait for the first spawned init tasks
|
|
|
|
throw new ClientError('Previous chunk upload is still being processed. Parallel chunked uploads is not supported.')
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2022-07-25 00:09:28 +00:00
|
|
|
|
|
|
|
// Reset timeout
|
2020-06-15 16:48:43 +00:00
|
|
|
chunksData[uuid].setTimeout(chunkedUploadsTimeout)
|
2020-06-15 16:14:33 +00:00
|
|
|
return chunksData[uuid]
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Helper functions */
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.isExtensionFiltered = extname => {
|
2018-12-20 11:53:37 +00:00
|
|
|
// If empty extension needs to be filtered
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!extname && config.filterNoExtension) return true
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2018-05-11 14:34:13 +00:00
|
|
|
// If there are extensions that have to be filtered
|
2019-09-08 01:56:29 +00:00
|
|
|
if (extname && extensionsFilter) {
|
2021-01-31 22:23:53 +00:00
|
|
|
const match = config.extensionsFilter.includes(extname.toLowerCase())
|
2018-12-20 11:53:37 +00:00
|
|
|
const whitelist = config.extensionsFilterMode === 'whitelist'
|
2020-10-30 18:12:09 +00:00
|
|
|
if ((!whitelist && match) || (whitelist && !match)) return true
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2018-05-11 14:34:13 +00:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.parseFileIdentifierLength = fileLength => {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!config.uploads.fileIdentifierLength) return fileIdentifierLengthFallback
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const parsed = parseInt(fileLength)
|
|
|
|
if (isNaN(parsed) ||
|
|
|
|
!fileIdentifierLengthChangeable ||
|
|
|
|
parsed < config.uploads.fileIdentifierLength.min ||
|
2020-10-30 18:12:09 +00:00
|
|
|
parsed > config.uploads.fileIdentifierLength.max) {
|
2019-09-08 01:56:29 +00:00
|
|
|
return config.uploads.fileIdentifierLength.default || fileIdentifierLengthFallback
|
2020-10-30 18:12:09 +00:00
|
|
|
} else {
|
2019-09-08 01:56:29 +00:00
|
|
|
return parsed
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2022-07-29 02:14:55 +00:00
|
|
|
self.getUniqueUploadIdentifier = async (length, extension = '', res) => {
|
2019-09-08 01:56:29 +00:00
|
|
|
for (let i = 0; i < utils.idMaxTries; i++) {
|
|
|
|
const identifier = randomstring.generate(length)
|
2022-07-29 02:14:55 +00:00
|
|
|
|
|
|
|
if (queryDatabaseForIdentifierMatch) {
|
|
|
|
// If must query database for identifiers matches
|
|
|
|
if (self.onHold.has(identifier)) {
|
2022-07-30 01:35:26 +00:00
|
|
|
logger.debug(`Identifier ${identifier} is currently held by another upload (${i + 1}/${utils.idMaxTries}).`)
|
2019-09-08 01:56:29 +00:00
|
|
|
continue
|
|
|
|
}
|
2020-09-26 22:18:42 +00:00
|
|
|
|
|
|
|
// Put token on-hold (wait for it to be inserted to DB)
|
|
|
|
self.onHold.add(identifier)
|
|
|
|
|
2022-06-03 21:21:56 +00:00
|
|
|
const file = await utils.db.table('files')
|
2020-09-27 01:04:41 +00:00
|
|
|
.whereRaw('?? like ?', ['name', `${identifier}.%`])
|
2020-09-26 22:18:42 +00:00
|
|
|
.select('id')
|
|
|
|
.first()
|
|
|
|
if (file) {
|
|
|
|
self.onHold.delete(identifier)
|
2022-07-30 01:35:26 +00:00
|
|
|
logger.debug(`Identifier ${identifier} is already in use (${i + 1}/${utils.idMaxTries}).`)
|
2020-09-26 22:18:42 +00:00
|
|
|
continue
|
|
|
|
}
|
2022-07-29 02:14:55 +00:00
|
|
|
|
2022-09-23 23:33:58 +00:00
|
|
|
if (utils.devmode) {
|
|
|
|
logger.debug(`upload.onHold: ${utils.inspect(self.onHold)}`)
|
|
|
|
}
|
|
|
|
|
2022-07-29 02:14:55 +00:00
|
|
|
// Unhold identifier once the Response has been sent
|
|
|
|
if (res) {
|
|
|
|
if (!res.locals.identifiers) {
|
|
|
|
res.locals.identifiers = []
|
|
|
|
res.once('finish', () => { self.unholdUploadIdentifiers(res) })
|
|
|
|
}
|
|
|
|
res.locals.identifiers.push(identifier)
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
} else {
|
2022-07-29 02:14:55 +00:00
|
|
|
// Otherwise, check for physical files' full name matches
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
2022-07-29 02:14:55 +00:00
|
|
|
const name = identifier + extension
|
2019-09-08 01:56:29 +00:00
|
|
|
await paths.access(path.join(paths.uploads, name))
|
2022-07-30 01:35:26 +00:00
|
|
|
logger.debug(`${name} is already in use (${i + 1}/${utils.idMaxTries}).`)
|
2019-09-08 01:56:29 +00:00
|
|
|
continue
|
|
|
|
} catch (error) {
|
2021-01-08 02:44:04 +00:00
|
|
|
// Re-throw non-ENOENT error
|
2020-10-30 18:12:09 +00:00
|
|
|
if (error & error.code !== 'ENOENT') throw error
|
2018-12-03 09:18:52 +00:00
|
|
|
}
|
2018-04-28 17:26:39 +00:00
|
|
|
}
|
2022-07-29 02:14:55 +00:00
|
|
|
|
|
|
|
// Return the random identifier only
|
|
|
|
return identifier
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2021-01-08 02:44:04 +00:00
|
|
|
throw new ServerError('Failed to allocate a unique name for the upload. Try again?')
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2022-07-29 02:14:55 +00:00
|
|
|
self.unholdUploadIdentifiers = res => {
|
|
|
|
if (!res.locals.identifiers) return
|
|
|
|
|
|
|
|
for (const identifier of res.locals.identifiers) {
|
|
|
|
self.onHold.delete(identifier)
|
2022-09-23 23:33:58 +00:00
|
|
|
|
|
|
|
if (utils.devmode) {
|
|
|
|
logger.debug(`upload.onHold: ${utils.inspect(self.onHold)} -> ${utils.inspect(identifier)}`)
|
|
|
|
}
|
2022-07-29 02:14:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
delete res.locals.identifiers
|
|
|
|
}
|
|
|
|
|
2022-05-06 19:17:31 +00:00
|
|
|
self.assertRetentionPeriod = (user, age) => {
|
2022-08-04 14:59:50 +00:00
|
|
|
if (!utils.retentions.enabled) {
|
|
|
|
return null
|
|
|
|
}
|
2022-05-06 19:17:31 +00:00
|
|
|
|
2022-08-04 14:59:50 +00:00
|
|
|
// _ is special key for non-registered users (no auth requests)
|
2022-05-06 19:17:31 +00:00
|
|
|
const group = user ? perms.group(user) : '_'
|
|
|
|
if (!group || !utils.retentions.periods[group]) {
|
|
|
|
throw new ClientError('You are not eligible for any file retention periods.', { statusCode: 403 })
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
|
|
|
|
2022-05-08 05:00:27 +00:00
|
|
|
let parsed = parseFloat(age)
|
|
|
|
if (Number.isNaN(parsed) || age < 0) {
|
2022-05-06 19:17:31 +00:00
|
|
|
parsed = utils.retentions.default[group]
|
2022-05-08 05:00:27 +00:00
|
|
|
} else if (!utils.retentions.periods[group].includes(parsed)) {
|
|
|
|
throw new ClientError('You are not eligible for the specified file retention period.', { statusCode: 403 })
|
2022-05-06 19:17:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!parsed && !utils.retentions.periods[group].includes(0)) {
|
|
|
|
throw new ClientError('Permanent uploads are not permitted.', { statusCode: 403 })
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-05-06 19:17:31 +00:00
|
|
|
|
|
|
|
return parsed
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
self.parseStripTags = stripTags => {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!config.uploads.stripTags) return false
|
2019-11-29 13:42:53 +00:00
|
|
|
|
2020-10-30 18:12:09 +00:00
|
|
|
if (config.uploads.stripTags.force || stripTags === undefined) {
|
2019-11-29 13:42:53 +00:00
|
|
|
return config.uploads.stripTags.default
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-11-29 13:42:53 +00:00
|
|
|
|
|
|
|
return Boolean(parseInt(stripTags))
|
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
/** File uploads */
|
2022-04-16 13:44:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.upload = async (req, res) => {
|
2022-09-21 01:03:28 +00:00
|
|
|
// Assert Request type (skip for POST /nojs requests)
|
|
|
|
let isMultipart = req.locals.nojs
|
|
|
|
let isJson
|
|
|
|
if (!req.locals.nojs) {
|
|
|
|
// Multipart for regular uploads, JSON for URL uploads
|
|
|
|
isMultipart = req.is('multipart/form-data')
|
|
|
|
isJson = req.is('application/json')
|
|
|
|
if (!isMultipart && !isJson) {
|
|
|
|
throw new ClientError('Request Content-Type must be either multipart/form-data or application/json.')
|
|
|
|
}
|
2022-07-22 01:42:11 +00:00
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (config.privateUploadGroup) {
|
2022-08-04 14:59:06 +00:00
|
|
|
if (!req.locals.user || !perms.is(req.locals.user, config.privateUploadGroup)) {
|
2022-07-10 12:46:25 +00:00
|
|
|
throw new ClientError(config.privateUploadCustomResponse || 'Your usergroup is not permitted to upload new files.', { statusCode: 403 })
|
2021-05-22 13:59:00 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2021-05-22 13:59:00 +00:00
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
let albumid = parseInt(req.headers.albumid || (req.path_parameters && req.path_parameters.albumid))
|
2022-07-10 12:46:25 +00:00
|
|
|
if (isNaN(albumid)) albumid = null
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const age = self.assertRetentionPeriod(req.locals.user, req.headers.age)
|
2017-01-19 06:34:48 +00:00
|
|
|
|
2022-07-22 01:42:11 +00:00
|
|
|
if (isMultipart) {
|
2022-08-04 14:59:06 +00:00
|
|
|
return self.actuallyUpload(req, res, { albumid, age })
|
2022-07-22 01:42:11 +00:00
|
|
|
} else {
|
|
|
|
// Parse POST body
|
|
|
|
req.body = await req.json()
|
2022-08-04 14:59:06 +00:00
|
|
|
return self.actuallyUploadUrls(req, res, { albumid, age })
|
2022-07-22 01:42:11 +00:00
|
|
|
}
|
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.actuallyUpload = async (req, res, data = {}) => {
|
2022-07-22 01:42:11 +00:00
|
|
|
// Init empty Request.body and Request.files
|
|
|
|
req.body = {}
|
|
|
|
req.files = []
|
2022-07-14 11:18:39 +00:00
|
|
|
|
2022-08-04 15:08:40 +00:00
|
|
|
const unfreezeChunksData = async () => {
|
|
|
|
req.files.forEach(file => {
|
|
|
|
if (!file.chunksData) return
|
|
|
|
file.chunksData.processing = false
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
const cleanUpFiles = async () => {
|
|
|
|
// Unhold identifiers generated via self.getUniqueUploadIdentifier()
|
|
|
|
self.unholdUploadIdentifiers(res)
|
|
|
|
|
|
|
|
// Unlink temp files
|
|
|
|
return Promise.all(req.files.map(async file => {
|
2022-07-14 18:40:57 +00:00
|
|
|
if (!file.filename) return
|
|
|
|
return utils.unlinkFile(file.filename).catch(logger.error)
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
2022-07-12 07:39:16 +00:00
|
|
|
await req.multipart({
|
|
|
|
// https://github.com/mscdex/busboy/tree/v1.6.0#exports
|
2022-07-28 03:19:28 +00:00
|
|
|
// This would otherwise defaults to latin1
|
|
|
|
defParamCharset: 'utf8',
|
2022-07-12 07:39:16 +00:00
|
|
|
limits: {
|
|
|
|
fileSize: maxSizeBytes,
|
|
|
|
// Maximum number of non-file fields.
|
|
|
|
// Dropzone.js will add 6 extra fields for chunked uploads.
|
|
|
|
// We don't use them for anything else.
|
|
|
|
fields: 6,
|
|
|
|
// Maximum number of file fields.
|
|
|
|
// Chunked uploads still need to provide ONLY 1 file field.
|
|
|
|
// Otherwise, only one of the files will end up being properly stored,
|
|
|
|
// and that will also be as a chunk.
|
|
|
|
files: maxFilesPerUpload
|
2022-07-12 06:07:13 +00:00
|
|
|
}
|
2022-07-12 07:39:16 +00:00
|
|
|
}, async field => {
|
2022-07-14 18:06:28 +00:00
|
|
|
// Keep non-files fields in Request.body
|
|
|
|
// Since fields get processed in sequence depending on the order at which they were defined,
|
|
|
|
// chunked uploads data must be set before the files[] field which contain the actual file
|
|
|
|
if (field.truncated) {
|
|
|
|
// Re-map Dropzone chunked uploads keys so people can manually use the API without prepending 'dz'
|
|
|
|
let name = field.name
|
|
|
|
if (name.startsWith('dz')) {
|
|
|
|
name = name.replace(/^dz/, '')
|
2022-07-12 07:39:16 +00:00
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-14 18:06:28 +00:00
|
|
|
req.body[name] = field.value
|
|
|
|
return
|
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-14 18:06:28 +00:00
|
|
|
// Process files immediately and push into Request.files array
|
|
|
|
if (field.file) {
|
2022-08-23 21:17:07 +00:00
|
|
|
if (field.name !== 'files[]') {
|
|
|
|
throw new ClientError(`Unexpected file-type field: ${field.name}`)
|
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
// Push immediately as we will only be adding props into the file object down the line
|
|
|
|
const file = {
|
2022-07-22 01:42:11 +00:00
|
|
|
albumid: data.albumid,
|
|
|
|
age: data.age,
|
2022-07-28 03:19:28 +00:00
|
|
|
originalname: field.file.name || '',
|
|
|
|
mimetype: field.mime_type || '',
|
2022-07-14 09:35:06 +00:00
|
|
|
isChunk: req.body.uuid !== undefined &&
|
|
|
|
req.body.chunkindex !== undefined
|
2022-07-12 06:07:13 +00:00
|
|
|
}
|
2022-07-14 09:35:06 +00:00
|
|
|
req.files.push(file)
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
if (file.isChunk) {
|
|
|
|
if (!chunkedUploads) {
|
|
|
|
throw new ClientError('Chunked uploads are disabled at the moment.')
|
|
|
|
} else if (req.files.length > 1) {
|
|
|
|
throw new ClientError('Chunked uploads may only be uploaded 1 chunk at a time.')
|
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
file.extname = utils.extname(file.originalname)
|
|
|
|
if (self.isExtensionFiltered(file.extname)) {
|
|
|
|
throw new ClientError(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
if (file.isChunk) {
|
2022-08-03 10:28:42 +00:00
|
|
|
// Re-map UUID property to IP-specific UUID
|
|
|
|
const uuid = `${req.ip}_${req.body.uuid}`
|
2022-07-25 00:09:28 +00:00
|
|
|
// Calling initChunks() will also reset the chunked uploads' timeout
|
2022-08-03 10:28:42 +00:00
|
|
|
file.chunksData = await initChunks(uuid)
|
2022-07-14 09:35:06 +00:00
|
|
|
file.filename = file.chunksData.filename
|
2022-07-25 00:09:28 +00:00
|
|
|
file.path = file.chunksData.path
|
2022-07-12 07:39:16 +00:00
|
|
|
} else {
|
|
|
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
2022-07-29 02:14:55 +00:00
|
|
|
const identifier = await self.getUniqueUploadIdentifier(length, file.extname, res)
|
|
|
|
file.filename = identifier + file.extname
|
2022-07-25 00:09:28 +00:00
|
|
|
file.path = path.join(paths.uploads, file.filename)
|
2022-07-12 07:39:16 +00:00
|
|
|
}
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
const readStream = field.file.stream
|
|
|
|
let writeStream
|
|
|
|
let hashStream
|
2022-08-04 17:54:44 +00:00
|
|
|
let _reject
|
2022-08-02 09:19:57 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
// Write the file into disk, and supply required props into file object
|
2022-08-04 17:54:44 +00:00
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
// Keep reference to Promise's reject function to allow unlistening events
|
|
|
|
_reject = reject
|
|
|
|
|
|
|
|
// Ensure this Promise's status can be asserted later
|
|
|
|
const _resolve = () => {
|
|
|
|
file.promised = true
|
|
|
|
return resolve()
|
|
|
|
}
|
|
|
|
|
|
|
|
readStream.once('error', _reject)
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-25 00:09:28 +00:00
|
|
|
if (file.isChunk) {
|
2022-07-14 09:35:06 +00:00
|
|
|
writeStream = file.chunksData.writeStream
|
|
|
|
hashStream = file.chunksData.hashStream
|
2022-07-12 07:39:16 +00:00
|
|
|
} else {
|
2022-07-14 09:35:06 +00:00
|
|
|
writeStream = fs.createWriteStream(file.path)
|
2022-07-25 00:32:25 +00:00
|
|
|
hashStream = enableHashing && blake3.createHash()
|
2022-07-12 06:07:13 +00:00
|
|
|
}
|
|
|
|
|
2022-07-25 00:09:28 +00:00
|
|
|
// Re-init stream errors listeners for this Request
|
2022-08-04 17:54:44 +00:00
|
|
|
writeStream.once('error', _reject)
|
2022-07-25 00:09:28 +00:00
|
|
|
|
|
|
|
if (hashStream) {
|
2022-08-04 17:54:44 +00:00
|
|
|
hashStream.once('error', _reject)
|
2022-09-23 23:41:31 +00:00
|
|
|
// Ensure readStream will only be resumed later down the line by readStream.pipe()
|
|
|
|
readStream.pause()
|
2022-07-25 00:09:28 +00:00
|
|
|
readStream.on('data', data => {
|
|
|
|
// .dispose() will destroy this internal component,
|
|
|
|
// so use it as an indicator of whether the hashStream has been .dispose()'d
|
|
|
|
if (hashStream.hash.hash) {
|
|
|
|
hashStream.update(data)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
if (file.isChunk) {
|
2022-08-02 09:19:57 +00:00
|
|
|
// We listen for readStream's end event
|
2022-08-04 17:54:44 +00:00
|
|
|
readStream.once('end', () => _resolve())
|
2022-07-12 06:07:13 +00:00
|
|
|
} else {
|
2022-08-02 09:19:57 +00:00
|
|
|
// We immediately listen for writeStream's finish event
|
|
|
|
writeStream.once('finish', () => {
|
|
|
|
file.size = writeStream.bytesWritten
|
|
|
|
if (hashStream && hashStream.hash.hash) {
|
2022-08-04 15:08:40 +00:00
|
|
|
const hash = hashStream.digest('hex')
|
|
|
|
file.hash = file.size === 0
|
|
|
|
? ''
|
|
|
|
: hash
|
2022-08-02 09:19:57 +00:00
|
|
|
}
|
2022-08-04 17:54:44 +00:00
|
|
|
return _resolve()
|
2022-08-02 09:19:57 +00:00
|
|
|
})
|
2022-07-12 06:07:13 +00:00
|
|
|
}
|
2022-08-02 09:19:57 +00:00
|
|
|
|
|
|
|
// Pipe readStream to writeStream
|
|
|
|
// Do not end writeStream when readStream finishes if it's a chunk upload
|
|
|
|
readStream
|
|
|
|
.pipe(writeStream, { end: !file.isChunk })
|
|
|
|
}).catch(error => {
|
|
|
|
// Dispose of unfinished write & hasher streams
|
|
|
|
if (writeStream && !writeStream.destroyed) {
|
|
|
|
writeStream.destroy()
|
|
|
|
}
|
|
|
|
if (hashStream && hashStream.hash.hash) {
|
|
|
|
hashStream.dispose()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
2022-08-04 17:54:44 +00:00
|
|
|
}).finally(() => {
|
|
|
|
if (!file.isChunk) return
|
|
|
|
// Unlisten streams' error event for this Request if it's a chunk upload
|
|
|
|
utils.unlistenEmitters([writeStream, hashStream], 'error', _reject)
|
2022-07-12 07:39:16 +00:00
|
|
|
})
|
2022-07-14 11:18:39 +00:00
|
|
|
|
2022-07-28 06:26:15 +00:00
|
|
|
// file.size is not populated if a chunk upload, so ignore
|
|
|
|
if (config.filterEmptyFile && !file.isChunk && file.size === 0) {
|
2022-07-14 11:18:39 +00:00
|
|
|
throw new ClientError('Empty files are not allowed.')
|
|
|
|
}
|
2022-07-12 07:39:16 +00:00
|
|
|
}
|
2022-07-12 06:07:13 +00:00
|
|
|
}).catch(error => {
|
2022-08-02 09:19:57 +00:00
|
|
|
// Clean up temp files and held identifiers (do not wait)
|
|
|
|
cleanUpFiles()
|
2022-08-04 15:08:40 +00:00
|
|
|
unfreezeChunksData()
|
2022-07-14 11:18:39 +00:00
|
|
|
|
2022-08-03 10:28:42 +00:00
|
|
|
// Response.multipart() itself may throw string errors
|
2022-07-12 06:07:13 +00:00
|
|
|
if (typeof error === 'string') {
|
2022-07-12 07:39:16 +00:00
|
|
|
throw new ClientError(error)
|
2022-07-12 06:07:13 +00:00
|
|
|
} else {
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2022-07-22 01:42:11 +00:00
|
|
|
if (!req.files.length) {
|
|
|
|
throw new ClientError('No files.')
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-04 15:08:40 +00:00
|
|
|
// If for some reason Request.multipart() resolves before a file's Promise
|
|
|
|
// Typically caused by something hanging up longer than
|
|
|
|
// uWebSockets.js' internal security timeout (10 seconds)
|
|
|
|
if (req.files.some(file => file.promised !== true)) {
|
|
|
|
// Clean up temp files and held identifiers (do not wait)
|
|
|
|
cleanUpFiles()
|
|
|
|
unfreezeChunksData()
|
|
|
|
|
|
|
|
throw new ServerError()
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success
|
2022-08-03 10:28:42 +00:00
|
|
|
// NOTE: We loop through Request.files for clarity,
|
|
|
|
// but we will actually have already rejected the Request
|
|
|
|
// if it has more than 1 file while being a chunk upload
|
2022-08-04 15:08:40 +00:00
|
|
|
if (req.files.some(file => file.chunksData)) {
|
2019-09-08 01:56:29 +00:00
|
|
|
req.files.forEach(file => {
|
2022-08-03 10:28:42 +00:00
|
|
|
file.chunksData.chunks++
|
|
|
|
// Mark as ready to accept more chunk uploads or to finalize
|
|
|
|
file.chunksData.processing = false
|
2018-03-28 11:36:28 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
return res.json({ success: true })
|
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2022-09-21 01:03:28 +00:00
|
|
|
// If POST /nojs requests, additionally attempt to parse token from form input
|
|
|
|
if (req.locals.nojs) {
|
|
|
|
await new Promise((resolve, reject) => {
|
|
|
|
auth.optionalUser(req, res, error => {
|
|
|
|
if (error) return reject(error)
|
|
|
|
return resolve()
|
|
|
|
}, {
|
|
|
|
token: req.body.token
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
const filesData = req.files
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2022-04-22 21:44:01 +00:00
|
|
|
if (utils.scan.instance) {
|
2022-08-04 14:59:06 +00:00
|
|
|
const scanResult = await self.scanFiles(req, filesData)
|
2022-04-15 09:41:05 +00:00
|
|
|
if (scanResult) {
|
|
|
|
throw new ClientError(scanResult)
|
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
await self.stripTags(req, filesData)
|
2019-11-29 13:42:53 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const result = await self.storeFilesToDb(req, res, filesData)
|
|
|
|
return self.sendUploadResponse(req, res, result)
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** URL uploads */
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.actuallyUploadUrls = async (req, res, data = {}) => {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!config.uploads.urlMaxSize) {
|
2021-01-08 02:44:04 +00:00
|
|
|
throw new ClientError('Upload by URLs is disabled at the moment.', { statusCode: 403 })
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2018-12-08 17:55:04 +00:00
|
|
|
const urls = req.body.urls
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!urls || !(urls instanceof Array)) {
|
2021-01-08 02:44:04 +00:00
|
|
|
throw new ClientError('Missing "urls" property (array).')
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2020-10-30 18:12:09 +00:00
|
|
|
if (urls.length > maxFilesPerUpload) {
|
2021-01-08 02:44:04 +00:00
|
|
|
throw new ClientError(`Maximum ${maxFilesPerUpload} URLs at a time.`)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
|
2022-05-08 08:19:29 +00:00
|
|
|
const assertSize = (size, isContentLength = false) => {
|
|
|
|
if (config.filterEmptyFile && size === 0) {
|
|
|
|
throw new ClientError('Empty files are not allowed.')
|
|
|
|
} else if (size > urlMaxSizeBytes) {
|
|
|
|
if (isContentLength) {
|
|
|
|
throw new ClientError(`File too large. Content-Length header reports file is bigger than ${urlMaxSize} MB.`)
|
|
|
|
} else {
|
|
|
|
throw new ClientError(`File too large. File is bigger than ${urlMaxSize} MB.`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
const filesData = []
|
2022-07-14 11:17:46 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
await Promise.all(urls.map(async url => {
|
2022-07-14 09:35:06 +00:00
|
|
|
// Push immediately as we will only be adding props into the file object down the line
|
|
|
|
const file = {
|
|
|
|
url,
|
|
|
|
albumid: data.albumid,
|
|
|
|
age: data.age
|
|
|
|
}
|
|
|
|
filesData.push(file)
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (config.uploads.urlProxy) {
|
|
|
|
url = config.uploads.urlProxy
|
|
|
|
.replace(/{url}/g, encodeURIComponent(url))
|
|
|
|
.replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, '')))
|
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Try to determine size early via Content-Length header,
|
2022-09-13 18:33:31 +00:00
|
|
|
// but continue anyway if it isn't a valid number (some servers don't provide them)
|
|
|
|
const headStart = Date.now()
|
2022-07-10 12:46:25 +00:00
|
|
|
try {
|
2022-09-13 18:33:31 +00:00
|
|
|
const head = await utils.fetch(url, {
|
2022-08-23 21:37:13 +00:00
|
|
|
method: 'HEAD',
|
|
|
|
size: urlMaxSizeBytes, // limit max response body size
|
2022-09-13 18:33:31 +00:00
|
|
|
timeout: urlFetchTimeout
|
2022-08-23 21:37:13 +00:00
|
|
|
})
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (head.status === 200) {
|
|
|
|
const contentLength = parseInt(head.headers.get('content-length'))
|
|
|
|
if (!Number.isNaN(contentLength)) {
|
|
|
|
assertSize(contentLength, true)
|
2022-05-08 08:35:28 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
} catch (ex) {
|
|
|
|
// Re-throw only if ClientError, otherwise ignore
|
|
|
|
if (ex instanceof ClientError) {
|
|
|
|
throw ex
|
|
|
|
}
|
|
|
|
}
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
2022-07-29 03:15:11 +00:00
|
|
|
const identifier = await self.getUniqueUploadIdentifier(length, '.tmp', res)
|
|
|
|
|
|
|
|
// Temporarily store to disk as a .tmp file
|
|
|
|
file.filename = identifier + '.tmp'
|
2022-07-14 09:35:06 +00:00
|
|
|
file.path = path.join(paths.uploads, file.filename)
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
let writeStream
|
|
|
|
let hashStream
|
2022-08-02 09:19:57 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
return Promise.resolve().then(async () => {
|
2022-07-14 09:35:06 +00:00
|
|
|
writeStream = fs.createWriteStream(file.path)
|
2022-07-25 00:32:25 +00:00
|
|
|
hashStream = enableHashing && blake3.createHash()
|
2022-07-10 12:46:25 +00:00
|
|
|
|
2022-09-13 18:33:31 +00:00
|
|
|
// Reduce GET timeout by time already spent for HEAD request
|
|
|
|
const _timeout = urlFetchTimeout - (Date.now() - headStart)
|
|
|
|
|
2022-09-13 23:04:18 +00:00
|
|
|
// Skip early if HEAD fetch took too long
|
|
|
|
if (_timeout <= 0) {
|
|
|
|
throw new ClientError('Fetch timed out. Try again?')
|
|
|
|
}
|
|
|
|
|
2022-09-13 18:33:31 +00:00
|
|
|
const fetchFile = await utils.fetch(url, {
|
2022-08-23 21:37:13 +00:00
|
|
|
method: 'GET',
|
|
|
|
size: urlMaxSizeBytes, // limit max response body size
|
2022-09-13 18:33:31 +00:00
|
|
|
timeout: _timeout
|
2022-08-23 21:37:13 +00:00
|
|
|
})
|
2022-07-10 12:46:25 +00:00
|
|
|
.then(res => new Promise((resolve, reject) => {
|
2022-07-25 00:32:25 +00:00
|
|
|
if (res.status !== 200) {
|
|
|
|
return resolve(res)
|
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
|
2022-07-25 00:32:25 +00:00
|
|
|
writeStream.once('error', reject)
|
|
|
|
res.body.once('error', reject)
|
|
|
|
|
|
|
|
if (hashStream) {
|
|
|
|
hashStream.once('error', reject)
|
|
|
|
res.body.on('data', d => hashStream.update(d))
|
2020-06-01 05:23:15 +00:00
|
|
|
}
|
2022-07-25 00:32:25 +00:00
|
|
|
|
|
|
|
res.body.pipe(writeStream)
|
|
|
|
writeStream.once('finish', () => resolve(res))
|
2022-07-10 12:46:25 +00:00
|
|
|
}))
|
2020-06-01 05:23:15 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (fetchFile.status !== 200) {
|
|
|
|
throw new ServerError(`${fetchFile.status} ${fetchFile.statusText}`)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Re-test size via actual bytes written to physical file
|
2022-07-14 09:35:06 +00:00
|
|
|
assertSize(writeStream.bytesWritten)
|
|
|
|
|
2022-07-29 03:15:11 +00:00
|
|
|
// Try to determine filename from Content-Disposition header if available
|
|
|
|
const contentDispositionHeader = fetchFile.headers.get('content-disposition')
|
|
|
|
if (contentDispositionHeader) {
|
|
|
|
const parsed = contentDisposition.parse(contentDispositionHeader)
|
|
|
|
if (parsed && parsed.parameters) {
|
|
|
|
file.originalname = parsed.parameters.filename
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!file.originalname) {
|
|
|
|
file.originalname = path.basename(url).split(/[?#]/)[0]
|
|
|
|
}
|
|
|
|
|
|
|
|
file.extname = utils.extname(file.originalname)
|
|
|
|
|
|
|
|
// Extensions filter
|
|
|
|
let filtered = false
|
|
|
|
if (urlExtensionsFilter && ['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) {
|
|
|
|
const match = config.uploads.urlExtensionsFilter.includes(file.extname.toLowerCase())
|
|
|
|
const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist'
|
|
|
|
filtered = ((!whitelist && match) || (whitelist && !match))
|
|
|
|
} else {
|
|
|
|
filtered = self.isExtensionFiltered(file.extname)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (filtered) {
|
|
|
|
throw new ClientError(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Generate a new filename with actual extname
|
|
|
|
// Also generate a new random identifier if required
|
|
|
|
const _identifier = queryDatabaseForIdentifierMatch
|
|
|
|
? identifier
|
|
|
|
: await self.getUniqueUploadIdentifier(length, file.extname, res)
|
|
|
|
const _name = _identifier + file.extname
|
|
|
|
|
|
|
|
// Move .tmp file to the new filename
|
|
|
|
const destination = path.join(paths.uploads, _name)
|
|
|
|
await paths.rename(file.path, destination)
|
|
|
|
|
|
|
|
// Then update the props with renewed information
|
|
|
|
file.filename = _name
|
|
|
|
file.path = destination
|
|
|
|
|
|
|
|
// Finalize other file props
|
2022-07-14 09:35:06 +00:00
|
|
|
const contentType = fetchFile.headers.get('content-type')
|
|
|
|
file.mimetype = contentType ? contentType.split(';')[0] : 'application/octet-stream'
|
|
|
|
file.size = writeStream.bytesWritten
|
2022-07-25 00:32:25 +00:00
|
|
|
file.hash = hashStream
|
|
|
|
? hashStream.digest('hex')
|
|
|
|
: null
|
2022-07-10 12:46:25 +00:00
|
|
|
}).catch(err => {
|
|
|
|
// Dispose of unfinished write & hasher streams
|
2022-07-14 09:35:06 +00:00
|
|
|
if (writeStream && !writeStream.destroyed) {
|
|
|
|
writeStream.destroy()
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2022-07-25 00:32:25 +00:00
|
|
|
if (hashStream && hashStream.hash.hash) {
|
|
|
|
hashStream.dispose()
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Re-throw errors
|
|
|
|
throw err
|
|
|
|
})
|
|
|
|
})).catch(async error => {
|
2022-07-14 11:17:46 +00:00
|
|
|
// Unlink temp files (do not wait)
|
|
|
|
if (filesData.length) {
|
2022-07-25 00:32:39 +00:00
|
|
|
Promise.all(filesData.map(async file => {
|
2022-07-28 06:26:15 +00:00
|
|
|
if (!file.filename) return
|
|
|
|
return utils.unlinkFile(file.filename).catch(logger.error)
|
2022-07-25 00:32:39 +00:00
|
|
|
}))
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Re-throw suppressed errors as ClientError, otherwise as-is
|
2019-11-29 10:42:29 +00:00
|
|
|
const errorString = error.toString()
|
|
|
|
const suppress = [
|
2022-08-23 21:37:13 +00:00
|
|
|
/ network timeout at:/,
|
2019-11-29 10:42:29 +00:00
|
|
|
/ over limit:/
|
|
|
|
]
|
2021-01-08 02:44:04 +00:00
|
|
|
if (suppress.some(t => t.test(errorString))) {
|
|
|
|
throw new ClientError(errorString)
|
2022-09-13 18:33:31 +00:00
|
|
|
} else if (errorString.startsWith('AbortError:')) {
|
|
|
|
throw new ClientError('Fetch timed out. Try again?')
|
2020-10-30 18:12:09 +00:00
|
|
|
} else {
|
2021-01-08 02:44:04 +00:00
|
|
|
throw error
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
if (utils.scan.instance) {
|
2022-08-04 14:59:06 +00:00
|
|
|
const scanResult = await self.scanFiles(req, filesData)
|
2022-08-02 09:19:57 +00:00
|
|
|
if (scanResult) {
|
|
|
|
throw new ClientError(scanResult)
|
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const result = await self.storeFilesToDb(req, res, filesData)
|
|
|
|
return self.sendUploadResponse(req, res, result)
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Chunk uploads */
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.finishChunks = async (req, res) => {
|
|
|
|
if (!chunkedUploads) {
|
|
|
|
throw new ClientError('Chunked upload is disabled.', { statusCode: 403 })
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2018-03-28 11:36:28 +00:00
|
|
|
const files = req.body.files
|
2022-05-08 05:01:18 +00:00
|
|
|
if (!Array.isArray(files) || !files.length) {
|
2021-01-08 02:44:04 +00:00
|
|
|
throw new ClientError('Bad request.')
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2022-08-03 10:28:42 +00:00
|
|
|
// Re-map UUID property to IP-specific UUID
|
|
|
|
files.forEach(file => {
|
|
|
|
file.uuid = `${req.ip}_${file.uuid}`
|
|
|
|
})
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
return self.actuallyFinishChunks(req, res, files)
|
2022-07-10 12:46:25 +00:00
|
|
|
.catch(error => {
|
2022-07-14 11:17:46 +00:00
|
|
|
// Unlink temp files (do not wait)
|
|
|
|
Promise.all(files.map(async file => {
|
2022-07-10 12:46:25 +00:00
|
|
|
if (file.uuid && chunksData[file.uuid]) {
|
2022-07-14 11:17:46 +00:00
|
|
|
return self.cleanUpChunks(file.uuid).catch(logger.error)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2022-07-14 11:17:46 +00:00
|
|
|
}))
|
2022-07-10 12:46:25 +00:00
|
|
|
// Re-throw errors
|
|
|
|
throw error
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.actuallyFinishChunks = async (req, res, files) => {
|
2022-07-14 09:35:06 +00:00
|
|
|
const filesData = []
|
2022-07-10 12:46:25 +00:00
|
|
|
await Promise.all(files.map(async file => {
|
|
|
|
if (!file.uuid || typeof chunksData[file.uuid] === 'undefined') {
|
|
|
|
throw new ClientError('Invalid file UUID, or chunks data had already timed out. Try again?')
|
|
|
|
}
|
2022-05-08 05:01:18 +00:00
|
|
|
|
2022-07-25 00:09:28 +00:00
|
|
|
if (chunksData[file.uuid].processing) {
|
|
|
|
throw new ClientError('Previous chunk upload is still being processed. Try again?')
|
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Suspend timeout
|
|
|
|
// If the chunk errors out there, it will be immediately cleaned up anyway
|
|
|
|
chunksData[file.uuid].clearTimeout()
|
2022-05-08 05:01:18 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Conclude write and hasher streams
|
2022-07-14 09:35:06 +00:00
|
|
|
chunksData[file.uuid].writeStream.end()
|
2022-07-25 00:09:28 +00:00
|
|
|
const bytesWritten = chunksData[file.uuid].writeStream.bytesWritten
|
2022-07-25 00:32:25 +00:00
|
|
|
const hash = chunksData[file.uuid].hashStream
|
|
|
|
? chunksData[file.uuid].hashStream.digest('hex')
|
|
|
|
: null
|
2020-06-15 16:14:33 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (chunksData[file.uuid].chunks < 2 || chunksData[file.uuid].chunks > maxChunksCount) {
|
|
|
|
throw new ClientError('Invalid chunks count.')
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
const extname = typeof file.original === 'string' ? utils.extname(file.original) : ''
|
|
|
|
if (self.isExtensionFiltered(extname)) {
|
|
|
|
throw new ClientError(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const age = self.assertRetentionPeriod(req.locals.user, file.age)
|
2018-04-28 17:26:39 +00:00
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
let size = file.size
|
|
|
|
if (size === undefined) {
|
|
|
|
size = bytesWritten
|
|
|
|
} else if (size !== bytesWritten) {
|
2022-07-25 00:09:28 +00:00
|
|
|
// If client reports actual total size, confirm match
|
2022-08-02 09:19:57 +00:00
|
|
|
throw new ClientError(`Written bytes (${bytesWritten}) does not match actual size reported by client (${size}).`)
|
2022-07-25 00:09:28 +00:00
|
|
|
}
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
if (config.filterEmptyFile && size === 0) {
|
2022-07-10 12:46:25 +00:00
|
|
|
throw new ClientError('Empty files are not allowed.')
|
2022-08-02 09:19:57 +00:00
|
|
|
} else if (size > maxSizeBytes) {
|
2022-07-10 12:46:25 +00:00
|
|
|
throw new ClientError(`File too large. Chunks are bigger than ${maxSize} MB.`)
|
|
|
|
}
|
2018-04-28 17:26:39 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const tmpfile = path.join(chunksData[file.uuid].root, chunksData[file.uuid].filename)
|
2022-07-25 00:09:28 +00:00
|
|
|
|
|
|
|
// Double-check file size
|
2022-07-10 12:46:25 +00:00
|
|
|
const lstat = await paths.lstat(tmpfile)
|
2022-08-02 09:19:57 +00:00
|
|
|
if (lstat.size !== size) {
|
|
|
|
throw new ClientError(`Resulting physical file size (${lstat.size}) does not match expected size (${size}).`)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-06-15 16:14:33 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Generate name
|
|
|
|
const length = self.parseFileIdentifierLength(file.filelength)
|
2022-08-02 09:19:57 +00:00
|
|
|
const identifier = await self.getUniqueUploadIdentifier(length, extname, res)
|
|
|
|
const name = identifier + extname
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Move tmp file to final destination
|
|
|
|
// For fs.copyFile(), tmpfile will eventually be unlinked by self.cleanUpChunks()
|
|
|
|
const destination = path.join(paths.uploads, name)
|
|
|
|
if (chunksCopyFile) {
|
|
|
|
await paths.copyFile(tmpfile, destination)
|
|
|
|
} else {
|
|
|
|
await paths.rename(tmpfile, destination)
|
|
|
|
}
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Continue even when encountering errors
|
|
|
|
await self.cleanUpChunks(file.uuid).catch(logger.error)
|
2018-04-05 12:54:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
let albumid = parseInt(file.albumid)
|
2022-08-02 09:19:57 +00:00
|
|
|
if (isNaN(albumid)) {
|
|
|
|
albumid = null
|
|
|
|
}
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
filesData.push({
|
2022-07-10 12:46:25 +00:00
|
|
|
filename: name,
|
|
|
|
originalname: file.original || '',
|
2022-08-02 09:19:57 +00:00
|
|
|
extname,
|
2022-07-10 12:46:25 +00:00
|
|
|
mimetype: file.type || '',
|
2022-07-14 09:35:06 +00:00
|
|
|
path: destination,
|
2022-08-02 09:19:57 +00:00
|
|
|
size,
|
2022-07-10 12:46:25 +00:00
|
|
|
hash,
|
|
|
|
albumid,
|
2022-08-02 09:19:57 +00:00
|
|
|
age
|
2022-07-14 09:35:06 +00:00
|
|
|
})
|
2022-07-10 12:46:25 +00:00
|
|
|
}))
|
2019-09-23 08:09:15 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (utils.scan.instance) {
|
2022-08-04 14:59:06 +00:00
|
|
|
const scanResult = await self.scanFiles(req, filesData)
|
2022-08-02 09:19:57 +00:00
|
|
|
if (scanResult) {
|
|
|
|
throw new ClientError(scanResult)
|
|
|
|
}
|
2018-04-05 10:31:07 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
await self.stripTags(req, filesData)
|
2022-07-10 12:46:25 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const result = await self.storeFilesToDb(req, res, filesData)
|
|
|
|
return self.sendUploadResponse(req, res, result)
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2022-05-08 05:01:18 +00:00
|
|
|
self.cleanUpChunks = async uuid => {
|
2022-05-08 08:35:28 +00:00
|
|
|
// Dispose of unfinished write & hasher streams
|
2022-07-14 09:35:06 +00:00
|
|
|
if (chunksData[uuid].writeStream && !chunksData[uuid].writeStream.destroyed) {
|
|
|
|
chunksData[uuid].writeStream.destroy()
|
2022-05-08 05:01:18 +00:00
|
|
|
}
|
2022-07-25 00:32:25 +00:00
|
|
|
if (chunksData[uuid].hashStream && chunksData[uuid].hashStream.hash.hash) {
|
|
|
|
chunksData[uuid].hashStream.dispose()
|
|
|
|
}
|
2022-05-08 05:01:18 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
// Remove tmp file
|
|
|
|
await paths.unlink(path.join(chunksData[uuid].root, chunksData[uuid].filename))
|
|
|
|
.catch(error => {
|
2021-01-08 02:44:04 +00:00
|
|
|
// Re-throw non-ENOENT error
|
2020-10-30 18:12:09 +00:00
|
|
|
if (error.code !== 'ENOENT') logger.error(error)
|
2020-06-15 16:14:33 +00:00
|
|
|
})
|
2020-10-30 18:12:09 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Remove UUID dir
|
|
|
|
await paths.rmdir(chunksData[uuid].root)
|
2020-10-30 18:12:09 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
// Delete cached chunks data
|
2019-09-08 01:56:29 +00:00
|
|
|
delete chunksData[uuid]
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Virus scanning (ClamAV) */
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
self.assertScanUserBypass = (user, filenames) => {
|
|
|
|
if (!user || !utils.scan.groupBypass) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!Array.isArray(filenames)) {
|
|
|
|
filenames = [filenames]
|
|
|
|
}
|
|
|
|
|
2022-04-22 21:44:01 +00:00
|
|
|
logger.debug(`[ClamAV]: ${filenames.join(', ')}: Skipped, uploaded by ${user.username} (${utils.scan.groupBypass})`)
|
|
|
|
return perms.is(user, utils.scan.groupBypass)
|
|
|
|
}
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
self.assertScanFileBypass = data => {
|
|
|
|
if (typeof data !== 'object' || !data.filename) {
|
|
|
|
return false
|
|
|
|
}
|
2022-04-22 21:44:01 +00:00
|
|
|
|
|
|
|
const extname = data.extname || utils.extname(data.filename)
|
|
|
|
if (utils.scan.whitelistExtensions && utils.scan.whitelistExtensions.includes(extname)) {
|
|
|
|
logger.debug(`[ClamAV]: ${data.filename}: Skipped, extension whitelisted`)
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2022-08-02 09:19:57 +00:00
|
|
|
if (utils.scan.maxSize && data.size !== undefined && data.size > utils.scan.maxSize) {
|
2022-04-22 21:44:01 +00:00
|
|
|
logger.debug(`[ClamAV]: ${data.filename}: Skipped, size ${data.size} > ${utils.scan.maxSize}`)
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.scanFiles = async (req, filesData) => {
|
2022-07-14 09:35:06 +00:00
|
|
|
const filenames = filesData.map(file => file.filename)
|
2022-08-04 14:59:06 +00:00
|
|
|
if (self.assertScanUserBypass(req.locals.user, filenames)) {
|
2019-11-05 20:35:04 +00:00
|
|
|
return false
|
2020-04-04 14:20:01 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
const foundThreats = []
|
2022-04-15 08:36:50 +00:00
|
|
|
const unableToScan = []
|
2022-07-14 09:35:06 +00:00
|
|
|
const result = await Promise.all(filesData.map(async file => {
|
2022-08-02 09:19:57 +00:00
|
|
|
if (self.assertScanFileBypass(file)) return
|
2020-04-04 14:20:01 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
logger.debug(`[ClamAV]: ${file.filename}: Scanning\u2026`)
|
|
|
|
const response = await utils.scan.instance.isInfected(file.path)
|
2022-04-15 08:36:50 +00:00
|
|
|
if (response.isInfected) {
|
2022-07-14 09:35:06 +00:00
|
|
|
logger.log(`[ClamAV]: ${file.filename}: ${response.viruses.join(', ')}`)
|
2020-10-31 23:35:56 +00:00
|
|
|
foundThreats.push(...response.viruses)
|
2022-04-15 08:36:50 +00:00
|
|
|
} else if (response.isInfected === null) {
|
2022-07-14 09:35:06 +00:00
|
|
|
logger.log(`[ClamAV]: ${file.filename}: Unable to scan`)
|
|
|
|
unableToScan.push(file.filename)
|
2022-04-22 22:01:56 +00:00
|
|
|
} else {
|
2022-07-14 09:35:06 +00:00
|
|
|
logger.debug(`[ClamAV]: ${file.filename}: File is clean`)
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2019-11-05 20:35:04 +00:00
|
|
|
})).then(() => {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (foundThreats.length) {
|
2020-10-31 23:35:56 +00:00
|
|
|
const more = foundThreats.length > 1
|
|
|
|
return `Threat${more ? 's' : ''} detected: ${foundThreats[0]}${more ? ', and more' : ''}.`
|
2022-04-15 08:36:50 +00:00
|
|
|
} else if (unableToScan.length) {
|
|
|
|
const more = unableToScan.length > 1
|
|
|
|
return `Unable to scan: ${unableToScan[0]}${more ? ', and more' : ''}.`
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-11-05 20:35:04 +00:00
|
|
|
}).catch(error => {
|
2022-04-22 21:44:01 +00:00
|
|
|
logger.error(`[ClamAV]: ${filenames.join(', ')}: ${error.toString()}`)
|
2019-11-05 20:35:04 +00:00
|
|
|
return 'An unexpected error occurred with ClamAV, please contact the site owner.'
|
|
|
|
})
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2022-04-15 09:41:05 +00:00
|
|
|
if (result) {
|
2022-07-14 11:17:46 +00:00
|
|
|
// Unlink temp files (do not wait)
|
|
|
|
Promise.all(filesData.map(async file =>
|
2022-07-14 09:35:06 +00:00
|
|
|
utils.unlinkFile(file.filename).catch(logger.error)
|
2019-11-05 20:35:04 +00:00
|
|
|
))
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2022-04-15 09:41:05 +00:00
|
|
|
return result
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Strip tags (EXIF, etc.) */
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
self.stripTags = async (req, filesData) => {
|
2020-10-30 18:12:09 +00:00
|
|
|
if (!self.parseStripTags(req.headers.striptags)) return
|
2019-11-29 13:42:53 +00:00
|
|
|
|
|
|
|
try {
|
2022-07-14 11:17:46 +00:00
|
|
|
await Promise.all(filesData.map(async file =>
|
2022-07-14 09:35:06 +00:00
|
|
|
utils.stripTags(file.filename, file.extname)
|
2019-11-29 13:42:53 +00:00
|
|
|
))
|
|
|
|
} catch (error) {
|
2022-07-14 11:17:46 +00:00
|
|
|
// Unlink temp files (do not wait)
|
|
|
|
Promise.all(filesData.map(async file =>
|
2022-07-14 09:35:06 +00:00
|
|
|
utils.unlinkFile(file.filename).catch(logger.error)
|
2019-11-29 13:42:53 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Database functions */
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.storeFilesToDb = async (req, res, filesData) => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const files = []
|
|
|
|
const exists = []
|
|
|
|
const albumids = []
|
2019-11-29 13:42:53 +00:00
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
await Promise.all(filesData.map(async file => {
|
2022-07-25 00:32:25 +00:00
|
|
|
if (enableHashing) {
|
|
|
|
// Check if the file exists by checking its hash and size
|
|
|
|
const dbFile = await utils.db.table('files')
|
|
|
|
.where(function () {
|
2022-08-04 14:59:06 +00:00
|
|
|
if (req.locals.user) {
|
|
|
|
this.where('userid', req.locals.user.id)
|
2022-07-25 00:32:25 +00:00
|
|
|
} else {
|
2022-08-04 14:59:06 +00:00
|
|
|
this.whereNull('userid')
|
2022-07-25 00:32:25 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
.where({
|
|
|
|
hash: file.hash,
|
|
|
|
size: String(file.size)
|
|
|
|
})
|
|
|
|
// Select expirydate to display expiration date of existing files as well
|
|
|
|
.select('name', 'expirydate')
|
|
|
|
.first()
|
|
|
|
|
|
|
|
if (dbFile) {
|
|
|
|
// Continue even when encountering errors
|
|
|
|
await utils.unlinkFile(file.filename).catch(logger.error)
|
|
|
|
logger.debug(`Unlinked ${file.filename} since a duplicate named ${dbFile.name} exists`)
|
|
|
|
|
|
|
|
// If on /nojs route, append original name reported by client,
|
|
|
|
// instead of the actual original name from database
|
|
|
|
if (req.path === '/nojs') {
|
|
|
|
dbFile.original = file.originalname
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
|
2022-07-25 00:32:25 +00:00
|
|
|
exists.push(dbFile)
|
|
|
|
return
|
|
|
|
}
|
2018-04-05 10:31:07 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const timestamp = Math.floor(Date.now() / 1000)
|
|
|
|
const data = {
|
2022-07-14 09:35:06 +00:00
|
|
|
name: file.filename,
|
|
|
|
original: file.originalname,
|
|
|
|
type: file.mimetype,
|
|
|
|
size: String(file.size),
|
|
|
|
hash: file.hash,
|
2019-09-08 01:56:29 +00:00
|
|
|
// Only disable if explicitly set to false in config
|
|
|
|
ip: config.uploads.storeIP !== false ? req.ip : null,
|
|
|
|
timestamp
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
if (req.locals.user) {
|
|
|
|
data.userid = req.locals.user.id
|
2022-07-14 09:35:06 +00:00
|
|
|
data.albumid = file.albumid
|
2020-10-30 18:12:09 +00:00
|
|
|
if (data.albumid !== null && !albumids.includes(data.albumid)) {
|
2019-09-08 01:56:29 +00:00
|
|
|
albumids.push(data.albumid)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2022-07-14 09:35:06 +00:00
|
|
|
if (file.age) {
|
|
|
|
data.expirydate = data.timestamp + (file.age * 3600) // Hours to seconds
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
files.push(data)
|
|
|
|
|
|
|
|
// Generate thumbs, but do not wait
|
2022-07-14 09:35:06 +00:00
|
|
|
if (utils.mayGenerateThumb(file.extname)) {
|
|
|
|
utils.generateThumbs(file.filename, file.extname, true).catch(logger.error)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2018-04-12 14:37:42 +00:00
|
|
|
if (files.length) {
|
2022-08-04 14:59:06 +00:00
|
|
|
// albumids should be empty if non-registerd users (no auth requests)
|
2019-09-08 01:56:29 +00:00
|
|
|
let authorizedIds = []
|
|
|
|
if (albumids.length) {
|
2022-06-03 21:21:56 +00:00
|
|
|
authorizedIds = await utils.db.table('albums')
|
2022-08-04 14:59:06 +00:00
|
|
|
.where({ userid: req.locals.user.id })
|
2019-09-08 01:56:29 +00:00
|
|
|
.whereIn('id', albumids)
|
|
|
|
.select('id')
|
|
|
|
.then(rows => rows.map(row => row.id))
|
|
|
|
|
|
|
|
// Remove albumid if user do not own the album
|
2020-10-30 18:12:09 +00:00
|
|
|
for (const file of files) {
|
|
|
|
if (file.albumid !== null && !authorizedIds.includes(file.albumid)) {
|
2019-09-08 01:56:29 +00:00
|
|
|
file.albumid = null
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 09:57:55 +00:00
|
|
|
await utils.db.transaction(async trx => {
|
|
|
|
// Insert new files to DB
|
|
|
|
await trx('files')
|
|
|
|
.insert(files)
|
|
|
|
utils.invalidateStatsCache('uploads')
|
|
|
|
|
|
|
|
// Update albums' timestamp
|
|
|
|
if (authorizedIds.length) {
|
|
|
|
await trx('albums')
|
|
|
|
.whereIn('id', authorizedIds)
|
|
|
|
.update('editedAt', Math.floor(Date.now() / 1000))
|
|
|
|
utils.deleteStoredAlbumRenders(authorizedIds)
|
|
|
|
}
|
|
|
|
})
|
2018-05-12 14:01:14 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-10-11 10:32:22 +00:00
|
|
|
return [...files, ...exists]
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2018-04-04 17:38:15 +00:00
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Final response */
|
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
self.sendUploadResponse = async (req, res, result) => {
|
2019-09-08 01:56:29 +00:00
|
|
|
// Send response
|
2021-01-08 02:44:04 +00:00
|
|
|
return res.json({
|
2018-05-12 14:01:14 +00:00
|
|
|
success: true,
|
2019-09-08 01:56:29 +00:00
|
|
|
files: result.map(file => {
|
|
|
|
const map = {
|
2018-04-12 14:37:42 +00:00
|
|
|
name: file.name,
|
2022-06-22 06:27:38 +00:00
|
|
|
url: `${utils.conf.domain ? `${utils.conf.domain}/` : ''}${file.name}`
|
2018-04-04 17:38:15 +00:00
|
|
|
}
|
2018-05-12 14:01:14 +00:00
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
// If a temporary upload, add expiry date
|
2020-10-30 18:12:09 +00:00
|
|
|
if (file.expirydate) {
|
2019-09-08 01:56:29 +00:00
|
|
|
map.expirydate = file.expirydate
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-12-18 17:01:28 +00:00
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
// If on /nojs route, add original name
|
2020-10-30 18:12:09 +00:00
|
|
|
if (req.path === '/nojs') {
|
2019-09-08 01:56:29 +00:00
|
|
|
map.original = file.original
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2018-04-12 14:37:42 +00:00
|
|
|
|
2020-06-19 18:28:23 +00:00
|
|
|
// If uploaded by user, add delete URL (intended for ShareX and its derivatives)
|
|
|
|
// Homepage uploader will not use this (use dashboard instead)
|
2022-08-04 14:59:06 +00:00
|
|
|
if (req.locals.user) {
|
2022-08-19 02:29:11 +00:00
|
|
|
map.deleteUrl = `${utils.conf.homeDomain || ''}/file/${file.name}?delete`
|
2022-06-28 05:03:49 +00:00
|
|
|
}
|
2020-06-19 18:28:23 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
return map
|
|
|
|
})
|
|
|
|
})
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-03-17 00:53:29 +00:00
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** Delete uploads */
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.delete = async (req, res) => {
|
2022-08-04 14:59:06 +00:00
|
|
|
// Re-map Request.body for .bulkDelete()
|
|
|
|
// This is the legacy API used by lolisafe v3's frontend
|
2022-07-10 12:46:25 +00:00
|
|
|
// Meanwhile this fork's frontend uses .bulkDelete() straight away
|
2022-08-04 14:59:06 +00:00
|
|
|
const id = parseInt(req.body.id)
|
|
|
|
req.body = {
|
|
|
|
_legacy: true,
|
|
|
|
field: 'id',
|
|
|
|
values: isNaN(id) ? undefined : [id]
|
|
|
|
}
|
2022-07-22 01:40:40 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
return self.bulkDelete(req, res)
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.bulkDelete = async (req, res) => {
|
|
|
|
const field = req.body.field || 'id'
|
|
|
|
const values = req.body.values
|
|
|
|
|
|
|
|
if (!Array.isArray(values) || !values.length) {
|
|
|
|
throw new ClientError('No array of files specified.')
|
|
|
|
}
|
2018-03-29 23:22:08 +00:00
|
|
|
|
2022-08-04 14:59:06 +00:00
|
|
|
const failed = await utils.bulkDeleteFromDb(field, values, req.locals.user)
|
2022-07-10 12:46:25 +00:00
|
|
|
|
|
|
|
return res.json({ success: true, failed })
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2022-04-16 13:44:11 +00:00
|
|
|
/** List uploads */
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.list = async (req, res) => {
|
|
|
|
const all = req.headers.all === '1'
|
|
|
|
const filters = req.headers.filters
|
|
|
|
const minoffset = Number(req.headers.minoffset) || 0
|
2022-08-04 14:59:06 +00:00
|
|
|
const ismoderator = perms.is(req.locals.user, 'moderator')
|
|
|
|
if (all && !ismoderator) {
|
|
|
|
return res.status(403).end()
|
|
|
|
}
|
2021-01-08 02:44:04 +00:00
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
const albumid = req.path_parameters && Number(req.path_parameters.albumid)
|
2022-07-10 12:46:25 +00:00
|
|
|
const basedomain = utils.conf.domain
|
|
|
|
|
2022-08-01 08:20:14 +00:00
|
|
|
// Thresholds for regular users (usergroups lower than moderator)
|
2022-07-10 12:46:25 +00:00
|
|
|
const MAX_WILDCARDS_IN_KEY = 2
|
|
|
|
const MAX_TEXT_QUERIES = 3 // non-keyed keywords
|
2022-08-01 08:21:23 +00:00
|
|
|
const MAX_SORT_KEYS = 2
|
2022-07-10 12:46:25 +00:00
|
|
|
const MAX_IS_KEYS = 1
|
|
|
|
|
2022-09-24 02:54:49 +00:00
|
|
|
// Timezone offset
|
|
|
|
let timezoneOffset = 0
|
|
|
|
if (minoffset !== undefined) {
|
|
|
|
timezoneOffset = 60000 * (utils.timezoneOffset - minoffset)
|
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const filterObj = {
|
|
|
|
uploaders: [],
|
|
|
|
excludeUploaders: [],
|
|
|
|
queries: {
|
|
|
|
exclude: {}
|
|
|
|
},
|
|
|
|
typeIs: [
|
|
|
|
'image',
|
|
|
|
'video',
|
|
|
|
'audio'
|
|
|
|
],
|
|
|
|
flags: {}
|
|
|
|
}
|
|
|
|
|
|
|
|
const sortObj = {
|
|
|
|
// Cast columns to specific type if they are stored differently
|
|
|
|
casts: {
|
|
|
|
size: 'integer'
|
|
|
|
},
|
|
|
|
// Columns mapping
|
|
|
|
maps: {
|
|
|
|
date: 'timestamp',
|
|
|
|
expiry: 'expirydate',
|
|
|
|
originalname: 'original'
|
|
|
|
},
|
|
|
|
// Columns with which to use SQLite's NULLS LAST option
|
|
|
|
nullsLast: [
|
|
|
|
'userid',
|
2022-08-01 08:20:14 +00:00
|
|
|
'albumid',
|
2022-07-10 12:46:25 +00:00
|
|
|
'expirydate',
|
|
|
|
'ip'
|
|
|
|
],
|
|
|
|
parsed: []
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse glob wildcards into SQL wildcards
|
|
|
|
function sqlLikeParser (pattern) {
|
|
|
|
// Escape SQL operators
|
|
|
|
const escaped = pattern
|
|
|
|
.replace(/(?<!\\)%/g, '\\%')
|
|
|
|
.replace(/(?<!\\)_/g, '\\_')
|
|
|
|
|
|
|
|
// Look for any glob operators
|
|
|
|
const match = pattern.match(/(?<!\\)(\*|\?)/g)
|
|
|
|
if (match && match.length) {
|
|
|
|
return {
|
|
|
|
count: match.length,
|
|
|
|
// Replace glob operators with their SQL equivalents
|
|
|
|
escaped: escaped
|
|
|
|
.replace(/(?<!\\)\*/g, '%')
|
|
|
|
.replace(/(?<!\\)\?/g, '_')
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return {
|
|
|
|
count: 0,
|
|
|
|
// Assume partial match
|
|
|
|
escaped: `%${escaped}%`
|
2020-05-02 19:39:24 +00:00
|
|
|
}
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (filters) {
|
|
|
|
const keywords = []
|
2020-05-16 15:42:08 +00:00
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
// Only allow filtering by 'albumid' when not listing a specific album's uploads
|
|
|
|
if (isNaN(albumid)) {
|
2022-07-21 18:44:15 +00:00
|
|
|
keywords.push('albumid')
|
|
|
|
}
|
2020-05-02 20:30:50 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Only allow filtering by 'ip' and 'user' keys when listing all uploads
|
2022-07-21 19:03:59 +00:00
|
|
|
if (all) {
|
|
|
|
keywords.push('ip', 'user')
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const ranges = [
|
|
|
|
'date',
|
|
|
|
'expiry'
|
|
|
|
]
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
keywords.push('is', 'sort', 'orderby')
|
|
|
|
filterObj.queries = searchQuery.parse(filters, {
|
|
|
|
keywords,
|
|
|
|
ranges,
|
|
|
|
tokenize: true,
|
|
|
|
alwaysArray: true,
|
|
|
|
offsets: false
|
|
|
|
})
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Accept orderby as alternative for sort
|
|
|
|
if (filterObj.queries.orderby) {
|
|
|
|
if (!filterObj.queries.sort) filterObj.queries.sort = []
|
|
|
|
filterObj.queries.sort.push(...filterObj.queries.orderby)
|
|
|
|
delete filterObj.queries.orderby
|
|
|
|
}
|
2020-05-16 15:07:15 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// For some reason, single value won't be in Array even with 'alwaysArray' option
|
|
|
|
if (typeof filterObj.queries.exclude.text === 'string') {
|
|
|
|
filterObj.queries.exclude.text = [filterObj.queries.exclude.text]
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Text (non-keyed keywords) queries
|
|
|
|
let textQueries = 0
|
|
|
|
if (filterObj.queries.text) textQueries += filterObj.queries.text.length
|
|
|
|
if (filterObj.queries.exclude.text) textQueries += filterObj.queries.exclude.text.length
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Regular user threshold check
|
|
|
|
if (!ismoderator && textQueries > MAX_TEXT_QUERIES) {
|
|
|
|
throw new ClientError(`Users are only allowed to use ${MAX_TEXT_QUERIES} non-keyed keyword${MAX_TEXT_QUERIES === 1 ? '' : 's'} at a time.`)
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (filterObj.queries.text) {
|
|
|
|
for (let i = 0; i < filterObj.queries.text.length; i++) {
|
|
|
|
const result = sqlLikeParser(filterObj.queries.text[i])
|
|
|
|
if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY) {
|
|
|
|
throw new ClientError(`Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
filterObj.queries.text[i] = result.escaped
|
2020-05-02 19:39:24 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (filterObj.queries.exclude.text) {
|
|
|
|
for (let i = 0; i < filterObj.queries.exclude.text.length; i++) {
|
|
|
|
const result = sqlLikeParser(filterObj.queries.exclude.text[i])
|
|
|
|
if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY) {
|
|
|
|
throw new ClientError(`Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
filterObj.queries.exclude.text[i] = result.escaped
|
2020-05-02 19:39:24 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
for (const key of keywords) {
|
|
|
|
let queryIndex = -1
|
|
|
|
let excludeIndex = -1
|
2020-04-19 18:19:20 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Make sure keyword arrays only contain unique values
|
|
|
|
if (filterObj.queries[key]) {
|
|
|
|
filterObj.queries[key] = filterObj.queries[key].filter((v, i, a) => a.indexOf(v) === i)
|
|
|
|
queryIndex = filterObj.queries[key].indexOf('-')
|
|
|
|
}
|
|
|
|
if (filterObj.queries.exclude[key]) {
|
|
|
|
filterObj.queries.exclude[key] = filterObj.queries.exclude[key].filter((v, i, a) => a.indexOf(v) === i)
|
|
|
|
excludeIndex = filterObj.queries.exclude[key].indexOf('-')
|
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Flag to match NULL values
|
|
|
|
const inQuery = queryIndex !== -1
|
|
|
|
const inExclude = excludeIndex !== -1
|
|
|
|
if (inQuery || inExclude) {
|
|
|
|
// Prioritize exclude keys when both types found
|
|
|
|
filterObj.flags[`${key}Null`] = inExclude ? false : inQuery
|
|
|
|
if (inQuery) {
|
|
|
|
if (filterObj.queries[key].length === 1) {
|
|
|
|
// Delete key to avoid unexpected behavior
|
|
|
|
delete filterObj.queries[key]
|
|
|
|
} else {
|
|
|
|
filterObj.queries[key].splice(queryIndex, 1)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
if (inExclude) {
|
|
|
|
if (filterObj.queries.exclude[key].length === 1) {
|
|
|
|
// Delete key to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.exclude[key]
|
|
|
|
} else {
|
|
|
|
filterObj.queries.exclude[key].splice(excludeIndex, 1)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-09-24 02:54:49 +00:00
|
|
|
const parseDate = (date, resetMs) => {
|
2022-07-10 12:46:25 +00:00
|
|
|
// [YYYY][/MM][/DD] [HH][:MM][:SS]
|
|
|
|
// e.g. 2020/01/01 00:00:00, 2018/01/01 06, 2019/11, 12:34:00
|
2022-09-24 01:38:43 +00:00
|
|
|
const formattedMatch = date.match(/^(\d{4})?(\/\d{2})?(\/\d{2})?\s?(\d{2})?(:\d{2})?(:\d{2})?$/)
|
|
|
|
if (formattedMatch) {
|
2022-09-24 02:54:49 +00:00
|
|
|
const dateObj = new Date(Date.now() + timezoneOffset)
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-09-24 01:38:43 +00:00
|
|
|
if (formattedMatch[1] !== undefined) {
|
|
|
|
dateObj.setFullYear(Number(formattedMatch[1]), // full year
|
|
|
|
formattedMatch[2] !== undefined ? (Number(formattedMatch[2].slice(1)) - 1) : 0, // month, zero-based
|
|
|
|
formattedMatch[3] !== undefined ? Number(formattedMatch[3].slice(1)) : 1) // date
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2021-01-08 02:44:04 +00:00
|
|
|
|
2022-09-24 01:38:43 +00:00
|
|
|
if (formattedMatch[4] !== undefined) {
|
|
|
|
dateObj.setHours(Number(formattedMatch[4]), // hours
|
|
|
|
formattedMatch[5] !== undefined ? Number(formattedMatch[5].slice(1)) : 0, // minutes
|
|
|
|
formattedMatch[6] !== undefined ? Number(formattedMatch[6].slice(1)) : 0) // seconds
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (resetMs) {
|
|
|
|
dateObj.setMilliseconds(0)
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
|
|
|
|
// Calculate timezone differences
|
2022-09-24 02:54:49 +00:00
|
|
|
return new Date(dateObj.getTime() - timezoneOffset)
|
|
|
|
} else if (/^\d+$/.test(date)) {
|
2022-09-24 01:38:43 +00:00
|
|
|
// Unix timestamps (always assume seconds resolution)
|
|
|
|
return new Date(parseInt(date) * 1000)
|
2022-09-24 02:54:49 +00:00
|
|
|
}
|
|
|
|
return null
|
|
|
|
}
|
|
|
|
|
|
|
|
const parseRelativeDuration = (operator, duration, resetMs, inverse = false) => {
|
|
|
|
let milliseconds = parseDuration(duration)
|
|
|
|
if (isNaN(milliseconds) || typeof milliseconds !== 'number') {
|
2022-07-10 12:46:25 +00:00
|
|
|
return null
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2022-09-24 02:54:49 +00:00
|
|
|
|
|
|
|
let from = operator === '<'
|
|
|
|
if (inverse) {
|
|
|
|
// Intended for "expiry" column, as it essentially has to do the opposite
|
|
|
|
from = !from
|
|
|
|
milliseconds = -milliseconds
|
|
|
|
}
|
|
|
|
|
|
|
|
const dateObj = new Date(Date.now() + timezoneOffset - milliseconds)
|
|
|
|
if (resetMs) {
|
|
|
|
dateObj.setMilliseconds(0)
|
|
|
|
}
|
|
|
|
|
|
|
|
const range = { from: null, to: null }
|
|
|
|
const offsetDateObj = new Date(dateObj.getTime() - timezoneOffset)
|
|
|
|
if (from) {
|
|
|
|
range.from = Math.floor(offsetDateObj / 1000)
|
|
|
|
} else {
|
|
|
|
range.to = Math.ceil(offsetDateObj / 1000)
|
|
|
|
}
|
|
|
|
return range
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Parse dates to timestamps
|
|
|
|
for (const range of ranges) {
|
|
|
|
if (filterObj.queries[range]) {
|
|
|
|
if (filterObj.queries[range].from) {
|
2022-09-24 02:54:49 +00:00
|
|
|
const relativeMatch = filterObj.queries[range].from.match(/^(<|>)(.*)$/)
|
|
|
|
if (relativeMatch && relativeMatch[2]) {
|
|
|
|
// Human-readable relative duration
|
|
|
|
filterObj.queries[range] = parseRelativeDuration(relativeMatch[1], relativeMatch[2], true, (range === 'expiry'))
|
|
|
|
continue
|
|
|
|
} else {
|
|
|
|
const parsed = parseDate(filterObj.queries[range].from, true)
|
|
|
|
filterObj.queries[range].from = parsed ? Math.floor(parsed / 1000) : null
|
|
|
|
}
|
2020-04-04 16:36:43 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
if (filterObj.queries[range].to) {
|
2022-09-24 02:54:49 +00:00
|
|
|
const parsed = parseDate(filterObj.queries[range].to, true)
|
2022-07-10 12:46:25 +00:00
|
|
|
filterObj.queries[range].to = parsed ? Math.ceil(parsed / 1000) : null
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Query users table for user IDs
|
|
|
|
if (filterObj.queries.user || filterObj.queries.exclude.user) {
|
|
|
|
const usernames = []
|
|
|
|
if (filterObj.queries.user) {
|
|
|
|
usernames.push(...filterObj.queries.user)
|
|
|
|
}
|
|
|
|
if (filterObj.queries.exclude.user) {
|
|
|
|
usernames.push(...filterObj.queries.exclude.user)
|
|
|
|
}
|
2018-10-09 19:52:41 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const uploaders = await utils.db.table('users')
|
|
|
|
.whereIn('username', usernames)
|
|
|
|
.select('id', 'username')
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// If no matches, or mismatched results
|
|
|
|
if (!uploaders || (uploaders.length !== usernames.length)) {
|
|
|
|
const notFound = usernames.filter(username => {
|
|
|
|
return !uploaders.find(uploader => uploader.username === username)
|
|
|
|
})
|
|
|
|
if (notFound) {
|
|
|
|
throw new ClientError(`User${notFound.length === 1 ? '' : 's'} not found: ${notFound.join(', ')}.`)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
for (const uploader of uploaders) {
|
|
|
|
if (filterObj.queries.user && filterObj.queries.user.includes(uploader.username)) {
|
|
|
|
filterObj.uploaders.push(uploader)
|
|
|
|
} else {
|
|
|
|
filterObj.excludeUploaders.push(uploader)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Delete keys to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.user
|
|
|
|
delete filterObj.queries.exclude.user
|
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Parse sort keys
|
|
|
|
if (filterObj.queries.sort) {
|
|
|
|
const allowed = [
|
|
|
|
'expirydate',
|
|
|
|
'id',
|
|
|
|
'name',
|
|
|
|
'original',
|
|
|
|
'size',
|
|
|
|
'timestamp'
|
|
|
|
]
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
// Only allow sorting by 'albumid' when not listing a specific album's uploads
|
|
|
|
if (isNaN(albumid)) {
|
2022-07-21 18:44:15 +00:00
|
|
|
allowed.push('albumid')
|
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Only allow sorting by 'ip' and 'userid' columns when listing all uploads
|
2022-07-21 18:44:15 +00:00
|
|
|
if (all) {
|
|
|
|
allowed.push('ip', 'userid')
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
for (const obQuery of filterObj.queries.sort) {
|
|
|
|
const tmp = obQuery.toLowerCase().split(':')
|
|
|
|
const column = sortObj.maps[tmp[0]] || tmp[0]
|
|
|
|
|
|
|
|
if (!allowed.includes(column)) {
|
|
|
|
// Alert users if using disallowed/missing columns
|
|
|
|
throw new ClientError(`Column "${column}" cannot be used for sorting.\n\nTry the following instead:\n${allowed.join(', ')}`)
|
2020-05-02 21:32:45 +00:00
|
|
|
}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
sortObj.parsed.push({
|
|
|
|
column,
|
2022-09-24 01:09:27 +00:00
|
|
|
order: (tmp[1] && /^d/i.test(tmp[1])) ? 'desc' : 'asc',
|
2022-07-10 12:46:25 +00:00
|
|
|
clause: sortObj.nullsLast.includes(column) ? 'nulls last' : '',
|
|
|
|
cast: sortObj.casts[column] || null
|
|
|
|
})
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2020-05-02 21:32:45 +00:00
|
|
|
|
2021-01-08 02:44:04 +00:00
|
|
|
// Regular user threshold check
|
2022-07-10 12:46:25 +00:00
|
|
|
if (!ismoderator && sortObj.parsed.length > MAX_SORT_KEYS) {
|
|
|
|
throw new ClientError(`Users are only allowed to use ${MAX_SORT_KEYS} sort key${MAX_SORT_KEYS === 1 ? '' : 's'} at a time.`)
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
|
|
|
|
// Delete key to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.sort
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2020-04-12 09:30:33 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Parse is keys
|
|
|
|
let isKeys = 0
|
|
|
|
let isLast
|
|
|
|
if (filterObj.queries.is || filterObj.queries.exclude.is) {
|
|
|
|
for (const type of filterObj.typeIs) {
|
|
|
|
const inQuery = filterObj.queries.is && filterObj.queries.is.includes(type)
|
|
|
|
const inExclude = filterObj.queries.exclude.is && filterObj.queries.exclude.is.includes(type)
|
|
|
|
|
|
|
|
// Prioritize exclude keys when both types found
|
|
|
|
if (inQuery || inExclude) {
|
|
|
|
filterObj.flags[`is${type}`] = inExclude ? false : inQuery
|
|
|
|
if (isLast !== undefined && isLast !== filterObj.flags[`is${type}`]) {
|
|
|
|
throw new ClientError('Cannot mix inclusion and exclusion type-is keys.')
|
|
|
|
}
|
|
|
|
isKeys++
|
|
|
|
isLast = filterObj.flags[`is${type}`]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete keys to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.is
|
|
|
|
delete filterObj.queries.exclude.is
|
|
|
|
}
|
|
|
|
|
|
|
|
// Regular user threshold check
|
|
|
|
if (!ismoderator && isKeys > MAX_IS_KEYS) {
|
|
|
|
throw new ClientError(`Users are only allowed to use ${MAX_IS_KEYS} type-is key${MAX_IS_KEYS === 1 ? '' : 's'} at a time.`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function filter () {
|
|
|
|
// If listing all uploads
|
|
|
|
if (all) {
|
|
|
|
this.where(function () {
|
|
|
|
// Filter uploads matching any of the supplied 'user' keys and/or NULL flag
|
|
|
|
// Prioritze exclude keys when both types found
|
|
|
|
this.orWhere(function () {
|
|
|
|
if (filterObj.excludeUploaders.length) {
|
|
|
|
this.whereNotIn('userid', filterObj.excludeUploaders.map(v => v.id))
|
|
|
|
} else if (filterObj.uploaders.length) {
|
|
|
|
this.orWhereIn('userid', filterObj.uploaders.map(v => v.id))
|
|
|
|
}
|
|
|
|
// Such overbearing logic for NULL values, smh...
|
|
|
|
if ((filterObj.excludeUploaders.length && filterObj.flags.userNull !== false) ||
|
2020-05-02 20:30:50 +00:00
|
|
|
(filterObj.uploaders.length && filterObj.flags.userNull) ||
|
2020-10-30 18:12:09 +00:00
|
|
|
(!filterObj.excludeUploaders.length && !filterObj.uploaders.length && filterObj.flags.userNull)) {
|
2022-07-10 12:46:25 +00:00
|
|
|
this.orWhereNull('userid')
|
|
|
|
} else if (filterObj.flags.userNull === false) {
|
|
|
|
this.whereNotNull('userid')
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
})
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Filter uploads matching any of the supplied 'ip' keys and/or NULL flag
|
|
|
|
// Same prioritization logic as above
|
|
|
|
this.orWhere(function () {
|
|
|
|
if (filterObj.queries.exclude.ip) {
|
|
|
|
this.whereNotIn('ip', filterObj.queries.exclude.ip)
|
|
|
|
} else if (filterObj.queries.ip) {
|
|
|
|
this.orWhereIn('ip', filterObj.queries.ip)
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
|
|
|
// ...
|
2022-07-10 12:46:25 +00:00
|
|
|
if ((filterObj.queries.exclude.ip && filterObj.flags.ipNull !== false) ||
|
|
|
|
(filterObj.queries.ip && filterObj.flags.ipNull) ||
|
|
|
|
(!filterObj.queries.exclude.ip && !filterObj.queries.ip && filterObj.flags.ipNull)) {
|
|
|
|
this.orWhereNull('ip')
|
|
|
|
} else if (filterObj.flags.ipNull === false) {
|
|
|
|
this.whereNotNull('ip')
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
|
|
|
})
|
2022-07-10 12:46:25 +00:00
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// If not listing all uploads, list user's uploads
|
2022-08-04 14:59:06 +00:00
|
|
|
this.where('userid', req.locals.user.id)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-05-02 20:30:50 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Then, refine using any of the supplied 'albumid' keys and/or NULL flag
|
|
|
|
// Same prioritization logic as 'userid' and 'ip' above
|
2022-07-21 19:03:59 +00:00
|
|
|
if (isNaN(albumid)) {
|
2021-01-08 02:44:04 +00:00
|
|
|
this.andWhere(function () {
|
2022-07-10 12:46:25 +00:00
|
|
|
if (filterObj.queries.exclude.albumid) {
|
|
|
|
this.whereNotIn('albumid', filterObj.queries.exclude.albumid)
|
|
|
|
} else if (filterObj.queries.albumid) {
|
|
|
|
this.orWhereIn('albumid', filterObj.queries.albumid)
|
|
|
|
}
|
|
|
|
// ...
|
|
|
|
if ((filterObj.queries.exclude.albumid && filterObj.flags.albumidNull !== false) ||
|
|
|
|
(filterObj.queries.albumid && filterObj.flags.albumidNull) ||
|
|
|
|
(!filterObj.queries.exclude.albumid && !filterObj.queries.albumid && filterObj.flags.albumidNull)) {
|
|
|
|
this.orWhereNull('albumid')
|
|
|
|
} else if (filterObj.flags.albumidNull === false) {
|
|
|
|
this.whereNotNull('albumid')
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2021-01-08 02:44:04 +00:00
|
|
|
})
|
2022-07-10 12:46:25 +00:00
|
|
|
} else if (!all) {
|
|
|
|
// If not listing all uploads, list uploads from user's album
|
2022-07-21 18:44:15 +00:00
|
|
|
this.andWhere('albumid', req.path_parameters.albumid)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Then, refine using the supplied 'date' ranges
|
|
|
|
this.andWhere(function () {
|
2022-08-01 08:20:14 +00:00
|
|
|
if (!filterObj.queries.date ||
|
|
|
|
(!filterObj.queries.date.from && !filterObj.queries.date.to)) {
|
|
|
|
return
|
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
if (typeof filterObj.queries.date.from === 'number') {
|
|
|
|
if (typeof filterObj.queries.date.to === 'number') {
|
|
|
|
this.andWhereBetween('timestamp', [filterObj.queries.date.from, filterObj.queries.date.to])
|
2020-10-30 18:12:09 +00:00
|
|
|
} else {
|
2022-07-10 12:46:25 +00:00
|
|
|
this.andWhere('timestamp', '>=', filterObj.queries.date.from)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
} else {
|
|
|
|
this.andWhere('timestamp', '<=', filterObj.queries.date.to)
|
|
|
|
}
|
|
|
|
})
|
2020-05-02 21:32:45 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Then, refine using the supplied 'expiry' ranges
|
|
|
|
this.andWhere(function () {
|
2022-08-01 08:20:14 +00:00
|
|
|
if (!filterObj.queries.expiry ||
|
|
|
|
(!filterObj.queries.expiry.from && !filterObj.queries.expiry.to)) {
|
|
|
|
return
|
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
if (typeof filterObj.queries.expiry.from === 'number') {
|
|
|
|
if (typeof filterObj.queries.expiry.to === 'number') {
|
|
|
|
this.andWhereBetween('expirydate', [filterObj.queries.expiry.from, filterObj.queries.expiry.to])
|
|
|
|
} else {
|
|
|
|
this.andWhere('expirydate', '>=', filterObj.queries.expiry.from)
|
2020-10-30 18:12:09 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
} else {
|
|
|
|
this.andWhere('expirydate', '<=', filterObj.queries.expiry.to)
|
|
|
|
}
|
|
|
|
})
|
2020-05-02 21:32:45 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Then, refine using type-is flags
|
|
|
|
this.andWhere(function () {
|
|
|
|
for (const type of filterObj.typeIs) {
|
|
|
|
let func
|
|
|
|
let operator
|
|
|
|
if (filterObj.flags[`is${type}`] === true) {
|
|
|
|
func = 'orWhere'
|
|
|
|
operator = 'like'
|
|
|
|
} else if (filterObj.flags[`is${type}`] === false) {
|
|
|
|
func = 'andWhere'
|
|
|
|
operator = 'not like'
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (func) {
|
|
|
|
for (const pattern of utils[`${type}Exts`].map(ext => `%${ext}`)) {
|
|
|
|
this[func]('name', operator, pattern)
|
|
|
|
}
|
2021-01-08 02:44:04 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
})
|
2019-01-01 19:39:08 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Then, refine using the supplied keywords against their file names
|
|
|
|
this.andWhere(function () {
|
|
|
|
if (!filterObj.queries.text) return
|
|
|
|
for (const pattern of filterObj.queries.text) {
|
|
|
|
this.orWhereRaw('?? like ? escape ?', ['name', pattern, '\\'])
|
|
|
|
this.orWhereRaw('?? like ? escape ?', ['original', pattern, '\\'])
|
|
|
|
}
|
|
|
|
})
|
2019-01-01 19:39:08 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Finally, refine using the supplied exclusions against their file names
|
|
|
|
this.andWhere(function () {
|
|
|
|
if (!filterObj.queries.exclude.text) return
|
|
|
|
for (const pattern of filterObj.queries.exclude.text) {
|
|
|
|
this.andWhereRaw('?? not like ? escape ?', ['name', pattern, '\\'])
|
|
|
|
this.andWhereRaw('?? not like ? escape ?', ['original', pattern, '\\'])
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
// Base result object
|
|
|
|
const result = { success: true, files: [], uploadsPerPage, count: 0, basedomain }
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Query uploads count for pagination
|
2022-07-31 08:51:32 +00:00
|
|
|
result.count = await utils.db.table('files')
|
2022-07-10 12:46:25 +00:00
|
|
|
.where(filter)
|
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
2022-07-31 08:51:32 +00:00
|
|
|
if (!result.count) {
|
|
|
|
return res.json(result)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
let offset = req.path_parameters && Number(req.path_parameters.page)
|
2022-07-31 08:51:32 +00:00
|
|
|
if (isNaN(offset)) {
|
|
|
|
offset = 0
|
|
|
|
} else if (offset < 0) {
|
|
|
|
offset = Math.max(0, Math.ceil(result.count / uploadsPerPage) + offset)
|
|
|
|
}
|
2019-01-01 19:39:08 +00:00
|
|
|
|
2022-08-07 23:08:40 +00:00
|
|
|
// Database columns to query
|
2022-07-10 12:46:25 +00:00
|
|
|
const columns = ['id', 'name', 'original', 'userid', 'size', 'timestamp']
|
2022-08-07 23:08:40 +00:00
|
|
|
|
2022-08-01 08:20:14 +00:00
|
|
|
if (utils.retentions.enabled) {
|
|
|
|
columns.push('expirydate')
|
|
|
|
}
|
2022-08-07 23:08:40 +00:00
|
|
|
|
|
|
|
const filterByAlbums = filterObj.queries.albumid ||
|
2022-08-01 08:20:14 +00:00
|
|
|
filterObj.queries.exclude.albumid ||
|
2022-08-07 23:08:40 +00:00
|
|
|
filterObj.flags.albumidNull !== undefined
|
|
|
|
|
|
|
|
// If not listing all uploads, OR specifically filtering by album IDs
|
|
|
|
if (!all || filterByAlbums) {
|
2022-08-01 08:20:14 +00:00
|
|
|
columns.push('albumid')
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Only select IPs if we are listing all uploads
|
2022-08-01 08:20:14 +00:00
|
|
|
if (all) {
|
|
|
|
columns.push('ip')
|
|
|
|
}
|
2019-06-04 00:57:37 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Build raw query for order by (sorting) operation
|
|
|
|
let orderByRaw
|
|
|
|
if (sortObj.parsed.length) {
|
|
|
|
orderByRaw = sortObj.parsed.map(sort => {
|
|
|
|
// Use Knex.raw() to sanitize user inputs
|
|
|
|
if (sort.cast) {
|
|
|
|
return utils.db.raw(`cast (?? as ${sort.cast}) ${sort.order} ${sort.clause}`.trim(), sort.column)
|
|
|
|
} else {
|
|
|
|
return utils.db.raw(`?? ${sort.order} ${sort.clause}`.trim(), sort.column)
|
|
|
|
}
|
|
|
|
}).join(', ')
|
|
|
|
} else {
|
|
|
|
orderByRaw = '`id` desc'
|
|
|
|
}
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
result.files = await utils.db.table('files')
|
2022-07-10 12:46:25 +00:00
|
|
|
.where(filter)
|
|
|
|
.orderByRaw(orderByRaw)
|
2022-07-31 08:51:32 +00:00
|
|
|
.limit(uploadsPerPage)
|
|
|
|
.offset(uploadsPerPage * offset)
|
2022-07-10 12:46:25 +00:00
|
|
|
.select(columns)
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
if (!result.files.length) {
|
|
|
|
return res.json(result)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2019-06-18 18:48:30 +00:00
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
for (const file of result.files) {
|
2022-07-10 12:46:25 +00:00
|
|
|
file.extname = utils.extname(file.name)
|
|
|
|
if (utils.mayGenerateThumb(file.extname)) {
|
|
|
|
file.thumb = `thumbs/${file.name.slice(0, -file.extname.length)}.png`
|
2020-04-12 09:30:33 +00:00
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
result.albums = {}
|
|
|
|
|
2022-08-07 23:08:40 +00:00
|
|
|
// If not listing all uploads, OR specifically filtering by album IDs
|
|
|
|
if (!all || filterByAlbums) {
|
2022-07-31 08:51:32 +00:00
|
|
|
const albumids = result.files
|
2022-07-10 12:46:25 +00:00
|
|
|
.map(file => file.albumid)
|
2022-07-31 08:51:32 +00:00
|
|
|
.filter(utils.filterUniquifySqlArray)
|
|
|
|
|
|
|
|
result.albums = await utils.db.table('albums')
|
2022-08-07 23:08:40 +00:00
|
|
|
.where(function () {
|
|
|
|
this.whereIn('id', albumids)
|
|
|
|
|
|
|
|
// Only include data of disabled albums if listing all uploads
|
|
|
|
// and filtering by album IDs
|
|
|
|
if (!all) {
|
|
|
|
this.andWhere('enabled', 1)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.select('id', 'name', 'enabled')
|
2022-07-10 12:46:25 +00:00
|
|
|
.then(rows => {
|
|
|
|
// Build Object indexed by their IDs
|
|
|
|
const obj = {}
|
|
|
|
for (const row of rows) {
|
|
|
|
obj[row.id] = row.name
|
|
|
|
}
|
|
|
|
return obj
|
|
|
|
})
|
2022-08-07 23:08:40 +00:00
|
|
|
|
|
|
|
// If filtering by album IDs,
|
|
|
|
// then filter out uploads with missing albums data (assume disabled/deleted)
|
|
|
|
if (filterByAlbums) {
|
|
|
|
result.files = result.files.filter(file => result.albums[file.albumid] !== undefined)
|
|
|
|
}
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2020-04-12 09:30:33 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// If we are not listing all uploads, send response
|
|
|
|
if (!all) {
|
2022-07-31 08:51:32 +00:00
|
|
|
return res.json(result)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
2022-08-07 23:08:40 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Otherwise proceed to querying usernames
|
|
|
|
let usersTable = filterObj.uploaders
|
|
|
|
if (!usersTable.length) {
|
2022-07-31 08:51:32 +00:00
|
|
|
const userids = result.files
|
2022-07-10 12:46:25 +00:00
|
|
|
.map(file => file.userid)
|
2022-07-31 08:51:32 +00:00
|
|
|
.filter(utils.filterUniquifySqlArray)
|
2019-06-17 19:48:42 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// If there are no uploads attached to a registered user, send response
|
|
|
|
if (!userids.length) {
|
2022-07-31 08:51:32 +00:00
|
|
|
return res.json(result)
|
2020-04-12 09:30:33 +00:00
|
|
|
}
|
2019-06-17 19:48:42 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
// Query usernames of user IDs from currently selected files
|
|
|
|
usersTable = await utils.db.table('users')
|
|
|
|
.whereIn('id', userids)
|
|
|
|
.select('id', 'username')
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
result.users = {}
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
for (const user of usersTable) {
|
2022-07-31 08:51:32 +00:00
|
|
|
result.users[user.id] = user.username
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
|
2022-07-31 08:51:32 +00:00
|
|
|
return res.json(result)
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
|
|
|
|
2022-06-28 04:57:56 +00:00
|
|
|
/** Get file info */
|
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
self.get = async (req, res) => {
|
2022-08-04 14:59:06 +00:00
|
|
|
const ismoderator = perms.is(req.locals.user, 'moderator')
|
2022-06-28 04:57:56 +00:00
|
|
|
|
2022-07-21 19:03:59 +00:00
|
|
|
const identifier = req.path_parameters && req.path_parameters.identifier
|
2022-07-10 12:46:25 +00:00
|
|
|
if (identifier === undefined) {
|
|
|
|
throw new ClientError('No identifier provided.')
|
|
|
|
}
|
2022-06-28 04:57:56 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
const file = await utils.db.table('files')
|
|
|
|
.where('name', identifier)
|
|
|
|
.where(function () {
|
2022-08-04 14:59:06 +00:00
|
|
|
// Only allow moderators to get any files' information
|
2022-07-10 12:46:25 +00:00
|
|
|
if (!ismoderator) {
|
2022-08-04 14:59:06 +00:00
|
|
|
this.where('userid', req.locals.user.id)
|
2022-07-10 12:46:25 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
.first()
|
2022-06-28 04:57:56 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
if (!file) {
|
|
|
|
throw new ClientError('File not found.', { statusCode: 404 })
|
|
|
|
}
|
2022-06-28 04:57:56 +00:00
|
|
|
|
2022-07-10 12:46:25 +00:00
|
|
|
return res.json({ success: true, file })
|
2022-06-28 04:57:56 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
module.exports = self
|