2020-05-28 19:52:58 +00:00
|
|
|
const blake3 = require('blake3')
|
2018-09-23 16:28:15 +00:00
|
|
|
const fetch = require('node-fetch')
|
2018-01-23 20:06:30 +00:00
|
|
|
const fs = require('fs')
|
2018-09-01 20:37:26 +00:00
|
|
|
const multer = require('multer')
|
|
|
|
const path = require('path')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const randomstring = require('randomstring')
|
2020-04-18 19:52:11 +00:00
|
|
|
const searchQuery = require('search-query-parser')
|
2020-05-28 19:52:58 +00:00
|
|
|
const multerStorage = require('./multerStorageController')
|
2019-09-08 01:56:29 +00:00
|
|
|
const paths = require('./pathsController')
|
2018-10-13 11:06:58 +00:00
|
|
|
const perms = require('./permissionController')
|
2018-04-13 16:20:57 +00:00
|
|
|
const utils = require('./utilsController')
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
const config = require('./../config')
|
|
|
|
const logger = require('./../logger')
|
|
|
|
const db = require('knex')(config.database)
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const self = {}
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const fileIdentifierLengthFallback = 32
|
|
|
|
const fileIdentifierLengthChangeable = !config.uploads.fileIdentifierLength.force &&
|
|
|
|
typeof config.uploads.fileIdentifierLength.min === 'number' &&
|
|
|
|
typeof config.uploads.fileIdentifierLength.max === 'number'
|
|
|
|
|
|
|
|
const maxSize = parseInt(config.uploads.maxSize)
|
|
|
|
const maxSizeBytes = maxSize * 1e6
|
2019-08-20 02:16:34 +00:00
|
|
|
const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1e6
|
2018-03-13 14:51:39 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
const maxFilesPerUpload = 20
|
|
|
|
|
2020-05-28 19:52:58 +00:00
|
|
|
const chunkedUploads = config.uploads.chunkSize &&
|
|
|
|
typeof config.uploads.chunkSize === 'object' &&
|
|
|
|
config.uploads.chunkSize.default
|
2020-06-15 16:48:43 +00:00
|
|
|
const chunkedUploadsTimeout = config.uploads.chunkSize.timeout || 1800000
|
2019-09-08 01:56:29 +00:00
|
|
|
const chunksData = {}
|
2019-11-29 13:42:53 +00:00
|
|
|
// Hard-coded min chunk size of 1 MB (e.g. 50 MB = max 50 chunks)
|
2019-09-08 01:56:29 +00:00
|
|
|
const maxChunksCount = maxSize
|
|
|
|
|
|
|
|
const extensionsFilter = Array.isArray(config.extensionsFilter) &&
|
|
|
|
config.extensionsFilter.length
|
|
|
|
const urlExtensionsFilter = Array.isArray(config.uploads.urlExtensionsFilter) &&
|
|
|
|
config.uploads.urlExtensionsFilter.length
|
|
|
|
const temporaryUploads = Array.isArray(config.uploads.temporaryUploadAges) &&
|
|
|
|
config.uploads.temporaryUploadAges.length
|
|
|
|
|
2020-06-15 16:48:43 +00:00
|
|
|
class ChunksData {
|
|
|
|
constructor (uuid, root) {
|
|
|
|
this.uuid = uuid
|
|
|
|
this.root = root
|
|
|
|
this.filename = 'tmp'
|
|
|
|
this.chunks = 0
|
|
|
|
this.stream = null
|
|
|
|
this.hasher = null
|
|
|
|
}
|
|
|
|
|
|
|
|
onTimeout () {
|
2020-06-15 20:01:32 +00:00
|
|
|
if (this.stream && !this.stream.writableEnded)
|
2020-06-15 16:48:43 +00:00
|
|
|
this.stream.end()
|
|
|
|
if (this.hasher)
|
|
|
|
this.hasher.dispose()
|
|
|
|
self.cleanUpChunks(this.uuid, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
setTimeout (delay) {
|
|
|
|
this.clearTimeout()
|
|
|
|
this._timeout = setTimeout(this.onTimeout.bind(this), delay)
|
|
|
|
}
|
|
|
|
|
|
|
|
clearTimeout () {
|
|
|
|
if (this._timeout)
|
|
|
|
clearTimeout(this._timeout)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const initChunks = async uuid => {
|
|
|
|
if (chunksData[uuid] === undefined) {
|
|
|
|
const root = path.join(paths.chunks, uuid)
|
|
|
|
try {
|
|
|
|
await paths.access(root)
|
|
|
|
} catch (err) {
|
|
|
|
// Re-throw error
|
|
|
|
if (err && err.code !== 'ENOENT')
|
|
|
|
throw err
|
|
|
|
await paths.mkdir(root)
|
2018-03-13 14:51:39 +00:00
|
|
|
}
|
2020-06-15 16:48:43 +00:00
|
|
|
chunksData[uuid] = new ChunksData(uuid, root)
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2020-06-15 16:48:43 +00:00
|
|
|
chunksData[uuid].setTimeout(chunkedUploadsTimeout)
|
2020-06-15 16:14:33 +00:00
|
|
|
return chunksData[uuid]
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const executeMulter = multer({
|
2019-09-17 04:13:41 +00:00
|
|
|
// Guide: https://github.com/expressjs/multer#limits
|
2018-03-28 11:36:28 +00:00
|
|
|
limits: {
|
2019-09-17 04:13:41 +00:00
|
|
|
fileSize: maxSizeBytes,
|
|
|
|
// Maximum number of non-file fields.
|
|
|
|
// Dropzone.js will add 6 extra fields for chunked uploads.
|
|
|
|
// We don't use them for anything else.
|
|
|
|
fields: 6,
|
|
|
|
// Maximum number of file fields.
|
|
|
|
// Chunked uploads still need to provide only 1 file field.
|
|
|
|
// Otherwise, only one of the files will end up being properly stored,
|
|
|
|
// and that will also be as a chunk.
|
2019-09-23 08:09:15 +00:00
|
|
|
files: maxFilesPerUpload
|
2018-03-28 11:36:28 +00:00
|
|
|
},
|
2018-04-05 10:52:57 +00:00
|
|
|
fileFilter (req, file, cb) {
|
2019-09-08 01:56:29 +00:00
|
|
|
file.extname = utils.extname(file.originalname)
|
|
|
|
if (self.isExtensionFiltered(file.extname))
|
|
|
|
return cb(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`)
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2018-05-09 08:41:30 +00:00
|
|
|
// Re-map Dropzone keys so people can manually use the API without prepending 'dz'
|
|
|
|
for (const key in req.body) {
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!/^dz/.test(key)) continue
|
2018-05-09 08:41:30 +00:00
|
|
|
req.body[key.replace(/^dz/, '')] = req.body[key]
|
|
|
|
delete req.body[key]
|
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-19 01:27:19 +00:00
|
|
|
if (req.body.chunkindex !== undefined && !chunkedUploads)
|
2019-09-08 01:56:29 +00:00
|
|
|
return cb('Chunked uploads are disabled at the moment.')
|
|
|
|
else
|
|
|
|
return cb(null, true)
|
|
|
|
},
|
2020-05-28 19:52:58 +00:00
|
|
|
storage: multerStorage({
|
2019-09-08 01:56:29 +00:00
|
|
|
destination (req, file, cb) {
|
2020-05-28 19:52:58 +00:00
|
|
|
// Is file a chunk!?
|
2020-06-15 16:14:33 +00:00
|
|
|
file._isChunk = chunkedUploads && req.body.uuid !== undefined && req.body.chunkindex !== undefined
|
2020-05-28 19:52:58 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
if (file._isChunk)
|
2020-05-28 19:52:58 +00:00
|
|
|
initChunks(req.body.uuid)
|
2020-06-15 16:14:33 +00:00
|
|
|
.then(chunksData => {
|
|
|
|
file._chunksData = chunksData
|
|
|
|
cb(null, chunksData.root)
|
|
|
|
})
|
2020-05-28 19:52:58 +00:00
|
|
|
.catch(error => {
|
|
|
|
logger.error(error)
|
|
|
|
return cb('Could not process the chunked upload. Try again?')
|
|
|
|
})
|
|
|
|
else
|
2019-09-08 01:56:29 +00:00
|
|
|
return cb(null, paths.uploads)
|
|
|
|
},
|
|
|
|
|
|
|
|
filename (req, file, cb) {
|
2020-06-15 16:14:33 +00:00
|
|
|
if (file._isChunk) {
|
|
|
|
return cb(null, chunksData[req.body.uuid].filename)
|
2020-05-28 19:52:58 +00:00
|
|
|
} else {
|
2019-09-08 01:56:29 +00:00
|
|
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
|
|
|
return self.getUniqueRandomName(length, file.extname)
|
|
|
|
.then(name => cb(null, name))
|
|
|
|
.catch(error => cb(error))
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
})
|
2019-09-19 01:27:19 +00:00
|
|
|
}).array('files[]')
|
2017-01-13 07:34:21 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.isExtensionFiltered = extname => {
|
2018-12-20 11:53:37 +00:00
|
|
|
// If empty extension needs to be filtered
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!extname && config.filterNoExtension)
|
|
|
|
return true
|
|
|
|
|
2018-05-11 14:34:13 +00:00
|
|
|
// If there are extensions that have to be filtered
|
2019-09-08 01:56:29 +00:00
|
|
|
if (extname && extensionsFilter) {
|
2018-05-11 14:34:13 +00:00
|
|
|
const match = config.extensionsFilter.some(extension => extname === extension.toLowerCase())
|
2018-12-20 11:53:37 +00:00
|
|
|
const whitelist = config.extensionsFilterMode === 'whitelist'
|
2019-09-08 01:56:29 +00:00
|
|
|
if ((!whitelist && match) || (whitelist && !match))
|
|
|
|
return true
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2018-05-11 14:34:13 +00:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.parseFileIdentifierLength = fileLength => {
|
|
|
|
if (!config.uploads.fileIdentifierLength)
|
|
|
|
return fileIdentifierLengthFallback
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const parsed = parseInt(fileLength)
|
|
|
|
if (isNaN(parsed) ||
|
|
|
|
!fileIdentifierLengthChangeable ||
|
|
|
|
parsed < config.uploads.fileIdentifierLength.min ||
|
|
|
|
parsed > config.uploads.fileIdentifierLength.max)
|
|
|
|
return config.uploads.fileIdentifierLength.default || fileIdentifierLengthFallback
|
|
|
|
else
|
|
|
|
return parsed
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.getUniqueRandomName = async (length, extension) => {
|
|
|
|
for (let i = 0; i < utils.idMaxTries; i++) {
|
|
|
|
const identifier = randomstring.generate(length)
|
|
|
|
const name = identifier + extension
|
|
|
|
if (config.uploads.cacheFileIdentifiers) {
|
|
|
|
if (utils.idSet.has(identifier)) {
|
|
|
|
logger.log(`Identifier ${identifier} is already in use (${i + 1}/${utils.idMaxTries}).`)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
utils.idSet.add(identifier)
|
|
|
|
// logger.log(`Added ${identifier} to identifiers cache`)
|
|
|
|
} else {
|
|
|
|
try {
|
|
|
|
await paths.access(path.join(paths.uploads, name))
|
|
|
|
logger.log(`${name} is already in use (${i + 1}/${utils.idMaxTries}).`)
|
|
|
|
continue
|
|
|
|
} catch (error) {
|
|
|
|
// Re-throw error
|
|
|
|
if (error & error.code !== 'ENOENT')
|
|
|
|
throw error
|
2018-12-03 09:18:52 +00:00
|
|
|
}
|
2018-04-28 17:26:39 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
return name
|
|
|
|
}
|
|
|
|
|
|
|
|
throw 'Sorry, we could not allocate a unique random name. Try again?'
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.parseUploadAge = age => {
|
|
|
|
if (age === undefined || age === null)
|
|
|
|
return config.uploads.temporaryUploadAges[0]
|
|
|
|
const parsed = parseFloat(age)
|
|
|
|
if (config.uploads.temporaryUploadAges.includes(parsed))
|
|
|
|
return parsed
|
|
|
|
else
|
|
|
|
return null
|
|
|
|
}
|
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
self.parseStripTags = stripTags => {
|
|
|
|
if (!config.uploads.stripTags)
|
|
|
|
return false
|
|
|
|
|
|
|
|
if (config.uploads.stripTags.force || stripTags === undefined)
|
|
|
|
return config.uploads.stripTags.default
|
|
|
|
|
|
|
|
return Boolean(parseInt(stripTags))
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.upload = async (req, res, next) => {
|
2018-03-24 19:47:41 +00:00
|
|
|
let user
|
2018-01-23 20:06:30 +00:00
|
|
|
if (config.private === true) {
|
2018-03-24 19:47:41 +00:00
|
|
|
user = await utils.authorize(req, res)
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!user) return
|
2018-12-18 17:41:42 +00:00
|
|
|
} else if (req.headers.token) {
|
2019-09-08 01:56:29 +00:00
|
|
|
user = await db.table('users')
|
|
|
|
.where('token', req.headers.token)
|
|
|
|
.first()
|
|
|
|
if (user && (user.enabled === false || user.enabled === 0))
|
|
|
|
return res.json({ success: false, description: 'This account has been disabled.' })
|
2018-12-18 17:41:42 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2018-04-05 12:54:24 +00:00
|
|
|
let albumid = parseInt(req.headers.albumid || req.params.albumid)
|
2019-09-08 01:56:29 +00:00
|
|
|
if (isNaN(albumid))
|
|
|
|
albumid = null
|
|
|
|
|
|
|
|
let age = null
|
|
|
|
if (temporaryUploads) {
|
|
|
|
age = self.parseUploadAge(req.headers.age)
|
|
|
|
if (!age && !config.uploads.temporaryUploadAges.includes(0))
|
2019-11-09 20:41:54 +00:00
|
|
|
return res.json({ success: false, description: 'Permanent uploads are not permitted.' })
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2017-01-19 06:34:48 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
const func = req.body.urls ? self.actuallyUploadUrls : self.actuallyUploadFiles
|
|
|
|
await func(req, res, user, albumid, age)
|
|
|
|
} catch (error) {
|
2018-04-25 13:16:34 +00:00
|
|
|
const isError = error instanceof Error
|
2019-08-26 17:02:06 +00:00
|
|
|
if (isError) logger.error(error)
|
2019-09-08 01:56:29 +00:00
|
|
|
return res.status(400).json({
|
2018-03-28 11:36:28 +00:00
|
|
|
success: false,
|
2018-05-11 14:34:13 +00:00
|
|
|
description: isError ? error.toString() : error
|
2018-03-28 11:36:28 +00:00
|
|
|
})
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.actuallyUploadFiles = async (req, res, user, albumid, age) => {
|
|
|
|
const error = await new Promise(resolve => {
|
|
|
|
return executeMulter(req, res, err => resolve(err))
|
|
|
|
})
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (error) {
|
|
|
|
const suppress = [
|
|
|
|
'LIMIT_FILE_SIZE',
|
|
|
|
'LIMIT_UNEXPECTED_FILE'
|
|
|
|
]
|
|
|
|
if (suppress.includes(error.code))
|
|
|
|
throw error.toString()
|
|
|
|
else
|
|
|
|
throw error
|
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!req.files || !req.files.length)
|
|
|
|
throw 'No files.'
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success
|
|
|
|
const uuid = req.body.uuid
|
|
|
|
if (chunkedUploads && chunksData[uuid] !== undefined) {
|
|
|
|
req.files.forEach(file => {
|
2020-06-15 16:14:33 +00:00
|
|
|
chunksData[uuid].chunks++
|
2018-03-28 11:36:28 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
return res.json({ success: true })
|
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const infoMap = req.files.map(file => {
|
|
|
|
file.albumid = albumid
|
|
|
|
file.age = age
|
|
|
|
return {
|
|
|
|
path: path.join(paths.uploads, file.filename),
|
|
|
|
data: file
|
2018-09-01 20:37:26 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) {
|
|
|
|
// Unlink all files when at least one file is an empty file
|
2019-09-23 08:09:15 +00:00
|
|
|
// Should continue even when encountering errors
|
|
|
|
await Promise.all(infoMap.map(info =>
|
|
|
|
utils.unlinkFile(info.data.filename).catch(logger.error)
|
|
|
|
))
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
throw 'Empty files are not allowed.'
|
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (utils.clamd.scanner) {
|
2019-11-05 20:35:04 +00:00
|
|
|
const scanResult = await self.scanFiles(req, user, infoMap)
|
2019-09-08 01:56:29 +00:00
|
|
|
if (scanResult) throw scanResult
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
await self.stripTags(req, infoMap)
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
2020-06-19 18:28:23 +00:00
|
|
|
await self.sendUploadResponse(req, res, user, result)
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
self.actuallyUploadUrls = async (req, res, user, albumid, age) => {
|
|
|
|
if (!config.uploads.urlMaxSize)
|
|
|
|
throw 'Upload by URLs is disabled at the moment.'
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2018-12-08 17:55:04 +00:00
|
|
|
const urls = req.body.urls
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!urls || !(urls instanceof Array))
|
|
|
|
throw 'Missing "urls" property (array).'
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
if (urls.length > maxFilesPerUpload)
|
|
|
|
throw `Maximum ${maxFilesPerUpload} URLs at a time.`
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const downloaded = []
|
2018-05-11 14:34:13 +00:00
|
|
|
const infoMap = []
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(urls.map(async url => {
|
2019-09-08 01:56:29 +00:00
|
|
|
const original = path.basename(url).split(/[?#]/)[0]
|
|
|
|
const extname = utils.extname(original)
|
|
|
|
|
|
|
|
// Extensions filter
|
|
|
|
let filtered = false
|
|
|
|
if (['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode))
|
|
|
|
if (urlExtensionsFilter) {
|
|
|
|
const match = config.uploads.urlExtensionsFilter.some(extension => extname === extension.toLowerCase())
|
|
|
|
const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist'
|
|
|
|
filtered = ((!whitelist && match) || (whitelist && !match))
|
|
|
|
} else {
|
|
|
|
throw 'Invalid extensions filter, please contact the site owner.'
|
|
|
|
}
|
|
|
|
else
|
|
|
|
filtered = self.isExtensionFiltered(extname)
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (filtered)
|
2019-11-09 20:41:54 +00:00
|
|
|
throw `${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (config.uploads.urlProxy)
|
|
|
|
url = config.uploads.urlProxy
|
|
|
|
.replace(/{url}/g, encodeURIComponent(url))
|
|
|
|
.replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, '')))
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const length = self.parseFileIdentifierLength(req.headers.filelength)
|
|
|
|
const name = await self.getUniqueRandomName(length, extname)
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const destination = path.join(paths.uploads, name)
|
2020-06-01 05:23:15 +00:00
|
|
|
const outStream = fs.createWriteStream(destination)
|
|
|
|
const hash = blake3.createHash()
|
|
|
|
|
|
|
|
// Push to array early, so regardless of its progress it will be deleted on errors
|
2019-09-08 01:56:29 +00:00
|
|
|
downloaded.push(destination)
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2020-06-01 05:23:15 +00:00
|
|
|
// Limit max response body size with maximum allowed size
|
|
|
|
const fetchFile = await fetch(url, { size: urlMaxSizeBytes })
|
|
|
|
.then(res => new Promise((resolve, reject) => {
|
|
|
|
if (res.status === 200) {
|
|
|
|
const onerror = error => {
|
|
|
|
hash.dispose()
|
|
|
|
reject(error)
|
|
|
|
}
|
|
|
|
outStream.on('error', onerror)
|
|
|
|
res.body.on('error', onerror)
|
|
|
|
res.body.on('data', d => hash.update(d))
|
|
|
|
|
|
|
|
res.body.pipe(outStream)
|
|
|
|
outStream.on('finish', () => resolve(res))
|
|
|
|
} else {
|
|
|
|
resolve(res)
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
|
|
|
|
if (fetchFile.status !== 200)
|
|
|
|
throw `${fetchFile.status} ${fetchFile.statusText}`
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
infoMap.push({
|
|
|
|
path: destination,
|
|
|
|
data: {
|
2018-09-23 16:28:15 +00:00
|
|
|
filename: name,
|
|
|
|
originalname: original,
|
2019-09-08 01:56:29 +00:00
|
|
|
extname,
|
2020-06-01 05:23:15 +00:00
|
|
|
mimetype: fetchFile.headers.get('content-type').split(';')[0] || '',
|
|
|
|
size: outStream.bytesWritten,
|
|
|
|
hash: hash.digest('hex'),
|
2019-09-08 01:56:29 +00:00
|
|
|
albumid,
|
|
|
|
age
|
2018-09-01 20:37:26 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
})
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
// If no errors encountered, clear cache of downloaded files
|
2019-09-08 01:56:29 +00:00
|
|
|
downloaded.length = 0
|
2018-09-23 16:28:15 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (utils.clamd.scanner) {
|
2019-11-05 20:35:04 +00:00
|
|
|
const scanResult = await self.scanFiles(req, user, infoMap)
|
2019-09-08 01:56:29 +00:00
|
|
|
if (scanResult) throw scanResult
|
2018-12-18 17:41:42 +00:00
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
2020-06-19 18:28:23 +00:00
|
|
|
await self.sendUploadResponse(req, res, user, result)
|
2019-09-08 01:56:29 +00:00
|
|
|
} catch (error) {
|
|
|
|
// Unlink all downloaded files when at least one file threw an error from the for-loop
|
2019-09-23 08:09:15 +00:00
|
|
|
// Should continue even when encountering errors
|
2019-09-08 01:56:29 +00:00
|
|
|
if (downloaded.length)
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(downloaded.map(file =>
|
|
|
|
utils.unlinkFile(file).catch(logger.error)
|
|
|
|
))
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2019-11-29 10:42:29 +00:00
|
|
|
const errorString = error.toString()
|
|
|
|
const suppress = [
|
|
|
|
/ over limit:/
|
|
|
|
]
|
|
|
|
if (!suppress.some(t => t.test(errorString)))
|
|
|
|
throw error
|
|
|
|
else
|
|
|
|
throw errorString
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.finishChunks = async (req, res, next) => {
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!chunkedUploads)
|
2018-05-11 14:34:13 +00:00
|
|
|
return res.json({ success: false, description: 'Chunked upload is disabled at the moment.' })
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2018-03-28 11:36:28 +00:00
|
|
|
let user
|
|
|
|
if (config.private === true) {
|
|
|
|
user = await utils.authorize(req, res)
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!user) return
|
2018-12-18 17:41:42 +00:00
|
|
|
} else if (req.headers.token) {
|
2019-09-08 01:56:29 +00:00
|
|
|
user = await db.table('users')
|
|
|
|
.where('token', req.headers.token)
|
|
|
|
.first()
|
|
|
|
if (user && (user.enabled === false || user.enabled === 0))
|
|
|
|
return res.json({ success: false, description: 'This account has been disabled.' })
|
2018-12-18 17:41:42 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
await self.actuallyFinishChunks(req, res, user)
|
|
|
|
} catch (error) {
|
2018-04-25 13:16:34 +00:00
|
|
|
const isError = error instanceof Error
|
2019-08-26 17:02:06 +00:00
|
|
|
if (isError) logger.error(error)
|
2019-09-08 01:56:29 +00:00
|
|
|
return res.status(400).json({
|
2018-03-28 11:36:28 +00:00
|
|
|
success: false,
|
2018-05-11 14:34:13 +00:00
|
|
|
description: isError ? error.toString() : error
|
2018-03-28 11:36:28 +00:00
|
|
|
})
|
|
|
|
}
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
self.actuallyFinishChunks = async (req, res, user) => {
|
|
|
|
const check = file => typeof file.uuid !== 'string' ||
|
|
|
|
!chunksData[file.uuid] ||
|
2020-06-15 16:14:33 +00:00
|
|
|
chunksData[file.uuid].chunks < 2
|
2018-03-28 11:36:28 +00:00
|
|
|
|
|
|
|
const files = req.body.files
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!Array.isArray(files) || !files.length || files.some(check))
|
|
|
|
throw 'An unexpected error occurred.'
|
2018-03-28 11:36:28 +00:00
|
|
|
|
|
|
|
const infoMap = []
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(files.map(async file => {
|
2020-06-15 16:14:33 +00:00
|
|
|
// Close stream
|
|
|
|
chunksData[file.uuid].stream.end()
|
|
|
|
|
|
|
|
if (chunksData[file.uuid].chunks > maxChunksCount)
|
2019-09-08 01:56:29 +00:00
|
|
|
throw 'Too many chunks.'
|
|
|
|
|
|
|
|
file.extname = typeof file.original === 'string' ? utils.extname(file.original) : ''
|
|
|
|
if (self.isExtensionFiltered(file.extname))
|
2019-11-09 20:41:54 +00:00
|
|
|
throw `${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`
|
2019-09-08 01:56:29 +00:00
|
|
|
|
|
|
|
if (temporaryUploads) {
|
|
|
|
file.age = self.parseUploadAge(file.age)
|
|
|
|
if (!file.age && !config.uploads.temporaryUploadAges.includes(0))
|
2019-11-09 20:41:54 +00:00
|
|
|
throw 'Permanent uploads are not permitted.'
|
2018-11-28 17:52:12 +00:00
|
|
|
}
|
2018-04-28 17:26:39 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
file.size = chunksData[file.uuid].stream.bytesWritten
|
2019-09-08 01:56:29 +00:00
|
|
|
if (config.filterEmptyFile && file.size === 0)
|
|
|
|
throw 'Empty files are not allowed.'
|
|
|
|
else if (file.size > maxSizeBytes)
|
|
|
|
throw `File too large. Chunks are bigger than ${maxSize} MB.`
|
2018-04-28 17:26:39 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
// Double-check file size
|
|
|
|
const tmpfile = path.join(chunksData[file.uuid].root, chunksData[file.uuid].filename)
|
|
|
|
const lstat = await paths.lstat(tmpfile)
|
|
|
|
if (lstat.size !== file.size)
|
|
|
|
throw `File size mismatched (${lstat.size} vs. ${file.size}).`
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Generate name
|
|
|
|
const length = self.parseFileIdentifierLength(file.filelength)
|
|
|
|
const name = await self.getUniqueRandomName(length, file.extname)
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2020-06-15 16:14:33 +00:00
|
|
|
// Move tmp file to final destination
|
2019-09-08 01:56:29 +00:00
|
|
|
const destination = path.join(paths.uploads, name)
|
2020-06-15 16:14:33 +00:00
|
|
|
await paths.rename(tmpfile, destination)
|
|
|
|
const hash = chunksData[file.uuid].hasher.digest('hex')
|
2018-12-20 11:53:37 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Continue even when encountering errors
|
|
|
|
await self.cleanUpChunks(file.uuid).catch(logger.error)
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
let albumid = parseInt(file.albumid)
|
|
|
|
if (isNaN(albumid))
|
|
|
|
albumid = null
|
2018-04-05 12:54:24 +00:00
|
|
|
|
2018-05-11 14:34:13 +00:00
|
|
|
const data = {
|
|
|
|
filename: name,
|
|
|
|
originalname: file.original || '',
|
2019-09-08 01:56:29 +00:00
|
|
|
extname: file.extname,
|
2018-05-11 14:34:13 +00:00
|
|
|
mimetype: file.type || '',
|
2019-09-08 01:56:29 +00:00
|
|
|
size: file.size,
|
2020-05-28 19:52:58 +00:00
|
|
|
hash,
|
2019-09-08 01:56:29 +00:00
|
|
|
albumid,
|
|
|
|
age: file.age
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
2018-04-05 12:54:24 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
infoMap.push({ path: destination, data })
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (utils.clamd.scanner) {
|
2019-11-05 20:35:04 +00:00
|
|
|
const scanResult = await self.scanFiles(req, user, infoMap)
|
2019-09-08 01:56:29 +00:00
|
|
|
if (scanResult) throw scanResult
|
|
|
|
}
|
2018-05-11 14:34:13 +00:00
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
await self.stripTags(req, infoMap)
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const result = await self.storeFilesToDb(req, res, user, infoMap)
|
2020-06-19 18:28:23 +00:00
|
|
|
await self.sendUploadResponse(req, res, user, result)
|
2019-09-08 01:56:29 +00:00
|
|
|
} catch (error) {
|
2020-06-15 16:14:33 +00:00
|
|
|
// Dispose unfinished hasher and clean up leftover chunks
|
2019-09-23 08:09:15 +00:00
|
|
|
// Should continue even when encountering errors
|
2019-09-28 09:42:49 +00:00
|
|
|
await Promise.all(files.map(file => {
|
2020-06-15 16:14:33 +00:00
|
|
|
// eslint-disable-next-line curly
|
|
|
|
if (chunksData[file.uuid] !== undefined) {
|
|
|
|
try {
|
|
|
|
if (chunksData[file.uuid].hasher)
|
|
|
|
chunksData[file.uuid].hasher.dispose()
|
|
|
|
} catch (error) {}
|
|
|
|
self.cleanUpChunks(file.uuid).catch(logger.error)
|
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
|
|
|
|
2019-09-19 01:27:19 +00:00
|
|
|
// Re-throw error
|
2019-09-08 01:56:29 +00:00
|
|
|
throw error
|
2018-04-05 10:31:07 +00:00
|
|
|
}
|
2018-03-28 11:36:28 +00:00
|
|
|
}
|
|
|
|
|
2020-06-15 16:48:43 +00:00
|
|
|
self.cleanUpChunks = async (uuid, onTimeout) => {
|
2020-06-15 16:14:33 +00:00
|
|
|
// Remove tmp file
|
|
|
|
await paths.unlink(path.join(chunksData[uuid].root, chunksData[uuid].filename))
|
|
|
|
.catch(error => {
|
|
|
|
if (error.code !== 'ENOENT')
|
|
|
|
logger.error(error)
|
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
// Remove UUID dir
|
|
|
|
await paths.rmdir(chunksData[uuid].root)
|
2020-06-15 16:14:33 +00:00
|
|
|
// Delete cached chunks data
|
2020-06-15 16:48:43 +00:00
|
|
|
if (!onTimeout) chunksData[uuid].clearTimeout()
|
2019-09-08 01:56:29 +00:00
|
|
|
delete chunksData[uuid]
|
2018-05-11 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
self.scanFiles = async (req, user, infoMap) => {
|
2020-04-04 14:20:01 +00:00
|
|
|
// eslint-disable-next-line curly
|
|
|
|
if (user && utils.clamd.groupBypass && perms.is(user, utils.clamd.groupBypass)) {
|
|
|
|
// logger.log(`[ClamAV]: Skipping ${infoMap.length} file(s), ${utils.clamd.groupBypass} group bypass`)
|
2019-11-05 20:35:04 +00:00
|
|
|
return false
|
2020-04-04 14:20:01 +00:00
|
|
|
}
|
2019-09-23 08:09:15 +00:00
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
const foundThreats = []
|
|
|
|
const results = await Promise.all(infoMap.map(async info => {
|
2020-04-04 14:20:01 +00:00
|
|
|
if (utils.clamd.whitelistExtensions && utils.clamd.whitelistExtensions.includes(info.data.extname))
|
|
|
|
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, extension whitelisted`)
|
|
|
|
|
|
|
|
if (utils.clamd.maxSize && info.data.size > utils.clamd.maxSize)
|
|
|
|
return // logger.log(`[ClamAV]: Skipping ${info.data.filename}, size ${info.data.size} > ${utils.clamd.maxSize}`)
|
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
const reply = await utils.clamd.scanner.scanFile(info.path, utils.clamd.timeout, utils.clamd.chunkSize)
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!reply.includes('OK') || reply.includes('FOUND')) {
|
|
|
|
// eslint-disable-next-line no-control-regex
|
2019-11-05 20:35:04 +00:00
|
|
|
const foundThreat = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '')
|
|
|
|
logger.log(`[ClamAV]: ${info.data.filename}: ${foundThreat} FOUND.`)
|
|
|
|
foundThreats.push(foundThreat)
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
2019-11-05 20:35:04 +00:00
|
|
|
})).then(() => {
|
|
|
|
if (foundThreats.length)
|
|
|
|
return `Threat found: ${foundThreats[0]}${foundThreats.length > 1 ? ', and more' : ''}.`
|
|
|
|
}).catch(error => {
|
|
|
|
logger.error(`[ClamAV]: ${error.toString()}`)
|
|
|
|
return 'An unexpected error occurred with ClamAV, please contact the site owner.'
|
|
|
|
})
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
if (results)
|
|
|
|
// Unlink all files when at least one threat is found OR any errors occurred
|
|
|
|
// Should continue even when encountering errors
|
|
|
|
await Promise.all(infoMap.map(info =>
|
|
|
|
utils.unlinkFile(info.data.filename).catch(logger.error)
|
|
|
|
))
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2019-11-05 20:35:04 +00:00
|
|
|
return results
|
2019-09-08 01:56:29 +00:00
|
|
|
}
|
|
|
|
|
2019-11-29 13:42:53 +00:00
|
|
|
self.stripTags = async (req, infoMap) => {
|
|
|
|
if (!self.parseStripTags(req.headers.striptags))
|
|
|
|
return
|
|
|
|
|
|
|
|
try {
|
|
|
|
await Promise.all(infoMap.map(info =>
|
|
|
|
utils.stripTags(info.data.filename, info.data.extname)
|
|
|
|
))
|
|
|
|
} catch (error) {
|
|
|
|
// Unlink all files when at least one threat is found OR any errors occurred
|
|
|
|
// Should continue even when encountering errors
|
|
|
|
await Promise.all(infoMap.map(info =>
|
|
|
|
utils.unlinkFile(info.data.filename).catch(logger.error)
|
|
|
|
))
|
|
|
|
|
|
|
|
// Re-throw error
|
|
|
|
throw error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.storeFilesToDb = async (req, res, user, infoMap) => {
|
|
|
|
const files = []
|
|
|
|
const exists = []
|
|
|
|
const albumids = []
|
2019-11-29 13:42:53 +00:00
|
|
|
|
2019-09-23 08:09:15 +00:00
|
|
|
await Promise.all(infoMap.map(async info => {
|
2019-09-08 01:56:29 +00:00
|
|
|
// Check if the file exists by checking its hash and size
|
2020-05-28 19:56:11 +00:00
|
|
|
const dbFile = await db.table('files')
|
2019-09-08 01:56:29 +00:00
|
|
|
.where(function () {
|
|
|
|
if (user === undefined)
|
|
|
|
this.whereNull('userid')
|
|
|
|
else
|
|
|
|
this.where('userid', user.id)
|
|
|
|
})
|
|
|
|
.where({
|
2020-05-28 19:52:58 +00:00
|
|
|
hash: info.data.hash,
|
2019-09-08 01:56:29 +00:00
|
|
|
size: info.data.size
|
2018-01-23 20:06:30 +00:00
|
|
|
})
|
2019-09-08 01:56:29 +00:00
|
|
|
// Select expirydate to display expiration date of existing files as well
|
|
|
|
.select('name', 'expirydate')
|
|
|
|
.first()
|
|
|
|
|
|
|
|
if (dbFile) {
|
|
|
|
// Continue even when encountering errors
|
|
|
|
await utils.unlinkFile(info.data.filename).catch(logger.error)
|
|
|
|
// logger.log(`Unlinked ${info.data.filename} since a duplicate named ${dbFile.name} exists`)
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
|
|
|
|
// If on /nojs route, append original file name reported by client
|
|
|
|
if (req.path === '/nojs')
|
|
|
|
dbFile.original = info.data.originalname
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
exists.push(dbFile)
|
2019-09-23 08:09:15 +00:00
|
|
|
return
|
2018-04-05 10:31:07 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
const timestamp = Math.floor(Date.now() / 1000)
|
|
|
|
const data = {
|
|
|
|
name: info.data.filename,
|
|
|
|
original: info.data.originalname,
|
|
|
|
type: info.data.mimetype,
|
|
|
|
size: info.data.size,
|
2020-05-28 19:52:58 +00:00
|
|
|
hash: info.data.hash,
|
2019-09-08 01:56:29 +00:00
|
|
|
// Only disable if explicitly set to false in config
|
|
|
|
ip: config.uploads.storeIP !== false ? req.ip : null,
|
|
|
|
timestamp
|
|
|
|
}
|
|
|
|
|
|
|
|
if (user) {
|
|
|
|
data.userid = user.id
|
|
|
|
data.albumid = info.data.albumid
|
|
|
|
if (data.albumid !== null && !albumids.includes(data.albumid))
|
|
|
|
albumids.push(data.albumid)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (info.data.age)
|
|
|
|
data.expirydate = data.timestamp + (info.data.age * 3600) // Hours to seconds
|
2018-09-01 20:37:26 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
files.push(data)
|
|
|
|
|
|
|
|
// Generate thumbs, but do not wait
|
|
|
|
if (utils.mayGenerateThumb(info.data.extname))
|
2020-06-15 14:10:40 +00:00
|
|
|
utils.generateThumbs(info.data.filename, info.data.extname, true).catch(logger.error)
|
2019-09-23 08:09:15 +00:00
|
|
|
}))
|
2018-03-28 11:36:28 +00:00
|
|
|
|
2018-04-12 14:37:42 +00:00
|
|
|
if (files.length) {
|
2019-09-08 01:56:29 +00:00
|
|
|
let authorizedIds = []
|
|
|
|
if (albumids.length) {
|
|
|
|
authorizedIds = await db.table('albums')
|
|
|
|
.where({ userid: user.id })
|
|
|
|
.whereIn('id', albumids)
|
|
|
|
.select('id')
|
|
|
|
.then(rows => rows.map(row => row.id))
|
|
|
|
|
|
|
|
// Remove albumid if user do not own the album
|
|
|
|
for (const file of files)
|
|
|
|
if (file.albumid !== null && !authorizedIds.includes(file.albumid))
|
|
|
|
file.albumid = null
|
|
|
|
}
|
|
|
|
|
2018-04-12 14:37:42 +00:00
|
|
|
// Insert new files to DB
|
|
|
|
await db.table('files').insert(files)
|
2019-09-08 01:56:29 +00:00
|
|
|
utils.invalidateStatsCache('uploads')
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
// Update albums' timestamp
|
2019-09-17 04:13:41 +00:00
|
|
|
if (authorizedIds.length) {
|
2019-09-08 01:56:29 +00:00
|
|
|
await db.table('albums')
|
|
|
|
.whereIn('id', authorizedIds)
|
|
|
|
.update('editedAt', Math.floor(Date.now() / 1000))
|
2019-09-17 04:13:41 +00:00
|
|
|
utils.invalidateAlbumsCache(authorizedIds)
|
|
|
|
}
|
2018-05-12 14:01:14 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
return files.concat(exists)
|
|
|
|
}
|
2018-04-04 17:38:15 +00:00
|
|
|
|
2020-06-19 18:28:23 +00:00
|
|
|
self.sendUploadResponse = async (req, res, user, result) => {
|
2019-09-08 01:56:29 +00:00
|
|
|
// Send response
|
2018-05-12 14:01:14 +00:00
|
|
|
res.json({
|
|
|
|
success: true,
|
2019-09-08 01:56:29 +00:00
|
|
|
files: result.map(file => {
|
|
|
|
const map = {
|
2018-04-12 14:37:42 +00:00
|
|
|
name: file.name,
|
2018-05-12 14:01:14 +00:00
|
|
|
url: `${config.domain}/${file.name}`
|
2018-04-04 17:38:15 +00:00
|
|
|
}
|
2018-05-12 14:01:14 +00:00
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
// If a temporary upload, add expiry date
|
2019-09-08 01:56:29 +00:00
|
|
|
if (file.expirydate)
|
|
|
|
map.expirydate = file.expirydate
|
2018-12-18 17:01:28 +00:00
|
|
|
|
Updates (very important to read)
Client-side CSS & JS files will now be processed with Gulp.
Gulp tasks are configured in gulpfile.js file.
CSS files will be optimized with postcss-preset-env, which will
auto-add vendor prefixes and convert any parts necessary for browsers
compatibility.
Afterwards they will be minified with cssnano.
JS files will be optimized with bublé,
likewise for browsers compatibility.
Afterwards they will be minified with terser.
Unprocessed CSS & JS files will now be located at src directory, while
the processed results will be located at dist directory.
Due to bublé, the JS files should now be compatible up to IE 11
at the minimum.
Previously the safe would not work in IE 11 due to extensive usage of
template literals.
Due to that as well, JS files in src directory will now extensively use
arrow functions for my personal comfort (as they will be converted too).
The server will use the processed files at dist directory by default.
If you want to rebuild the files by your own, you can run "yarn build".
Gulp is a development dependency, so make sure you have installed all
development dependencies (e.i. NOT using "yarn install --production").
---
yarn lint -> gulp lint
yarn build -> gulp default
yarn watch -> gulp watch
yarn develop -> env NODE_ENV=development yarn watch
---
Fixed not being able to demote staff into normal users.
/api/token/verify will no longer respond with 401 HTTP error code,
unless an error occurred (which will be 500 HTTP error code).
Fixed /nojs route not displaying file's original name when a duplicate
is found on the server.
Removed is-breeze CSS class name, in favor of Bulma's is-info.
Removed custom styling from auth page, in favor of global styling.
Removed all usage of style HTML attribute in favor of CSS classes.
Renamed js/s/ to js/misc/.
Use loading spinners on dashboard's sidebar menus.
Disable all other sidebar menus when something is loading.
Changed title HTML attribute of disabled control buttons in
uploads & users list.
Hid checkboxes and WIP controls from users list.
Better error messages handling.
Especially homepage will now support CF's HTTP error codes.
Updated various icons.
Also, added fontello config file at public/libs/fontello/config.json.
This should let you edit them more easily with fontello.
Use Gatsby icon for my blog's link in homepage's footer.
A bunch of other improvements here & there.
2019-09-15 06:20:11 +00:00
|
|
|
// If on /nojs route, add original name
|
2019-09-08 01:56:29 +00:00
|
|
|
if (req.path === '/nojs')
|
|
|
|
map.original = file.original
|
2018-04-12 14:37:42 +00:00
|
|
|
|
2020-06-19 18:28:23 +00:00
|
|
|
// If uploaded by user, add delete URL (intended for ShareX and its derivatives)
|
|
|
|
// Homepage uploader will not use this (use dashboard instead)
|
2020-06-19 21:01:16 +00:00
|
|
|
// REVISION: I wasn't aware ShareX wouldn't do a basic GET request to this API,
|
|
|
|
// which I hoped would then use the token header in the downloadable ShareX config file.
|
|
|
|
// At its current state, this isn't really usable.
|
|
|
|
/*
|
2020-06-19 18:28:23 +00:00
|
|
|
if (user)
|
|
|
|
map.deleteUrl = `${config.homeDomain}/api/upload/delete/${file.name}`
|
2020-06-19 21:01:16 +00:00
|
|
|
*/
|
2020-06-19 18:28:23 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
return map
|
|
|
|
})
|
|
|
|
})
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-03-17 00:53:29 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.delete = async (req, res) => {
|
2020-06-19 18:28:23 +00:00
|
|
|
// Map /api/delete requests to /api/bulkdelete
|
|
|
|
let body
|
|
|
|
if (req.method === 'POST') {
|
|
|
|
// Original lolisafe API (this fork uses /api/bulkdelete immediately)
|
|
|
|
const id = parseInt(req.body.id)
|
|
|
|
body = {
|
|
|
|
field: 'id',
|
|
|
|
values: isNaN(id) ? undefined : [id]
|
|
|
|
}
|
2020-06-21 15:21:38 +00:00
|
|
|
} /* else if (req.method === 'GET') {
|
2020-06-19 18:28:23 +00:00
|
|
|
// ShareX-compatible API (or other clients that require basic GET-based API)
|
|
|
|
const name = req.params.name
|
|
|
|
body = {
|
|
|
|
field: 'name',
|
|
|
|
values: name ? [name] : undefined
|
|
|
|
}
|
2020-06-21 15:21:38 +00:00
|
|
|
} */
|
2020-06-19 18:28:23 +00:00
|
|
|
|
2019-01-01 19:39:08 +00:00
|
|
|
req.body = body
|
2019-09-08 01:56:29 +00:00
|
|
|
return self.bulkDelete(req, res)
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.bulkDelete = async (req, res) => {
|
2018-03-29 23:22:08 +00:00
|
|
|
const user = await utils.authorize(req, res)
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!user) return
|
2018-05-05 19:44:58 +00:00
|
|
|
|
|
|
|
const field = req.body.field || 'id'
|
|
|
|
const values = req.body.values
|
2018-09-04 15:49:37 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
if (!Array.isArray(values) || !values.length)
|
2018-09-04 15:49:37 +00:00
|
|
|
return res.json({ success: false, description: 'No array of files specified.' })
|
2018-03-29 23:22:08 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
try {
|
|
|
|
const failed = await utils.bulkDeleteFromDb(field, values, user)
|
2018-05-05 19:44:58 +00:00
|
|
|
return res.json({ success: true, failed })
|
2019-09-08 01:56:29 +00:00
|
|
|
} catch (error) {
|
|
|
|
logger.error(error)
|
|
|
|
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
2017-10-04 00:13:38 +00:00
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
self.list = async (req, res) => {
|
2018-01-23 20:06:30 +00:00
|
|
|
const user = await utils.authorize(req, res)
|
2018-12-18 17:01:28 +00:00
|
|
|
if (!user) return
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-05-24 01:28:54 +00:00
|
|
|
const all = req.headers.all === '1'
|
2019-06-17 19:34:15 +00:00
|
|
|
const filters = req.headers.filters
|
2020-05-24 01:28:54 +00:00
|
|
|
const minoffset = Number(req.headers.minoffset) || 0
|
2018-10-13 11:06:58 +00:00
|
|
|
const ismoderator = perms.is(user, 'moderator')
|
2020-05-02 19:39:24 +00:00
|
|
|
if (all && !ismoderator)
|
2019-09-08 01:56:29 +00:00
|
|
|
return res.status(403).end()
|
2019-01-03 04:49:56 +00:00
|
|
|
|
2019-06-04 00:57:37 +00:00
|
|
|
const basedomain = config.domain
|
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Thresholds for regular users
|
|
|
|
const MAX_WILDCARDS_IN_KEY = 2
|
|
|
|
const MAX_TEXT_QUERIES = 3 // non-keyed keywords
|
|
|
|
const MAX_SORT_KEYS = 1
|
2020-05-02 21:32:45 +00:00
|
|
|
const MAX_IS_KEYS = 1
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
const filterObj = {
|
2019-06-17 19:34:15 +00:00
|
|
|
uploaders: [],
|
2020-04-18 19:52:11 +00:00
|
|
|
excludeUploaders: [],
|
|
|
|
queries: {
|
|
|
|
exclude: {}
|
2020-04-17 06:36:57 +00:00
|
|
|
},
|
2020-05-02 21:32:45 +00:00
|
|
|
typeIs: [
|
|
|
|
'image',
|
|
|
|
'video'
|
|
|
|
],
|
2020-04-18 19:52:11 +00:00
|
|
|
flags: {}
|
2019-06-17 19:34:15 +00:00
|
|
|
}
|
|
|
|
|
2020-04-19 18:19:20 +00:00
|
|
|
const sortObj = {
|
2020-04-18 19:52:11 +00:00
|
|
|
// Cast columns to specific type if they are stored differently
|
|
|
|
casts: {
|
|
|
|
size: 'integer'
|
|
|
|
},
|
|
|
|
// Columns mapping
|
|
|
|
maps: {
|
|
|
|
date: 'timestamp',
|
2020-07-25 09:39:04 +00:00
|
|
|
expiry: 'expirydate',
|
|
|
|
originalname: 'original'
|
2020-04-18 19:52:11 +00:00
|
|
|
},
|
|
|
|
// Columns with which to use SQLite's NULLS LAST option
|
|
|
|
nullsLast: [
|
|
|
|
'userid',
|
|
|
|
'expirydate',
|
|
|
|
'ip'
|
|
|
|
],
|
|
|
|
parsed: []
|
2020-04-04 16:36:43 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Parse glob wildcards into SQL wildcards
|
|
|
|
function sqlLikeParser (pattern) {
|
|
|
|
// Escape SQL operators
|
|
|
|
const escaped = pattern
|
|
|
|
.replace(/(?<!\\)%/g, '\\%')
|
|
|
|
.replace(/(?<!\\)_/g, '\\_')
|
|
|
|
|
|
|
|
// Look for any glob operators
|
|
|
|
const match = pattern.match(/(?<!\\)(\*|\?)/g)
|
2020-05-02 20:30:50 +00:00
|
|
|
if (match && match.length)
|
2020-05-02 19:39:24 +00:00
|
|
|
return {
|
|
|
|
count: match.length,
|
|
|
|
// Replace glob operators with their SQL equivalents
|
|
|
|
escaped: escaped
|
|
|
|
.replace(/(?<!\\)\*/g, '%')
|
|
|
|
.replace(/(?<!\\)\?/g, '_')
|
|
|
|
}
|
2020-05-02 20:30:50 +00:00
|
|
|
else
|
2020-05-02 19:39:24 +00:00
|
|
|
return {
|
|
|
|
count: 0,
|
|
|
|
// Assume partial match
|
|
|
|
escaped: `%${escaped}%`
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 19:34:15 +00:00
|
|
|
if (filters) {
|
2020-05-16 15:42:08 +00:00
|
|
|
let keywords = []
|
|
|
|
|
|
|
|
if (req.params.id === undefined)
|
|
|
|
keywords = keywords.concat([
|
|
|
|
'albumid'
|
|
|
|
])
|
2020-05-02 20:30:50 +00:00
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Only allow filtering by 'ip' and 'user' keys when listing all uploads
|
|
|
|
if (all)
|
|
|
|
keywords = keywords.concat([
|
|
|
|
'ip',
|
|
|
|
'user'
|
|
|
|
])
|
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
const ranges = [
|
|
|
|
'date',
|
|
|
|
'expiry'
|
|
|
|
]
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
filterObj.queries = searchQuery.parse(filters, {
|
|
|
|
keywords: keywords.concat([
|
2020-05-02 21:32:45 +00:00
|
|
|
'is',
|
2020-05-16 15:07:15 +00:00
|
|
|
'sort',
|
|
|
|
'orderby'
|
2020-04-18 19:52:11 +00:00
|
|
|
]),
|
|
|
|
ranges,
|
|
|
|
tokenize: true,
|
|
|
|
alwaysArray: true,
|
|
|
|
offsets: false
|
|
|
|
})
|
|
|
|
|
2020-05-16 15:07:15 +00:00
|
|
|
// Accept orderby as alternative for sort
|
|
|
|
if (filterObj.queries.orderby) {
|
|
|
|
if (!filterObj.queries.sort) filterObj.queries.sort = []
|
|
|
|
filterObj.queries.sort = filterObj.queries.sort.concat(filterObj.queries.orderby)
|
|
|
|
delete filterObj.queries.orderby
|
|
|
|
}
|
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// For some reason, single value won't be in Array even with 'alwaysArray' option
|
|
|
|
if (typeof filterObj.queries.exclude.text === 'string')
|
|
|
|
filterObj.queries.exclude.text = [filterObj.queries.exclude.text]
|
|
|
|
|
2020-05-02 21:32:45 +00:00
|
|
|
// Text (non-keyed keywords) queries
|
2020-05-02 19:39:24 +00:00
|
|
|
let textQueries = 0
|
|
|
|
if (filterObj.queries.text) textQueries += filterObj.queries.text.length
|
|
|
|
if (filterObj.queries.exclude.text) textQueries += filterObj.queries.exclude.text.length
|
|
|
|
|
|
|
|
// Regular user threshold check
|
|
|
|
if (!ismoderator && textQueries > MAX_TEXT_QUERIES)
|
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `Users are only allowed to use ${MAX_TEXT_QUERIES} non-keyed keyword${MAX_TEXT_QUERIES === 1 ? '' : 's'} at a time.`
|
|
|
|
})
|
|
|
|
|
|
|
|
if (filterObj.queries.text)
|
|
|
|
for (let i = 0; i < filterObj.queries.text.length; i++) {
|
|
|
|
const result = sqlLikeParser(filterObj.queries.text[i])
|
2020-05-02 21:32:45 +00:00
|
|
|
if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY)
|
2020-05-02 19:39:24 +00:00
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`
|
|
|
|
})
|
|
|
|
filterObj.queries.text[i] = result.escaped
|
|
|
|
}
|
|
|
|
|
2020-05-02 21:32:45 +00:00
|
|
|
if (filterObj.queries.exclude.text)
|
2020-05-02 19:39:24 +00:00
|
|
|
for (let i = 0; i < filterObj.queries.exclude.text.length; i++) {
|
|
|
|
const result = sqlLikeParser(filterObj.queries.exclude.text[i])
|
2020-05-02 21:32:45 +00:00
|
|
|
if (!ismoderator && result.count > MAX_WILDCARDS_IN_KEY)
|
2020-05-02 19:39:24 +00:00
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `Users are only allowed to use ${MAX_WILDCARDS_IN_KEY} wildcard${MAX_WILDCARDS_IN_KEY === 1 ? '' : 's'} per key.`
|
|
|
|
})
|
|
|
|
filterObj.queries.exclude.text[i] = result.escaped
|
|
|
|
}
|
|
|
|
|
2020-04-19 18:19:20 +00:00
|
|
|
for (const key of keywords) {
|
|
|
|
let queryIndex = -1
|
|
|
|
let excludeIndex = -1
|
|
|
|
|
|
|
|
// Make sure keyword arrays only contain unique values
|
2020-04-18 19:52:11 +00:00
|
|
|
if (filterObj.queries[key]) {
|
|
|
|
filterObj.queries[key] = filterObj.queries[key].filter((v, i, a) => a.indexOf(v) === i)
|
2020-04-19 18:19:20 +00:00
|
|
|
queryIndex = filterObj.queries[key].indexOf('-')
|
|
|
|
}
|
|
|
|
if (filterObj.queries.exclude[key]) {
|
|
|
|
filterObj.queries.exclude[key] = filterObj.queries.exclude[key].filter((v, i, a) => a.indexOf(v) === i)
|
|
|
|
excludeIndex = filterObj.queries.exclude[key].indexOf('-')
|
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2020-04-19 18:19:20 +00:00
|
|
|
// Flag to match NULL values
|
|
|
|
const inQuery = queryIndex !== -1
|
|
|
|
const inExclude = excludeIndex !== -1
|
|
|
|
if (inQuery || inExclude) {
|
|
|
|
// Prioritize exclude keys when both types found
|
|
|
|
filterObj.flags[`${key}Null`] = inExclude ? false : inQuery
|
2020-05-02 19:39:24 +00:00
|
|
|
if (inQuery)
|
|
|
|
if (filterObj.queries[key].length === 1)
|
|
|
|
// Delete key to avoid unexpected behavior
|
|
|
|
delete filterObj.queries[key]
|
|
|
|
else
|
|
|
|
filterObj.queries[key].splice(queryIndex, 1)
|
|
|
|
if (inExclude)
|
|
|
|
if (filterObj.queries.exclude[key].length === 1)
|
|
|
|
// Delete key to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.exclude[key]
|
|
|
|
else
|
|
|
|
filterObj.queries.exclude[key].splice(excludeIndex, 1)
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2020-04-19 18:19:20 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
|
|
|
|
const parseDate = (date, minoffset, resetMs) => {
|
|
|
|
// [YYYY][/MM][/DD] [HH][:MM][:SS]
|
|
|
|
// e.g. 2020/01/01 00:00:00, 2018/01/01 06, 2019/11, 12:34:00
|
|
|
|
const match = date.match(/^(\d{4})?(\/\d{2})?(\/\d{2})?\s?(\d{2})?(:\d{2})?(:\d{2})?$/)
|
|
|
|
|
|
|
|
if (match) {
|
2020-05-24 01:28:54 +00:00
|
|
|
let offset = 0
|
|
|
|
if (minoffset !== undefined)
|
|
|
|
offset = 60000 * (utils.timezoneOffset - minoffset)
|
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
const dateObj = new Date(Date.now() + offset)
|
|
|
|
|
|
|
|
if (match[1] !== undefined)
|
|
|
|
dateObj.setFullYear(Number(match[1]), // full year
|
|
|
|
match[2] !== undefined ? (Number(match[2].slice(1)) - 1) : 0, // month, zero-based
|
|
|
|
match[3] !== undefined ? Number(match[3].slice(1)) : 1) // date
|
|
|
|
|
|
|
|
if (match[4] !== undefined)
|
|
|
|
dateObj.setHours(Number(match[4]), // hours
|
|
|
|
match[5] !== undefined ? Number(match[5].slice(1)) : 0, // minutes
|
|
|
|
match[6] !== undefined ? Number(match[6].slice(1)) : 0) // seconds
|
|
|
|
|
|
|
|
if (resetMs)
|
|
|
|
dateObj.setMilliseconds(0)
|
|
|
|
|
|
|
|
// Calculate timezone differences
|
2020-05-24 01:28:54 +00:00
|
|
|
return new Date(dateObj.getTime() - offset)
|
2020-04-18 19:52:11 +00:00
|
|
|
} else {
|
|
|
|
return null
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parse dates to timestamps
|
|
|
|
for (const range of ranges)
|
|
|
|
if (filterObj.queries[range]) {
|
|
|
|
if (filterObj.queries[range].from) {
|
|
|
|
const parsed = parseDate(filterObj.queries[range].from, minoffset, true)
|
|
|
|
filterObj.queries[range].from = parsed ? Math.floor(parsed / 1000) : null
|
2020-04-04 16:36:43 +00:00
|
|
|
}
|
2020-04-18 19:52:11 +00:00
|
|
|
if (filterObj.queries[range].to) {
|
|
|
|
const parsed = parseDate(filterObj.queries[range].to, minoffset, true)
|
|
|
|
filterObj.queries[range].to = parsed ? Math.ceil(parsed / 1000) : null
|
|
|
|
}
|
|
|
|
}
|
2018-10-09 19:52:41 +00:00
|
|
|
|
2020-04-18 19:52:11 +00:00
|
|
|
// Query users table for user IDs
|
|
|
|
if (filterObj.queries.user || filterObj.queries.exclude.user) {
|
|
|
|
const usernames = []
|
|
|
|
.concat(filterObj.queries.user || [])
|
|
|
|
.concat(filterObj.queries.exclude.user || [])
|
|
|
|
|
|
|
|
const uploaders = await db.table('users')
|
|
|
|
.whereIn('username', usernames)
|
|
|
|
.select('id', 'username')
|
|
|
|
|
|
|
|
// If no matches, or mismatched results
|
|
|
|
if (!uploaders || (uploaders.length !== usernames.length)) {
|
|
|
|
const notFound = usernames.filter(username => {
|
|
|
|
return !uploaders.find(uploader => uploader.username === username)
|
|
|
|
})
|
|
|
|
if (notFound)
|
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `User${notFound.length === 1 ? '' : 's'} not found: ${notFound.join(', ')}.`
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const uploader of uploaders)
|
|
|
|
if (filterObj.queries.user && filterObj.queries.user.includes(uploader.username))
|
|
|
|
filterObj.uploaders.push(uploader)
|
|
|
|
else
|
|
|
|
filterObj.excludeUploaders.push(uploader)
|
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Delete keys to avoid unexpected behavior
|
2020-04-18 19:52:11 +00:00
|
|
|
delete filterObj.queries.user
|
|
|
|
delete filterObj.queries.exclude.user
|
|
|
|
}
|
|
|
|
|
2020-04-19 18:19:20 +00:00
|
|
|
// Parse sort keys
|
|
|
|
if (filterObj.queries.sort) {
|
2020-05-02 19:39:24 +00:00
|
|
|
let allowed = [
|
|
|
|
'expirydate',
|
|
|
|
'id',
|
|
|
|
'name',
|
2020-07-25 09:39:04 +00:00
|
|
|
'original',
|
2020-05-02 19:39:24 +00:00
|
|
|
'size',
|
|
|
|
'timestamp'
|
|
|
|
]
|
2020-05-02 20:30:50 +00:00
|
|
|
|
|
|
|
// Only allow sorting by 'albumid' when not listing album's uploads
|
|
|
|
if (req.params.id === undefined)
|
|
|
|
allowed = allowed.concat([
|
|
|
|
'albumid'
|
|
|
|
])
|
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Only allow sorting by 'ip' and 'userid' columns when listing all uploads
|
|
|
|
if (all)
|
|
|
|
allowed = allowed.concat([
|
|
|
|
'ip',
|
|
|
|
'userid'
|
|
|
|
])
|
|
|
|
|
2020-04-19 18:19:20 +00:00
|
|
|
for (const obQuery of filterObj.queries.sort) {
|
2020-04-18 19:52:11 +00:00
|
|
|
const tmp = obQuery.toLowerCase().split(':')
|
2020-05-02 19:39:24 +00:00
|
|
|
const column = sortObj.maps[tmp[0]] || tmp[0]
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
if (!allowed.includes(column))
|
|
|
|
// Alert users if using disallowed/missing columns
|
|
|
|
return res.json({ success: false, description: `Column \`${column}\` cannot be used for sorting.\n\nTry the following instead:\n${allowed.join(', ')}` })
|
2020-04-18 19:52:11 +00:00
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
sortObj.parsed.push({
|
|
|
|
column,
|
|
|
|
order: (tmp[1] && /^d/.test(tmp[1])) ? 'desc' : 'asc',
|
|
|
|
clause: sortObj.nullsLast.includes(column) ? 'nulls last' : '',
|
|
|
|
cast: sortObj.casts[column] || null
|
|
|
|
})
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Regular user threshold check
|
|
|
|
if (!ismoderator && sortObj.parsed.length > MAX_SORT_KEYS)
|
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `Users are only allowed to use ${MAX_SORT_KEYS} sort key${MAX_SORT_KEYS === 1 ? '' : 's'} at a time.`
|
|
|
|
})
|
|
|
|
|
|
|
|
// Delete key to avoid unexpected behavior
|
2020-04-19 18:19:20 +00:00
|
|
|
delete filterObj.queries.sort
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2020-05-02 21:32:45 +00:00
|
|
|
|
|
|
|
// Parse is keys
|
|
|
|
let isKeys = 0
|
|
|
|
let isLast
|
|
|
|
if (filterObj.queries.is || filterObj.queries.exclude.is) {
|
|
|
|
for (const type of filterObj.typeIs) {
|
|
|
|
const inQuery = filterObj.queries.is && filterObj.queries.is.includes(type)
|
|
|
|
const inExclude = filterObj.queries.exclude.is && filterObj.queries.exclude.is.includes(type)
|
|
|
|
|
|
|
|
// Prioritize exclude keys when both types found
|
|
|
|
if (inQuery || inExclude) {
|
|
|
|
filterObj.flags[`is${type}`] = inExclude ? false : inQuery
|
|
|
|
if (isLast !== undefined && isLast !== filterObj.flags[`is${type}`])
|
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: 'Cannot mix inclusion and exclusion type-is keys.'
|
|
|
|
})
|
|
|
|
isKeys++
|
|
|
|
isLast = filterObj.flags[`is${type}`]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete keys to avoid unexpected behavior
|
|
|
|
delete filterObj.queries.is
|
|
|
|
delete filterObj.queries.exclude.is
|
|
|
|
}
|
|
|
|
|
|
|
|
// Regular user threshold check
|
|
|
|
if (!ismoderator && isKeys > MAX_IS_KEYS)
|
|
|
|
return res.json({
|
|
|
|
success: false,
|
|
|
|
description: `Users are only allowed to use ${MAX_IS_KEYS} type-is key${MAX_IS_KEYS === 1 ? '' : 's'} at a time.`
|
|
|
|
})
|
2020-04-18 19:52:11 +00:00
|
|
|
}
|
2020-04-12 09:30:33 +00:00
|
|
|
|
2019-01-01 19:39:08 +00:00
|
|
|
function filter () {
|
2020-05-02 20:30:50 +00:00
|
|
|
// If listing all uploads
|
|
|
|
if (all)
|
|
|
|
this.where(function () {
|
|
|
|
// Filter uploads matching any of the supplied 'user' keys and/or NULL flag
|
|
|
|
// Prioritze exclude keys when both types found
|
|
|
|
this.orWhere(function () {
|
|
|
|
if (filterObj.excludeUploaders.length)
|
|
|
|
this.orWhereNotIn('userid', filterObj.excludeUploaders.map(v => v.id))
|
|
|
|
else if (filterObj.uploaders.length)
|
|
|
|
this.orWhereIn('userid', filterObj.uploaders.map(v => v.id))
|
|
|
|
// Such overbearing logic for NULL values, smh...
|
|
|
|
if ((filterObj.excludeUploaders.length && filterObj.flags.userNull !== false) ||
|
|
|
|
(filterObj.uploaders.length && filterObj.flags.userNull) ||
|
|
|
|
(!filterObj.excludeUploaders.length && !filterObj.uploaders.length && filterObj.flags.userNull))
|
|
|
|
this.orWhereNull('userid')
|
|
|
|
else if (filterObj.flags.userNull === false)
|
|
|
|
this.orWhereNotNull('userid')
|
|
|
|
})
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// Filter uploads matching any of the supplied 'ip' keys and/or NULL flag
|
|
|
|
// Same prioritization logic as above
|
|
|
|
this.orWhere(function () {
|
|
|
|
if (filterObj.queries.exclude.ip)
|
|
|
|
this.orWhereNotIn('ip', filterObj.queries.exclude.ip)
|
|
|
|
else if (filterObj.queries.ip)
|
|
|
|
this.orWhereIn('ip', filterObj.queries.ip)
|
|
|
|
// ...
|
|
|
|
if ((filterObj.queries.exclude.ip && filterObj.flags.ipNull !== false) ||
|
|
|
|
(filterObj.queries.ip && filterObj.flags.ipNull) ||
|
|
|
|
(!filterObj.queries.exclude.ip && !filterObj.queries.ip && filterObj.flags.ipNull))
|
|
|
|
this.orWhereNull('ip')
|
|
|
|
else if (filterObj.flags.ipNull === false)
|
|
|
|
this.orWhereNotNull('ip')
|
2020-05-02 19:39:24 +00:00
|
|
|
})
|
2020-05-02 20:30:50 +00:00
|
|
|
})
|
|
|
|
else
|
|
|
|
// If not listing all uploads, list user's uploads
|
|
|
|
this.where('userid', user.id)
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// Then, refine using any of the supplied 'albumid' keys and/or NULL flag
|
|
|
|
// Same prioritization logic as 'userid' and 'ip' above
|
|
|
|
if (req.params.id === undefined)
|
2020-05-02 19:39:24 +00:00
|
|
|
this.andWhere(function () {
|
2020-05-02 20:30:50 +00:00
|
|
|
if (filterObj.queries.exclude.albumid)
|
|
|
|
this.orWhereNotIn('albumid', filterObj.queries.exclude.albumid)
|
|
|
|
else if (filterObj.queries.albumid)
|
|
|
|
this.orWhereIn('albumid', filterObj.queries.albumid)
|
|
|
|
// ...
|
|
|
|
if ((filterObj.queries.exclude.albumid && filterObj.flags.albumidNull !== false) ||
|
|
|
|
(filterObj.queries.albumid && filterObj.flags.albumidNull) ||
|
|
|
|
(!filterObj.queries.exclude.albumid && !filterObj.queries.albumid && filterObj.flags.albumidNull))
|
|
|
|
this.orWhereNull('albumid')
|
2020-05-02 21:40:04 +00:00
|
|
|
else if (filterObj.flags.albumidNull === false)
|
2020-05-02 20:30:50 +00:00
|
|
|
this.orWhereNotNull('albumid')
|
2020-05-02 19:39:24 +00:00
|
|
|
})
|
2020-05-02 20:30:50 +00:00
|
|
|
else if (!all)
|
|
|
|
// If not listing all uploads, list uploads from user's album
|
|
|
|
this.andWhere('albumid', req.params.id)
|
|
|
|
|
|
|
|
// Then, refine using the supplied 'date' ranges
|
|
|
|
this.andWhere(function () {
|
2020-05-24 01:28:54 +00:00
|
|
|
if (!filterObj.queries.date || (!filterObj.queries.date.from && !filterObj.queries.date.to)) return
|
2020-05-02 20:30:50 +00:00
|
|
|
if (typeof filterObj.queries.date.from === 'number')
|
|
|
|
if (typeof filterObj.queries.date.to === 'number')
|
|
|
|
this.andWhereBetween('timestamp', [filterObj.queries.date.from, filterObj.queries.date.to])
|
|
|
|
else
|
|
|
|
this.andWhere('timestamp', '>=', filterObj.queries.date.from)
|
|
|
|
else
|
|
|
|
this.andWhere('timestamp', '<=', filterObj.queries.date.to)
|
|
|
|
})
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// Then, refine using the supplied 'expiry' ranges
|
|
|
|
this.andWhere(function () {
|
2020-05-24 01:28:54 +00:00
|
|
|
if (!filterObj.queries.expiry || (!filterObj.queries.expiry.from && !filterObj.queries.expiry.to)) return
|
2020-05-02 20:30:50 +00:00
|
|
|
if (typeof filterObj.queries.expiry.from === 'number')
|
|
|
|
if (typeof filterObj.queries.expiry.to === 'number')
|
|
|
|
this.andWhereBetween('expirydate', [filterObj.queries.expiry.from, filterObj.queries.expiry.to])
|
2020-05-02 19:39:24 +00:00
|
|
|
else
|
2020-05-17 16:43:08 +00:00
|
|
|
this.andWhere('expirydate', '>=', filterObj.queries.expiry.from)
|
2020-05-02 20:30:50 +00:00
|
|
|
else
|
2020-05-17 16:43:08 +00:00
|
|
|
this.andWhere('expirydate', '<=', filterObj.queries.expiry.to)
|
2020-05-02 20:30:50 +00:00
|
|
|
})
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-05-02 21:32:45 +00:00
|
|
|
// Then, refine using type-is flags
|
|
|
|
this.andWhere(function () {
|
|
|
|
for (const type of filterObj.typeIs) {
|
2020-05-02 21:51:22 +00:00
|
|
|
let func
|
2020-05-02 21:32:45 +00:00
|
|
|
let operator
|
2020-05-02 21:51:22 +00:00
|
|
|
if (filterObj.flags[`is${type}`] === true) {
|
|
|
|
func = 'orWhere'
|
2020-05-02 21:32:45 +00:00
|
|
|
operator = 'like'
|
2020-05-02 21:51:22 +00:00
|
|
|
} else if (filterObj.flags[`is${type}`] === false) {
|
|
|
|
func = 'andWhere'
|
2020-05-02 21:32:45 +00:00
|
|
|
operator = 'not like'
|
2020-05-02 21:51:22 +00:00
|
|
|
}
|
2020-05-02 21:32:45 +00:00
|
|
|
|
2020-05-02 21:51:22 +00:00
|
|
|
if (func)
|
|
|
|
for (const pattern of utils[`${type}Exts`].map(ext => `%${ext}`))
|
|
|
|
this[func]('name', operator, pattern)
|
2020-05-02 21:32:45 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// Then, refine using the supplied keywords against their file names
|
|
|
|
this.andWhere(function () {
|
|
|
|
if (!filterObj.queries.text) return
|
2020-06-07 05:29:17 +00:00
|
|
|
for (const pattern of filterObj.queries.text) {
|
2020-06-07 13:51:59 +00:00
|
|
|
this.orWhereRaw('?? like ? escape ?', ['name', pattern, '\\'])
|
|
|
|
this.orWhereRaw('?? like ? escape ?', ['original', pattern, '\\'])
|
2020-06-07 05:29:17 +00:00
|
|
|
}
|
2020-05-02 20:30:50 +00:00
|
|
|
})
|
2020-05-02 19:39:24 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// Finally, refine using the supplied exclusions against their file names
|
|
|
|
this.andWhere(function () {
|
|
|
|
if (!filterObj.queries.exclude.text) return
|
2020-06-07 05:29:17 +00:00
|
|
|
for (const pattern of filterObj.queries.exclude.text) {
|
2020-06-07 13:51:59 +00:00
|
|
|
this.andWhereRaw('?? not like ? escape ?', ['name', pattern, '\\'])
|
|
|
|
this.andWhereRaw('?? not like ? escape ?', ['original', pattern, '\\'])
|
2020-06-07 05:29:17 +00:00
|
|
|
}
|
2020-05-02 20:30:50 +00:00
|
|
|
})
|
2019-01-01 19:39:08 +00:00
|
|
|
}
|
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
try {
|
|
|
|
// Query uploads count for pagination
|
|
|
|
const count = await db.table('files')
|
|
|
|
.where(filter)
|
|
|
|
.count('id as count')
|
|
|
|
.then(rows => rows[0].count)
|
|
|
|
if (!count)
|
|
|
|
return res.json({ success: true, files: [], count })
|
2019-01-01 19:39:08 +00:00
|
|
|
|
2020-05-02 15:42:23 +00:00
|
|
|
let offset = Number(req.params.page)
|
|
|
|
if (isNaN(offset)) offset = 0
|
|
|
|
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
|
2019-01-01 19:39:08 +00:00
|
|
|
|
2020-06-07 05:29:17 +00:00
|
|
|
const columns = ['id', 'name', 'original', 'userid', 'size', 'timestamp']
|
2020-04-12 09:30:33 +00:00
|
|
|
if (temporaryUploads)
|
|
|
|
columns.push('expirydate')
|
2020-05-02 20:30:50 +00:00
|
|
|
if (!all || filterObj.queries.albumid || filterObj.queries.exclude.albumid ||
|
|
|
|
filterObj.flags.albumidNull !== undefined)
|
|
|
|
columns.push('albumid')
|
2019-09-08 01:56:29 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
// Only select IPs if we are listing all uploads
|
2020-05-02 20:30:50 +00:00
|
|
|
if (all)
|
|
|
|
columns.push('ip')
|
2019-06-04 00:57:37 +00:00
|
|
|
|
2020-05-02 19:39:24 +00:00
|
|
|
// Build raw query for order by (sorting) operation
|
|
|
|
let orderByRaw
|
|
|
|
if (sortObj.parsed.length)
|
|
|
|
orderByRaw = sortObj.parsed.map(sort => {
|
|
|
|
// Use Knex.raw() to sanitize user inputs
|
|
|
|
if (sort.cast)
|
|
|
|
return db.raw(`cast (?? as ${sort.cast}) ${sort.order} ${sort.clause}`.trim(), sort.column)
|
|
|
|
else
|
|
|
|
return db.raw(`?? ${sort.order} ${sort.clause}`.trim(), sort.column)
|
|
|
|
}).join(', ')
|
|
|
|
else
|
|
|
|
orderByRaw = '`id` desc'
|
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
const files = await db.table('files')
|
|
|
|
.where(filter)
|
2020-05-02 19:39:24 +00:00
|
|
|
.orderByRaw(orderByRaw)
|
2020-04-12 09:30:33 +00:00
|
|
|
.limit(25)
|
|
|
|
.offset(25 * offset)
|
|
|
|
.select(columns)
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
if (!files.length)
|
|
|
|
return res.json({ success: true, files, count, basedomain })
|
2019-06-18 18:48:30 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
for (const file of files) {
|
|
|
|
file.extname = utils.extname(file.name)
|
|
|
|
if (utils.mayGenerateThumb(file.extname))
|
|
|
|
file.thumb = `thumbs/${file.name.slice(0, -file.extname.length)}.png`
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
// If we queried albumid, query album names
|
2020-04-12 09:30:33 +00:00
|
|
|
let albums = {}
|
2020-05-02 20:30:50 +00:00
|
|
|
if (columns.includes('albumid')) {
|
2020-04-12 09:30:33 +00:00
|
|
|
const albumids = files
|
|
|
|
.map(file => file.albumid)
|
|
|
|
.filter((v, i, a) => {
|
|
|
|
return v !== null && v !== undefined && v !== '' && a.indexOf(v) === i
|
|
|
|
})
|
|
|
|
albums = await db.table('albums')
|
|
|
|
.whereIn('id', albumids)
|
|
|
|
.where('enabled', 1)
|
|
|
|
.select('id', 'name')
|
|
|
|
.then(rows => {
|
2020-04-18 19:52:11 +00:00
|
|
|
// Build Object indexed by their IDs
|
2020-04-12 09:30:33 +00:00
|
|
|
const obj = {}
|
|
|
|
for (const row of rows)
|
|
|
|
obj[row.id] = row.name
|
|
|
|
return obj
|
|
|
|
})
|
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
// If we are not listing all uploads, send response
|
|
|
|
if (!all)
|
|
|
|
return res.json({ success: true, files, count, albums, basedomain })
|
|
|
|
|
|
|
|
// Otherwise proceed to querying usernames
|
2020-04-18 19:52:11 +00:00
|
|
|
let usersTable = filterObj.uploaders
|
|
|
|
if (!usersTable.length) {
|
2020-04-12 09:30:33 +00:00
|
|
|
const userids = files
|
|
|
|
.map(file => file.userid)
|
|
|
|
.filter((v, i, a) => {
|
|
|
|
return v !== null && v !== undefined && v !== '' && a.indexOf(v) === i
|
|
|
|
})
|
2019-06-04 00:57:37 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
// If there are no uploads attached to a registered user, send response
|
|
|
|
if (userids.length === 0)
|
2020-05-02 20:30:50 +00:00
|
|
|
return res.json({ success: true, files, count, albums, basedomain })
|
2019-06-17 19:48:42 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
// Query usernames of user IDs from currently selected files
|
2020-04-18 19:52:11 +00:00
|
|
|
usersTable = await db.table('users')
|
2020-04-12 09:30:33 +00:00
|
|
|
.whereIn('id', userids)
|
|
|
|
.select('id', 'username')
|
|
|
|
}
|
2019-06-17 19:48:42 +00:00
|
|
|
|
2020-04-12 09:30:33 +00:00
|
|
|
const users = {}
|
2020-04-18 19:52:11 +00:00
|
|
|
for (const user of usersTable)
|
2020-04-12 09:30:33 +00:00
|
|
|
users[user.id] = user.username
|
2018-01-23 20:06:30 +00:00
|
|
|
|
2020-05-02 20:30:50 +00:00
|
|
|
return res.json({ success: true, files, count, users, albums, basedomain })
|
2020-04-12 09:30:33 +00:00
|
|
|
} catch (error) {
|
2020-06-01 04:44:16 +00:00
|
|
|
logger.error(error)
|
|
|
|
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
|
2020-04-12 09:30:33 +00:00
|
|
|
}
|
2018-01-23 20:06:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-08 01:56:29 +00:00
|
|
|
module.exports = self
|