From 02e2e402c3718f3085e66aa7e650cbf7732c1dd3 Mon Sep 17 00:00:00 2001 From: Bobby Wibowo Date: Sun, 8 Sep 2019 08:56:29 +0700 Subject: [PATCH] !!! MASSIVE OVERHAUL !!! As the title says, this commit is a massive overhaul. I've rewritten/restrucuted almost everything in the controller scripts. Because of that, there's a considerable possibility that I've broken something somewhere. Notable changes: Added temporary uploads. Removed file name length changer from dashboard, in favor of an equivalent in homepage config tab. This allows non-registered users to also set file name length. A bunch of other undocmented stuff. I don't know, I'm too tired to remember them all. --- .eslintrc.json | 4 +- config.sample.js | 39 +- controllers/albumsController.js | 754 ++++++++++--------- controllers/authController.js | 372 +++++----- controllers/pathsController.js | 79 ++ controllers/permissionController.js | 32 +- controllers/tokenController.js | 123 ++-- controllers/uploadController.js | 1041 ++++++++++++++------------- controllers/utilsController.js | 729 ++++++++++--------- database/db.js | 2 +- database/migration.js | 48 +- logger.js | 16 +- lolisafe.js | 231 +++--- package.json | 11 +- public/css/home.css | 6 + public/css/style.css | 11 +- public/css/sweetalert.css | 11 +- public/js/auth.js | 11 +- public/js/dashboard.js | 296 ++++---- public/js/home.js | 269 +++++-- public/js/s/utils.js | 20 +- public/safe.fiery.me.sxcu | 5 +- routes/album.js | 18 +- routes/api.js | 4 +- routes/nojs.js | 6 +- scripts/{cfpurge.js => cf-purge.js} | 18 +- scripts/clean-up.js | 78 ++ scripts/delete-expired.js | 46 ++ scripts/thumbs.js | 118 ++- views/_globals.njk | 2 +- views/dashboard.njk | 3 - views/faq.njk | 34 +- views/home.njk | 39 +- yarn.lock | 128 ++-- 34 files changed, 2528 insertions(+), 2076 deletions(-) create mode 100644 controllers/pathsController.js rename scripts/{cfpurge.js => cf-purge.js} (81%) create mode 100644 scripts/clean-up.js create mode 100644 scripts/delete-expired.js diff --git a/.eslintrc.json b/.eslintrc.json index 77fca09..54f42d8 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -15,6 +15,8 @@ "multi", "consistent" ], + "no-throw-literal": 0, + "no-var": "error", "prefer-const": [ "error", { @@ -30,6 +32,6 @@ "error", "single" ], - "no-var": "error" + "standard/no-callback-literal": 0 } } diff --git a/config.sample.js b/config.sample.js index ffbf438..e381321 100644 --- a/config.sample.js +++ b/config.sample.js @@ -196,6 +196,38 @@ module.exports = { */ urlExtensionsFilter: [], + /* + An array of allowed ages for uploads (in hours). + + Default age will be the value at the very top of the array. + If the array is populated but do not have a zero value, + permanent uploads will be rejected. + This only applies to new files uploaded after enabling the option. + + If the array is empty or is set to falsy value, temporary uploads + feature will be disabled, and all uploads will be permanent (original behavior). + + When temporary uploads feature is disabled, any existing temporary uploads + will not ever be automatically deleted, since the safe will not start the + periodical checkup task. + */ + temporaryUploadAges: [ + 0, // permanent + 1 / 60 * 15, // 15 minutes + 1 / 60 * 30, // 30 minutes + 1, // 1 hour + 6, // 6 hours + 12, // 12 hours + 24, // 24 hours (1 day) + 168 // 168 hours (7 days) + ], + + /* + Interval of the periodical check up tasks for temporary uploads (in milliseconds). + NOTE: Set to falsy value if you prefer to use your own external script. + */ + temporaryUploadsInterval: 1 * 60000, // 1 minute + /* Scan files using ClamAV through clamd. */ @@ -248,10 +280,9 @@ module.exports = { may not be used by more than a single file (e.i. if "abcd.jpg" already exists, a new PNG file may not be named as "abcd.png"). - If this is enabled, the safe will then attempt to read file list of the uploads directory - during first launch, parse the names, then cache the identifiers into memory. - Its downside is that it will use a bit more memory, generally a few MBs increase - on a safe with over >10k uploads. + If this is enabled, the safe will query files from the database during first launch, + parse their names, then cache the identifiers into memory. + Its downside is that it will use a bit more memory. If this is disabled, collision check will become less strict. As in, the same identifier may be used by multiple different extensions (e.i. if "abcd.jpg" diff --git a/controllers/albumsController.js b/controllers/albumsController.js index aa37cc6..dd8db8d 100644 --- a/controllers/albumsController.js +++ b/controllers/albumsController.js @@ -4,40 +4,68 @@ const EventEmitter = require('events') const fs = require('fs') const logger = require('./../logger') const path = require('path') +const paths = require('./pathsController') const randomstring = require('randomstring') const utils = require('./utilsController') const Zip = require('jszip') -const albumsController = {} +const self = { + onHold: new Set() +} -const maxTries = config.uploads.maxTries || 1 const homeDomain = config.homeDomain || config.domain -const uploadsDir = path.resolve(config.uploads.folder) -const zipsDir = path.join(uploadsDir, 'zips') -const zipMaxTotalSize = config.cloudflare.zipMaxTotalSize -const zipMaxTotalSizeBytes = parseInt(config.cloudflare.zipMaxTotalSize) * 1000000 + +const zipMaxTotalSize = parseInt(config.cloudflare.zipMaxTotalSize) +const zipMaxTotalSizeBytes = config.cloudflare.zipMaxTotalSize * 1000000 const zipOptions = config.uploads.jsZipOptions // Force 'type' option to 'nodebuffer' zipOptions.type = 'nodebuffer' // Apply fallbacks for missing config values -if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true -if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE' +if (zipOptions.streamFiles === undefined) + zipOptions.streamFiles = true +if (zipOptions.compression === undefined) + zipOptions.compression = 'DEFLATE' if (zipOptions.compressionOptions === undefined || zipOptions.compressionOptions.level === undefined) zipOptions.compressionOptions = { level: 1 } -albumsController.zipEmitters = new Map() +self.zipEmitters = new Map() class ZipEmitter extends EventEmitter { constructor (identifier) { super() this.identifier = identifier - this.once('done', () => albumsController.zipEmitters.delete(this.identifier)) + this.once('done', () => self.zipEmitters.delete(this.identifier)) } } -albumsController.list = async (req, res, next) => { +self.getUniqueRandomName = async () => { + for (let i = 0; i < utils.idMaxTries; i++) { + const identifier = randomstring.generate(config.uploads.albumIdentifierLength) + if (self.onHold.has(identifier)) + continue + + // Put token on-hold (wait for it to be inserted to DB) + self.onHold.add(identifier) + + const album = await db.table('albums') + .where('identifier', identifier) + .select('id') + .first() + if (album) { + self.onHold.delete(identifier) + logger.log(`Album with identifier ${identifier} already exists (${i + 1}/${utils.idMaxTries}).`) + continue + } + + return identifier + } + + throw 'Sorry, we could not allocate a unique random identifier. Try again?' +} + +self.list = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return @@ -55,88 +83,77 @@ albumsController.list = async (req, res, next) => { if (req.params.sidebar !== undefined) return res.json({ success: true, albums }) - const ids = [] + const albumids = {} for (const album of albums) { album.download = album.download !== 0 album.public = album.public !== 0 - - ids.push(album.id) + album.files = 0 + // Map by IDs + albumids[album.id] = album } const files = await db.table('files') - .whereIn('albumid', ids) + .whereIn('albumid', Object.keys(albumids)) .select('albumid') - const albumsCount = {} - for (const id of ids) albumsCount[id] = 0 - for (const file of files) albumsCount[file.albumid] += 1 - for (const album of albums) album.files = albumsCount[album.id] + // Increment files count + for (const file of files) + if (albumids[file.albumid]) + albumids[file.albumid].files++ return res.json({ success: true, albums, homeDomain }) } -albumsController.create = async (req, res, next) => { +self.create = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return - const name = utils.escape(req.body.name) - if (name === undefined || name === '') + const name = typeof req.body.name === 'string' + ? utils.escape(req.body.name.trim()) + : '' + + if (!name) return res.json({ success: false, description: 'No album name specified.' }) - const album = await db.table('albums') - .where({ + try { + const album = await db.table('albums') + .where({ + name, + enabled: 1, + userid: user.id + }) + .first() + + if (album) + return res.json({ success: false, description: 'There is already an album with that name.' }) + + const identifier = await self.getUniqueRandomName() + + const ids = await db.table('albums').insert({ name, enabled: 1, - userid: user.id + userid: user.id, + identifier, + timestamp: Math.floor(Date.now() / 1000), + editedAt: 0, + zipGeneratedAt: 0, + download: (req.body.download === false || req.body.download === 0) ? 0 : 1, + public: (req.body.public === false || req.body.public === 0) ? 0 : 1, + description: typeof req.body.description === 'string' + ? utils.escape(req.body.description.trim()) + : '' }) - .first() + utils.invalidateStatsCache('albums') + self.onHold.delete(identifier) - if (album) - return res.json({ success: false, description: 'There\'s already an album with that name.' }) - - const identifier = await albumsController.getUniqueRandomName() - .catch(error => { - res.json({ success: false, description: error.toString() }) - }) - if (!identifier) return - - const ids = await db.table('albums').insert({ - name, - enabled: 1, - userid: user.id, - identifier, - timestamp: Math.floor(Date.now() / 1000), - editedAt: 0, - zipGeneratedAt: 0, - download: (req.body.download === false || req.body.download === 0) ? 0 : 1, - public: (req.body.public === false || req.body.public === 0) ? 0 : 1, - description: utils.escape(req.body.description) || '' - }) - utils.invalidateStatsCache('albums') - - return res.json({ success: true, id: ids[0] }) + return res.json({ success: true, id: ids[0] }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -albumsController.getUniqueRandomName = () => { - return new Promise((resolve, reject) => { - const select = i => { - const identifier = randomstring.generate(config.uploads.albumIdentifierLength) - db.table('albums') - .where('identifier', identifier) - .then(rows => { - if (!rows || !rows.length) return resolve(identifier) - logger.log(`An album with identifier ${identifier} already exists (${++i}/${maxTries}).`) - if (i < maxTries) return select(i) - // eslint-disable-next-line prefer-promise-reject-errors - return reject('Sorry, we could not allocate a unique random identifier. Try again?') - }) - } - // Get us a unique random identifier - select(0) - }) -} - -albumsController.delete = async (req, res, next) => { +self.delete = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return @@ -145,94 +162,31 @@ albumsController.delete = async (req, res, next) => { if (id === undefined || id === '') return res.json({ success: false, description: 'No album specified.' }) - let failed = [] - if (purge) { - const files = await db.table('files') + try { + if (purge) { + const files = await db.table('files') + .where({ + albumid: id, + userid: user.id + }) + + if (files.length) { + const ids = files.map(file => file.id) + const failed = await utils.bulkDeleteFromDb('id', ids, user) + if (failed.length) + return res.json({ success: false, failed }) + } + } + + await db.table('albums') .where({ - albumid: id, + id, userid: user.id }) + .update('enabled', 0) + utils.invalidateStatsCache('albums') - if (files.length) { - const ids = files.map(file => file.id) - failed = await utils.bulkDeleteFiles('id', ids, user) - - if (failed.length === ids.length) - return res.json({ success: false, description: 'Could not delete any of the files associated with the album.' }) - } - } - - await db.table('albums') - .where({ - id, - userid: user.id - }) - .update('enabled', 0) - utils.invalidateStatsCache('albums') - - const identifier = await db.table('albums') - .select('identifier') - .where({ - id, - userid: user.id - }) - .first() - .then(row => row.identifier) - - // Unlink zip archive of the album if it exists - const zipPath = path.join(zipsDir, `${identifier}.zip`) - fs.unlink(zipPath, error => { - if (error && error.code !== 'ENOENT') { - logger.error(error) - return res.json({ success: false, description: error.toString(), failed }) - } - res.json({ success: true, failed }) - }) -} - -albumsController.edit = async (req, res, next) => { - const user = await utils.authorize(req, res) - if (!user) return - - const id = parseInt(req.body.id) - if (isNaN(id)) - return res.json({ success: false, description: 'No album specified.' }) - - const name = utils.escape(req.body.name) - if (name === undefined || name === '') - return res.json({ success: false, description: 'No name specified.' }) - - const album = await db.table('albums') - .where({ - id, - userid: user.id, - enabled: 1 - }) - .first() - - if (!album) - return res.json({ success: false, description: 'Could not get album with the specified ID.' }) - else if (album.id !== id) - return res.json({ success: false, description: 'Name already in use.' }) - else if (req._old && (album.id === id)) - // Old rename API - return res.json({ success: false, description: 'You did not specify a new name.' }) - - await db.table('albums') - .where({ - id, - userid: user.id - }) - .update({ - name, - download: Boolean(req.body.download), - public: Boolean(req.body.public), - description: utils.escape(req.body.description) || '' - }) - utils.invalidateStatsCache('albums') - - if (req.body.requestLink) { - const oldIdentifier = await db.table('albums') + const identifier = await db.table('albums') .select('identifier') .where({ id, @@ -241,84 +195,158 @@ albumsController.edit = async (req, res, next) => { .first() .then(row => row.identifier) - const identifier = await albumsController.getUniqueRandomName() - .catch(error => { - res.json({ success: false, description: error.toString() }) + await paths.unlink(path.join(paths.zips, `${identifier}.zip`)) + } catch (error) { + if (error && error.code !== 'ENOENT') { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } + } + + return res.json({ success: true }) +} + +self.edit = async (req, res, next) => { + const user = await utils.authorize(req, res) + if (!user) return + + const id = parseInt(req.body.id) + if (isNaN(id)) + return res.json({ success: false, description: 'No album specified.' }) + + const name = typeof req.body.name === 'string' + ? utils.escape(req.body.name.trim()) + : '' + + if (!name) + return res.json({ success: false, description: 'No name specified.' }) + + try { + const album = await db.table('albums') + .where({ + id, + userid: user.id, + enabled: 1 }) - if (!identifier) return + .first() + + if (!album) + return res.json({ success: false, description: 'Could not get album with the specified ID.' }) + else if (album.id !== id) + return res.json({ success: false, description: 'Name already in use.' }) + else if (req._old && (album.id === id)) + // Old rename API + return res.json({ success: false, description: 'You did not specify a new name.' }) await db.table('albums') .where({ id, userid: user.id }) - .update('identifier', identifier) + .update({ + name, + download: Boolean(req.body.download), + public: Boolean(req.body.public), + description: typeof req.body.description === 'string' + ? utils.escape(req.body.description.trim()) + : '' + }) + utils.invalidateStatsCache('albums') + + if (!req.body.requestLink) + return res.json({ success: true, name }) + + const oldIdentifier = album.identifier + const newIdentifier = await self.getUniqueRandomName() + + await db.table('albums') + .where({ + id, + userid: user.id + }) + .update('identifier', newIdentifier) + utils.invalidateStatsCache('albums') + self.onHold.delete(newIdentifier) // Rename zip archive of the album if it exists - const zipPath = path.join(zipsDir, `${oldIdentifier}.zip`) - return fs.access(zipPath, error => { - if (error) return res.json({ success: true, identifier }) - fs.rename(zipPath, path.join(zipsDir, `${identifier}.zip`), error => { - if (!error) return res.json({ success: true, identifier }) - logger.error(error) - res.json({ success: false, description: error.toString() }) - }) - }) - } + try { + const oldZip = path.join(paths.zips, `${oldIdentifier}.zip`) + // await paths.access(oldZip) + const newZip = path.join(paths.zips, `${newIdentifier}.zip`) + await paths.rename(oldZip, newZip) + } catch (err) { + // Re-throw error + if (err.code !== 'ENOENT') + throw err + } - return res.json({ success: true, name }) + return res.json({ + success: true, + identifier: newIdentifier + }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -albumsController.rename = async (req, res, next) => { +self.rename = async (req, res, next) => { req._old = true req.body = { name: req.body.name } - return albumsController.edit(req, res, next) + return self.edit(req, res, next) } -albumsController.get = async (req, res, next) => { - // TODO: Something, can't remember... +self.get = async (req, res, next) => { const identifier = req.params.identifier if (identifier === undefined) return res.status(401).json({ success: false, description: 'No identifier provided.' }) - const album = await db.table('albums') - .where({ - identifier, - enabled: 1 + try { + const album = await db.table('albums') + .where({ + identifier, + enabled: 1 + }) + .first() + + if (!album) + return res.json({ + success: false, + description: 'Album not found.' + }) + else if (album.public === 0) + return res.status(403).json({ + success: false, + description: 'This album is not available for public.' + }) + + const title = album.name + const files = await db.table('files') + .select('name') + .where('albumid', album.id) + .orderBy('id', 'DESC') + + for (const file of files) { + file.file = `${config.domain}/${file.name}` + + const extname = utils.extname(file.name) + if (utils.mayGenerateThumb(extname)) + file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png` + } + + return res.json({ + success: true, + title, + count: files.length, + files }) - .first() - - if (!album) - return res.json({ success: false, description: 'Album not found.' }) - else if (album.public === 0) - return res.status(401).json({ - success: false, - description: 'This album is not available for public.' - }) - - const title = album.name - const files = await db.table('files') - .select('name') - .where('albumid', album.id) - .orderBy('id', 'DESC') - - for (const file of files) { - file.file = `${config.domain}/${file.name}` - - const extname = utils.extname(file.name) - if (utils.mayGenerateThumb(extname)) - file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png` + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occcured. Try again?' }) } - - return res.json({ - success: true, - title, - count: files.length, - files - }) } -albumsController.generateZip = async (req, res, next) => { +self.generateZip = async (req, res, next) => { const versionString = parseInt(req.query.v) const download = (filePath, fileName) => { const headers = {} @@ -337,160 +365,178 @@ albumsController.generateZip = async (req, res, next) => { }) if (!config.uploads.generateZips) - return res.status(401).json({ success: false, description: 'Zip generation disabled.' }) - - const album = await db.table('albums') - .where({ - identifier, - enabled: 1 + return res.status(401).json({ + success: false, + description: 'Zip generation disabled.' }) - .first() - if (!album) - return res.json({ success: false, description: 'Album not found.' }) - else if (album.download === 0) - return res.json({ success: false, description: 'Download for this album is disabled.' }) - - if ((isNaN(versionString) || versionString <= 0) && album.editedAt) - return res.redirect(`${album.identifier}?v=${album.editedAt}`) - - if (album.zipGeneratedAt > album.editedAt) { - const filePath = path.join(zipsDir, `${identifier}.zip`) - const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error))) - if (exists) { - const fileName = `${album.name}.zip` - return download(filePath, fileName) - } - } - - if (albumsController.zipEmitters.has(identifier)) { - logger.log(`Waiting previous zip task for album: ${identifier}.`) - return albumsController.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => { - if (filePath && fileName) - download(filePath, fileName) - else if (json) - res.json(json) - }) - } - - albumsController.zipEmitters.set(identifier, new ZipEmitter(identifier)) - - logger.log(`Starting zip task for album: ${identifier}.`) - const files = await db.table('files') - .select('name', 'size') - .where('albumid', album.id) - if (files.length === 0) { - logger.log(`Finished zip task for album: ${identifier} (no files).`) - const json = { success: false, description: 'There are no files in the album.' } - albumsController.zipEmitters.get(identifier).emit('done', null, null, json) - return res.json(json) - } - - if (zipMaxTotalSize) { - const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0) - if (totalSizeBytes > zipMaxTotalSizeBytes) { - logger.log(`Finished zip task for album: ${identifier} (size exceeds).`) - const json = { - success: false, - description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize}).` - } - albumsController.zipEmitters.get(identifier).emit('done', null, null, json) - return res.json(json) - } - } - - const zipPath = path.join(zipsDir, `${album.identifier}.zip`) - const archive = new Zip() - - let iteration = 0 - for (const file of files) - fs.readFile(path.join(uploadsDir, file.name), (error, data) => { - if (error) - logger.error(error) - else - archive.file(file.name, data) - - iteration++ - if (iteration === files.length) - archive - .generateNodeStream(zipOptions) - .pipe(fs.createWriteStream(zipPath)) - .on('finish', async () => { - logger.log(`Finished zip task for album: ${identifier} (success).`) - await db.table('albums') - .where('id', album.id) - .update('zipGeneratedAt', Math.floor(Date.now() / 1000)) - - const filePath = path.join(zipsDir, `${identifier}.zip`) - const fileName = `${album.name}.zip` - - albumsController.zipEmitters.get(identifier).emit('done', filePath, fileName) - utils.invalidateStatsCache('albums') - return download(filePath, fileName) - }) - }) -} - -albumsController.addFiles = async (req, res, next) => { - const user = await utils.authorize(req, res) - if (!user) return - - const ids = req.body.ids - if (!ids || !ids.length) - return res.json({ success: false, description: 'No files specified.' }) - - let albumid = req.body.albumid - if (typeof albumid !== 'number') albumid = parseInt(albumid) - if (isNaN(albumid) || (albumid < 0)) albumid = null - - const albumids = [] - - if (albumid !== null) { + try { const album = await db.table('albums') - .where('id', albumid) - .where(function () { - if (user.username !== 'root') - this.where('userid', user.id) + .where({ + identifier, + enabled: 1 }) .first() if (!album) - return res.json({ success: false, description: 'Album doesn\'t exist or it doesn\'t belong to the user.' }) + return res.json({ success: false, description: 'Album not found.' }) + else if (album.download === 0) + return res.json({ success: false, description: 'Download for this album is disabled.' }) - albumids.push(albumid) + if ((isNaN(versionString) || versionString <= 0) && album.editedAt) + return res.redirect(`${album.identifier}?v=${album.editedAt}`) + + if (album.zipGeneratedAt > album.editedAt) { + const filePath = path.join(paths.zips, `${identifier}.zip`) + const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error))) + if (exists) { + const fileName = `${album.name}.zip` + return download(filePath, fileName) + } + } + + if (self.zipEmitters.has(identifier)) { + logger.log(`Waiting previous zip task for album: ${identifier}.`) + return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => { + if (filePath && fileName) + download(filePath, fileName) + else if (json) + res.json(json) + }) + } + + self.zipEmitters.set(identifier, new ZipEmitter(identifier)) + + logger.log(`Starting zip task for album: ${identifier}.`) + + const files = await db.table('files') + .select('name', 'size') + .where('albumid', album.id) + if (files.length === 0) { + logger.log(`Finished zip task for album: ${identifier} (no files).`) + const json = { + success: false, + description: 'There are no files in the album.' + } + self.zipEmitters.get(identifier).emit('done', null, null, json) + return res.json(json) + } + + if (zipMaxTotalSize) { + const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0) + if (totalSizeBytes > zipMaxTotalSizeBytes) { + logger.log(`Finished zip task for album: ${identifier} (size exceeds).`) + const json = { + success: false, + description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize} MB).` + } + self.zipEmitters.get(identifier).emit('done', null, null, json) + return res.json(json) + } + } + + const zipPath = path.join(paths.zips, `${album.identifier}.zip`) + const archive = new Zip() + + try { + for (const file of files) { + const data = await paths.readFile(path.join(paths.uploads, file.name)) + archive.file(file.name, data) + } + await new Promise((resolve, reject) => { + archive.generateNodeStream(zipOptions) + .pipe(fs.createWriteStream(zipPath)) + .on('error', error => reject(error)) + .on('finish', () => resolve()) + }) + } catch (error) { + logger.error(error) + return res.status(500).json({ + success: 'false', + description: error.toString() + }) + } + + logger.log(`Finished zip task for album: ${identifier} (success).`) + + await db.table('albums') + .where('id', album.id) + .update('zipGeneratedAt', Math.floor(Date.now() / 1000)) + utils.invalidateStatsCache('albums') + + const filePath = path.join(paths.zips, `${identifier}.zip`) + const fileName = `${album.name}.zip` + + self.zipEmitters.get(identifier).emit('done', filePath, fileName) + return download(filePath, fileName) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) } - - const files = await db.table('files') - .whereIn('id', ids) - .where(function () { - if (user.username !== 'root') - this.where('userid', user.id) - }) - - const failed = ids.filter(id => !files.find(file => file.id === id)) - - const updateDb = await db.table('files') - .whereIn('id', files.map(file => file.id)) - .update('albumid', albumid) - .catch(logger.error) - - if (!updateDb) - return res.json({ - success: false, - description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.` - }) - - files.forEach(file => { - if (file.albumid && !albumids.includes(file.albumid)) - albumids.push(file.albumid) - }) - - await db.table('albums') - .whereIn('id', albumids) - .update('editedAt', Math.floor(Date.now() / 1000)) - .catch(logger.error) - - return res.json({ success: true, failed }) } -module.exports = albumsController +self.addFiles = async (req, res, next) => { + const user = await utils.authorize(req, res) + if (!user) return + + const ids = req.body.ids + if (!Array.isArray(ids) || !ids.length) + return res.json({ success: false, description: 'No files specified.' }) + + let albumid = parseInt(req.body.albumid) + if (isNaN(albumid) || albumid < 0) albumid = null + + let failed = [] + const albumids = [] + try { + if (albumid !== null) { + const album = await db.table('albums') + .where('id', albumid) + .where(function () { + if (user.username !== 'root') + this.where('userid', user.id) + }) + .first() + + if (!album) + return res.json({ + success: false, + description: 'Album does not exist or it does not belong to the user.' + }) + + albumids.push(albumid) + } + + const files = await db.table('files') + .whereIn('id', ids) + .where('userid', user.id) + + failed = ids.filter(id => !files.find(file => file.id === id)) + + await db.table('files') + .whereIn('id', files.map(file => file.id)) + .update('albumid', albumid) + + files.forEach(file => { + if (file.albumid && !albumids.includes(file.albumid)) + albumids.push(file.albumid) + }) + + await db.table('albums') + .whereIn('id', albumids) + .update('editedAt', Math.floor(Date.now() / 1000)) + + return res.json({ success: true, failed }) + } catch (error) { + logger.error(error) + if (failed.length === ids.length) + return res.json({ + success: false, + description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.` + }) + else + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } +} + +module.exports = self diff --git a/controllers/authController.js b/controllers/authController.js index 0f5007a..da9073b 100644 --- a/controllers/authController.js +++ b/controllers/authController.js @@ -1,3 +1,4 @@ +const { promisify } = require('util') const bcrypt = require('bcrypt') const config = require('./../config') const db = require('knex')(config.database) @@ -7,160 +8,119 @@ const randomstring = require('randomstring') const tokens = require('./tokenController') const utils = require('./utilsController') -const authController = {} - -authController.verify = async (req, res, next) => { - let username = req.body.username - let password = req.body.password - - if (username === undefined) - return res.json({ success: false, description: 'No username provided.' }) - if (password === undefined) - return res.json({ success: false, description: 'No password provided.' }) - - username = username.trim() - password = password.trim() - - const user = await db.table('users').where('username', username).first() - if (!user) - return res.json({ success: false, description: 'Username does not exist.' }) - - if (user.enabled === false || user.enabled === 0) - return res.json({ success: false, description: 'This account has been disabled.' }) - - bcrypt.compare(password, user.password, (error, result) => { - if (error) { - logger.error(error) - return res.json({ success: false, description: 'There was an error.' }) - } - if (result === false) return res.json({ success: false, description: 'Wrong password.' }) - return res.json({ success: true, token: user.token }) - }) +const self = { + compare: promisify(bcrypt.compare), + hash: promisify(bcrypt.hash) } -authController.register = async (req, res, next) => { - if (config.enableUserAccounts === false) - return res.json({ success: false, description: 'Register is disabled at the moment.' }) - - let username = req.body.username - let password = req.body.password - - if (username === undefined) +self.verify = async (req, res, next) => { + const username = typeof req.body.username === 'string' + ? req.body.username.trim() + : '' + if (!username) return res.json({ success: false, description: 'No username provided.' }) - if (password === undefined) + + const password = typeof req.body.password === 'string' + ? req.body.password.trim() + : '' + if (!password) return res.json({ success: false, description: 'No password provided.' }) - username = username.trim() - password = password.trim() + try { + const user = await db.table('users') + .where('username', username) + .first() + if (!user) + return res.json({ success: false, description: 'Username does not exist.' }) + + if (user.enabled === false || user.enabled === 0) + return res.json({ success: false, description: 'This account has been disabled.' }) + + const result = await self.compare(password, user.password) + if (result === false) + return res.json({ success: false, description: 'Wrong password.' }) + else + return res.json({ success: true, token: user.token }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } +} + +self.register = async (req, res, next) => { + if (config.enableUserAccounts === false) + return res.json({ success: false, description: 'Registration is currently disabled.' }) + + const username = typeof req.body.username === 'string' + ? req.body.username.trim() + : '' if (username.length < 4 || username.length > 32) return res.json({ success: false, description: 'Username must have 4-32 characters.' }) + const password = typeof req.body.password === 'string' + ? req.body.password.trim() + : '' if (password.length < 6 || password.length > 64) return res.json({ success: false, description: 'Password must have 6-64 characters.' }) - const user = await db.table('users').where('username', username).first() - if (user) - return res.json({ success: false, description: 'Username already exists.' }) + try { + const user = await db.table('users') + .where('username', username) + .first() - bcrypt.hash(password, 10, async (error, hash) => { - if (error) { - logger.error(error) - return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' }) - } + if (user) + return res.json({ success: false, description: 'Username already exists.' }) + + const hash = await self.hash(password, 10) const token = await tokens.generateUniqueToken() if (!token) - return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' }) - - await db.table('users').insert({ - username, - password: hash, - token, - enabled: 1, - permission: perms.permissions.user - }) + return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' }) + await db.table('users') + .insert({ + username, + password: hash, + token, + enabled: 1, + permission: perms.permissions.user + }) utils.invalidateStatsCache('users') + token.onHold.delete(token) + return res.json({ success: true, token }) - }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -authController.changePassword = async (req, res, next) => { +self.changePassword = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return - const password = req.body.password - if (password === undefined) - return res.json({ success: false, description: 'No password provided.' }) - + const password = typeof req.body.password === 'string' + ? req.body.password.trim() + : '' if (password.length < 6 || password.length > 64) return res.json({ success: false, description: 'Password must have 6-64 characters.' }) - bcrypt.hash(password, 10, async (error, hash) => { - if (error) { - logger.error(error) - return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' }) - } + try { + const hash = await self.hash(password, 10) await db.table('users') .where('id', user.id) .update('password', hash) return res.json({ success: true }) - }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -authController.getFileLengthConfig = async (req, res, next) => { - const user = await utils.authorize(req, res) - if (!user) return - return res.json({ - success: true, - fileLength: user.fileLength, - config: config.uploads.fileLength - }) -} - -authController.changeFileLength = async (req, res, next) => { - if (config.uploads.fileLength.userChangeable === false) - return res.json({ - success: false, - description: 'Changing file name length is disabled at the moment.' - }) - - const user = await utils.authorize(req, res) - if (!user) return - - const fileLength = parseInt(req.body.fileLength) - if (fileLength === undefined) - return res.json({ - success: false, - description: 'No file name length provided.' - }) - - if (isNaN(fileLength)) - return res.json({ - success: false, - description: 'File name length is not a valid number.' - }) - - if (fileLength < config.uploads.fileLength.min || fileLength > config.uploads.fileLength.max) - return res.json({ - success: false, - description: `File name length must be ${config.uploads.fileLength.min} to ${config.uploads.fileLength.max} characters.` - }) - - if (fileLength === user.fileLength) - return res.json({ success: true }) - - await db.table('users') - .where('id', user.id) - .update('fileLength', fileLength) - - return res.json({ success: true }) -} - -authController.editUser = async (req, res, next) => { +self.editUser = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return @@ -168,67 +128,61 @@ authController.editUser = async (req, res, next) => { if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' }) - const target = await db.table('users') - .where('id', id) - .first() + try { + const target = await db.table('users') + .where('id', id) + .first() - if (!target) - return res.json({ success: false, description: 'Could not get user with the specified ID.' }) - else if (!perms.higher(user, target)) - return res.json({ success: false, description: 'The user is in the same or higher group as you.' }) - else if (target.username === 'root') - return res.json({ success: false, description: 'Root user may not be edited.' }) + if (!target) + return res.json({ success: false, description: 'Could not get user with the specified ID.' }) + else if (!perms.higher(user, target)) + return res.json({ success: false, description: 'The user is in the same or higher group as you.' }) + else if (target.username === 'root') + return res.json({ success: false, description: 'Root user may not be edited.' }) - const update = {} + const update = {} - if (req.body.username !== undefined) { - update.username = `${req.body.username}` - if (update.username.length < 4 || update.username.length > 32) - return res.json({ success: false, description: 'Username must have 4-32 characters.' }) - } + if (req.body.username !== undefined) { + update.username = String(req.body.username).trim() + if (update.username.length < 4 || update.username.length > 32) + return res.json({ success: false, description: 'Username must have 4-32 characters.' }) + } - if (req.body.enabled !== undefined) - update.enabled = Boolean(req.body.enabled) + if (req.body.enabled !== undefined) + update.enabled = Boolean(req.body.enabled) - if (req.body.group !== undefined) { - update.permission = perms.permissions[req.body.group] || target.permission - if (typeof update.permission !== 'number' || update.permission < 0) - update.permission = target.permission - } + if (req.body.group !== undefined) { + update.permission = perms.permissions[req.body.group] || target.permission + if (typeof update.permission !== 'number' || update.permission < 0) + update.permission = target.permission + } - await db.table('users') - .where('id', id) - .update(update) - utils.invalidateStatsCache('users') - - if (!req.body.resetPassword) - return res.json({ success: true, update }) - - const password = randomstring.generate(16) - bcrypt.hash(password, 10, async (error, hash) => { - if (error) { - logger.error(error) - return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' }) + let password + if (req.body.resetPassword) { + password = randomstring.generate(16) + update.password = await self.hash(password, 10) } await db.table('users') .where('id', id) - .update('password', hash) + .update(update) + utils.invalidateStatsCache('users') - return res.json({ success: true, update, password }) - }) -} - -authController.disableUser = async (req, res, next) => { - const body = { - id: req.body.id, - enabled: false + const response = { success: true, update } + if (password) response.password = password + return res.json(response) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) } - req.body = body - return authController.editUser(req, res, next) } -authController.listUsers = async (req, res, next) => { +self.disableUser = async (req, res, next) => { + req.body = { id: req.body.id, enabled: false } + return self.editUser(req, res, next) +} + +self.listUsers = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return @@ -236,53 +190,55 @@ authController.listUsers = async (req, res, next) => { if (!isadmin) return res.status(403).end() - const count = await db.table('users') - .count('id as count') - .then(rows => rows[0].count) - if (!count) - return res.json({ success: true, users: [], count }) + try { + const count = await db.table('users') + .count('id as count') + .then(rows => rows[0].count) + if (!count) + return res.json({ success: true, users: [], count }) - let offset = req.params.page - if (offset === undefined) offset = 0 + let offset = req.params.page + if (offset === undefined) offset = 0 - const users = await db.table('users') - .limit(25) - .offset(25 * offset) - .select('id', 'username', 'enabled', 'fileLength', 'permission') + const users = await db.table('users') + .limit(25) + .offset(25 * offset) + .select('id', 'username', 'enabled', 'permission') - const userids = [] + const userids = [] - for (const user of users) { - user.groups = perms.mapPermissions(user) - delete user.permission + for (const user of users) { + user.groups = perms.mapPermissions(user) + delete user.permission - userids.push(user.id) - user.uploadsCount = 0 - user.diskUsage = 0 + userids.push(user.id) + user.uploadsCount = 0 + user.diskUsage = 0 + } + + const maps = {} + const uploads = await db.table('files') + .whereIn('userid', userids) + + for (const upload of uploads) { + if (maps[upload.userid] === undefined) + maps[upload.userid] = { count: 0, size: 0 } + + maps[upload.userid].count++ + maps[upload.userid].size += parseInt(upload.size) + } + + for (const user of users) { + if (!maps[user.id]) continue + user.uploadsCount = maps[user.id].count + user.diskUsage = maps[user.id].size + } + + return res.json({ success: true, users, count }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) } - - const maps = {} - const uploads = await db.table('files').whereIn('userid', userids) - - for (const upload of uploads) { - // This is the fastest method that I can think of - if (maps[upload.userid] === undefined) - maps[upload.userid] = { - count: 0, - size: 0 - } - - maps[upload.userid].count++ - maps[upload.userid].size += parseInt(upload.size) - } - - for (const user of users) { - if (!maps[user.id]) continue - user.uploadsCount = maps[user.id].count - user.diskUsage = maps[user.id].size - } - - return res.json({ success: true, users, count }) } -module.exports = authController +module.exports = self diff --git a/controllers/pathsController.js b/controllers/pathsController.js new file mode 100644 index 0000000..cd0eb75 --- /dev/null +++ b/controllers/pathsController.js @@ -0,0 +1,79 @@ +const { promisify } = require('util') +const config = require('./../config') +const fs = require('fs') +const logger = require('./../logger') +const path = require('path') + +const self = {} + +// Promisify these fs functions +const fsFuncs = [ + 'access', + 'lstat', + 'mkdir', + 'readdir', + 'readFile', + 'rename', + 'rmdir', + 'symlink', + 'unlink' +] + +for (const fsFunc of fsFuncs) + self[fsFunc] = promisify(fs[fsFunc]) + +self.uploads = path.resolve(config.uploads.folder) +self.chunks = path.join(self.uploads, 'chunks') +self.thumbs = path.join(self.uploads, 'thumbs') +self.zips = path.join(self.uploads, 'zips') + +self.thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png') + +self.logs = path.resolve(config.logsFolder) + +self.customPages = path.resolve('pages/custom') +self.public = path.resolve('public') + +self.errorRoot = path.resolve(config.errorPages.rootDir) + +const verify = [ + self.uploads, + self.chunks, + self.thumbs, + self.zips, + self.logs, + self.customPages +] + +self.init = async () => { + try { + for (const p of verify) + try { + await self.access(p) + } catch (err) { + if (err.code !== 'ENOENT') { + logger.error(err) + } else { + const mkdir = await self.mkdir(p) + if (mkdir) + logger.log(`Created directory: ${p}`) + } + } + + // Purge chunks directory + const uuidDirs = await self.readdir(self.chunks) + for (const uuid of uuidDirs) { + const root = path.join(self.chunks, uuid) + const chunks = await self.readdir(root) + for (const chunk of chunks) + await self.unlink(path.join(root, chunk)) + await self.rmdir(root) + } + + self.verified = true + } catch (error) { + logger.error(error) + } +} + +module.exports = self diff --git a/controllers/permissionController.js b/controllers/permissionController.js index 23cd1b5..8f9f21b 100644 --- a/controllers/permissionController.js +++ b/controllers/permissionController.js @@ -1,32 +1,34 @@ -const permissionController = {} +const self = {} -permissionController.permissions = { - user: 0, // upload & delete own files, create & delete albums - moderator: 50, // delete other user's files - admin: 80, // manage users (disable accounts) & create moderators - superadmin: 100 // create admins - // groups will inherit permissions from groups which have lower value +self.permissions = { + user: 0, // Upload & delete own files, create & delete albums + moderator: 50, // Delete other user's files + admin: 80, // Manage users (disable accounts) & create moderators + superadmin: 100 // Create admins + // Groups will inherit permissions from groups which have lower value } -permissionController.is = (user, group) => { +self.is = (user, group) => { // root bypass - if (user.username === 'root') return true + if (user.username === 'root') + return true + const permission = user.permission || 0 - return permission >= permissionController.permissions[group] + return permission >= self.permissions[group] } -permissionController.higher = (user, target) => { +self.higher = (user, target) => { const userPermission = user.permission || 0 const targetPermission = target.permission || 0 return userPermission > targetPermission } -permissionController.mapPermissions = user => { +self.mapPermissions = user => { const map = {} - Object.keys(permissionController.permissions).forEach(group => { - map[group] = permissionController.is(user, group) + Object.keys(self.permissions).forEach(group => { + map[group] = self.is(user, group) }) return map } -module.exports = permissionController +module.exports = self diff --git a/controllers/tokenController.js b/controllers/tokenController.js index 18d5f8c..cdadf5b 100644 --- a/controllers/tokenController.js +++ b/controllers/tokenController.js @@ -1,74 +1,99 @@ const config = require('./../config') const db = require('knex')(config.database) +const logger = require('./../logger') const perms = require('./permissionController') const randomstring = require('randomstring') const utils = require('./utilsController') -const TOKEN_LENGTH = 64 -const UNIQUE_TOKEN_MAX_TRIES = 3 +const self = { + tokenLength: 64, + tokenMaxTries: 3, + onHold: new Set() +} -const tokenController = {} +self.generateUniqueToken = async () => { + for (let i = 0; i < self.tokenMaxTries; i++) { + const token = randomstring.generate(self.tokenLength) + if (self.onHold.has(token)) + continue -tokenController.generateUniqueToken = () => { - return new Promise(resolve => { - const query = async i => { - const token = randomstring.generate(TOKEN_LENGTH) - const user = await db.table('users').where('token', token).first().catch(() => undefined) - if (user === undefined) return resolve(token) - if (++i < UNIQUE_TOKEN_MAX_TRIES) return query(i) - resolve(null) + // Put token on-hold (wait for it to be inserted to DB) + self.onHold.add(token) + + const user = await db.table('users') + .where('token', token) + .select('id') + .first() + if (user) { + self.onHold.delete(token) + continue } - query(0) - }) + + return token + } + + return null } -tokenController.verify = async (req, res, next) => { - const token = req.body.token - if (token === undefined) - return res.status(401).json({ - success: false, - description: 'No token provided.' - }) +self.verify = async (req, res, next) => { + const token = typeof req.body.token === 'string' + ? req.body.token.trim() + : '' - const user = await db.table('users').where('token', token).first() - if (!user) - return res.status(401).json({ - success: false, - description: 'Invalid token.' - }) + if (!token) + return res.status(401).json({ success: false, description: 'No token provided.' }) - return res.json({ - success: true, - username: user.username, - permissions: perms.mapPermissions(user) - }) + try { + const user = await db.table('users') + .where('token', token) + .select('username', 'permission') + .first() + + if (!user) + return res.status(401).json({ success: false, description: 'Invalid token.' }) + + return res.json({ + success: true, + username: user.username, + permissions: perms.mapPermissions(user) + }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -tokenController.list = async (req, res, next) => { +self.list = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return - return res.json({ - success: true, - token: user.token - }) + return res.json({ success: true, token: user.token }) } -tokenController.change = async (req, res, next) => { +self.change = async (req, res, next) => { const user = await utils.authorize(req, res) if (!user) return - const newtoken = await tokenController.generateUniqueToken() - if (!newtoken) - return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' }) + const newToken = await self.generateUniqueToken() + if (!newToken) + return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' }) - await db.table('users').where('token', user.token).update({ - token: newtoken, - timestamp: Math.floor(Date.now() / 1000) - }) - return res.json({ - success: true, - token: newtoken - }) + try { + await db.table('users') + .where('token', user.token) + .update({ + token: newToken, + timestamp: Math.floor(Date.now() / 1000) + }) + self.onHold.delete(newToken) + + return res.json({ + success: true, + token: newToken + }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -module.exports = tokenController +module.exports = self diff --git a/controllers/uploadController.js b/controllers/uploadController.js index c4c6c1b..1ad846e 100644 --- a/controllers/uploadController.js +++ b/controllers/uploadController.js @@ -6,65 +6,58 @@ const fs = require('fs') const logger = require('./../logger') const multer = require('multer') const path = require('path') +const paths = require('./pathsController') const perms = require('./permissionController') const randomstring = require('randomstring') const utils = require('./utilsController') -const uploadsController = {} +const self = {} -const maxTries = config.uploads.maxTries || 1 -const uploadsDir = path.join(__dirname, '..', config.uploads.folder) -const chunkedUploads = Boolean(config.uploads.chunkSize) -const chunksDir = path.join(uploadsDir, 'chunks') -const maxSize = config.uploads.maxSize -const maxSizeBytes = parseInt(maxSize) * 1e6 +const fileIdentifierLengthFallback = 32 +const fileIdentifierLengthChangeable = !config.uploads.fileIdentifierLength.force && + typeof config.uploads.fileIdentifierLength.min === 'number' && + typeof config.uploads.fileIdentifierLength.max === 'number' + +const maxSize = parseInt(config.uploads.maxSize) +const maxSizeBytes = maxSize * 1e6 const urlMaxSizeBytes = parseInt(config.uploads.urlMaxSize) * 1e6 -const storage = multer.diskStorage({ - destination (req, file, cb) { - // If chunked uploads is disabled or the uploaded file is not a chunk - if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) - return cb(null, uploadsDir) +const chunkedUploads = Boolean(config.uploads.chunkSize) +const chunksData = {} +// Hard-coded min chunk size of 1 MB (e.i. 50 MB = max 50 chunks) +const maxChunksCount = maxSize - const uuidDir = path.join(chunksDir, req.body.uuid) - fs.access(uuidDir, error => { - if (!error) return cb(null, uuidDir) - fs.mkdir(uuidDir, error => { - if (!error) return cb(null, uuidDir) - logger.error(error) - // eslint-disable-next-line standard/no-callback-literal - return cb('Could not process the chunked upload. Try again?') - }) - }) - }, - filename (req, file, cb) { - // If chunked uploads is disabled or the uploaded file is not a chunk - if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) { - const extension = utils.extname(file.originalname) - const length = uploadsController.getFileNameLength(req) - return uploadsController.getUniqueRandomName(length, extension, req.app.get('uploads-set')) - .then(name => cb(null, name)) - .catch(error => cb(error)) +const extensionsFilter = Array.isArray(config.extensionsFilter) && + config.extensionsFilter.length +const urlExtensionsFilter = Array.isArray(config.uploads.urlExtensionsFilter) && + config.uploads.urlExtensionsFilter.length +const temporaryUploads = Array.isArray(config.uploads.temporaryUploadAges) && + config.uploads.temporaryUploadAges.length + +const initChunks = async uuid => { + if (chunksData[uuid] === undefined) { + const root = path.join(paths.chunks, uuid) + try { + await paths.access(root) + } catch (err) { + // Re-throw error + if (err && err.code !== 'ENOENT') + throw err + await paths.mkdir(root) } - - // index.extension (e.i. 0, 1, ..., n - will prepend zeros depending on the amount of chunks) - const digits = req.body.totalchunkcount !== undefined ? `${req.body.totalchunkcount - 1}`.length : 1 - const zeros = new Array(digits + 1).join('0') - const name = (zeros + req.body.chunkindex).slice(-digits) - return cb(null, name) + chunksData[uuid] = { root, chunks: [], size: 0 } } -}) + return chunksData[uuid].root +} -const upload = multer({ - storage, +const executeMulter = multer({ limits: { fileSize: maxSizeBytes }, fileFilter (req, file, cb) { - const extname = utils.extname(file.originalname) - if (uploadsController.isExtensionFiltered(extname)) - // eslint-disable-next-line standard/no-callback-literal - return cb(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`) + file.extname = utils.extname(file.originalname) + if (self.isExtensionFiltered(file.extname)) + return cb(`${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted.`) // Re-map Dropzone keys so people can manually use the API without prepending 'dz' for (const key in req.body) { @@ -73,248 +66,297 @@ const upload = multer({ delete req.body[key] } - if (req.body.chunkindex) { - if (!chunkedUploads) - // eslint-disable-next-line standard/no-callback-literal - return cb('Chunked uploads are disabled at the moment.') + if (req.body.chunkindex && !chunkedUploads) + return cb('Chunked uploads are disabled at the moment.') + else + return cb(null, true) + }, + storage: multer.diskStorage({ + destination (req, file, cb) { + // If chunked uploads is disabled or the uploaded file is not a chunk + if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) + return cb(null, paths.uploads) - const totalfilesize = parseInt(req.body.totalfilesize) - if (!isNaN(totalfilesize)) { - if (config.filterEmptyFile && totalfilesize === 0) - // eslint-disable-next-line standard/no-callback-literal - return cb('Empty files are not allowed.') - if (totalfilesize > maxSizeBytes) - // eslint-disable-next-line standard/no-callback-literal - return cb('Chunk error occurred. Total file size is larger than the maximum file size.') + initChunks(req.body.uuid) + .then(uuidDir => cb(null, uuidDir)) + .catch(error => { + logger.error(error) + return cb('Could not process the chunked upload. Try again?') + }) + }, + + filename (req, file, cb) { + // If chunked uploads is disabled or the uploaded file is not a chunk + if (!chunkedUploads || (req.body.uuid === undefined && req.body.chunkindex === undefined)) { + const length = self.parseFileIdentifierLength(req.headers.filelength) + return self.getUniqueRandomName(length, file.extname) + .then(name => cb(null, name)) + .catch(error => cb(error)) } - } - return cb(null, true) - } + // index.extension (e.i. 0, 1, ..., n - will prepend zeros depending on the amount of chunks) + const digits = req.body.totalchunkcount !== undefined ? `${req.body.totalchunkcount - 1}`.length : 1 + const zeros = new Array(digits + 1).join('0') + const name = (zeros + req.body.chunkindex).slice(-digits) + return cb(null, name) + } + }) }).array('files[]') -uploadsController.isExtensionFiltered = extname => { +self.isExtensionFiltered = extname => { // If empty extension needs to be filtered - if (!extname && config.filterNoExtension) return true + if (!extname && config.filterNoExtension) + return true + // If there are extensions that have to be filtered - if (extname && Array.isArray(config.extensionsFilter) && config.extensionsFilter.length) { + if (extname && extensionsFilter) { const match = config.extensionsFilter.some(extension => extname === extension.toLowerCase()) const whitelist = config.extensionsFilterMode === 'whitelist' - if ((!whitelist && match) || (whitelist && !match)) return true + if ((!whitelist && match) || (whitelist && !match)) + return true } + return false } -uploadsController.getFileNameLength = req => { - // If the user has a preferred file length, make sure it is within the allowed range - if (req.headers.filelength) - return Math.min(Math.max(req.headers.filelength, config.uploads.fileLength.min), config.uploads.fileLength.max) +self.parseFileIdentifierLength = fileLength => { + if (!config.uploads.fileIdentifierLength) + return fileIdentifierLengthFallback - // Let's default it to 32 characters when config key is falsy - return config.uploads.fileLength.default || 32 + const parsed = parseInt(fileLength) + if (isNaN(parsed) || + !fileIdentifierLengthChangeable || + parsed < config.uploads.fileIdentifierLength.min || + parsed > config.uploads.fileIdentifierLength.max) + return config.uploads.fileIdentifierLength.default || fileIdentifierLengthFallback + else + return parsed } -uploadsController.getUniqueRandomName = (length, extension, set) => { - return new Promise((resolve, reject) => { - const access = i => { - const identifier = randomstring.generate(length) - if (config.uploads.cacheFileIdentifiers) { - // Check whether the identifier is already used in cache - if (set.has(identifier)) { - logger.log(`Identifier ${identifier} is already in use (${++i}/${maxTries}).`) - if (i < maxTries) return access(i) - // eslint-disable-next-line prefer-promise-reject-errors - return reject('Sorry, we could not allocate a unique random name. Try again?') - } - set.add(identifier) - // logger.log(`Added ${identifier} to identifiers cache`) - return resolve(identifier + extension) - } else { - // Less stricter collision check, as in the same identifier - // can be used by multiple different extensions - const name = identifier + extension - fs.access(path.join(uploadsDir, name), error => { - if (error) return resolve(name) - logger.log(`A file named ${name} already exists (${++i}/${maxTries}).`) - if (i < maxTries) return access(i) - // eslint-disable-next-line prefer-promise-reject-errors - return reject('Sorry, we could not allocate a unique random name. Try again?') - }) +self.getUniqueRandomName = async (length, extension) => { + for (let i = 0; i < utils.idMaxTries; i++) { + const identifier = randomstring.generate(length) + const name = identifier + extension + if (config.uploads.cacheFileIdentifiers) { + if (utils.idSet.has(identifier)) { + logger.log(`Identifier ${identifier} is already in use (${i + 1}/${utils.idMaxTries}).`) + continue + } + utils.idSet.add(identifier) + // logger.log(`Added ${identifier} to identifiers cache`) + } else { + try { + await paths.access(path.join(paths.uploads, name)) + logger.log(`${name} is already in use (${i + 1}/${utils.idMaxTries}).`) + continue + } catch (error) { + // Re-throw error + if (error & error.code !== 'ENOENT') + throw error } } - access(0) - }) + return name + } + + throw 'Sorry, we could not allocate a unique random name. Try again?' } -uploadsController.upload = async (req, res, next) => { +self.parseUploadAge = age => { + if (age === undefined || age === null) + return config.uploads.temporaryUploadAges[0] + const parsed = parseFloat(age) + if (config.uploads.temporaryUploadAges.includes(parsed)) + return parsed + else + return null +} + +self.upload = async (req, res, next) => { let user if (config.private === true) { user = await utils.authorize(req, res) if (!user) return } else if (req.headers.token) { - user = await db.table('users').where('token', req.headers.token).first() + user = await db.table('users') + .where('token', req.headers.token) + .first() + if (user && (user.enabled === false || user.enabled === 0)) + return res.json({ success: false, description: 'This account has been disabled.' }) } - if (user && (user.enabled === false || user.enabled === 0)) - return res.json({ success: false, description: 'This account has been disabled.' }) - - if (user && user.fileLength && !req.headers.filelength) - req.headers.filelength = user.fileLength - let albumid = parseInt(req.headers.albumid || req.params.albumid) - if (isNaN(albumid)) albumid = null + if (isNaN(albumid)) + albumid = null - if (req.body.urls) - return uploadsController.actuallyUploadByUrl(req, res, user, albumid) - else - return uploadsController.actuallyUpload(req, res, user, albumid) -} + let age = null + if (temporaryUploads) { + age = self.parseUploadAge(req.headers.age) + if (!age && !config.uploads.temporaryUploadAges.includes(0)) + return res.json({ success: false, description: 'Permanent uploads are prohibited.' }) + } -uploadsController.actuallyUpload = async (req, res, user, albumid) => { - const erred = error => { + try { + const func = req.body.urls ? self.actuallyUploadUrls : self.actuallyUploadFiles + await func(req, res, user, albumid, age) + } catch (error) { const isError = error instanceof Error if (isError) logger.error(error) - res.status(400).json({ + return res.status(400).json({ success: false, description: isError ? error.toString() : error }) } +} - upload(req, res, async error => { - if (error) { - // Suppress error logging for errors with these codes - const suppress = [ - 'LIMIT_FILE_SIZE', - 'LIMIT_UNEXPECTED_FILE' - ] - if (error.code && suppress.includes(error.code)) return erred(error.toString()) - return erred(error) - } - - if (!req.files || !req.files.length) return erred('No files.') - - // If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success - if (chunkedUploads && req.body.uuid) return res.json({ success: true }) - - const infoMap = req.files.map(file => { - file.albumid = albumid - return { - path: path.join(__dirname, '..', config.uploads.folder, file.filename), - data: file - } - }) - - if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) { - infoMap.forEach(file => { - utils.deleteFile(file.data.filename, req.app.get('uploads-set')).catch(logger.error) - }) - return erred('Empty files are not allowed.') - } - - if (config.uploads.scan && config.uploads.scan.enabled) { - const scan = await uploadsController.scanFiles(req, infoMap) - if (scan) return erred(scan) - } - - const result = await uploadsController.formatInfoMap(req, res, user, infoMap) - .catch(erred) - if (!result) return - - uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles) +self.actuallyUploadFiles = async (req, res, user, albumid, age) => { + const error = await new Promise(resolve => { + return executeMulter(req, res, err => resolve(err)) }) -} -uploadsController.actuallyUploadByUrl = async (req, res, user, albumid) => { - const erred = error => { - const isError = error instanceof Error - if (isError) logger.error(error) - res.status(400).json({ - success: false, - description: isError ? error.toString() : error - }) + if (error) { + const suppress = [ + 'LIMIT_FILE_SIZE', + 'LIMIT_UNEXPECTED_FILE' + ] + if (suppress.includes(error.code)) + throw error.toString() + else + throw error } - if (!config.uploads.urlMaxSize) return erred('Upload by URLs is disabled at the moment.') + if (!req.files || !req.files.length) + throw 'No files.' + + // If chunked uploads is enabled and the uploaded file is a chunk, then just say that it was a success + const uuid = req.body.uuid + if (chunkedUploads && chunksData[uuid] !== undefined) { + req.files.forEach(file => { + chunksData[uuid].chunks.push(file.filename) + chunksData[uuid].size += file.size + }) + return res.json({ success: true }) + } + + const infoMap = req.files.map(file => { + file.albumid = albumid + file.age = age + return { + path: path.join(paths.uploads, file.filename), + data: file + } + }) + + if (config.filterEmptyFile && infoMap.some(file => file.data.size === 0)) { + // Unlink all files when at least one file is an empty file + for (const info of infoMap) + // Continue even when encountering errors + await utils.unlinkFile(info.data.filename).catch(logger.error) + + throw 'Empty files are not allowed.' + } + + if (utils.clamd.scanner) { + const scanResult = await self.scanFiles(req, infoMap) + if (scanResult) throw scanResult + } + + const result = await self.storeFilesToDb(req, res, user, infoMap) + await self.sendUploadResponse(req, res, result) +} + +self.actuallyUploadUrls = async (req, res, user, albumid, age) => { + if (!config.uploads.urlMaxSize) + throw 'Upload by URLs is disabled at the moment.' const urls = req.body.urls - if (!urls || !(urls instanceof Array)) return erred('Missing "urls" property (Array).') + if (!urls || !(urls instanceof Array)) + throw 'Missing "urls" property (array).' - let iteration = 0 + const downloaded = [] const infoMap = [] - for (let url of urls) { - const original = path.basename(url).split(/[?#]/)[0] - const extname = utils.extname(original) + try { + for (let url of urls) { + const original = path.basename(url).split(/[?#]/)[0] + const extname = utils.extname(original) - // Extensions filter - let filtered = false - if (['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) - if (Array.isArray(config.uploads.urlExtensionsFilter) && config.uploads.urlExtensionsFilter.length) { - const match = config.uploads.urlExtensionsFilter.some(extension => extname === extension.toLowerCase()) - const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist' - filtered = ((!whitelist && match) || (whitelist && !match)) - } else { - return erred('config.uploads.urlExtensionsFilter is not an array or is an empty array, please contact site owner.') - } - else filtered = uploadsController.isExtensionFiltered(extname) + // Extensions filter + let filtered = false + if (['blacklist', 'whitelist'].includes(config.uploads.urlExtensionsFilterMode)) + if (urlExtensionsFilter) { + const match = config.uploads.urlExtensionsFilter.some(extension => extname === extension.toLowerCase()) + const whitelist = config.uploads.urlExtensionsFilterMode === 'whitelist' + filtered = ((!whitelist && match) || (whitelist && !match)) + } else { + throw 'Invalid extensions filter, please contact the site owner.' + } + else + filtered = self.isExtensionFiltered(extname) - if (filtered) - return erred(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted due to security reasons.`) + if (filtered) + throw `${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted due to security reasons.` - if (config.uploads.urlProxy) - url = config.uploads.urlProxy - .replace(/{url}/g, encodeURIComponent(url)) - .replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, ''))) + if (config.uploads.urlProxy) + url = config.uploads.urlProxy + .replace(/{url}/g, encodeURIComponent(url)) + .replace(/{url-noprot}/g, encodeURIComponent(url.replace(/^https?:\/\//, ''))) - try { // Limit max response body size with maximum allowed size const fetchFile = await fetch(url, { size: urlMaxSizeBytes }) if (fetchFile.status !== 200) - return erred(`${fetchFile.status} ${fetchFile.statusText}`) + throw `${fetchFile.status} ${fetchFile.statusText}` const headers = fetchFile.headers const file = await fetchFile.buffer() - const length = uploadsController.getFileNameLength(req) - const name = await uploadsController.getUniqueRandomName(length, extname, req.app.get('uploads-set')) + const length = self.parseFileIdentifierLength(req.headers.filelength) + const name = await self.getUniqueRandomName(length, extname) - const destination = path.join(uploadsDir, name) - fs.writeFile(destination, file, async error => { - if (error) return erred(error) + const destination = path.join(paths.uploads, name) + await new Promise((resolve, reject) => { + fs.writeFile(destination, file, error => { + if (error) return reject(error) + return resolve() + }) + }) + downloaded.push(destination) - const data = { + infoMap.push({ + path: destination, + data: { filename: name, originalname: original, + extname, mimetype: headers.get('content-type').split(';')[0] || '', size: file.byteLength, - albumid - } - - infoMap.push({ - path: destination, - data - }) - - iteration++ - if (iteration === urls.length) { - if (config.uploads.scan && config.uploads.scan.enabled) { - const scan = await uploadsController.scanFiles(req, infoMap) - if (scan) return erred(scan) - } - - const result = await uploadsController.formatInfoMap(req, res, user, infoMap) - .catch(erred) - if (!result) return - - uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles) + albumid, + age } }) - } catch (error) { - erred(error) } + + // If not errors found, clear cache of downloaded files + downloaded.length = 0 + + if (utils.clamd.scanner) { + const scanResult = await self.scanFiles(req, infoMap) + if (scanResult) throw scanResult + } + + const result = await self.storeFilesToDb(req, res, user, infoMap) + await self.sendUploadResponse(req, res, result) + } catch (error) { + // Unlink all downloaded files when at least one file threw an error from the for-loop + if (downloaded.length) + for (const file of downloaded) + // Continue even when encountering errors + await utils.unlinkFile(file).catch(logger.error) + + // Re-throw error + throw error } } -uploadsController.finishChunks = async (req, res, next) => { +self.finishChunks = async (req, res, next) => { if (!chunkedUploads) return res.json({ success: false, description: 'Chunked upload is disabled at the moment.' }) @@ -323,362 +365,325 @@ uploadsController.finishChunks = async (req, res, next) => { user = await utils.authorize(req, res) if (!user) return } else if (req.headers.token) { - user = await db.table('users').where('token', req.headers.token).first() + user = await db.table('users') + .where('token', req.headers.token) + .first() + if (user && (user.enabled === false || user.enabled === 0)) + return res.json({ success: false, description: 'This account has been disabled.' }) } - if (user && (user.enabled === false || user.enabled === 0)) - return res.json({ success: false, description: 'This account has been disabled.' }) - - if (user && user.fileLength && !req.headers.filelength) - req.headers.filelength = user.fileLength - - let albumid = parseInt(req.headers.albumid || req.params.albumid) - if (isNaN(albumid)) albumid = null - - return uploadsController.actuallyFinishChunks(req, res, user, albumid) -} - -uploadsController.actuallyFinishChunks = async (req, res, user, albumid) => { - const erred = error => { + try { + await self.actuallyFinishChunks(req, res, user) + } catch (error) { const isError = error instanceof Error if (isError) logger.error(error) - res.status(400).json({ + return res.status(400).json({ success: false, description: isError ? error.toString() : error }) } +} + +self.actuallyFinishChunks = async (req, res, user) => { + const check = file => typeof file.uuid !== 'string' || + !chunksData[file.uuid] || + chunksData[file.uuid].chunks.length < 2 const files = req.body.files - if (!files || !(files instanceof Array) || !files.length) return erred('Invalid "files" property (Array).') + if (!Array.isArray(files) || !files.length || files.some(check)) + throw 'An unexpected error occurred.' - let iteration = 0 const infoMap = [] - for (const file of files) { - if (!file.uuid || typeof file.uuid !== 'string') return erred('Invalid "uuid" property (string).') - if (typeof file.count !== 'number' || file.count < 1) return erred('Invalid "count" property (number).') + try { + for (const file of files) { + if (chunksData[file.uuid].chunks.length > maxChunksCount) + throw 'Too many chunks.' - const uuidDir = path.join(chunksDir, file.uuid) - fs.readdir(uuidDir, async (error, chunkNames) => { - if (error) { - if (error.code === 'ENOENT') return erred('UUID is not being used.') - return erred(error) - } - if (file.count < chunkNames.length) return erred('Chunks count mismatch.') + file.extname = typeof file.original === 'string' ? utils.extname(file.original) : '' + if (self.isExtensionFiltered(file.extname)) + throw `${file.extname ? `${file.extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted due to security reasons.` - const extname = typeof file.original === 'string' ? utils.extname(file.original) : '' - if (uploadsController.isExtensionFiltered(extname)) - return erred(`${extname ? `${extname.substr(1).toUpperCase()} files` : 'Files with no extension'} are not permitted due to security reasons.`) - - const length = uploadsController.getFileNameLength(req) - const name = await uploadsController.getUniqueRandomName(length, extname, req.app.get('uploads-set')) - .catch(erred) - if (!name) return - - const destination = path.join(uploadsDir, name) - - // Sort chunk names - chunkNames.sort() - - // Get total chunks size - const chunksTotalSize = await uploadsController.getTotalSize(uuidDir, chunkNames) - .catch(erred) - if (typeof chunksTotalSize !== 'number') return - - const isEmpty = config.filterEmptyFile && (chunksTotalSize === 0) - const isBigger = chunksTotalSize > maxSizeBytes - if (isEmpty || isBigger) { - // Delete all chunks and remove chunks dir - const chunksCleaned = await uploadsController.cleanUpChunks(uuidDir, chunkNames) - .catch(erred) - if (!chunksCleaned) return - - if (isEmpty) - return erred('Empty files are not allowed.') - else - return erred(`Total chunks size is bigger than ${maxSize}.`) + if (temporaryUploads) { + file.age = self.parseUploadAge(file.age) + if (!file.age && !config.uploads.temporaryUploadAges.includes(0)) + throw 'Permanent uploads are prohibited.' } - // Append all chunks - const destFileStream = fs.createWriteStream(destination, { flags: 'a' }) - const chunksAppended = await uploadsController.appendToStream(destFileStream, uuidDir, chunkNames) - .catch(erred) - if (!chunksAppended) return + file.size = chunksData[file.uuid].size + if (config.filterEmptyFile && file.size === 0) + throw 'Empty files are not allowed.' + else if (file.size > maxSizeBytes) + throw `File too large. Chunks are bigger than ${maxSize} MB.` - // Delete all chunks and remove chunks dir - const chunksCleaned = await uploadsController.cleanUpChunks(uuidDir, chunkNames) - .catch(erred) - if (!chunksCleaned) return + // Generate name + const length = self.parseFileIdentifierLength(file.filelength) + const name = await self.getUniqueRandomName(length, file.extname) + + // Combine chunks + const destination = path.join(paths.uploads, name) + await self.combineChunks(destination, file.uuid) + + // Continue even when encountering errors + await self.cleanUpChunks(file.uuid).catch(logger.error) + + // Double-check file size + const lstat = await paths.lstat(destination) + if (lstat.size !== file.size) + throw 'Chunks size mismatched.' + + let albumid = parseInt(file.albumid) + if (isNaN(albumid)) + albumid = null const data = { filename: name, originalname: file.original || '', + extname: file.extname, mimetype: file.type || '', - size: file.size || 0 + size: file.size, + albumid, + age: file.age } - data.albumid = parseInt(file.albumid) - if (isNaN(data.albumid)) data.albumid = albumid + infoMap.push({ path: destination, data }) + } - infoMap.push({ - path: destination, - data - }) + if (utils.clamd.scanner) { + const scanResult = await self.scanFiles(req, infoMap) + if (scanResult) throw scanResult + } - iteration++ - if (iteration === files.length) { - if (config.uploads.scan && config.uploads.scan.enabled) { - const scan = await uploadsController.scanFiles(req, infoMap) - if (scan) return erred(scan) - } - - const result = await uploadsController.formatInfoMap(req, res, user, infoMap) - .catch(erred) - if (!result) return - - uploadsController.processFilesForDisplay(req, res, result.files, result.existingFiles) - } - }) + const result = await self.storeFilesToDb(req, res, user, infoMap) + await self.sendUploadResponse(req, res, result) + } catch (error) { + // Clean up leftover chunks + for (const file of files) + if (chunksData[file.uuid] !== undefined) + // Continue even when encountering errors + await self.cleanUpChunks(file.uuid).catch(logger.error) + throw error } } -uploadsController.getTotalSize = (uuidDir, chunkNames) => { - return new Promise((resolve, reject) => { - let size = 0 - const stat = i => { - if (i === chunkNames.length) return resolve(size) - fs.stat(path.join(uuidDir, chunkNames[i]), (error, stats) => { - if (error) return reject(error) - size += stats.size - stat(i + 1) +self.combineChunks = async (destination, uuid) => { + let errorObj + const writeStream = fs.createWriteStream(destination, { flags: 'a' }) + + try { + chunksData[uuid].chunks.sort() + for (const chunk of chunksData[uuid].chunks) + await new Promise((resolve, reject) => { + fs.createReadStream(path.join(chunksData[uuid].root, chunk)) + .on('error', error => reject(error)) + .on('end', () => resolve()) + .pipe(writeStream, { end: false }) }) + } catch (error) { + errorObj = error + } + + // Close stream + writeStream.end() + + // Re-throw error + if (errorObj) + throw errorObj +} + +self.cleanUpChunks = async (uuid) => { + // Unlink chunks + for (const chunk of chunksData[uuid].chunks) + await paths.unlink(path.join(chunksData[uuid].root, chunk)) + // Remove UUID dir + await paths.rmdir(chunksData[uuid].root) + // Delete cached date + delete chunksData[uuid] +} + +self.scanFiles = async (req, infoMap) => { + let foundThreat + let lastIteration + let errorString + for (let i = 0; i < infoMap.length; i++) { + let reply + try { + reply = await utils.clamd.scanner.scanFile(infoMap[i].path, utils.clamd.timeout, utils.clamd.chunkSize) + } catch (error) { + logger.error(`[ClamAV]: ${error.toString()}.`) + errorString = `[ClamAV]: ${error.code !== undefined ? `${error.code}, p` : 'P'}lease contact the site owner.` + break } - stat(0) - }) -} - -uploadsController.appendToStream = (destFileStream, uuidDr, chunkNames) => { - return new Promise((resolve, reject) => { - const append = i => { - if (i === chunkNames.length) { - destFileStream.end() - return resolve(true) - } - fs.createReadStream(path.join(uuidDr, chunkNames[i])) - .on('end', () => { - append(i + 1) - }) - .on('error', error => { - logger.error(error) - destFileStream.end() - return reject(error) - }) - .pipe(destFileStream, { end: false }) + if (!reply.includes('OK') || reply.includes('FOUND')) { + // eslint-disable-next-line no-control-regex + foundThreat = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '') + logger.log(`[ClamAV]: ${infoMap[i].path.data.filename}: ${foundThreat} FOUND.`) + lastIteration = i === infoMap.length - 1 + break } - append(0) - }) + } + + if (!foundThreat && !errorString) + return false + + // Unlink all files when at least one threat is found + for (const info of infoMap) + // Continue even when encountering errors + await utils.unlinkFile(info.data.filename).catch(logger.error) + + return errorString || + `Threat found: ${foundThreat}${lastIteration ? '' : ', and maybe more'}.` } -uploadsController.cleanUpChunks = async (uuidDir, chunkNames) => { - await Promise.all(chunkNames.map(chunkName => - new Promise((resolve, reject) => { - const chunkPath = path.join(uuidDir, chunkName) - fs.unlink(chunkPath, error => { - if (error && error.code !== 'ENOENT') - return reject(error) - resolve() - }) +self.storeFilesToDb = async (req, res, user, infoMap) => { + const files = [] + const exists = [] + const albumids = [] + for (const info of infoMap) { + // Create hash of the file + const hash = await new Promise((resolve, reject) => { + const result = crypto.createHash('md5') + fs.createReadStream(info.path) + .on('error', error => reject(error)) + .on('end', () => resolve(result.digest('hex'))) + .on('data', data => result.update(data, 'utf8')) }) - )) - return new Promise((resolve, reject) => { - fs.rmdir(uuidDir, error => { - if (error) return reject(error) - resolve(true) - }) - }) -} -uploadsController.formatInfoMap = (req, res, user, infoMap) => { - return new Promise(resolve => { - let iteration = 0 - const files = [] - const existingFiles = [] - const albumsAuthorized = {} - - for (const info of infoMap) { - // Check if the file exists by checking hash and size - const hash = crypto.createHash('md5') - const stream = fs.createReadStream(info.path) - - stream.on('data', data => { - hash.update(data, 'utf8') + // Check if the file exists by checking its hash and size + const dbFile = await db.table('files') + .where(function () { + if (user === undefined) + this.whereNull('userid') + else + this.where('userid', user.id) }) - - stream.on('end', async () => { - const fileHash = hash.digest('hex') - const dbFile = await db.table('files') - .where(function () { - if (user === undefined) - this.whereNull('userid') - else - this.where('userid', user.id) - }) - .where({ - hash: fileHash, - size: info.data.size - }) - .first() - - if (!dbFile) { - if (info.data.albumid && albumsAuthorized[info.data.albumid] === undefined) { - const authorized = await db.table('albums') - .where({ - id: info.data.albumid, - userid: user.id - }) - .first() - albumsAuthorized[info.data.albumid] = Boolean(authorized) - } - - files.push({ - name: info.data.filename, - original: info.data.originalname, - type: info.data.mimetype, - size: info.data.size, - hash: fileHash, - ip: config.uploads.storeIP !== false ? req.ip : null, // only disable if explicitly set to false - albumid: albumsAuthorized[info.data.albumid] ? info.data.albumid : null, - userid: user !== undefined ? user.id : null, - timestamp: Math.floor(Date.now() / 1000) - }) - utils.invalidateStatsCache('uploads') - } else { - utils.deleteFile(info.data.filename, req.app.get('uploads-set')).catch(logger.error) - existingFiles.push(dbFile) - } - - iteration++ - if (iteration === infoMap.length) - resolve({ files, existingFiles }) + .where({ + hash, + size: info.data.size }) + // Select expirydate to display expiration date of existing files as well + .select('name', 'expirydate') + .first() + + if (dbFile) { + // Continue even when encountering errors + await utils.unlinkFile(info.data.filename).catch(logger.error) + // logger.log(`Unlinked ${info.data.filename} since a duplicate named ${dbFile.name} exists`) + exists.push(dbFile) + continue } - }) -} -uploadsController.scanFiles = (req, infoMap) => { - return new Promise((resolve, reject) => { - const scanner = req.app.get('clam-scanner') - const timeout = config.uploads.scan.timeout || 5000 - const chunkSize = config.uploads.scan.chunkSize || 64 * 1024 - let iteration = 0 - for (const info of infoMap) - scanner.scanFile(info.path, timeout, chunkSize).then(reply => { - iteration++ - const lastIteration = iteration === infoMap.length - if (!reply.includes('OK') || reply.includes('FOUND')) { - // eslint-disable-next-line no-control-regex - const virus = reply.replace(/^stream: /, '').replace(/ FOUND\u0000$/, '') - logger.log(`[ClamAV]: ${info.data.filename}: ${virus} FOUND.`) - return resolve({ virus, lastIteration }) - } - if (lastIteration) resolve(null) - }).catch(reject) - }).then(result => { - if (!result) return false - // If there is at least one dirty file, then delete all files - const set = req.app.get('uploads-set') - infoMap.forEach(info => { - utils.deleteFile(info.data.filename).catch(logger.error) - if (set) { - const identifier = info.data.filename.split('.')[0] - set.delete(identifier) - // logger.log(`Removed ${identifier} from identifiers cache (formatInfoMap)`) - } - }) - // Unfortunately, we will only be returning name of the first virus - // even if the current session was made up by multiple virus types - return `Threat found: ${result.virus}${result.lastIteration ? '' : ', and maybe more'}.` - }).catch(error => { - logger.error(`[ClamAV]: ${error.toString()}.`) - return `[ClamAV]: ${error.code !== undefined ? `${error.code}, p` : 'P'}lease contact the site owner.` - }) -} + const timestamp = Math.floor(Date.now() / 1000) + const data = { + name: info.data.filename, + original: info.data.originalname, + type: info.data.mimetype, + size: info.data.size, + hash, + // Only disable if explicitly set to false in config + ip: config.uploads.storeIP !== false ? req.ip : null, + timestamp + } -uploadsController.processFilesForDisplay = async (req, res, files, existingFiles) => { - const responseFiles = [] + if (user) { + data.userid = user.id + data.albumid = info.data.albumid + if (data.albumid !== null && !albumids.includes(data.albumid)) + albumids.push(data.albumid) + } + + if (info.data.age) + data.expirydate = data.timestamp + (info.data.age * 3600) // Hours to seconds + + files.push(data) + + // Generate thumbs, but do not wait + if (utils.mayGenerateThumb(info.data.extname)) + utils.generateThumbs(info.data.filename, info.data.extname).catch(logger.error) + } if (files.length) { + let authorizedIds = [] + if (albumids.length) { + authorizedIds = await db.table('albums') + .where({ userid: user.id }) + .whereIn('id', albumids) + .select('id') + .then(rows => rows.map(row => row.id)) + + // Remove albumid if user do not own the album + for (const file of files) + if (file.albumid !== null && !authorizedIds.includes(file.albumid)) + file.albumid = null + } + // Insert new files to DB await db.table('files').insert(files) + utils.invalidateStatsCache('uploads') - for (const file of files) - responseFiles.push(file) + // Update albums' timestamp + if (authorizedIds.length) + await db.table('albums') + .whereIn('id', authorizedIds) + .update('editedAt', Math.floor(Date.now() / 1000)) } - if (existingFiles.length) - for (const file of existingFiles) - responseFiles.push(file) - - // We send response first before generating thumbnails and updating album timestamps - const nojs = req.path === '/nojs' - res.json({ - success: true, - files: responseFiles.map(file => { - const result = { - name: file.name, - size: file.size, - url: `${config.domain}/${file.name}` - } - // Add original name if it's /nojs route - if (nojs) result.original = file.original - return result - }) - }) - - const albumids = [] - for (const file of files) { - if (file.albumid && !albumids.includes(file.albumid)) - albumids.push(file.albumid) - - if (utils.mayGenerateThumb(utils.extname(file.name))) - utils.generateThumbs(file.name) - } - - if (albumids.length) - db.table('albums') - .whereIn('id', albumids) - .update('editedAt', Math.floor(Date.now() / 1000)) - .catch(logger.error) + return files.concat(exists) } -uploadsController.delete = async (req, res) => { +self.sendUploadResponse = async (req, res, result) => { + // Send response + res.json({ + success: true, + files: result.map(file => { + const map = { + name: file.name, + url: `${config.domain}/${file.name}` + } + + // Add expiry date if a temporary upload + if (file.expirydate) + map.expirydate = file.expirydate + + // Add original name if on /nojs route + if (req.path === '/nojs') + map.original = file.original + + return map + }) + }) +} + +self.delete = async (req, res) => { + // Map /delete requests to /bulkdelete route const id = parseInt(req.body.id) const body = { field: 'id', values: isNaN(id) ? undefined : [id] } req.body = body - return uploadsController.bulkDelete(req, res) + return self.bulkDelete(req, res) } -uploadsController.bulkDelete = async (req, res) => { +self.bulkDelete = async (req, res) => { const user = await utils.authorize(req, res) if (!user) return const field = req.body.field || 'id' const values = req.body.values - if (!values || !Array.isArray(values) || !values.length) + if (!Array.isArray(values) || !values.length) return res.json({ success: false, description: 'No array of files specified.' }) - const failed = await utils.bulkDeleteFiles(field, values, user, req.app.get('uploads-set')) - utils.invalidateStatsCache('uploads') - if (failed.length < values.length) + try { + const failed = await utils.bulkDeleteFromDb(field, values, user) return res.json({ success: true, failed }) - - return res.json({ success: false, description: 'Could not delete any files.' }) + } catch (error) { + logger.error(error) + return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } } -uploadsController.list = async (req, res) => { +self.list = async (req, res) => { const user = await utils.authorize(req, res) if (!user) return @@ -686,7 +691,8 @@ uploadsController.list = async (req, res) => { const all = req.headers.all === '1' const filters = req.headers.filters const ismoderator = perms.is(user, 'moderator') - if ((all || filters) && !ismoderator) return res.status(403).end() + if ((all || filters) && !ismoderator) + return res.status(403).end() const basedomain = config.domain @@ -770,12 +776,17 @@ uploadsController.list = async (req, res) => { .where(filter) .count('id as count') .then(rows => rows[0].count) - if (!count) return res.json({ success: true, files: [], count }) + if (!count) + return res.json({ success: true, files: [], count }) let offset = req.params.page if (offset === undefined) offset = 0 - const columns = ['id', 'timestamp', 'name', 'userid', 'size'] + const columns = ['id', 'name', 'userid', 'size', 'timestamp'] + + if (temporaryUploads) + columns.push('expirydate') + // Only select IPs if we are listing all uploads columns.push(all ? 'ip' : 'albumid') @@ -786,7 +797,8 @@ uploadsController.list = async (req, res) => { .offset(25 * offset) .select(columns) - if (!files.length) return res.json({ success: true, files, count, basedomain }) + if (!files.length) + return res.json({ success: true, files, count, basedomain }) for (const file of files) { file.extname = utils.extname(file.name) @@ -810,13 +822,15 @@ uploadsController.list = async (req, res) => { .then(rows => { // Build Object indexed by their IDs const obj = {} - for (const row of rows) obj[row.id] = row.name + for (const row of rows) + obj[row.id] = row.name return obj }) } // If we are not listing all uploads, send response - if (!all) return res.json({ success: true, files, count, albums, basedomain }) + if (!all) + return res.json({ success: true, files, count, albums, basedomain }) // Otherwise proceed to querying usernames let _users = _filters.uploaders @@ -828,7 +842,8 @@ uploadsController.list = async (req, res) => { }) // If there are no uploads attached to a registered user, send response - if (userids.length === 0) return res.json({ success: true, files, count, basedomain }) + if (userids.length === 0) + return res.json({ success: true, files, count, basedomain }) // Query usernames of user IDs from currently selected files _users = await db.table('users') @@ -843,4 +858,4 @@ uploadsController.list = async (req, res) => { return res.json({ success: true, files, count, users, basedomain }) } -module.exports = uploadsController +module.exports = self diff --git a/controllers/utilsController.js b/controllers/utilsController.js index d388806..6c335ee 100644 --- a/controllers/utilsController.js +++ b/controllers/utilsController.js @@ -1,21 +1,37 @@ -const { spawn } = require('child_process') +const { promisify } = require('util') const config = require('./../config') const db = require('knex')(config.database) const fetch = require('node-fetch') const ffmpeg = require('fluent-ffmpeg') const fs = require('fs') const logger = require('./../logger') -const os = require('os') const path = require('path') +const paths = require('./pathsController') const perms = require('./permissionController') const sharp = require('sharp') +const si = require('systeminformation') -const utilsController = {} -const _stats = { +const self = { + clamd: { + scanner: null, + timeout: config.uploads.scan.timeout || 5000, + chunkSize: config.uploads.scan.chunkSize || 64 * 1024 + }, + gitHash: null, + idSet: null, + + idMaxTries: config.uploads.maxTries || 1, + + imageExts: ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'], + videoExts: ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'], + + ffprobe: promisify(ffmpeg.ffprobe) +} + +const statsCache = { system: { cache: null, - generating: false, - generatedAt: 0 + generating: false }, albums: { cache: null, @@ -37,23 +53,17 @@ const _stats = { } } -const uploadsDir = path.resolve(config.uploads.folder) -const thumbsDir = path.join(uploadsDir, 'thumbs') -const thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png') const cloudflareAuth = config.cloudflare.apiKey && config.cloudflare.email && config.cloudflare.zoneId -utilsController.imageExtensions = ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'] -utilsController.videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'] - -utilsController.mayGenerateThumb = extname => { - return (config.uploads.generateThumbs.image && utilsController.imageExtensions.includes(extname)) || - (config.uploads.generateThumbs.video && utilsController.videoExtensions.includes(extname)) +self.mayGenerateThumb = extname => { + return (config.uploads.generateThumbs.image && self.imageExts.includes(extname)) || + (config.uploads.generateThumbs.video && self.videoExts.includes(extname)) } -// expand if necessary (must be lower case); for now only preserves some known tarballs -utilsController.preserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz'] +// Expand if necessary (must be lower case); for now only preserves some known tarballs +const extPreserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz'] -utilsController.extname = filename => { +self.extname = filename => { // Always return blank string if the filename does not seem to have a valid extension // Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted if (!/\../.test(filename)) return '' @@ -69,9 +79,9 @@ utilsController.extname = filename => { } // check against extensions that must be preserved - for (let i = 0; i < utilsController.preserves.length; i++) - if (lower.endsWith(utilsController.preserves[i])) { - extname = utilsController.preserves[i] + for (const extPreserve of extPreserves) + if (lower.endsWith(extPreserve)) { + extname = extPreserve break } @@ -81,18 +91,20 @@ utilsController.extname = filename => { return extname + multi } -utilsController.escape = string => { +self.escape = (string) => { // MIT License // Copyright(c) 2012-2013 TJ Holowaychuk // Copyright(c) 2015 Andreas Lubbe // Copyright(c) 2015 Tiancheng "Timothy" Gu - if (!string) return string + if (!string) + return string - const str = '' + string + const str = String(string) const match = /["'&<>]/.exec(str) - if (!match) return str + if (!match) + return str let escape let html = '' @@ -132,258 +144,266 @@ utilsController.escape = string => { : html } -utilsController.authorize = async (req, res) => { +self.authorize = async (req, res) => { + // TODO: Improve usage of this function by the other APIs const token = req.headers.token if (token === undefined) { res.status(401).json({ success: false, description: 'No token provided.' }) return } - const user = await db.table('users').where('token', token).first() - if (user) { - if (user.enabled === false || user.enabled === 0) { - res.json({ success: false, description: 'This account has been disabled.' }) - return + try { + const user = await db.table('users') + .where('token', token) + .first() + if (user) { + if (user.enabled === false || user.enabled === 0) { + res.json({ success: false, description: 'This account has been disabled.' }) + return + } + return user + } + + res.status(401).json({ success: false, description: 'Invalid token.' }) + } catch (error) { + logger.error(error) + res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' }) + } +} + +self.generateThumbs = async (name, extname, force) => { + const thumbname = path.join(paths.thumbs, name.slice(0, -extname.length) + '.png') + + try { + // Check if thumbnail already exists + try { + const lstat = await paths.lstat(thumbname) + if (lstat.isSymbolicLink()) + // Unlink if symlink (should be symlink to the placeholder) + await paths.unlink(thumbname) + else if (!force) + // Continue only if it does not exist, unless forced to + return true + } catch (error) { + // Re-throw error + if (error.code !== 'ENOENT') + throw error + } + + // Full path to input file + const input = path.join(paths.uploads, name) + + // If image extension + if (self.imageExts.includes(extname)) { + const resizeOptions = { + width: 200, + height: 200, + fit: 'contain', + background: { + r: 0, + g: 0, + b: 0, + alpha: 0 + } + } + const image = sharp(input) + const metadata = await image.metadata() + if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) { + await image + .resize(resizeOptions) + .toFile(thumbname) + } else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) { + await image + .toFile(thumbname) + } else { + const x = resizeOptions.width - metadata.width + const y = resizeOptions.height - metadata.height + await image + .extend({ + top: Math.floor(y / 2), + bottom: Math.ceil(y / 2), + left: Math.floor(x / 2), + right: Math.ceil(x / 2), + background: resizeOptions.background + }) + .toFile(thumbname) + } + } else if (self.videoExts.includes(extname)) { + const metadata = await self.ffprobe(input) + + // Skip files that do not have video streams/channels + if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video')) + throw 'File does not contain any video stream' + + await new Promise((resolve, reject) => { + ffmpeg(input) + .inputOptions([ + `-ss ${parseInt(metadata.format.duration) * 20 / 100}` + ]) + .output(thumbname) + .outputOptions([ + '-vframes 1', + '-vf scale=200:200:force_original_aspect_ratio=decrease' + ]) + .on('error', async error => { + // Try to unlink thumbnail, + // since ffmpeg may have created an incomplete thumbnail + try { + await paths.unlink(thumbname) + } catch (err) { + if (err && err.code !== 'ENOENT') + logger.error(`[${name}]: ${err.toString()}`) + } + return reject(error) + }) + .on('end', () => resolve(true)) + .run() + }) + } else { + return false + } + } catch (error) { + // Suppress error logging for errors these patterns + const errorString = error.toString() + const suppress = [ + /Input file contains unsupported image format/, + /Invalid data found when processing input/, + /File does not contain any video stream/ + ] + + if (!suppress.some(t => t.test(errorString))) + logger.error(`[${name}]: ${errorString}`) + + try { + await paths.symlink(paths.thumbPlaceholder, thumbname) + return true + } catch (err) { + logger.error(err) + return false } - return user } - res.status(401).json({ - success: false, - description: 'Invalid token.' - }) + return true } -utilsController.generateThumbs = (name, force) => { - return new Promise(resolve => { - const extname = utilsController.extname(name) - const thumbname = path.join(thumbsDir, name.slice(0, -extname.length) + '.png') - fs.lstat(thumbname, async (error, stats) => { - if (error && error.code !== 'ENOENT') { - logger.error(error) - return resolve(false) - } +self.unlinkFile = async (filename, predb) => { + try { + await paths.unlink(path.join(paths.uploads, filename)) + } catch (error) { + // Return true if file does not exist + if (error.code !== 'ENOENT') + throw error + } - if (!error && stats.isSymbolicLink()) { - // Unlink symlink - const unlink = await new Promise(resolve => { - fs.unlink(thumbname, error => { - if (error) logger.error(error) - resolve(!error) - }) - }) - if (!unlink) return resolve(false) - } + const identifier = filename.split('.')[0] - // Only make thumbnail if it does not exist (ENOENT) - if (!error && !force) return resolve(true) + // Do not remove from identifiers cache on pre-db-deletion + // eslint-disable-next-line curly + if (!predb && self.idSet) { + self.idSet.delete(identifier) + // logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`) + } - // Full path to input file - const input = path.join(__dirname, '..', config.uploads.folder, name) - - new Promise((resolve, reject) => { - // If image extension - if (utilsController.imageExtensions.includes(extname)) { - const resizeOptions = { - width: 200, - height: 200, - fit: 'contain', - background: { - r: 0, - g: 0, - b: 0, - alpha: 0 - } - } - const image = sharp(input) - return image - .metadata() - .then(metadata => { - if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) { - return image - .resize(resizeOptions) - .toFile(thumbname) - } else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) { - return image - .toFile(thumbname) - } else { - const x = resizeOptions.width - metadata.width - const y = resizeOptions.height - metadata.height - return image - .extend({ - top: Math.floor(y / 2), - bottom: Math.ceil(y / 2), - left: Math.floor(x / 2), - right: Math.ceil(x / 2), - background: resizeOptions.background - }) - .toFile(thumbname) - } - }) - .then(() => resolve(true)) - .catch(reject) - } - - // Otherwise video extension - ffmpeg.ffprobe(input, (error, metadata) => { - if (error) return reject(error) - - // Skip files that do not have video streams/channels - if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video')) - // eslint-disable-next-line prefer-promise-reject-errors - return reject('File does not contain any video stream') - - ffmpeg(input) - .inputOptions([ - `-ss ${parseInt(metadata.format.duration) * 20 / 100}` - ]) - .output(thumbname) - .outputOptions([ - '-vframes 1', - '-vf scale=200:200:force_original_aspect_ratio=decrease' - ]) - .on('error', error => { - // Attempt to unlink thumbnail - // Since ffmpeg may have already created an incomplete thumbnail - fs.unlink(thumbname, err => { - if (err && err.code !== 'ENOENT') - logger.error(`[${name}]: ${err.toString()}`) - reject(error) - }) - }) - .on('end', () => resolve(true)) - .run() - }) - }) - .then(resolve) - .catch(error => { - // Suppress error logging for errors these patterns - const errorString = error.toString() - const suppress = [ - /Input file contains unsupported image format/, - /Invalid data found when processing input/, - /File does not contain any video stream/ - ] - if (!suppress.some(t => t.test(errorString))) - logger.error(`[${name}]: ${errorString}`) - - fs.symlink(thumbPlaceholder, thumbname, err => { - if (err) logger.error(err) - // We return true anyway - // if we could make a symlink to the placeholder image - resolve(!err) - }) - }) - }) - }) + const extname = self.extname(filename) + if (self.imageExts.includes(extname) || self.videoExts.includes(extname)) + try { + await paths.unlink(path.join(paths.thumbs, `${identifier}.png`)) + } catch (error) { + if (error.code !== 'ENOENT') + throw error + } } -utilsController.deleteFile = (filename, set) => { - return new Promise((resolve, reject) => { - const extname = utilsController.extname(filename) - return fs.unlink(path.join(uploadsDir, filename), error => { - if (error && error.code !== 'ENOENT') return reject(error) - const identifier = filename.split('.')[0] - // eslint-disable-next-line curly - if (set) { - set.delete(identifier) - // logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`) - } - if (utilsController.imageExtensions.includes(extname) || utilsController.videoExtensions.includes(extname)) { - const thumb = `${identifier}.png` - return fs.unlink(path.join(thumbsDir, thumb), error => { - if (error && error.code !== 'ENOENT') return reject(error) - resolve(true) - }) - } - resolve(true) - }) - }) -} - -utilsController.bulkDeleteFiles = async (field, values, user, set) => { +self.bulkDeleteFromDb = async (field, values, user) => { if (!user || !['id', 'name'].includes(field)) return // SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999 // Read more: https://www.sqlite.org/limits.html const MAX_VARIABLES_CHUNK_SIZE = 999 const chunks = [] - const _values = values.slice() // Make a shallow copy of the array - while (_values.length) - chunks.push(_values.splice(0, MAX_VARIABLES_CHUNK_SIZE)) + while (values.length) + chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE)) - const failed = [] + let failed = [] const ismoderator = perms.is(user, 'moderator') - await Promise.all(chunks.map((chunk, index) => { - const job = async () => { - try { - const files = await db.table('files') - .whereIn(field, chunk) - .where(function () { - if (!ismoderator) - this.where('userid', user.id) - }) - // Push files that could not be found in DB - failed.push.apply(failed, chunk.filter(v => !files.find(file => file[field] === v))) + try { + let unlinkeds = [] + const albumids = [] - // Delete all found files physically - const deletedFiles = [] - await Promise.all(files.map(file => - utilsController.deleteFile(file.name) - .then(() => deletedFiles.push(file)) - .catch(error => { - failed.push(file[field]) - logger.error(error) - }) - )) + for (let i = 0; i < chunks.length; i++) { + const files = await db.table('files') + .whereIn(field, chunks[i]) + .where(function () { + if (!ismoderator) + self.where('userid', user.id) + }) - if (!deletedFiles.length) - return true + // Push files that could not be found in db + failed = failed.concat(chunks[i].filter(value => !files.find(file => file[field] === value))) - // Delete all found files from database - const deletedFromDb = await db.table('files') - .whereIn('id', deletedFiles.map(file => file.id)) - .del() - - if (set) - deletedFiles.forEach(file => { - const identifier = file.name.split('.')[0] - set.delete(identifier) - // logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFiles)`) - }) - - // Update albums if necessary - if (deletedFromDb) { - const albumids = [] - deletedFiles.forEach(file => { - if (file.albumid && !albumids.includes(file.albumid)) - albumids.push(file.albumid) - }) - await db.table('albums') - .whereIn('id', albumids) - .update('editedAt', Math.floor(Date.now() / 1000)) - .catch(logger.error) + // Unlink all found files + const unlinked = [] + for (const file of files) + try { + await self.unlinkFile(file.name, true) + unlinked.push(file) + } catch (error) { + logger.error(error) + failed.push(file[field]) } - // Purge Cloudflare's cache if necessary - if (config.cloudflare.purgeCache) - utilsController.purgeCloudflareCache(deletedFiles.map(file => file.name), true, true) - .then(results => { - for (const result of results) - if (result.errors.length) - result.errors.forEach(error => logger.error(`[CF]: ${error}`)) - }) - } catch (error) { - logger.error(error) - } + if (!unlinked.length) + continue + + // Delete all unlinked files from db + await db.table('files') + .whereIn('id', unlinked.map(file => file.id)) + .del() + self.invalidateStatsCache('uploads') + + if (self.idSet) + unlinked.forEach(file => { + const identifier = file.name.split('.')[0] + self.idSet.delete(identifier) + // logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`) + }) + + // Push album ids + unlinked.forEach(file => { + if (file.albumid && !albumids.includes(file.albumid)) + albumids.push(file.albumid) + }) + + // Push unlinked files + unlinkeds = unlinkeds.concat(unlinked) } - return new Promise(resolve => job().then(() => resolve())) - })) + + if (unlinkeds.length) { + // Update albums if necessary, but do not wait + if (albumids.length) + db.table('albums') + .whereIn('id', albumids) + .update('editedAt', Math.floor(Date.now() / 1000)) + .catch(logger.error) + + // Purge Cloudflare's cache if necessary, but do not wait + if (config.cloudflare.purgeCache) + self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true) + .then(results => { + for (const result of results) + if (result.errors.length) + result.errors.forEach(error => logger.error(`[CF]: ${error}`)) + }) + } + } catch (error) { + logger.error(error) + } + return failed } -utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => { +self.purgeCloudflareCache = async (names, uploads, thumbs) => { if (!Array.isArray(names) || !names.length || !cloudflareAuth) return [{ success: false, @@ -398,8 +418,8 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => { names = names.map(name => { if (uploads) { const url = `${domain}/${name}` - const extname = utilsController.extname(name) - if (thumbs && utilsController.mayGenerateThumb(extname)) + const extname = self.extname(name) + if (thumbs && self.mayGenerateThumb(extname)) thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`) return url } else { @@ -411,87 +431,70 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => { // Split array into multiple arrays with max length of 30 URLs // https://api.cloudflare.com/#zone-purge-files-by-url const MAX_LENGTH = 30 - const files = [] + const chunks = [] while (names.length) - files.push(names.splice(0, MAX_LENGTH)) + chunks.push(names.splice(0, MAX_LENGTH)) const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache` const results = [] - await new Promise(resolve => { - const purge = async i => { - const result = { - success: false, - files: files[i], - errors: [] - } - try { - const fetchPurge = await fetch(url, { - method: 'POST', - body: JSON.stringify({ - files: result.files - }), - headers: { - 'Content-Type': 'application/json', - 'X-Auth-Email': config.cloudflare.email, - 'X-Auth-Key': config.cloudflare.apiKey - } - }).then(res => res.json()) - result.success = fetchPurge.success - if (Array.isArray(fetchPurge.errors) && fetchPurge.errors.length) - result.errors = fetchPurge.errors.map(error => `${error.code}: ${error.message}`) - } catch (error) { - result.errors = [error.toString()] - } - - results.push(result) - - if (i < files.length - 1) - purge(i + 1) - else - resolve() + for (const chunk of chunks) { + const result = { + success: false, + files: chunk, + errors: [] } - purge(0) - }) + + try { + const purge = await fetch(url, { + method: 'POST', + body: JSON.stringify({ files: chunk }), + headers: { + 'Content-Type': 'application/json', + 'X-Auth-Email': config.cloudflare.email, + 'X-Auth-Key': config.cloudflare.apiKey + } + }) + const response = await purge.json() + result.success = response.success + if (Array.isArray(response.errors) && response.errors.length) + result.errors = response.errors.map(error => `${error.code}: ${error.message}`) + } catch (error) { + result.errors = [error.toString()] + } + + results.push(result) + } return results } -utilsController.getMemoryUsage = () => { - // For now this is linux-only. Not sure if darwin has this too. - return new Promise((resolve, reject) => { - const prc = spawn('free', ['-b']) - prc.stdout.setEncoding('utf8') - prc.stdout.on('data', data => { - const parsed = {} - const str = data.toString() - const lines = str.split(/\n/g) - for (let i = 0; i < lines.length; i++) { - lines[i] = lines[i].split(/\s+/) - if (i === 0) continue - const id = lines[i][0].toLowerCase().slice(0, -1) - if (!id) continue - if (!parsed[id]) parsed[id] = {} - for (let j = 1; j < lines[i].length; j++) { - const bytes = parseInt(lines[i][j]) - parsed[id][lines[0][j]] = isNaN(bytes) ? null : bytes - } - } - resolve(parsed) - }) - prc.on('close', code => { - reject(new Error(`Process exited with code ${code}.`)) - }) - }) +self.bulkDeleteExpired = async (dryrun) => { + const timestamp = Date.now() / 1000 + const field = 'id' + const sudo = { username: 'root' } + + const result = {} + result.expired = await db.table('files') + .where('expirydate', '<=', timestamp) + .select(field) + .then(rows => rows.map(row => row[field])) + + if (!dryrun) { + const values = result.expired.slice() // Make a shallow copy + result.failed = await self.bulkDeleteFromDb(field, values, sudo) + } + + return result } -utilsController.invalidateStatsCache = type => { +self.invalidateStatsCache = type => { if (!['albums', 'users', 'uploads'].includes(type)) return - _stats[type].invalidatedAt = Date.now() + statsCache[type].invalidatedAt = Date.now() } -utilsController.stats = async (req, res, next) => { - const user = await utilsController.authorize(req, res) +self.stats = async (req, res, next) => { + const user = await self.authorize(req, res) if (!user) return const isadmin = perms.is(user, 'admin') @@ -499,48 +502,44 @@ utilsController.stats = async (req, res, next) => { const stats = {} - if (!_stats.system.cache && _stats.system.generating) { + // Re-use caches as long as they are still valid + + if (!statsCache.system.cache && statsCache.system.generating) { stats.system = false - } else if ((Date.now() - _stats.system.generatedAt <= 1000) || _stats.system.generating) { - // Re-use system cache for only 1000ms - stats.system = _stats.system.cache + } else if (statsCache.system.generating) { + stats.system = statsCache.system.cache } else { - _stats.system.generating = true - const platform = os.platform() + statsCache.system.generating = true + + const os = await si.osInfo() + const currentLoad = await si.currentLoad() + const mem = await si.mem() + stats.system = { - platform: `${platform}-${os.arch()}`, - systemMemory: null, - nodeVersion: `${process.versions.node}`, - memoryUsage: process.memoryUsage().rss + platform: `${os.platform} ${os.arch}`, + distro: `${os.distro} ${os.release}`, + kernel: os.kernel, + cpuLoad: `${currentLoad.currentload.toFixed(1)}%`, + cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '), + systemMemory: { + used: mem.active, + total: mem.total + }, + memoryUsage: process.memoryUsage().rss, + nodeVersion: `${process.versions.node}` } - if (platform === 'linux') { - const memoryUsage = await utilsController.getMemoryUsage() - stats.system.systemMemory = { - used: memoryUsage.mem.used, - total: memoryUsage.mem.total - } - } else { - delete stats.system.systemMemory - } - - if (platform !== 'win32') - stats.system.loadAverage = `${os.loadavg().map(load => load.toFixed(2)).join(', ')}` - // Update cache - _stats.system.cache = stats.system - _stats.system.generatedAt = Date.now() - _stats.system.generating = false + statsCache.system.cache = stats.system + statsCache.system.generating = false } - // Re-use albums, users, and uploads caches as long as they are still valid - - if (!_stats.albums.cache && _stats.albums.generating) { + if (!statsCache.albums.cache && statsCache.albums.generating) { stats.albums = false - } else if ((_stats.albums.invalidatedAt < _stats.albums.generatedAt) || _stats.albums.generating) { - stats.albums = _stats.albums.cache + } else if ((statsCache.albums.invalidatedAt < statsCache.albums.generatedAt) || statsCache.albums.generating) { + stats.albums = statsCache.albums.cache } else { - _stats.albums.generating = true + statsCache.albums.generating = true stats.albums = { total: 0, active: 0, @@ -560,7 +559,7 @@ utilsController.stats = async (req, res, next) => { if (album.zipGeneratedAt) identifiers.push(album.identifier) } - const zipsDir = path.join(uploadsDir, 'zips') + const zipsDir = path.join(paths.uploads, 'zips') await Promise.all(identifiers.map(identifier => { return new Promise(resolve => { const filePath = path.join(zipsDir, `${identifier}.zip`) @@ -572,17 +571,17 @@ utilsController.stats = async (req, res, next) => { })) // Update cache - _stats.albums.cache = stats.albums - _stats.albums.generatedAt = Date.now() - _stats.albums.generating = false + statsCache.albums.cache = stats.albums + statsCache.albums.generatedAt = Date.now() + statsCache.albums.generating = false } - if (!_stats.users.cache && _stats.users.generating) { + if (!statsCache.users.cache && statsCache.users.generating) { stats.users = false - } else if ((_stats.users.invalidatedAt < _stats.users.generatedAt) || _stats.users.generating) { - stats.users = _stats.users.cache + } else if ((statsCache.users.invalidatedAt < statsCache.users.generatedAt) || statsCache.users.generating) { + stats.users = statsCache.users.cache } else { - _stats.users.generating = true + statsCache.users.generating = true stats.users = { total: 0, disabled: 0 @@ -609,17 +608,17 @@ utilsController.stats = async (req, res, next) => { } // Update cache - _stats.users.cache = stats.users - _stats.users.generatedAt = Date.now() - _stats.users.generating = false + statsCache.users.cache = stats.users + statsCache.users.generatedAt = Date.now() + statsCache.users.generating = false } - if (!_stats.uploads.cache && _stats.uploads.generating) { + if (!statsCache.uploads.cache && statsCache.uploads.generating) { stats.uploads = false - } else if ((_stats.uploads.invalidatedAt < _stats.uploads.generatedAt) || _stats.uploads.generating) { - stats.uploads = _stats.uploads.cache + } else if ((statsCache.uploads.invalidatedAt < statsCache.uploads.generatedAt) || statsCache.uploads.generating) { + stats.uploads = statsCache.uploads.cache } else { - _stats.uploads.generating = true + statsCache.uploads.generating = true stats.uploads = { total: 0, size: 0, @@ -632,22 +631,22 @@ utilsController.stats = async (req, res, next) => { stats.uploads.total = uploads.length for (const upload of uploads) { stats.uploads.size += parseInt(upload.size) - const extname = utilsController.extname(upload.name) - if (utilsController.imageExtensions.includes(extname)) + const extname = self.extname(upload.name) + if (self.imageExts.includes(extname)) stats.uploads.images++ - else if (utilsController.videoExtensions.includes(extname)) + else if (self.videoExts.includes(extname)) stats.uploads.videos++ else stats.uploads.others++ } // Update cache - _stats.uploads.cache = stats.uploads - _stats.uploads.generatedAt = Date.now() - _stats.uploads.generating = false + statsCache.uploads.cache = stats.uploads + statsCache.uploads.generatedAt = Date.now() + statsCache.uploads.generating = false } return res.json({ success: true, stats }) } -module.exports = utilsController +module.exports = self diff --git a/database/db.js b/database/db.js index 599869d..4967a2d 100644 --- a/database/db.js +++ b/database/db.js @@ -34,6 +34,7 @@ const init = function (db) { table.string('ip') table.integer('albumid') table.integer('timestamp') + table.integer('expirydate') }).then(() => {}) }) @@ -46,7 +47,6 @@ const init = function (db) { table.string('token') table.integer('enabled') table.integer('timestamp') - table.integer('fileLength') table.integer('permission') }).then(() => { db.table('users').where({ username: 'root' }).then((user) => { diff --git a/database/migration.js b/database/migration.js index 1e03b3c..f9ca8e7 100644 --- a/database/migration.js +++ b/database/migration.js @@ -3,6 +3,9 @@ const db = require('knex')(config.database) const perms = require('./../controllers/permissionController') const map = { + files: { + expirydate: 'integer' + }, albums: { editedAt: 'integer', zipGeneratedAt: 'integer', @@ -12,26 +15,25 @@ const map = { }, users: { enabled: 'integer', - fileLength: 'integer', permission: 'integer' } } -const migration = {} -migration.start = async () => { - const tables = Object.keys(map) - await Promise.all(tables.map(table => { - const columns = Object.keys(map[table]) - return Promise.all(columns.map(async column => { - if (await db.schema.hasColumn(table, column)) - return // console.log(`SKIP: ${column} => ${table}.`) +;(async () => { + const tableNames = Object.keys(map) + for (const tableName of tableNames) { + const columnNames = Object.keys(map[tableName]) + for (const columnName of columnNames) { + if (await db.schema.hasColumn(tableName, columnName)) + continue - const columnType = map[table][column] - return db.schema.table(table, t => { t[columnType](column) }) - .then(() => console.log(`OK: ${column} (${columnType}) => ${table}.`)) - .catch(console.error) - })) - })) + const columnType = map[tableName][columnName] + await db.schema.table(tableName, table => { + table[columnType](columnName) + }) + console.log(`OK: ${tableName} <- ${columnName} (${columnType})`) + } + } await db.table('users') .where('username', 'root') @@ -39,15 +41,17 @@ migration.start = async () => { .update({ permission: perms.permissions.superadmin }) - .then(rows => { - // NOTE: permissionController.js actually have a hard-coded check for "root" account so that + .then(result => { + // NOTE: permissionController.js actually has a hard-coded check for "root" account so that // it will always have "superadmin" permission regardless of its permission value in database - if (!rows) return console.log('Unable to update root\'s permission into superadmin.') + if (!result) return console.log('Unable to update root\'s permission into superadmin.') console.log(`Updated root's permission to ${perms.permissions.superadmin} (superadmin).`) }) console.log('Migration finished! Now you may start lolisafe normally.') - process.exit(0) -} - -migration.start() +})() + .then(() => process.exit(0)) + .catch(error => { + console.error(error) + process.exit(1) + }) diff --git a/logger.js b/logger.js index 15b4aaf..8ddc418 100644 --- a/logger.js +++ b/logger.js @@ -1,26 +1,26 @@ const { inspect } = require('util') -const logger = {} +const self = {} -logger.clean = item => { +const clean = item => { if (typeof item === 'string') return item const cleaned = inspect(item, { depth: 0 }) return cleaned } -logger.write = (content, options = {}) => { +const write = (content, options = {}) => { const date = new Date().toISOString() .replace(/T/, ' ') .replace(/\..*/, '') const stream = options.error ? process.stderr : process.stdout - stream.write(`[${date}]: ${options.prefix || ''}${logger.clean(content)}\n`) + stream.write(`[${date}]: ${options.prefix || ''}${clean(content)}\n`) } -logger.log = logger.write +self.log = write -logger.error = (content, options = {}) => { +self.error = (content, options = {}) => { options.error = true - logger.write(content, options) + write(content, options) } -module.exports = logger +module.exports = self diff --git a/lolisafe.js b/lolisafe.js index 5f7ba05..a7b7d2d 100644 --- a/lolisafe.js +++ b/lolisafe.js @@ -2,10 +2,10 @@ const bodyParser = require('body-parser') const clamd = require('clamdjs') const config = require('./config') const express = require('express') -const fs = require('fs') const helmet = require('helmet') const logger = require('./logger') const nunjucks = require('nunjucks') +const path = require('path') const RateLimit = require('express-rate-limit') const readline = require('readline') const safe = express() @@ -17,6 +17,7 @@ process.on('unhandledRejection', error => { logger.error(error, { prefix: 'Unhandled Rejection (Promise): ' }) }) +const paths = require('./controllers/pathsController') const utils = require('./controllers/utilsController') const album = require('./routes/album') @@ -26,14 +27,6 @@ const nojs = require('./routes/nojs') const db = require('knex')(config.database) require('./database/db.js')(db) -// Check and create missing directories -fs.existsSync('./pages/custom') || fs.mkdirSync('./pages/custom') -fs.existsSync(`./${config.logsFolder}`) || fs.mkdirSync(`./${config.logsFolder}`) -fs.existsSync(`./${config.uploads.folder}`) || fs.mkdirSync(`./${config.uploads.folder}`) -fs.existsSync(`./${config.uploads.folder}/chunks`) || fs.mkdirSync(`./${config.uploads.folder}/chunks`) -fs.existsSync(`./${config.uploads.folder}/thumbs`) || fs.mkdirSync(`./${config.uploads.folder}/thumbs`) -fs.existsSync(`./${config.uploads.folder}/zips`) || fs.mkdirSync(`./${config.uploads.folder}/zips`) - safe.use(helmet()) if (config.trustProxy) safe.set('trust proxy', 1) @@ -57,7 +50,7 @@ if (Array.isArray(config.rateLimits) && config.rateLimits.length) safe.use(bodyParser.urlencoded({ extended: true })) safe.use(bodyParser.json()) -// safe.fiery.me-exclusive cache control +// Cache control (safe.fiery.me) if (config.cacheControl) { const cacheControls = { // max-age: 30 days @@ -79,9 +72,9 @@ if (config.cacheControl) { } if (config.serveFilesWithNode) - safe.use('/', express.static(config.uploads.folder, { setHeaders })) + safe.use('/', express.static(paths.uploads, { setHeaders })) - safe.use('/', express.static('./public', { setHeaders })) + safe.use('/', express.static(paths.public, { setHeaders })) // Do NOT cache these dynamic routes safe.use(['/a', '/api', '/nojs'], (req, res, next) => { @@ -102,112 +95,107 @@ if (config.cacheControl) { }) } else { if (config.serveFilesWithNode) - safe.use('/', express.static(config.uploads.folder)) + safe.use('/', express.static(paths.uploads)) - safe.use('/', express.static('./public')) + safe.use('/', express.static(paths.public)) } safe.use('/', album) safe.use('/', nojs) safe.use('/api', api) -if (!Array.isArray(config.pages) || !config.pages.length) { - logger.error('Config does not haves any frontend pages enabled') - process.exit(1) -} +;(async () => { + try { + // Verify paths, create missing ones, clean up temp ones + await paths.init() -for (const page of config.pages) - if (fs.existsSync(`./pages/custom/${page}.html`)) { - safe.get(`/${page}`, (req, res, next) => res.sendFile(`${page}.html`, { - root: './pages/custom/' - })) - } else if (page === 'home') { - safe.get('/', (req, res, next) => res.render('home', { - maxSize: config.uploads.maxSize, - urlMaxSize: config.uploads.urlMaxSize, - urlDisclaimerMessage: config.uploads.urlDisclaimerMessage, - urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode, - urlExtensionsFilter: config.uploads.urlExtensionsFilter, - gitHash: safe.get('git-hash') - })) - } else if (page === 'faq') { - const fileLength = config.uploads.fileLength - safe.get('/faq', (req, res, next) => res.render('faq', { - whitelist: config.extensionsFilterMode === 'whitelist', - extensionsFilter: config.extensionsFilter, - fileLength, - tooShort: (fileLength.max - fileLength.default) > (fileLength.default - fileLength.min), - noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize), - chunkSize: config.uploads.chunkSize - })) - } else { - safe.get(`/${page}`, (req, res, next) => res.render(page)) - } - -safe.use((req, res, next) => { - res.status(404).sendFile(config.errorPages[404], { root: config.errorPages.rootDir }) -}) -safe.use((error, req, res, next) => { - logger.error(error) - res.status(500).sendFile(config.errorPages[500], { root: config.errorPages.rootDir }) -}) - -const start = async () => { - if (config.showGitHash) { - const gitHash = await new Promise((resolve, reject) => { - require('child_process').exec('git rev-parse HEAD', (error, stdout) => { - if (error) return reject(error) - resolve(stdout.replace(/\n$/, '')) - }) - }).catch(logger.error) - if (!gitHash) return - logger.log(`Git commit: ${gitHash}`) - safe.set('git-hash', gitHash) - } - - const scan = config.uploads.scan - if (scan && scan.enabled) { - const createScanner = async () => { - try { - if (!scan.ip || !scan.port) - throw new Error('clamd IP or port is missing') - - const version = await clamd.version(scan.ip, scan.port) - logger.log(`${scan.ip}:${scan.port} ${version}`) - - const scanner = clamd.createScanner(scan.ip, scan.port) - safe.set('clam-scanner', scanner) - return true - } catch (error) { - logger.error(`[ClamAV]: ${error.toString()}`) - return false - } + if (!Array.isArray(config.pages) || !config.pages.length) { + logger.error('Config file does not have any frontend pages enabled') + process.exit(1) } - if (!await createScanner()) return process.exit(1) - } - if (config.uploads.cacheFileIdentifiers) { - // Cache tree of uploads directory - const setSize = await new Promise((resolve, reject) => { - const uploadsDir = `./${config.uploads.folder}` - fs.readdir(uploadsDir, (error, names) => { - if (error) return reject(error) - const set = new Set() - names.forEach(name => set.add(name.split('.')[0])) - safe.set('uploads-set', set) - resolve(set.size) + for (const page of config.pages) { + const customPage = path.join(paths.customPages, `${page}.html`) + if (!await paths.access(customPage).catch(() => true)) + safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage)) + else if (page === 'home') + safe.get('/', (req, res, next) => res.render('home', { + maxSize: parseInt(config.uploads.maxSize), + urlMaxSize: parseInt(config.uploads.urlMaxSize), + urlDisclaimerMessage: config.uploads.urlDisclaimerMessage, + urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode, + urlExtensionsFilter: config.uploads.urlExtensionsFilter, + temporaryUploadAges: Array.isArray(config.uploads.temporaryUploadAges) && + config.uploads.temporaryUploadAges.length, + gitHash: utils.gitHash + })) + else if (page === 'faq') + safe.get('/faq', (req, res, next) => res.render('faq', { + whitelist: config.extensionsFilterMode === 'whitelist', + extensionsFilter: config.extensionsFilter, + noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize), + chunkSize: parseInt(config.uploads.chunkSize) + })) + else + safe.get(`/${page}`, (req, res, next) => res.render(page)) + } + + // Error pages + safe.use((req, res, next) => { + res.status(404).sendFile(path.join(paths.errorRoot, config.errorPages[404])) + }) + + safe.use((error, req, res, next) => { + logger.error(error) + res.status(500).sendFile(path.join(paths.errorRoot, config.errorPages[500])) + }) + + // Git hash + if (config.showGitHash) { + utils.gitHash = await new Promise((resolve, reject) => { + require('child_process').exec('git rev-parse HEAD', (error, stdout) => { + if (error) return reject(error) + resolve(stdout.replace(/\n$/, '')) + }) }) - }).catch(error => logger.error(error.toString())) - if (!setSize) return process.exit(1) - logger.log(`Cached ${setSize} identifiers in uploads directory`) - } + logger.log(`Git commit: ${utils.gitHash}`) + } + + // Clamd scanner + if (config.uploads.scan && config.uploads.scan.enabled) { + const { ip, port } = config.uploads.scan + const version = await clamd.version(ip, port) + logger.log(`${ip}:${port} ${version}`) + + utils.clamd.scanner = clamd.createScanner(ip, port) + if (!utils.clamd.scanner) + throw 'Could not create clamd scanner' + } + + // Cache file identifiers + if (config.uploads.cacheFileIdentifiers) { + utils.idSet = await db.table('files') + .select('name') + .then(rows => { + return new Set(rows.map(row => row.name.split('.')[0])) + }) + logger.log(`Cached ${utils.idSet.size} file identifiers`) + } + + // Binds Express to port + await new Promise((resolve, reject) => { + try { + safe.listen(config.port, () => resolve()) + } catch (error) { + reject(error) + } + }) - safe.listen(config.port, async () => { logger.log(`lolisafe started on port ${config.port}`) - // safe.fiery.me-exclusive cache control + // Cache control (safe.fiery.me) if (config.cacheControl) { - logger.log('Cache control enabled') + logger.log('Cache control enabled, purging...') const routes = config.pages.concat(['api/check']) const results = await utils.purgeCloudflareCache(routes) let errored = false @@ -224,6 +212,32 @@ const start = async () => { logger.log(`Purged ${succeeded} Cloudflare's cache`) } + // Temporary uploads + if (Array.isArray(config.uploads.temporaryUploadAges) && config.uploads.temporaryUploadAges.length) { + let temporaryUploadsInProgress = false + const temporaryUploadCheck = async () => { + if (temporaryUploadsInProgress) + return + + temporaryUploadsInProgress = true + const result = await utils.bulkDeleteExpired() + + if (result.expired.length) { + let logMessage = `Deleted ${result.expired.length} expired upload(s)` + if (result.failed.length) + logMessage += ` but unable to delete ${result.failed.length}` + + logger.log(logMessage) + } + + temporaryUploadsInProgress = false + } + temporaryUploadCheck() + + if (config.uploads.temporaryUploadsInterval) + setInterval(temporaryUploadCheck, config.uploads.temporaryUploadsInterval) + } + // NODE_ENV=development yarn start if (process.env.NODE_ENV === 'development') { // Add readline interface to allow evaluating arbitrary JavaScript from console @@ -242,9 +256,10 @@ const start = async () => { }).on('SIGINT', () => { process.exit(0) }) - logger.log('Development mode enabled (disabled Nunjucks caching & enabled readline interface)') + logger.log('Development mode (disabled nunjucks caching & enabled readline interface)') } - }) -} - -start() + } catch (error) { + logger.error(error) + process.exit(1) + } +})() diff --git a/package.json b/package.json index 47015b7..48db7b5 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,9 @@ "start": "node ./lolisafe.js", "startdev": "env NODE_ENV=development node ./lolisafe.js", "pm2": "pm2 start --name safe ./lolisafe.js", + "cf-purge": "node ./scripts/cf-purge.js", + "delete-expired": "node ./scripts/delete-expired.js", "thumbs": "node ./scripts/thumbs.js", - "cfpurge": "node ./scripts/cfpurge.js", "pull": "git stash; git pull; yarn install --production; git stash pop; echo OK." }, "dependencies": { @@ -29,23 +30,23 @@ "express": "^4.17.1", "express-rate-limit": "^5.0.0", "fluent-ffmpeg": "^2.1.2", - "helmet": "^3.20.1", + "helmet": "^3.21.0", "jszip": "^3.2.2", "knex": "^0.19.3", "multer": "^1.4.2", "node-fetch": "^2.6.0", "nunjucks": "^3.2.0", - "os": "^0.1.1", "randomstring": "^1.1.5", "readline": "^1.3.0", "sharp": "^0.23.0", - "sqlite3": "^4.1.0" + "sqlite3": "^4.1.0", + "systeminformation": "^4.14.8" }, "devDependencies": { "eslint": "^6.3.0", "eslint-config-standard": "^14.1.0", "eslint-plugin-import": "^2.18.2", - "eslint-plugin-node": "^9.2.0", + "eslint-plugin-node": "^10.0.0", "eslint-plugin-promise": "^4.2.1", "eslint-plugin-standard": "^4.0.1" } diff --git a/public/css/home.css b/public/css/home.css index b15777a..455696a 100644 --- a/public/css/home.css +++ b/public/css/home.css @@ -64,15 +64,18 @@ -webkit-transform: scale(0.86); transform: scale(0.86); } + 25% { opacity: 100; } + 67% { -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-transform: scale(1); transform: scale(1); } + 100% { -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); @@ -89,15 +92,18 @@ -webkit-transform: scale(0.86); transform: scale(0.86); } + 25% { opacity: 100; } + 67% { -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); -webkit-transform: scale(1); transform: scale(1); } + 100% { -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2); diff --git a/public/css/style.css b/public/css/style.css index 0997557..f95c4fa 100644 --- a/public/css/style.css +++ b/public/css/style.css @@ -13,6 +13,7 @@ body { 0% { opacity: 0; } + 100% { opacity: 1; } @@ -22,6 +23,7 @@ body { 0% { opacity: 0; } + 100% { opacity: 1; } @@ -39,6 +41,12 @@ hr { background-color: #898b8d; } +code, +.message-body code { + background-color: #222528; + border-radius: 5px; +} + .title { color: #eff0f1; } @@ -127,7 +135,8 @@ hr { } .progress.is-breeze:indeterminate { - background-image: linear-gradient(to right,#60a8dc 30%,#eff0f1 30%); + background-image: -webkit-gradient(linear, left top, right top, color-stop(30%, #60a8dc), color-stop(30%, #eff0f1)); + background-image: linear-gradient(to right, #60a8dc 30%, #eff0f1 30%); } .message { diff --git a/public/css/sweetalert.css b/public/css/sweetalert.css index 7fd3577..2e5bb61 100644 --- a/public/css/sweetalert.css +++ b/public/css/sweetalert.css @@ -31,13 +31,6 @@ color: #bdc3c7; } -.swal-content .is-code { - font-family: 'Courier New', Courier, monospace; - border: 1px dashed #eff0f1; - border-radius: 5px; - margin-top: 5px; -} - .swal-button { background-color: #3794d2; color: #eff0f1; @@ -94,6 +87,7 @@ 0% { border-color: #ffaa60; } + to { border-color: #f67400; } @@ -103,6 +97,7 @@ 0% { border-color: #ffaa60; } + to { border-color: #f67400; } @@ -112,6 +107,7 @@ 0% { background-color: #ffaa60; } + to { background-color: #f67400; } @@ -121,6 +117,7 @@ 0% { background-color: #ffaa60; } + to { background-color: #f67400; } diff --git a/public/js/auth.js b/public/js/auth.js index 377eec5..9ec9c2d 100644 --- a/public/js/auth.js +++ b/public/js/auth.js @@ -14,18 +14,17 @@ const page = { } page.do = function (dest) { - const user = page.user.value - const pass = page.pass.value - + const user = page.user.value.trim() if (!user) return swal('An error occurred!', 'You need to specify a username.', 'error') + const pass = page.pass.value.trim() if (!pass) return swal('An error occurred!', 'You need to specify a password.', 'error') axios.post(`api/${dest}`, { - username: user.trim(), - password: pass.trim() + username: user, + password: pass }).then(function (response) { if (response.data.success === false) return swal(`Unable to ${dest}!`, response.data.description, 'error') @@ -49,7 +48,7 @@ page.verify = function () { window.location = 'dashboard' }).catch(function (error) { - console.log(error) + console.error(error) const description = error.response.data && error.response.data.description ? error.response.data.description : 'There was an error with the request, please check the console for more information.' diff --git a/public/js/dashboard.js b/public/js/dashboard.js index e477b0d..5ca8d21 100644 --- a/public/js/dashboard.js +++ b/public/js/dashboard.js @@ -108,7 +108,7 @@ page.verifyToken = function (token, reloadOnError) { page.permissions = response.data.permissions page.prepareDashboard() }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) } @@ -167,11 +167,6 @@ page.prepareDashboard = function () { page.getAlbums() }) - document.querySelector('#itemFileLength').addEventListener('click', function () { - page.setActiveMenu(this) - page.changeFileLength() - }) - document.querySelector('#itemTokens').addEventListener('click', function () { page.setActiveMenu(this) page.changeToken() @@ -216,9 +211,11 @@ page.domClick = function (event) { let element = event.target if (!element) return - // If the clicked element is an icon, delegate event to its A parent; hacky - if (element.tagName === 'I' && element.parentNode.tagName === 'SPAN') element = element.parentNode - if (element.tagName === 'SPAN' && element.parentNode.tagName === 'A') element = element.parentNode + // Delegate click events to their A or BUTTON parents + if (['I'].includes(element.tagName) && ['SPAN'].includes(element.parentNode.tagName)) + element = element.parentNode + if (['SPAN'].includes(element.tagName) && ['A', 'BUTTON'].includes(element.parentNode.tagName)) + element = element.parentNode // Skip elements that have no action data if (!element.dataset || !element.dataset.action) return @@ -325,8 +322,10 @@ page.switchPage = function (action, element) { views.pageNum = parseInt(element.dataset.goto) return func(views, element) case 'jump-to-page': { - const jumpToPage = parseInt(document.querySelector('#jumpToPage').value) - views.pageNum = isNaN(jumpToPage) ? 0 : (jumpToPage - 1) + const jumpToPage = document.querySelector('#jumpToPage') + if (!jumpToPage.checkValidity()) return + const parsed = parseInt(jumpToPage.value) + views.pageNum = isNaN(parsed) ? 0 : (parsed - 1) if (views.pageNum < 0) views.pageNum = 0 return func(views, element) } @@ -340,7 +339,7 @@ page.focusJumpToPage = function () { element.select() } -page.getUploads = function ({ pageNum, album, all, filters } = {}, element) { +page.getUploads = function ({ pageNum, album, all, filters, autoPage } = {}, element) { if (element) page.isLoading(element, true) if ((all || filters) && !page.permissions.moderator) @@ -368,7 +367,15 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) { const files = response.data.files if (pageNum && (files.length === 0)) { if (element) page.isLoading(element, false) - return swal('An error occurred!', `There are no more uploads to populate page ${pageNum + 1}.`, 'error') + if (autoPage) + return page.getUploads({ + pageNum: Math.ceil(response.data.count / 25) - 1, + album, + all, + filters + }, element) + else + return swal('An error occurred!', `There are no more uploads to populate page ${pageNum + 1}.`, 'error') } page.currentView = all ? 'uploadsAll' : 'uploads' @@ -413,7 +420,7 @@ page.getUploads = function ({ pageNum, album, all, filters } = {}, element) {
- +
` - page.fadeAndScroll() const homeDomain = response.data.homeDomain const table = document.querySelector('#table') @@ -1252,8 +1289,9 @@ page.getAlbums = function () { table.appendChild(tr) } + page.fadeAndScroll() }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) } @@ -1315,8 +1353,8 @@ page.editAlbum = function (id) { axios.post('api/albums/edit', { id, - name: document.querySelector('#swalName').value, - description: document.querySelector('#swalDescription').value, + name: document.querySelector('#swalName').value.trim(), + description: document.querySelector('#swalDescription').value.trim(), download: document.querySelector('#swalDownload').checked, public: document.querySelector('#swalPublic').checked, requestLink: document.querySelector('#swalRequestLink').checked @@ -1340,7 +1378,7 @@ page.editAlbum = function (id) { page.getAlbumsSidebar() page.getAlbums() }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) }) @@ -1375,6 +1413,8 @@ page.deleteAlbum = function (id) { if (response.data.success === false) if (response.data.description === 'No token provided') { return page.verifyToken(page.token) + } else if (Array.isArray(response.data.failed) && response.data.failed.length) { + return swal('An error occurred!', 'Unable to delete ', 'error') } else { return swal('An error occurred!', response.data.description, 'error') } @@ -1383,7 +1423,7 @@ page.deleteAlbum = function (id) { page.getAlbumsSidebar() page.getAlbums() }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) }) @@ -1411,7 +1451,7 @@ page.submitAlbum = function (element) { page.getAlbumsSidebar() page.getAlbums() }).catch(function (error) { - console.log(error) + console.error(error) page.isLoading(element, false) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) @@ -1448,7 +1488,7 @@ page.getAlbumsSidebar = function () { albumsContainer.appendChild(li) } }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) } @@ -1458,82 +1498,6 @@ page.getAlbum = function (album) { page.getUploads({ album: album.id }) } -page.changeFileLength = function () { - axios.get('api/filelength/config').then(function (response) { - if (response.data.success === false) - if (response.data.description === 'No token provided') { - return page.verifyToken(page.token) - } else { - return swal('An error occurred!', response.data.description, 'error') - } - - // Shorter vars - const { max, min } = response.data.config - const [chg, def] = [response.data.config.userChangeable, response.data.config.default] - const len = response.data.fileLength - - page.dom.innerHTML = ` -

File name length

- -
-
- -
- -
-

Default file name length is ${def} characters. ${(chg ? `Range allowed for user is ${min} to ${max} characters.` : 'Changing file name length is disabled at the moment.')}

-
-
-
- -
-
-
- - ` - page.fadeAndScroll() - - document.querySelector('#setFileLength').addEventListener('click', function () { - page.setFileLength(document.querySelector('#fileLength').value, this) - }) - }).catch(function (error) { - console.log(error) - return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') - }) -} - -page.setFileLength = function (fileLength, element) { - page.isLoading(element, true) - - axios.post('api/filelength/change', { fileLength }).then(function (response) { - page.isLoading(element, false) - - if (response.data.success === false) - if (response.data.description === 'No token provided') { - return page.verifyToken(page.token) - } else { - return swal('An error occurred!', response.data.description, 'error') - } - - swal({ - title: 'Woohoo!', - text: 'Your file length was successfully changed.', - icon: 'success' - }).then(function () { - page.changeFileLength() - }) - }).catch(function (error) { - console.log(error) - page.isLoading(element, false) - return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') - }) -} - page.changeToken = function () { axios.get('api/tokens').then(function (response) { if (response.data.success === false) @@ -1566,7 +1530,7 @@ page.changeToken = function () { ` page.fadeAndScroll() }).catch(function (error) { - console.log(error) + console.error(error) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) } @@ -1595,7 +1559,7 @@ page.getNewToken = function (element) { page.changeToken() }) }).catch(function (error) { - console.log(error) + console.error(error) page.isLoading(element, false) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) @@ -1608,13 +1572,13 @@ page.changePassword = function () {
- +
- +
@@ -1664,7 +1628,7 @@ page.sendNewPassword = function (pass, element) { page.changePassword() }) }).catch(function (error) { - console.log(error) + console.error(error) page.isLoading(element, false) return swal('An error occurred!', 'There was an error with the request, please check the console for more information.', 'error') }) @@ -1713,7 +1677,7 @@ page.getUsers = function ({ pageNum } = {}, element) {
- +

{% if urlMaxSize !== maxSize -%} - Maximum file size for URL upload is {{ urlMaxSize }}. + Maximum file size per URL is {{ urlMaxSize }}. {%- endif %} {% if urlExtensionsFilter.length and (urlExtensionsFilterMode === 'blacklist') -%} @@ -132,19 +132,36 @@

-
+ + {%- if temporaryUploadAges %} + + {%- endif %} +
- +
-

Default is . Max is .

+

-
+
-

Default is .

+

@@ -155,6 +172,7 @@ Save & reload

+

This configuration will only be used in this browser.

@@ -167,13 +185,14 @@

- +

- +